| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 #include <stdio.h> | 10 #include <stdio.h> |
| (...skipping 733 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 744 std::deque<FrameComparison> comparisons_ GUARDED_BY(comparison_lock_); | 744 std::deque<FrameComparison> comparisons_ GUARDED_BY(comparison_lock_); |
| 745 rtc::Event done_; | 745 rtc::Event done_; |
| 746 }; | 746 }; |
| 747 | 747 |
| 748 VideoQualityTest::VideoQualityTest() : clock_(Clock::GetRealTimeClock()) {} | 748 VideoQualityTest::VideoQualityTest() : clock_(Clock::GetRealTimeClock()) {} |
| 749 | 749 |
| 750 void VideoQualityTest::TestBody() {} | 750 void VideoQualityTest::TestBody() {} |
| 751 | 751 |
| 752 std::string VideoQualityTest::GenerateGraphTitle() const { | 752 std::string VideoQualityTest::GenerateGraphTitle() const { |
| 753 std::stringstream ss; | 753 std::stringstream ss; |
| 754 ss << params_.video.codec; | 754 ss << params_.common.codec; |
| 755 ss << " (" << params_.video.target_bitrate_bps / 1000 << "kbps"; | 755 ss << " (" << params_.common.target_bitrate_bps / 1000 << "kbps"; |
| 756 ss << ", " << params_.video.fps << " FPS"; | 756 ss << ", " << params_.common.fps << " FPS"; |
| 757 if (params_.screenshare.scroll_duration) | 757 if (params_.screenshare.scroll_duration) |
| 758 ss << ", " << params_.screenshare.scroll_duration << "s scroll"; | 758 ss << ", " << params_.screenshare.scroll_duration << "s scroll"; |
| 759 if (params_.ss.streams.size() > 1) | 759 if (params_.ss.streams.size() > 1) |
| 760 ss << ", Stream #" << params_.ss.selected_stream; | 760 ss << ", Stream #" << params_.ss.selected_stream; |
| 761 if (params_.ss.num_spatial_layers > 1) | 761 if (params_.ss.num_spatial_layers > 1) |
| 762 ss << ", Layer #" << params_.ss.selected_sl; | 762 ss << ", Layer #" << params_.ss.selected_sl; |
| 763 ss << ")"; | 763 ss << ")"; |
| 764 return ss.str(); | 764 return ss.str(); |
| 765 } | 765 } |
| 766 | 766 |
| 767 void VideoQualityTest::CheckParams() { | 767 void VideoQualityTest::CheckParams() { |
| 768 // Add a default stream in none specified. | 768 // Add a default stream in none specified. |
| 769 if (params_.ss.streams.empty()) | 769 if (params_.ss.streams.empty()) |
| 770 params_.ss.streams.push_back(VideoQualityTest::DefaultVideoStream(params_)); | 770 params_.ss.streams.push_back(VideoQualityTest::DefaultVideoStream(params_)); |
| 771 if (params_.ss.num_spatial_layers == 0) | 771 if (params_.ss.num_spatial_layers == 0) |
| 772 params_.ss.num_spatial_layers = 1; | 772 params_.ss.num_spatial_layers = 1; |
| 773 | 773 |
| 774 if (params_.pipe.loss_percent != 0 || | 774 if (params_.pipe.loss_percent != 0 || |
| 775 params_.pipe.queue_length_packets != 0) { | 775 params_.pipe.queue_length_packets != 0) { |
| 776 // Since LayerFilteringTransport changes the sequence numbers, we can't | 776 // Since LayerFilteringTransport changes the sequence numbers, we can't |
| 777 // use that feature with pack loss, since the NACK request would end up | 777 // use that feature with pack loss, since the NACK request would end up |
| 778 // retransmitting the wrong packets. | 778 // retransmitting the wrong packets. |
| 779 RTC_CHECK(params_.ss.selected_sl == -1 || | 779 RTC_CHECK(params_.ss.selected_sl == -1 || |
| 780 params_.ss.selected_sl == params_.ss.num_spatial_layers - 1); | 780 params_.ss.selected_sl == params_.ss.num_spatial_layers - 1); |
| 781 RTC_CHECK(params_.video.selected_tl == -1 || | 781 RTC_CHECK(params_.common.selected_tl == -1 || |
| 782 params_.video.selected_tl == | 782 params_.common.selected_tl == |
| 783 params_.video.num_temporal_layers - 1); | 783 params_.common.num_temporal_layers - 1); |
| 784 } | 784 } |
| 785 | 785 |
| 786 // TODO(ivica): Should max_bitrate_bps == -1 represent inf max bitrate, as it | 786 // TODO(ivica): Should max_bitrate_bps == -1 represent inf max bitrate, as it |
| 787 // does in some parts of the code? | 787 // does in some parts of the code? |
| 788 RTC_CHECK_GE(params_.video.max_bitrate_bps, params_.video.target_bitrate_bps); | 788 RTC_CHECK_GE(params_.common.max_bitrate_bps, |
| 789 RTC_CHECK_GE(params_.video.target_bitrate_bps, params_.video.min_bitrate_bps); | 789 params_.common.target_bitrate_bps); |
| 790 RTC_CHECK_LT(params_.video.selected_tl, params_.video.num_temporal_layers); | 790 RTC_CHECK_GE(params_.common.target_bitrate_bps, |
| 791 params_.common.min_bitrate_bps); |
| 792 RTC_CHECK_LT(params_.common.selected_tl, params_.common.num_temporal_layers); |
| 791 RTC_CHECK_LT(params_.ss.selected_stream, params_.ss.streams.size()); | 793 RTC_CHECK_LT(params_.ss.selected_stream, params_.ss.streams.size()); |
| 792 for (const VideoStream& stream : params_.ss.streams) { | 794 for (const VideoStream& stream : params_.ss.streams) { |
| 793 RTC_CHECK_GE(stream.min_bitrate_bps, 0); | 795 RTC_CHECK_GE(stream.min_bitrate_bps, 0); |
| 794 RTC_CHECK_GE(stream.target_bitrate_bps, stream.min_bitrate_bps); | 796 RTC_CHECK_GE(stream.target_bitrate_bps, stream.min_bitrate_bps); |
| 795 RTC_CHECK_GE(stream.max_bitrate_bps, stream.target_bitrate_bps); | 797 RTC_CHECK_GE(stream.max_bitrate_bps, stream.target_bitrate_bps); |
| 796 RTC_CHECK_EQ(static_cast<int>(stream.temporal_layer_thresholds_bps.size()), | 798 RTC_CHECK_EQ(static_cast<int>(stream.temporal_layer_thresholds_bps.size()), |
| 797 params_.video.num_temporal_layers - 1); | 799 params_.common.num_temporal_layers - 1); |
| 798 } | 800 } |
| 799 // TODO(ivica): Should we check if the sum of all streams/layers is equal to | 801 // TODO(ivica): Should we check if the sum of all streams/layers is equal to |
| 800 // the total bitrate? We anyway have to update them in the case bitrate | 802 // the total bitrate? We anyway have to update them in the case bitrate |
| 801 // estimator changes the total bitrates. | 803 // estimator changes the total bitrates. |
| 802 RTC_CHECK_GE(params_.ss.num_spatial_layers, 1); | 804 RTC_CHECK_GE(params_.ss.num_spatial_layers, 1); |
| 803 RTC_CHECK_LE(params_.ss.selected_sl, params_.ss.num_spatial_layers); | 805 RTC_CHECK_LE(params_.ss.selected_sl, params_.ss.num_spatial_layers); |
| 804 RTC_CHECK(params_.ss.spatial_layers.empty() || | 806 RTC_CHECK(params_.ss.spatial_layers.empty() || |
| 805 params_.ss.spatial_layers.size() == | 807 params_.ss.spatial_layers.size() == |
| 806 static_cast<size_t>(params_.ss.num_spatial_layers)); | 808 static_cast<size_t>(params_.ss.num_spatial_layers)); |
| 807 if (params_.video.codec == "VP8") { | 809 if (params_.common.codec == "VP8") { |
| 808 RTC_CHECK_EQ(params_.ss.num_spatial_layers, 1); | 810 RTC_CHECK_EQ(params_.ss.num_spatial_layers, 1); |
| 809 } else if (params_.video.codec == "VP9") { | 811 } else if (params_.common.codec == "VP9") { |
| 810 RTC_CHECK_EQ(params_.ss.streams.size(), 1u); | 812 RTC_CHECK_EQ(params_.ss.streams.size(), 1u); |
| 811 } | 813 } |
| 812 } | 814 } |
| 813 | 815 |
| 814 // Static. | 816 // Static. |
| 815 std::vector<int> VideoQualityTest::ParseCSV(const std::string& str) { | 817 std::vector<int> VideoQualityTest::ParseCSV(const std::string& str) { |
| 816 // Parse comma separated nonnegative integers, where some elements may be | 818 // Parse comma separated nonnegative integers, where some elements may be |
| 817 // empty. The empty values are replaced with -1. | 819 // empty. The empty values are replaced with -1. |
| 818 // E.g. "10,-20,,30,40" --> {10, 20, -1, 30,40} | 820 // E.g. "10,-20,,30,40" --> {10, 20, -1, 30,40} |
| 819 // E.g. ",,10,,20," --> {-1, -1, 10, -1, 20, -1} | 821 // E.g. ",,10,,20," --> {-1, -1, 10, -1, 20, -1} |
| (...skipping 15 matching lines...) Expand all Loading... |
| 835 << "Unexpected non-number value."; | 837 << "Unexpected non-number value."; |
| 836 p += pos; | 838 p += pos; |
| 837 } | 839 } |
| 838 result.push_back(value); | 840 result.push_back(value); |
| 839 return result; | 841 return result; |
| 840 } | 842 } |
| 841 | 843 |
| 842 // Static. | 844 // Static. |
| 843 VideoStream VideoQualityTest::DefaultVideoStream(const Params& params) { | 845 VideoStream VideoQualityTest::DefaultVideoStream(const Params& params) { |
| 844 VideoStream stream; | 846 VideoStream stream; |
| 845 stream.width = params.video.width; | 847 stream.width = params.common.width; |
| 846 stream.height = params.video.height; | 848 stream.height = params.common.height; |
| 847 stream.max_framerate = params.video.fps; | 849 stream.max_framerate = params.common.fps; |
| 848 stream.min_bitrate_bps = params.video.min_bitrate_bps; | 850 stream.min_bitrate_bps = params.common.min_bitrate_bps; |
| 849 stream.target_bitrate_bps = params.video.target_bitrate_bps; | 851 stream.target_bitrate_bps = params.common.target_bitrate_bps; |
| 850 stream.max_bitrate_bps = params.video.max_bitrate_bps; | 852 stream.max_bitrate_bps = params.common.max_bitrate_bps; |
| 851 stream.max_qp = 52; | 853 stream.max_qp = 52; |
| 852 if (params.video.num_temporal_layers == 2) | 854 if (params.common.num_temporal_layers == 2) |
| 853 stream.temporal_layer_thresholds_bps.push_back(stream.target_bitrate_bps); | 855 stream.temporal_layer_thresholds_bps.push_back(stream.target_bitrate_bps); |
| 854 return stream; | 856 return stream; |
| 855 } | 857 } |
| 856 | 858 |
| 857 // Static. | 859 // Static. |
| 858 void VideoQualityTest::FillScalabilitySettings( | 860 void VideoQualityTest::FillScalabilitySettings( |
| 859 Params* params, | 861 Params* params, |
| 860 const std::vector<std::string>& stream_descriptors, | 862 const std::vector<std::string>& stream_descriptors, |
| 861 size_t selected_stream, | 863 size_t selected_stream, |
| 862 int num_spatial_layers, | 864 int num_spatial_layers, |
| (...skipping 22 matching lines...) Expand all Loading... |
| 885 if (v[5] != -1) | 887 if (v[5] != -1) |
| 886 stream.max_bitrate_bps = v[5]; | 888 stream.max_bitrate_bps = v[5]; |
| 887 if (v.size() > 6 && v[6] != -1) | 889 if (v.size() > 6 && v[6] != -1) |
| 888 stream.max_qp = v[6]; | 890 stream.max_qp = v[6]; |
| 889 if (v.size() > 7) { | 891 if (v.size() > 7) { |
| 890 stream.temporal_layer_thresholds_bps.clear(); | 892 stream.temporal_layer_thresholds_bps.clear(); |
| 891 stream.temporal_layer_thresholds_bps.insert( | 893 stream.temporal_layer_thresholds_bps.insert( |
| 892 stream.temporal_layer_thresholds_bps.end(), v.begin() + 7, v.end()); | 894 stream.temporal_layer_thresholds_bps.end(), v.begin() + 7, v.end()); |
| 893 } else { | 895 } else { |
| 894 // Automatic TL thresholds for more than two layers not supported. | 896 // Automatic TL thresholds for more than two layers not supported. |
| 895 RTC_CHECK_LE(params->video.num_temporal_layers, 2); | 897 RTC_CHECK_LE(params->common.num_temporal_layers, 2); |
| 896 } | 898 } |
| 897 params->ss.streams.push_back(stream); | 899 params->ss.streams.push_back(stream); |
| 898 } | 900 } |
| 899 params->ss.selected_stream = selected_stream; | 901 params->ss.selected_stream = selected_stream; |
| 900 | 902 |
| 901 params->ss.num_spatial_layers = num_spatial_layers ? num_spatial_layers : 1; | 903 params->ss.num_spatial_layers = num_spatial_layers ? num_spatial_layers : 1; |
| 902 params->ss.selected_sl = selected_sl; | 904 params->ss.selected_sl = selected_sl; |
| 903 RTC_CHECK(params->ss.spatial_layers.empty()); | 905 RTC_CHECK(params->ss.spatial_layers.empty()); |
| 904 for (auto descriptor : sl_descriptors) { | 906 for (auto descriptor : sl_descriptors) { |
| 905 if (descriptor.empty()) | 907 if (descriptor.empty()) |
| (...skipping 11 matching lines...) Expand all Loading... |
| 917 | 919 |
| 918 void VideoQualityTest::SetupCommon(Transport* send_transport, | 920 void VideoQualityTest::SetupCommon(Transport* send_transport, |
| 919 Transport* recv_transport) { | 921 Transport* recv_transport) { |
| 920 if (params_.logs) | 922 if (params_.logs) |
| 921 trace_to_stderr_.reset(new test::TraceToStderr); | 923 trace_to_stderr_.reset(new test::TraceToStderr); |
| 922 | 924 |
| 923 size_t num_streams = params_.ss.streams.size(); | 925 size_t num_streams = params_.ss.streams.size(); |
| 924 CreateSendConfig(num_streams, 0, send_transport); | 926 CreateSendConfig(num_streams, 0, send_transport); |
| 925 | 927 |
| 926 int payload_type; | 928 int payload_type; |
| 927 if (params_.video.codec == "H264") { | 929 if (params_.common.codec == "H264") { |
| 928 encoder_.reset(VideoEncoder::Create(VideoEncoder::kH264)); | 930 encoder_.reset(VideoEncoder::Create(VideoEncoder::kH264)); |
| 929 payload_type = kPayloadTypeH264; | 931 payload_type = kPayloadTypeH264; |
| 930 } else if (params_.video.codec == "VP8") { | 932 } else if (params_.common.codec == "VP8") { |
| 931 encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp8)); | 933 encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp8)); |
| 932 payload_type = kPayloadTypeVP8; | 934 payload_type = kPayloadTypeVP8; |
| 933 } else if (params_.video.codec == "VP9") { | 935 } else if (params_.common.codec == "VP9") { |
| 934 encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp9)); | 936 encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp9)); |
| 935 payload_type = kPayloadTypeVP9; | 937 payload_type = kPayloadTypeVP9; |
| 936 } else { | 938 } else { |
| 937 RTC_NOTREACHED() << "Codec not supported!"; | 939 RTC_NOTREACHED() << "Codec not supported!"; |
| 938 return; | 940 return; |
| 939 } | 941 } |
| 940 video_send_config_.encoder_settings.encoder = encoder_.get(); | 942 video_send_config_.encoder_settings.encoder = encoder_.get(); |
| 941 video_send_config_.encoder_settings.payload_name = params_.video.codec; | 943 video_send_config_.encoder_settings.payload_name = params_.common.codec; |
| 942 video_send_config_.encoder_settings.payload_type = payload_type; | 944 video_send_config_.encoder_settings.payload_type = payload_type; |
| 943 video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs; | 945 video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs; |
| 944 video_send_config_.rtp.rtx.payload_type = kSendRtxPayloadType; | 946 video_send_config_.rtp.rtx.payload_type = kSendRtxPayloadType; |
| 945 for (size_t i = 0; i < num_streams; ++i) | 947 for (size_t i = 0; i < num_streams; ++i) |
| 946 video_send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]); | 948 video_send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]); |
| 947 | 949 |
| 948 video_send_config_.rtp.extensions.clear(); | 950 video_send_config_.rtp.extensions.clear(); |
| 949 if (params_.call.send_side_bwe) { | 951 if (params_.common.send_side_bwe) { |
| 950 video_send_config_.rtp.extensions.push_back( | 952 video_send_config_.rtp.extensions.push_back( |
| 951 RtpExtension(RtpExtension::kTransportSequenceNumberUri, | 953 RtpExtension(RtpExtension::kTransportSequenceNumberUri, |
| 952 test::kTransportSequenceNumberExtensionId)); | 954 test::kTransportSequenceNumberExtensionId)); |
| 953 } else { | 955 } else { |
| 954 video_send_config_.rtp.extensions.push_back(RtpExtension( | 956 video_send_config_.rtp.extensions.push_back(RtpExtension( |
| 955 RtpExtension::kAbsSendTimeUri, test::kAbsSendTimeExtensionId)); | 957 RtpExtension::kAbsSendTimeUri, test::kAbsSendTimeExtensionId)); |
| 956 } | 958 } |
| 957 | 959 |
| 958 video_encoder_config_.min_transmit_bitrate_bps = | 960 video_encoder_config_.min_transmit_bitrate_bps = |
| 959 params_.video.min_transmit_bps; | 961 params_.common.min_transmit_bps; |
| 960 video_encoder_config_.streams = params_.ss.streams; | 962 video_encoder_config_.streams = params_.ss.streams; |
| 961 video_encoder_config_.spatial_layers = params_.ss.spatial_layers; | 963 video_encoder_config_.spatial_layers = params_.ss.spatial_layers; |
| 962 | 964 |
| 963 CreateMatchingReceiveConfigs(recv_transport); | 965 CreateMatchingReceiveConfigs(recv_transport); |
| 964 | 966 |
| 965 for (size_t i = 0; i < num_streams; ++i) { | 967 for (size_t i = 0; i < num_streams; ++i) { |
| 966 video_receive_configs_[i].rtp.nack.rtp_history_ms = kNackRtpHistoryMs; | 968 video_receive_configs_[i].rtp.nack.rtp_history_ms = kNackRtpHistoryMs; |
| 967 video_receive_configs_[i].rtp.rtx[payload_type].ssrc = kSendRtxSsrcs[i]; | 969 video_receive_configs_[i].rtp.rtx[payload_type].ssrc = kSendRtxSsrcs[i]; |
| 968 video_receive_configs_[i].rtp.rtx[payload_type].payload_type = | 970 video_receive_configs_[i].rtp.rtx[payload_type].payload_type = |
| 969 kSendRtxPayloadType; | 971 kSendRtxPayloadType; |
| 970 video_receive_configs_[i].rtp.transport_cc = params_.call.send_side_bwe; | 972 video_receive_configs_[i].rtp.transport_cc = params_.common.send_side_bwe; |
| 971 } | 973 } |
| 972 } | 974 } |
| 973 | 975 |
| 974 void VideoQualityTest::SetupScreenshare() { | 976 void VideoQualityTest::SetupScreenshare() { |
| 975 RTC_CHECK(params_.screenshare.enabled); | 977 RTC_CHECK(params_.screenshare.enabled); |
| 976 | 978 |
| 977 // Fill out codec settings. | 979 // Fill out codec settings. |
| 978 video_encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen; | 980 video_encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen; |
| 979 if (params_.video.codec == "VP8") { | 981 if (params_.common.codec == "VP8") { |
| 980 codec_settings_.VP8 = VideoEncoder::GetDefaultVp8Settings(); | 982 codec_settings_.VP8 = VideoEncoder::GetDefaultVp8Settings(); |
| 981 codec_settings_.VP8.denoisingOn = false; | 983 codec_settings_.VP8.denoisingOn = false; |
| 982 codec_settings_.VP8.frameDroppingOn = false; | 984 codec_settings_.VP8.frameDroppingOn = false; |
| 983 codec_settings_.VP8.numberOfTemporalLayers = | 985 codec_settings_.VP8.numberOfTemporalLayers = |
| 984 static_cast<unsigned char>(params_.video.num_temporal_layers); | 986 static_cast<unsigned char>(params_.common.num_temporal_layers); |
| 985 video_encoder_config_.encoder_specific_settings = &codec_settings_.VP8; | 987 video_encoder_config_.encoder_specific_settings = &codec_settings_.VP8; |
| 986 } else if (params_.video.codec == "VP9") { | 988 } else if (params_.common.codec == "VP9") { |
| 987 codec_settings_.VP9 = VideoEncoder::GetDefaultVp9Settings(); | 989 codec_settings_.VP9 = VideoEncoder::GetDefaultVp9Settings(); |
| 988 codec_settings_.VP9.denoisingOn = false; | 990 codec_settings_.VP9.denoisingOn = false; |
| 989 codec_settings_.VP9.frameDroppingOn = false; | 991 codec_settings_.VP9.frameDroppingOn = false; |
| 990 codec_settings_.VP9.numberOfTemporalLayers = | 992 codec_settings_.VP9.numberOfTemporalLayers = |
| 991 static_cast<unsigned char>(params_.video.num_temporal_layers); | 993 static_cast<unsigned char>(params_.common.num_temporal_layers); |
| 992 video_encoder_config_.encoder_specific_settings = &codec_settings_.VP9; | 994 video_encoder_config_.encoder_specific_settings = &codec_settings_.VP9; |
| 993 codec_settings_.VP9.numberOfSpatialLayers = | 995 codec_settings_.VP9.numberOfSpatialLayers = |
| 994 static_cast<unsigned char>(params_.ss.num_spatial_layers); | 996 static_cast<unsigned char>(params_.ss.num_spatial_layers); |
| 995 } | 997 } |
| 996 | 998 |
| 997 // Setup frame generator. | 999 // Setup frame generator. |
| 998 const size_t kWidth = 1850; | 1000 const size_t kWidth = 1850; |
| 999 const size_t kHeight = 1110; | 1001 const size_t kHeight = 1110; |
| 1000 std::vector<std::string> slides; | 1002 std::vector<std::string> slides; |
| 1001 slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv")); | 1003 slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv")); |
| 1002 slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv")); | 1004 slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv")); |
| 1003 slides.push_back(test::ResourcePath("photo_1850_1110", "yuv")); | 1005 slides.push_back(test::ResourcePath("photo_1850_1110", "yuv")); |
| 1004 slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv")); | 1006 slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv")); |
| 1005 | 1007 |
| 1006 if (params_.screenshare.scroll_duration == 0) { | 1008 if (params_.screenshare.scroll_duration == 0) { |
| 1007 // Cycle image every slide_change_interval seconds. | 1009 // Cycle image every slide_change_interval seconds. |
| 1008 frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile( | 1010 frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile( |
| 1009 slides, kWidth, kHeight, | 1011 slides, kWidth, kHeight, |
| 1010 params_.screenshare.slide_change_interval * params_.video.fps)); | 1012 params_.screenshare.slide_change_interval * params_.common.fps)); |
| 1011 } else { | 1013 } else { |
| 1012 RTC_CHECK_LE(params_.video.width, kWidth); | 1014 RTC_CHECK_LE(params_.common.width, kWidth); |
| 1013 RTC_CHECK_LE(params_.video.height, kHeight); | 1015 RTC_CHECK_LE(params_.common.height, kHeight); |
| 1014 RTC_CHECK_GT(params_.screenshare.slide_change_interval, 0); | 1016 RTC_CHECK_GT(params_.screenshare.slide_change_interval, 0); |
| 1015 const int kPauseDurationMs = (params_.screenshare.slide_change_interval - | 1017 const int kPauseDurationMs = (params_.screenshare.slide_change_interval - |
| 1016 params_.screenshare.scroll_duration) * | 1018 params_.screenshare.scroll_duration) * |
| 1017 1000; | 1019 1000; |
| 1018 RTC_CHECK_LE(params_.screenshare.scroll_duration, | 1020 RTC_CHECK_LE(params_.screenshare.scroll_duration, |
| 1019 params_.screenshare.slide_change_interval); | 1021 params_.screenshare.slide_change_interval); |
| 1020 | 1022 |
| 1021 frame_generator_.reset( | 1023 frame_generator_.reset( |
| 1022 test::FrameGenerator::CreateScrollingInputFromYuvFiles( | 1024 test::FrameGenerator::CreateScrollingInputFromYuvFiles( |
| 1023 clock_, slides, kWidth, kHeight, params_.video.width, | 1025 clock_, slides, kWidth, kHeight, params_.common.width, |
| 1024 params_.video.height, params_.screenshare.scroll_duration * 1000, | 1026 params_.common.height, params_.screenshare.scroll_duration * 1000, |
| 1025 kPauseDurationMs)); | 1027 kPauseDurationMs)); |
| 1026 } | 1028 } |
| 1027 } | 1029 } |
| 1028 | 1030 |
| 1029 void VideoQualityTest::CreateCapturer(VideoCaptureInput* input) { | 1031 void VideoQualityTest::CreateCapturer(VideoCaptureInput* input) { |
| 1030 if (params_.screenshare.enabled) { | 1032 if (params_.screenshare.enabled) { |
| 1031 test::FrameGeneratorCapturer* frame_generator_capturer = | 1033 test::FrameGeneratorCapturer* frame_generator_capturer = |
| 1032 new test::FrameGeneratorCapturer( | 1034 new test::FrameGeneratorCapturer( |
| 1033 clock_, input, frame_generator_.release(), params_.video.fps); | 1035 clock_, input, frame_generator_.release(), params_.common.fps); |
| 1034 EXPECT_TRUE(frame_generator_capturer->Init()); | 1036 EXPECT_TRUE(frame_generator_capturer->Init()); |
| 1035 capturer_.reset(frame_generator_capturer); | 1037 capturer_.reset(frame_generator_capturer); |
| 1036 } else { | 1038 } else { |
| 1037 if (params_.video.clip_name.empty()) { | 1039 if (params_.video.clip_name.empty()) { |
| 1038 capturer_.reset(test::VideoCapturer::Create(input, params_.video.width, | 1040 capturer_.reset(test::VideoCapturer::Create(input, params_.common.width, |
| 1039 params_.video.height, | 1041 params_.common.height, |
| 1040 params_.video.fps, clock_)); | 1042 params_.common.fps, clock_)); |
| 1041 } else { | 1043 } else { |
| 1042 capturer_.reset(test::FrameGeneratorCapturer::CreateFromYuvFile( | 1044 capturer_.reset(test::FrameGeneratorCapturer::CreateFromYuvFile( |
| 1043 input, test::ResourcePath(params_.video.clip_name, "yuv"), | 1045 input, test::ResourcePath(params_.video.clip_name, "yuv"), |
| 1044 params_.video.width, params_.video.height, params_.video.fps, | 1046 params_.common.width, params_.common.height, params_.common.fps, |
| 1045 clock_)); | 1047 clock_)); |
| 1046 ASSERT_TRUE(capturer_) << "Could not create capturer for " | 1048 ASSERT_TRUE(capturer_) << "Could not create capturer for " |
| 1047 << params_.video.clip_name | 1049 << params_.video.clip_name |
| 1048 << ".yuv. Is this resource file present?"; | 1050 << ".yuv. Is this resource file present?"; |
| 1049 } | 1051 } |
| 1050 } | 1052 } |
| 1051 } | 1053 } |
| 1052 | 1054 |
| 1053 void VideoQualityTest::RunWithAnalyzer(const Params& params) { | 1055 void VideoQualityTest::RunWithAnalyzer(const Params& params) { |
| 1054 params_ = params; | 1056 params_ = params; |
| 1055 | 1057 |
| 1056 RTC_CHECK(!params_.audio.enabled); | 1058 RTC_CHECK(!params_.audio); |
| 1057 // TODO(ivica): Merge with RunWithRenderer and use a flag / argument to | 1059 // TODO(ivica): Merge with RunWithRenderer and use a flag / argument to |
| 1058 // differentiate between the analyzer and the renderer case. | 1060 // differentiate between the analyzer and the renderer case. |
| 1059 CheckParams(); | 1061 CheckParams(); |
| 1060 | 1062 |
| 1061 FILE* graph_data_output_file = nullptr; | 1063 FILE* graph_data_output_file = nullptr; |
| 1062 if (!params_.analyzer.graph_data_output_filename.empty()) { | 1064 if (!params_.analyzer.graph_data_output_filename.empty()) { |
| 1063 graph_data_output_file = | 1065 graph_data_output_file = |
| 1064 fopen(params_.analyzer.graph_data_output_filename.c_str(), "w"); | 1066 fopen(params_.analyzer.graph_data_output_filename.c_str(), "w"); |
| 1065 RTC_CHECK(graph_data_output_file) | 1067 RTC_CHECK(graph_data_output_file) |
| 1066 << "Can't open the file " << params_.analyzer.graph_data_output_filename | 1068 << "Can't open the file " << params_.analyzer.graph_data_output_filename |
| 1067 << "!"; | 1069 << "!"; |
| 1068 } | 1070 } |
| 1069 | 1071 |
| 1070 Call::Config call_config; | 1072 Call::Config call_config; |
| 1071 call_config.bitrate_config = params_.call.call_bitrate_config; | 1073 call_config.bitrate_config = params.common.call_bitrate_config; |
| 1072 CreateCalls(call_config, call_config); | 1074 CreateCalls(call_config, call_config); |
| 1073 | 1075 |
| 1074 test::LayerFilteringTransport send_transport( | 1076 test::LayerFilteringTransport send_transport( |
| 1075 params_.pipe, sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9, | 1077 params.pipe, sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9, |
| 1076 params_.video.selected_tl, params_.ss.selected_sl); | 1078 params.common.selected_tl, params_.ss.selected_sl); |
| 1077 test::DirectTransport recv_transport(params_.pipe, receiver_call_.get()); | 1079 test::DirectTransport recv_transport(params.pipe, receiver_call_.get()); |
| 1078 | 1080 |
| 1079 std::string graph_title = params_.analyzer.graph_title; | 1081 std::string graph_title = params_.analyzer.graph_title; |
| 1080 if (graph_title.empty()) | 1082 if (graph_title.empty()) |
| 1081 graph_title = VideoQualityTest::GenerateGraphTitle(); | 1083 graph_title = VideoQualityTest::GenerateGraphTitle(); |
| 1082 | 1084 |
| 1083 // In the case of different resolutions, the functions calculating PSNR and | 1085 // In the case of different resolutions, the functions calculating PSNR and |
| 1084 // SSIM return -1.0, instead of a positive value as usual. VideoAnalyzer | 1086 // SSIM return -1.0, instead of a positive value as usual. VideoAnalyzer |
| 1085 // aborts if the average psnr/ssim are below the given threshold, which is | 1087 // aborts if the average psnr/ssim are below the given threshold, which is |
| 1086 // 0.0 by default. Setting the thresholds to -1.1 prevents the unnecessary | 1088 // 0.0 by default. Setting the thresholds to -1.1 prevents the unnecessary |
| 1087 // abort. | 1089 // abort. |
| 1088 VideoStream& selected_stream = params_.ss.streams[params_.ss.selected_stream]; | 1090 VideoStream& selected_stream = params_.ss.streams[params_.ss.selected_stream]; |
| 1089 int selected_sl = params_.ss.selected_sl != -1 | 1091 int selected_sl = params_.ss.selected_sl != -1 |
| 1090 ? params_.ss.selected_sl | 1092 ? params_.ss.selected_sl |
| 1091 : params_.ss.num_spatial_layers - 1; | 1093 : params_.ss.num_spatial_layers - 1; |
| 1092 bool disable_quality_check = | 1094 bool disable_quality_check = |
| 1093 selected_stream.width != params_.video.width || | 1095 selected_stream.width != params_.common.width || |
| 1094 selected_stream.height != params_.video.height || | 1096 selected_stream.height != params_.common.height || |
| 1095 (!params_.ss.spatial_layers.empty() && | 1097 (!params_.ss.spatial_layers.empty() && |
| 1096 params_.ss.spatial_layers[selected_sl].scaling_factor_num != | 1098 params_.ss.spatial_layers[selected_sl].scaling_factor_num != |
| 1097 params_.ss.spatial_layers[selected_sl].scaling_factor_den); | 1099 params_.ss.spatial_layers[selected_sl].scaling_factor_den); |
| 1098 if (disable_quality_check) { | 1100 if (disable_quality_check) { |
| 1099 fprintf(stderr, | 1101 fprintf(stderr, |
| 1100 "Warning: Calculating PSNR and SSIM for downsized resolution " | 1102 "Warning: Calculating PSNR and SSIM for downsized resolution " |
| 1101 "not implemented yet! Skipping PSNR and SSIM calculations!"); | 1103 "not implemented yet! Skipping PSNR and SSIM calculations!"); |
| 1102 } | 1104 } |
| 1103 | 1105 |
| 1104 VideoAnalyzer analyzer( | 1106 VideoAnalyzer analyzer( |
| 1105 &send_transport, params_.analyzer.test_label, | 1107 &send_transport, params_.analyzer.test_label, |
| 1106 disable_quality_check ? -1.1 : params_.analyzer.avg_psnr_threshold, | 1108 disable_quality_check ? -1.1 : params_.analyzer.avg_psnr_threshold, |
| 1107 disable_quality_check ? -1.1 : params_.analyzer.avg_ssim_threshold, | 1109 disable_quality_check ? -1.1 : params_.analyzer.avg_ssim_threshold, |
| 1108 params_.analyzer.test_durations_secs * params_.video.fps, | 1110 params_.analyzer.test_durations_secs * params_.common.fps, |
| 1109 graph_data_output_file, graph_title, | 1111 graph_data_output_file, graph_title, |
| 1110 kVideoSendSsrcs[params_.ss.selected_stream]); | 1112 kVideoSendSsrcs[params_.ss.selected_stream]); |
| 1111 | 1113 |
| 1112 analyzer.SetReceiver(receiver_call_->Receiver()); | 1114 analyzer.SetReceiver(receiver_call_->Receiver()); |
| 1113 send_transport.SetReceiver(&analyzer); | 1115 send_transport.SetReceiver(&analyzer); |
| 1114 recv_transport.SetReceiver(sender_call_->Receiver()); | 1116 recv_transport.SetReceiver(sender_call_->Receiver()); |
| 1115 | 1117 |
| 1116 SetupCommon(&analyzer, &recv_transport); | 1118 SetupCommon(&analyzer, &recv_transport); |
| 1117 video_receive_configs_[params_.ss.selected_stream].renderer = &analyzer; | 1119 video_receive_configs_[params_.ss.selected_stream].renderer = &analyzer; |
| 1118 video_send_config_.pre_encode_callback = analyzer.pre_encode_proxy(); | 1120 video_send_config_.pre_encode_callback = analyzer.pre_encode_proxy(); |
| (...skipping 30 matching lines...) Expand all Loading... |
| 1149 | 1151 |
| 1150 if (graph_data_output_file) | 1152 if (graph_data_output_file) |
| 1151 fclose(graph_data_output_file); | 1153 fclose(graph_data_output_file); |
| 1152 } | 1154 } |
| 1153 | 1155 |
| 1154 void VideoQualityTest::RunWithRenderers(const Params& params) { | 1156 void VideoQualityTest::RunWithRenderers(const Params& params) { |
| 1155 params_ = params; | 1157 params_ = params; |
| 1156 CheckParams(); | 1158 CheckParams(); |
| 1157 | 1159 |
| 1158 std::unique_ptr<test::VideoRenderer> local_preview( | 1160 std::unique_ptr<test::VideoRenderer> local_preview( |
| 1159 test::VideoRenderer::Create("Local Preview", params_.video.width, | 1161 test::VideoRenderer::Create("Local Preview", params_.common.width, |
| 1160 params_.video.height)); | 1162 params_.common.height)); |
| 1161 size_t stream_id = params_.ss.selected_stream; | 1163 size_t stream_id = params_.ss.selected_stream; |
| 1162 std::string title = "Loopback Video"; | 1164 std::string title = "Loopback Video"; |
| 1163 if (params_.ss.streams.size() > 1) { | 1165 if (params_.ss.streams.size() > 1) { |
| 1164 std::ostringstream s; | 1166 std::ostringstream s; |
| 1165 s << stream_id; | 1167 s << stream_id; |
| 1166 title += " - Stream #" + s.str(); | 1168 title += " - Stream #" + s.str(); |
| 1167 } | 1169 } |
| 1168 | 1170 |
| 1169 std::unique_ptr<test::VideoRenderer> loopback_video( | 1171 std::unique_ptr<test::VideoRenderer> loopback_video( |
| 1170 test::VideoRenderer::Create(title.c_str(), | 1172 test::VideoRenderer::Create(title.c_str(), |
| 1171 params_.ss.streams[stream_id].width, | 1173 params_.ss.streams[stream_id].width, |
| 1172 params_.ss.streams[stream_id].height)); | 1174 params_.ss.streams[stream_id].height)); |
| 1173 | 1175 |
| 1174 // TODO(ivica): Remove bitrate_config and use the default Call::Config(), to | 1176 // TODO(ivica): Remove bitrate_config and use the default Call::Config(), to |
| 1175 // match the full stack tests. | 1177 // match the full stack tests. |
| 1176 Call::Config call_config; | 1178 Call::Config call_config; |
| 1177 call_config.bitrate_config = params_.call.call_bitrate_config; | 1179 call_config.bitrate_config = params_.common.call_bitrate_config; |
| 1178 | 1180 |
| 1179 ::VoiceEngineState voe; | 1181 ::VoiceEngineState voe; |
| 1180 if (params_.audio.enabled) { | 1182 if (params_.audio) { |
| 1181 CreateVoiceEngine(&voe, decoder_factory_); | 1183 CreateVoiceEngine(&voe, decoder_factory_); |
| 1182 AudioState::Config audio_state_config; | 1184 AudioState::Config audio_state_config; |
| 1183 audio_state_config.voice_engine = voe.voice_engine; | 1185 audio_state_config.voice_engine = voe.voice_engine; |
| 1184 call_config.audio_state = AudioState::Create(audio_state_config); | 1186 call_config.audio_state = AudioState::Create(audio_state_config); |
| 1185 } | 1187 } |
| 1186 | 1188 |
| 1187 std::unique_ptr<Call> call(Call::Create(call_config)); | 1189 std::unique_ptr<Call> call(Call::Create(call_config)); |
| 1188 | 1190 |
| 1189 test::LayerFilteringTransport transport( | 1191 test::LayerFilteringTransport transport( |
| 1190 params.pipe, call.get(), kPayloadTypeVP8, kPayloadTypeVP9, | 1192 params.pipe, call.get(), kPayloadTypeVP8, kPayloadTypeVP9, |
| 1191 params.video.selected_tl, params_.ss.selected_sl); | 1193 params.common.selected_tl, params_.ss.selected_sl); |
| 1192 // TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at | 1194 // TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at |
| 1193 // least share as much code as possible. That way this test would also match | 1195 // least share as much code as possible. That way this test would also match |
| 1194 // the full stack tests better. | 1196 // the full stack tests better. |
| 1195 transport.SetReceiver(call->Receiver()); | 1197 transport.SetReceiver(call->Receiver()); |
| 1196 | 1198 |
| 1197 SetupCommon(&transport, &transport); | 1199 SetupCommon(&transport, &transport); |
| 1198 | 1200 |
| 1199 video_send_config_.pre_encode_callback = local_preview.get(); | 1201 video_send_config_.pre_encode_callback = local_preview.get(); |
| 1200 video_receive_configs_[stream_id].renderer = loopback_video.get(); | 1202 video_receive_configs_[stream_id].renderer = loopback_video.get(); |
| 1201 if (params_.audio.enabled && params_.audio.sync_video) | 1203 if (params_.audio && params_.audio_video_sync) |
| 1202 video_receive_configs_[stream_id].sync_group = kSyncGroup; | 1204 video_receive_configs_[stream_id].sync_group = kSyncGroup; |
| 1203 | 1205 |
| 1204 video_send_config_.suspend_below_min_bitrate = | 1206 video_send_config_.suspend_below_min_bitrate = |
| 1205 params_.video.suspend_below_min_bitrate; | 1207 params_.common.suspend_below_min_bitrate; |
| 1206 | 1208 |
| 1207 if (params.video.fec) { | 1209 if (params.common.fec) { |
| 1208 video_send_config_.rtp.fec.red_payload_type = kRedPayloadType; | 1210 video_send_config_.rtp.fec.red_payload_type = kRedPayloadType; |
| 1209 video_send_config_.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType; | 1211 video_send_config_.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType; |
| 1210 video_receive_configs_[stream_id].rtp.fec.red_payload_type = | 1212 video_receive_configs_[stream_id].rtp.fec.red_payload_type = |
| 1211 kRedPayloadType; | 1213 kRedPayloadType; |
| 1212 video_receive_configs_[stream_id].rtp.fec.ulpfec_payload_type = | 1214 video_receive_configs_[stream_id].rtp.fec.ulpfec_payload_type = |
| 1213 kUlpfecPayloadType; | 1215 kUlpfecPayloadType; |
| 1214 } | 1216 } |
| 1215 | 1217 |
| 1216 if (params_.screenshare.enabled) | 1218 if (params_.screenshare.enabled) |
| 1217 SetupScreenshare(); | 1219 SetupScreenshare(); |
| 1218 | 1220 |
| 1219 video_send_stream_ = call->CreateVideoSendStream( | 1221 video_send_stream_ = call->CreateVideoSendStream( |
| 1220 video_send_config_.Copy(), video_encoder_config_.Copy()); | 1222 video_send_config_.Copy(), video_encoder_config_.Copy()); |
| 1221 VideoReceiveStream* video_receive_stream = | 1223 VideoReceiveStream* video_receive_stream = |
| 1222 call->CreateVideoReceiveStream(video_receive_configs_[stream_id].Copy()); | 1224 call->CreateVideoReceiveStream(video_receive_configs_[stream_id].Copy()); |
| 1223 CreateCapturer(video_send_stream_->Input()); | 1225 CreateCapturer(video_send_stream_->Input()); |
| 1224 | 1226 |
| 1225 AudioReceiveStream* audio_receive_stream = nullptr; | 1227 AudioReceiveStream* audio_receive_stream = nullptr; |
| 1226 if (params_.audio.enabled) { | 1228 if (params_.audio) { |
| 1227 audio_send_config_ = AudioSendStream::Config(&transport); | 1229 audio_send_config_ = AudioSendStream::Config(&transport); |
| 1228 audio_send_config_.voe_channel_id = voe.send_channel_id; | 1230 audio_send_config_.voe_channel_id = voe.send_channel_id; |
| 1229 audio_send_config_.rtp.ssrc = kAudioSendSsrc; | 1231 audio_send_config_.rtp.ssrc = kAudioSendSsrc; |
| 1230 | 1232 |
| 1231 // Add extension to enable audio send side BWE, and allow audio bit rate | 1233 // Add extension to enable audio send side BWE, and allow audio bit rate |
| 1232 // adaptation. | 1234 // adaptation. |
| 1233 audio_send_config_.rtp.extensions.clear(); | 1235 audio_send_config_.rtp.extensions.clear(); |
| 1234 if (params_.call.send_side_bwe) { | 1236 if (params_.common.send_side_bwe) { |
| 1235 audio_send_config_.rtp.extensions.push_back(webrtc::RtpExtension( | 1237 audio_send_config_.rtp.extensions.push_back(webrtc::RtpExtension( |
| 1236 webrtc::RtpExtension::kTransportSequenceNumberUri, | 1238 webrtc::RtpExtension::kTransportSequenceNumberUri, |
| 1237 test::kTransportSequenceNumberExtensionId)); | 1239 test::kTransportSequenceNumberExtensionId)); |
| 1238 audio_send_config_.min_bitrate_kbps = kOpusMinBitrate / 1000; | 1240 audio_send_config_.min_bitrate_kbps = kOpusMinBitrate / 1000; |
| 1239 audio_send_config_.max_bitrate_kbps = kOpusBitrateFb / 1000; | 1241 audio_send_config_.max_bitrate_kbps = kOpusBitrateFb / 1000; |
| 1240 } | 1242 } |
| 1241 | 1243 |
| 1242 audio_send_stream_ = call->CreateAudioSendStream(audio_send_config_); | 1244 audio_send_stream_ = call->CreateAudioSendStream(audio_send_config_); |
| 1243 | 1245 |
| 1244 AudioReceiveStream::Config audio_config; | 1246 AudioReceiveStream::Config audio_config; |
| 1245 audio_config.rtp.local_ssrc = kReceiverLocalAudioSsrc; | 1247 audio_config.rtp.local_ssrc = kReceiverLocalAudioSsrc; |
| 1246 audio_config.rtcp_send_transport = &transport; | 1248 audio_config.rtcp_send_transport = &transport; |
| 1247 audio_config.voe_channel_id = voe.receive_channel_id; | 1249 audio_config.voe_channel_id = voe.receive_channel_id; |
| 1248 audio_config.rtp.remote_ssrc = audio_send_config_.rtp.ssrc; | 1250 audio_config.rtp.remote_ssrc = audio_send_config_.rtp.ssrc; |
| 1249 audio_config.rtp.transport_cc = params_.call.send_side_bwe; | 1251 audio_config.rtp.transport_cc = params_.common.send_side_bwe; |
| 1250 audio_config.rtp.extensions = audio_send_config_.rtp.extensions; | 1252 audio_config.rtp.extensions = audio_send_config_.rtp.extensions; |
| 1251 audio_config.decoder_factory = decoder_factory_; | 1253 audio_config.decoder_factory = decoder_factory_; |
| 1252 if (params_.audio.sync_video) | 1254 if (params_.audio_video_sync) |
| 1253 audio_config.sync_group = kSyncGroup; | 1255 audio_config.sync_group = kSyncGroup; |
| 1254 | 1256 |
| 1255 audio_receive_stream =call->CreateAudioReceiveStream(audio_config); | 1257 audio_receive_stream =call->CreateAudioReceiveStream(audio_config); |
| 1256 | 1258 |
| 1257 const CodecInst kOpusInst = {120, "OPUS", 48000, 960, 2, 64000}; | 1259 const CodecInst kOpusInst = {120, "OPUS", 48000, 960, 2, 64000}; |
| 1258 EXPECT_EQ(0, voe.codec->SetSendCodec(voe.send_channel_id, kOpusInst)); | 1260 EXPECT_EQ(0, voe.codec->SetSendCodec(voe.send_channel_id, kOpusInst)); |
| 1259 } | 1261 } |
| 1260 | 1262 |
| 1261 // Start sending and receiving video. | 1263 // Start sending and receiving video. |
| 1262 video_receive_stream->Start(); | 1264 video_receive_stream->Start(); |
| 1263 video_send_stream_->Start(); | 1265 video_send_stream_->Start(); |
| 1264 capturer_->Start(); | 1266 capturer_->Start(); |
| 1265 | 1267 |
| 1266 if (params_.audio.enabled) { | 1268 if (params_.audio) { |
| 1267 // Start receiving audio. | 1269 // Start receiving audio. |
| 1268 audio_receive_stream->Start(); | 1270 audio_receive_stream->Start(); |
| 1269 EXPECT_EQ(0, voe.base->StartPlayout(voe.receive_channel_id)); | 1271 EXPECT_EQ(0, voe.base->StartPlayout(voe.receive_channel_id)); |
| 1270 EXPECT_EQ(0, voe.base->StartReceive(voe.receive_channel_id)); | 1272 EXPECT_EQ(0, voe.base->StartReceive(voe.receive_channel_id)); |
| 1271 | 1273 |
| 1272 // Start sending audio. | 1274 // Start sending audio. |
| 1273 audio_send_stream_->Start(); | 1275 audio_send_stream_->Start(); |
| 1274 EXPECT_EQ(0, voe.base->StartSend(voe.send_channel_id)); | 1276 EXPECT_EQ(0, voe.base->StartSend(voe.send_channel_id)); |
| 1275 } | 1277 } |
| 1276 | 1278 |
| 1277 test::PressEnterToContinue(); | 1279 test::PressEnterToContinue(); |
| 1278 | 1280 |
| 1279 if (params_.audio.enabled) { | 1281 if (params_.audio) { |
| 1280 // Stop sending audio. | 1282 // Stop sending audio. |
| 1281 EXPECT_EQ(0, voe.base->StopSend(voe.send_channel_id)); | 1283 EXPECT_EQ(0, voe.base->StopSend(voe.send_channel_id)); |
| 1282 audio_send_stream_->Stop(); | 1284 audio_send_stream_->Stop(); |
| 1283 | 1285 |
| 1284 // Stop receiving audio. | 1286 // Stop receiving audio. |
| 1285 EXPECT_EQ(0, voe.base->StopReceive(voe.receive_channel_id)); | 1287 EXPECT_EQ(0, voe.base->StopReceive(voe.receive_channel_id)); |
| 1286 EXPECT_EQ(0, voe.base->StopPlayout(voe.receive_channel_id)); | 1288 EXPECT_EQ(0, voe.base->StopPlayout(voe.receive_channel_id)); |
| 1287 audio_receive_stream->Stop(); | 1289 audio_receive_stream->Stop(); |
| 1288 } | 1290 } |
| 1289 | 1291 |
| 1290 // Stop receiving and sending video. | 1292 // Stop receiving and sending video. |
| 1291 capturer_->Stop(); | 1293 capturer_->Stop(); |
| 1292 video_send_stream_->Stop(); | 1294 video_send_stream_->Stop(); |
| 1293 video_receive_stream->Stop(); | 1295 video_receive_stream->Stop(); |
| 1294 | 1296 |
| 1295 call->DestroyVideoReceiveStream(video_receive_stream); | 1297 call->DestroyVideoReceiveStream(video_receive_stream); |
| 1296 call->DestroyVideoSendStream(video_send_stream_); | 1298 call->DestroyVideoSendStream(video_send_stream_); |
| 1297 | 1299 |
| 1298 if (params_.audio.enabled) { | 1300 if (params_.audio) { |
| 1299 call->DestroyAudioSendStream(audio_send_stream_); | 1301 call->DestroyAudioSendStream(audio_send_stream_); |
| 1300 call->DestroyAudioReceiveStream(audio_receive_stream); | 1302 call->DestroyAudioReceiveStream(audio_receive_stream); |
| 1301 } | 1303 } |
| 1302 | 1304 |
| 1303 transport.StopSending(); | 1305 transport.StopSending(); |
| 1304 if (params_.audio.enabled) | 1306 if (params_.audio) |
| 1305 DestroyVoiceEngine(&voe); | 1307 DestroyVoiceEngine(&voe); |
| 1306 } | 1308 } |
| 1307 | 1309 |
| 1308 } // namespace webrtc | 1310 } // namespace webrtc |
| OLD | NEW |