OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 #include <stdio.h> | 10 #include <stdio.h> |
(...skipping 727 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
738 const double avg_ssim_threshold_; | 738 const double avg_ssim_threshold_; |
739 | 739 |
740 rtc::CriticalSection comparison_lock_; | 740 rtc::CriticalSection comparison_lock_; |
741 std::vector<rtc::PlatformThread*> comparison_thread_pool_; | 741 std::vector<rtc::PlatformThread*> comparison_thread_pool_; |
742 rtc::PlatformThread stats_polling_thread_; | 742 rtc::PlatformThread stats_polling_thread_; |
743 rtc::Event comparison_available_event_; | 743 rtc::Event comparison_available_event_; |
744 std::deque<FrameComparison> comparisons_ GUARDED_BY(comparison_lock_); | 744 std::deque<FrameComparison> comparisons_ GUARDED_BY(comparison_lock_); |
745 rtc::Event done_; | 745 rtc::Event done_; |
746 }; | 746 }; |
747 | 747 |
748 VideoQualityTest::Params::Params() | |
749 : call({}), | |
750 video({}), | |
751 audio({}), | |
752 screenshare({}), | |
753 analyzer({}), | |
754 pipe(), | |
755 logs(false), | |
756 ss({}) {} | |
stefan-webrtc
2016/09/09 11:42:09
Is this the right way to do this? It's not clear t
minyue-webrtc
2016/09/09 14:01:32
This means that all fields will be filled with 0s.
stefan-webrtc
2016/09/09 14:18:05
But which parameter was used without being initial
minyue-webrtc
2016/10/20 15:50:41
Fixing at the tests meaning adding many codes, yet
| |
757 | |
758 VideoQualityTest::Params::~Params() = default; | |
759 | |
748 VideoQualityTest::VideoQualityTest() : clock_(Clock::GetRealTimeClock()) {} | 760 VideoQualityTest::VideoQualityTest() : clock_(Clock::GetRealTimeClock()) {} |
749 | 761 |
750 void VideoQualityTest::TestBody() {} | 762 void VideoQualityTest::TestBody() {} |
751 | 763 |
752 std::string VideoQualityTest::GenerateGraphTitle() const { | 764 std::string VideoQualityTest::GenerateGraphTitle() const { |
753 std::stringstream ss; | 765 std::stringstream ss; |
754 ss << params_.common.codec; | 766 ss << params_.video.codec; |
755 ss << " (" << params_.common.target_bitrate_bps / 1000 << "kbps"; | 767 ss << " (" << params_.video.target_bitrate_bps / 1000 << "kbps"; |
756 ss << ", " << params_.common.fps << " FPS"; | 768 ss << ", " << params_.video.fps << " FPS"; |
757 if (params_.screenshare.scroll_duration) | 769 if (params_.screenshare.scroll_duration) |
758 ss << ", " << params_.screenshare.scroll_duration << "s scroll"; | 770 ss << ", " << params_.screenshare.scroll_duration << "s scroll"; |
759 if (params_.ss.streams.size() > 1) | 771 if (params_.ss.streams.size() > 1) |
760 ss << ", Stream #" << params_.ss.selected_stream; | 772 ss << ", Stream #" << params_.ss.selected_stream; |
761 if (params_.ss.num_spatial_layers > 1) | 773 if (params_.ss.num_spatial_layers > 1) |
762 ss << ", Layer #" << params_.ss.selected_sl; | 774 ss << ", Layer #" << params_.ss.selected_sl; |
763 ss << ")"; | 775 ss << ")"; |
764 return ss.str(); | 776 return ss.str(); |
765 } | 777 } |
766 | 778 |
767 void VideoQualityTest::CheckParams() { | 779 void VideoQualityTest::CheckParams() { |
768 // Add a default stream in none specified. | 780 // Add a default stream in none specified. |
769 if (params_.ss.streams.empty()) | 781 if (params_.ss.streams.empty()) |
770 params_.ss.streams.push_back(VideoQualityTest::DefaultVideoStream(params_)); | 782 params_.ss.streams.push_back(VideoQualityTest::DefaultVideoStream(params_)); |
771 if (params_.ss.num_spatial_layers == 0) | 783 if (params_.ss.num_spatial_layers == 0) |
772 params_.ss.num_spatial_layers = 1; | 784 params_.ss.num_spatial_layers = 1; |
773 | 785 |
774 if (params_.pipe.loss_percent != 0 || | 786 if (params_.pipe.loss_percent != 0 || |
775 params_.pipe.queue_length_packets != 0) { | 787 params_.pipe.queue_length_packets != 0) { |
776 // Since LayerFilteringTransport changes the sequence numbers, we can't | 788 // Since LayerFilteringTransport changes the sequence numbers, we can't |
777 // use that feature with pack loss, since the NACK request would end up | 789 // use that feature with pack loss, since the NACK request would end up |
778 // retransmitting the wrong packets. | 790 // retransmitting the wrong packets. |
779 RTC_CHECK(params_.ss.selected_sl == -1 || | 791 RTC_CHECK(params_.ss.selected_sl == -1 || |
780 params_.ss.selected_sl == params_.ss.num_spatial_layers - 1); | 792 params_.ss.selected_sl == params_.ss.num_spatial_layers - 1); |
781 RTC_CHECK(params_.common.selected_tl == -1 || | 793 RTC_CHECK(params_.video.selected_tl == -1 || |
782 params_.common.selected_tl == | 794 params_.video.selected_tl == |
783 params_.common.num_temporal_layers - 1); | 795 params_.video.num_temporal_layers - 1); |
784 } | 796 } |
785 | 797 |
786 // TODO(ivica): Should max_bitrate_bps == -1 represent inf max bitrate, as it | 798 // TODO(ivica): Should max_bitrate_bps == -1 represent inf max bitrate, as it |
787 // does in some parts of the code? | 799 // does in some parts of the code? |
788 RTC_CHECK_GE(params_.common.max_bitrate_bps, | 800 RTC_CHECK_GE(params_.video.max_bitrate_bps, params_.video.target_bitrate_bps); |
789 params_.common.target_bitrate_bps); | 801 RTC_CHECK_GE(params_.video.target_bitrate_bps, params_.video.min_bitrate_bps); |
790 RTC_CHECK_GE(params_.common.target_bitrate_bps, | 802 RTC_CHECK_LT(params_.video.selected_tl, params_.video.num_temporal_layers); |
791 params_.common.min_bitrate_bps); | |
792 RTC_CHECK_LT(params_.common.selected_tl, params_.common.num_temporal_layers); | |
793 RTC_CHECK_LT(params_.ss.selected_stream, params_.ss.streams.size()); | 803 RTC_CHECK_LT(params_.ss.selected_stream, params_.ss.streams.size()); |
794 for (const VideoStream& stream : params_.ss.streams) { | 804 for (const VideoStream& stream : params_.ss.streams) { |
795 RTC_CHECK_GE(stream.min_bitrate_bps, 0); | 805 RTC_CHECK_GE(stream.min_bitrate_bps, 0); |
796 RTC_CHECK_GE(stream.target_bitrate_bps, stream.min_bitrate_bps); | 806 RTC_CHECK_GE(stream.target_bitrate_bps, stream.min_bitrate_bps); |
797 RTC_CHECK_GE(stream.max_bitrate_bps, stream.target_bitrate_bps); | 807 RTC_CHECK_GE(stream.max_bitrate_bps, stream.target_bitrate_bps); |
798 RTC_CHECK_EQ(static_cast<int>(stream.temporal_layer_thresholds_bps.size()), | 808 RTC_CHECK_EQ(static_cast<int>(stream.temporal_layer_thresholds_bps.size()), |
799 params_.common.num_temporal_layers - 1); | 809 params_.video.num_temporal_layers - 1); |
800 } | 810 } |
801 // TODO(ivica): Should we check if the sum of all streams/layers is equal to | 811 // TODO(ivica): Should we check if the sum of all streams/layers is equal to |
802 // the total bitrate? We anyway have to update them in the case bitrate | 812 // the total bitrate? We anyway have to update them in the case bitrate |
803 // estimator changes the total bitrates. | 813 // estimator changes the total bitrates. |
804 RTC_CHECK_GE(params_.ss.num_spatial_layers, 1); | 814 RTC_CHECK_GE(params_.ss.num_spatial_layers, 1); |
805 RTC_CHECK_LE(params_.ss.selected_sl, params_.ss.num_spatial_layers); | 815 RTC_CHECK_LE(params_.ss.selected_sl, params_.ss.num_spatial_layers); |
806 RTC_CHECK(params_.ss.spatial_layers.empty() || | 816 RTC_CHECK(params_.ss.spatial_layers.empty() || |
807 params_.ss.spatial_layers.size() == | 817 params_.ss.spatial_layers.size() == |
808 static_cast<size_t>(params_.ss.num_spatial_layers)); | 818 static_cast<size_t>(params_.ss.num_spatial_layers)); |
809 if (params_.common.codec == "VP8") { | 819 if (params_.video.codec == "VP8") { |
810 RTC_CHECK_EQ(params_.ss.num_spatial_layers, 1); | 820 RTC_CHECK_EQ(params_.ss.num_spatial_layers, 1); |
811 } else if (params_.common.codec == "VP9") { | 821 } else if (params_.video.codec == "VP9") { |
812 RTC_CHECK_EQ(params_.ss.streams.size(), 1u); | 822 RTC_CHECK_EQ(params_.ss.streams.size(), 1u); |
813 } | 823 } |
814 } | 824 } |
815 | 825 |
816 // Static. | 826 // Static. |
817 std::vector<int> VideoQualityTest::ParseCSV(const std::string& str) { | 827 std::vector<int> VideoQualityTest::ParseCSV(const std::string& str) { |
818 // Parse comma separated nonnegative integers, where some elements may be | 828 // Parse comma separated nonnegative integers, where some elements may be |
819 // empty. The empty values are replaced with -1. | 829 // empty. The empty values are replaced with -1. |
820 // E.g. "10,-20,,30,40" --> {10, 20, -1, 30,40} | 830 // E.g. "10,-20,,30,40" --> {10, 20, -1, 30,40} |
821 // E.g. ",,10,,20," --> {-1, -1, 10, -1, 20, -1} | 831 // E.g. ",,10,,20," --> {-1, -1, 10, -1, 20, -1} |
(...skipping 15 matching lines...) Expand all Loading... | |
837 << "Unexpected non-number value."; | 847 << "Unexpected non-number value."; |
838 p += pos; | 848 p += pos; |
839 } | 849 } |
840 result.push_back(value); | 850 result.push_back(value); |
841 return result; | 851 return result; |
842 } | 852 } |
843 | 853 |
844 // Static. | 854 // Static. |
845 VideoStream VideoQualityTest::DefaultVideoStream(const Params& params) { | 855 VideoStream VideoQualityTest::DefaultVideoStream(const Params& params) { |
846 VideoStream stream; | 856 VideoStream stream; |
847 stream.width = params.common.width; | 857 stream.width = params.video.width; |
848 stream.height = params.common.height; | 858 stream.height = params.video.height; |
849 stream.max_framerate = params.common.fps; | 859 stream.max_framerate = params.video.fps; |
850 stream.min_bitrate_bps = params.common.min_bitrate_bps; | 860 stream.min_bitrate_bps = params.video.min_bitrate_bps; |
851 stream.target_bitrate_bps = params.common.target_bitrate_bps; | 861 stream.target_bitrate_bps = params.video.target_bitrate_bps; |
852 stream.max_bitrate_bps = params.common.max_bitrate_bps; | 862 stream.max_bitrate_bps = params.video.max_bitrate_bps; |
853 stream.max_qp = 52; | 863 stream.max_qp = 52; |
854 if (params.common.num_temporal_layers == 2) | 864 if (params.video.num_temporal_layers == 2) |
855 stream.temporal_layer_thresholds_bps.push_back(stream.target_bitrate_bps); | 865 stream.temporal_layer_thresholds_bps.push_back(stream.target_bitrate_bps); |
856 return stream; | 866 return stream; |
857 } | 867 } |
858 | 868 |
859 // Static. | 869 // Static. |
860 void VideoQualityTest::FillScalabilitySettings( | 870 void VideoQualityTest::FillScalabilitySettings( |
861 Params* params, | 871 Params* params, |
862 const std::vector<std::string>& stream_descriptors, | 872 const std::vector<std::string>& stream_descriptors, |
863 size_t selected_stream, | 873 size_t selected_stream, |
864 int num_spatial_layers, | 874 int num_spatial_layers, |
(...skipping 22 matching lines...) Expand all Loading... | |
887 if (v[5] != -1) | 897 if (v[5] != -1) |
888 stream.max_bitrate_bps = v[5]; | 898 stream.max_bitrate_bps = v[5]; |
889 if (v.size() > 6 && v[6] != -1) | 899 if (v.size() > 6 && v[6] != -1) |
890 stream.max_qp = v[6]; | 900 stream.max_qp = v[6]; |
891 if (v.size() > 7) { | 901 if (v.size() > 7) { |
892 stream.temporal_layer_thresholds_bps.clear(); | 902 stream.temporal_layer_thresholds_bps.clear(); |
893 stream.temporal_layer_thresholds_bps.insert( | 903 stream.temporal_layer_thresholds_bps.insert( |
894 stream.temporal_layer_thresholds_bps.end(), v.begin() + 7, v.end()); | 904 stream.temporal_layer_thresholds_bps.end(), v.begin() + 7, v.end()); |
895 } else { | 905 } else { |
896 // Automatic TL thresholds for more than two layers not supported. | 906 // Automatic TL thresholds for more than two layers not supported. |
897 RTC_CHECK_LE(params->common.num_temporal_layers, 2); | 907 RTC_CHECK_LE(params->video.num_temporal_layers, 2); |
898 } | 908 } |
899 params->ss.streams.push_back(stream); | 909 params->ss.streams.push_back(stream); |
900 } | 910 } |
901 params->ss.selected_stream = selected_stream; | 911 params->ss.selected_stream = selected_stream; |
902 | 912 |
903 params->ss.num_spatial_layers = num_spatial_layers ? num_spatial_layers : 1; | 913 params->ss.num_spatial_layers = num_spatial_layers ? num_spatial_layers : 1; |
904 params->ss.selected_sl = selected_sl; | 914 params->ss.selected_sl = selected_sl; |
905 RTC_CHECK(params->ss.spatial_layers.empty()); | 915 RTC_CHECK(params->ss.spatial_layers.empty()); |
906 for (auto descriptor : sl_descriptors) { | 916 for (auto descriptor : sl_descriptors) { |
907 if (descriptor.empty()) | 917 if (descriptor.empty()) |
(...skipping 11 matching lines...) Expand all Loading... | |
919 | 929 |
920 void VideoQualityTest::SetupCommon(Transport* send_transport, | 930 void VideoQualityTest::SetupCommon(Transport* send_transport, |
921 Transport* recv_transport) { | 931 Transport* recv_transport) { |
922 if (params_.logs) | 932 if (params_.logs) |
923 trace_to_stderr_.reset(new test::TraceToStderr); | 933 trace_to_stderr_.reset(new test::TraceToStderr); |
924 | 934 |
925 size_t num_streams = params_.ss.streams.size(); | 935 size_t num_streams = params_.ss.streams.size(); |
926 CreateSendConfig(num_streams, 0, send_transport); | 936 CreateSendConfig(num_streams, 0, send_transport); |
927 | 937 |
928 int payload_type; | 938 int payload_type; |
929 if (params_.common.codec == "H264") { | 939 if (params_.video.codec == "H264") { |
930 encoder_.reset(VideoEncoder::Create(VideoEncoder::kH264)); | 940 encoder_.reset(VideoEncoder::Create(VideoEncoder::kH264)); |
931 payload_type = kPayloadTypeH264; | 941 payload_type = kPayloadTypeH264; |
932 } else if (params_.common.codec == "VP8") { | 942 } else if (params_.video.codec == "VP8") { |
933 encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp8)); | 943 encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp8)); |
934 payload_type = kPayloadTypeVP8; | 944 payload_type = kPayloadTypeVP8; |
935 } else if (params_.common.codec == "VP9") { | 945 } else if (params_.video.codec == "VP9") { |
936 encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp9)); | 946 encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp9)); |
937 payload_type = kPayloadTypeVP9; | 947 payload_type = kPayloadTypeVP9; |
938 } else { | 948 } else { |
939 RTC_NOTREACHED() << "Codec not supported!"; | 949 RTC_NOTREACHED() << "Codec not supported!"; |
940 return; | 950 return; |
941 } | 951 } |
942 video_send_config_.encoder_settings.encoder = encoder_.get(); | 952 video_send_config_.encoder_settings.encoder = encoder_.get(); |
943 video_send_config_.encoder_settings.payload_name = params_.common.codec; | 953 video_send_config_.encoder_settings.payload_name = params_.video.codec; |
944 video_send_config_.encoder_settings.payload_type = payload_type; | 954 video_send_config_.encoder_settings.payload_type = payload_type; |
945 video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs; | 955 video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs; |
946 video_send_config_.rtp.rtx.payload_type = kSendRtxPayloadType; | 956 video_send_config_.rtp.rtx.payload_type = kSendRtxPayloadType; |
947 for (size_t i = 0; i < num_streams; ++i) | 957 for (size_t i = 0; i < num_streams; ++i) |
948 video_send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]); | 958 video_send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]); |
949 | 959 |
950 video_send_config_.rtp.extensions.clear(); | 960 video_send_config_.rtp.extensions.clear(); |
951 if (params_.common.send_side_bwe) { | 961 if (params_.call.send_side_bwe) { |
952 video_send_config_.rtp.extensions.push_back( | 962 video_send_config_.rtp.extensions.push_back( |
953 RtpExtension(RtpExtension::kTransportSequenceNumberUri, | 963 RtpExtension(RtpExtension::kTransportSequenceNumberUri, |
954 test::kTransportSequenceNumberExtensionId)); | 964 test::kTransportSequenceNumberExtensionId)); |
955 } else { | 965 } else { |
956 video_send_config_.rtp.extensions.push_back(RtpExtension( | 966 video_send_config_.rtp.extensions.push_back(RtpExtension( |
957 RtpExtension::kAbsSendTimeUri, test::kAbsSendTimeExtensionId)); | 967 RtpExtension::kAbsSendTimeUri, test::kAbsSendTimeExtensionId)); |
958 } | 968 } |
959 | 969 |
960 video_encoder_config_.min_transmit_bitrate_bps = | 970 video_encoder_config_.min_transmit_bitrate_bps = |
961 params_.common.min_transmit_bps; | 971 params_.video.min_transmit_bps; |
962 video_encoder_config_.streams = params_.ss.streams; | 972 video_encoder_config_.streams = params_.ss.streams; |
963 video_encoder_config_.spatial_layers = params_.ss.spatial_layers; | 973 video_encoder_config_.spatial_layers = params_.ss.spatial_layers; |
964 | 974 |
965 CreateMatchingReceiveConfigs(recv_transport); | 975 CreateMatchingReceiveConfigs(recv_transport); |
966 | 976 |
967 for (size_t i = 0; i < num_streams; ++i) { | 977 for (size_t i = 0; i < num_streams; ++i) { |
968 video_receive_configs_[i].rtp.nack.rtp_history_ms = kNackRtpHistoryMs; | 978 video_receive_configs_[i].rtp.nack.rtp_history_ms = kNackRtpHistoryMs; |
969 video_receive_configs_[i].rtp.rtx[payload_type].ssrc = kSendRtxSsrcs[i]; | 979 video_receive_configs_[i].rtp.rtx[payload_type].ssrc = kSendRtxSsrcs[i]; |
970 video_receive_configs_[i].rtp.rtx[payload_type].payload_type = | 980 video_receive_configs_[i].rtp.rtx[payload_type].payload_type = |
971 kSendRtxPayloadType; | 981 kSendRtxPayloadType; |
972 video_receive_configs_[i].rtp.transport_cc = params_.common.send_side_bwe; | 982 video_receive_configs_[i].rtp.transport_cc = params_.call.send_side_bwe; |
973 } | 983 } |
974 } | 984 } |
975 | 985 |
976 void VideoQualityTest::SetupScreenshare() { | 986 void VideoQualityTest::SetupScreenshare() { |
977 RTC_CHECK(params_.screenshare.enabled); | 987 RTC_CHECK(params_.screenshare.enabled); |
978 | 988 |
979 // Fill out codec settings. | 989 // Fill out codec settings. |
980 video_encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen; | 990 video_encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen; |
981 if (params_.common.codec == "VP8") { | 991 if (params_.video.codec == "VP8") { |
982 codec_settings_.VP8 = VideoEncoder::GetDefaultVp8Settings(); | 992 codec_settings_.VP8 = VideoEncoder::GetDefaultVp8Settings(); |
983 codec_settings_.VP8.denoisingOn = false; | 993 codec_settings_.VP8.denoisingOn = false; |
984 codec_settings_.VP8.frameDroppingOn = false; | 994 codec_settings_.VP8.frameDroppingOn = false; |
985 codec_settings_.VP8.numberOfTemporalLayers = | 995 codec_settings_.VP8.numberOfTemporalLayers = |
986 static_cast<unsigned char>(params_.common.num_temporal_layers); | 996 static_cast<unsigned char>(params_.video.num_temporal_layers); |
987 video_encoder_config_.encoder_specific_settings = &codec_settings_.VP8; | 997 video_encoder_config_.encoder_specific_settings = &codec_settings_.VP8; |
988 } else if (params_.common.codec == "VP9") { | 998 } else if (params_.video.codec == "VP9") { |
989 codec_settings_.VP9 = VideoEncoder::GetDefaultVp9Settings(); | 999 codec_settings_.VP9 = VideoEncoder::GetDefaultVp9Settings(); |
990 codec_settings_.VP9.denoisingOn = false; | 1000 codec_settings_.VP9.denoisingOn = false; |
991 codec_settings_.VP9.frameDroppingOn = false; | 1001 codec_settings_.VP9.frameDroppingOn = false; |
992 codec_settings_.VP9.numberOfTemporalLayers = | 1002 codec_settings_.VP9.numberOfTemporalLayers = |
993 static_cast<unsigned char>(params_.common.num_temporal_layers); | 1003 static_cast<unsigned char>(params_.video.num_temporal_layers); |
994 video_encoder_config_.encoder_specific_settings = &codec_settings_.VP9; | 1004 video_encoder_config_.encoder_specific_settings = &codec_settings_.VP9; |
995 codec_settings_.VP9.numberOfSpatialLayers = | 1005 codec_settings_.VP9.numberOfSpatialLayers = |
996 static_cast<unsigned char>(params_.ss.num_spatial_layers); | 1006 static_cast<unsigned char>(params_.ss.num_spatial_layers); |
997 } | 1007 } |
998 | 1008 |
999 // Setup frame generator. | 1009 // Setup frame generator. |
1000 const size_t kWidth = 1850; | 1010 const size_t kWidth = 1850; |
1001 const size_t kHeight = 1110; | 1011 const size_t kHeight = 1110; |
1002 std::vector<std::string> slides; | 1012 std::vector<std::string> slides; |
1003 slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv")); | 1013 slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv")); |
1004 slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv")); | 1014 slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv")); |
1005 slides.push_back(test::ResourcePath("photo_1850_1110", "yuv")); | 1015 slides.push_back(test::ResourcePath("photo_1850_1110", "yuv")); |
1006 slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv")); | 1016 slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv")); |
1007 | 1017 |
1008 if (params_.screenshare.scroll_duration == 0) { | 1018 if (params_.screenshare.scroll_duration == 0) { |
1009 // Cycle image every slide_change_interval seconds. | 1019 // Cycle image every slide_change_interval seconds. |
1010 frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile( | 1020 frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile( |
1011 slides, kWidth, kHeight, | 1021 slides, kWidth, kHeight, |
1012 params_.screenshare.slide_change_interval * params_.common.fps)); | 1022 params_.screenshare.slide_change_interval * params_.video.fps)); |
1013 } else { | 1023 } else { |
1014 RTC_CHECK_LE(params_.common.width, kWidth); | 1024 RTC_CHECK_LE(params_.video.width, kWidth); |
1015 RTC_CHECK_LE(params_.common.height, kHeight); | 1025 RTC_CHECK_LE(params_.video.height, kHeight); |
1016 RTC_CHECK_GT(params_.screenshare.slide_change_interval, 0); | 1026 RTC_CHECK_GT(params_.screenshare.slide_change_interval, 0); |
1017 const int kPauseDurationMs = (params_.screenshare.slide_change_interval - | 1027 const int kPauseDurationMs = (params_.screenshare.slide_change_interval - |
1018 params_.screenshare.scroll_duration) * | 1028 params_.screenshare.scroll_duration) * |
1019 1000; | 1029 1000; |
1020 RTC_CHECK_LE(params_.screenshare.scroll_duration, | 1030 RTC_CHECK_LE(params_.screenshare.scroll_duration, |
1021 params_.screenshare.slide_change_interval); | 1031 params_.screenshare.slide_change_interval); |
1022 | 1032 |
1023 frame_generator_.reset( | 1033 frame_generator_.reset( |
1024 test::FrameGenerator::CreateScrollingInputFromYuvFiles( | 1034 test::FrameGenerator::CreateScrollingInputFromYuvFiles( |
1025 clock_, slides, kWidth, kHeight, params_.common.width, | 1035 clock_, slides, kWidth, kHeight, params_.video.width, |
1026 params_.common.height, params_.screenshare.scroll_duration * 1000, | 1036 params_.video.height, params_.screenshare.scroll_duration * 1000, |
1027 kPauseDurationMs)); | 1037 kPauseDurationMs)); |
1028 } | 1038 } |
1029 } | 1039 } |
1030 | 1040 |
1031 void VideoQualityTest::CreateCapturer(VideoCaptureInput* input) { | 1041 void VideoQualityTest::CreateCapturer(VideoCaptureInput* input) { |
1032 if (params_.screenshare.enabled) { | 1042 if (params_.screenshare.enabled) { |
1033 test::FrameGeneratorCapturer* frame_generator_capturer = | 1043 test::FrameGeneratorCapturer* frame_generator_capturer = |
1034 new test::FrameGeneratorCapturer( | 1044 new test::FrameGeneratorCapturer( |
1035 clock_, input, frame_generator_.release(), params_.common.fps); | 1045 clock_, input, frame_generator_.release(), params_.video.fps); |
1036 EXPECT_TRUE(frame_generator_capturer->Init()); | 1046 EXPECT_TRUE(frame_generator_capturer->Init()); |
1037 capturer_.reset(frame_generator_capturer); | 1047 capturer_.reset(frame_generator_capturer); |
1038 } else { | 1048 } else { |
1039 if (params_.video.clip_name.empty()) { | 1049 if (params_.video.clip_name.empty()) { |
1040 capturer_.reset(test::VideoCapturer::Create(input, params_.common.width, | 1050 capturer_.reset(test::VideoCapturer::Create(input, params_.video.width, |
1041 params_.common.height, | 1051 params_.video.height, |
1042 params_.common.fps, clock_)); | 1052 params_.video.fps, clock_)); |
1043 } else { | 1053 } else { |
1044 capturer_.reset(test::FrameGeneratorCapturer::CreateFromYuvFile( | 1054 capturer_.reset(test::FrameGeneratorCapturer::CreateFromYuvFile( |
1045 input, test::ResourcePath(params_.video.clip_name, "yuv"), | 1055 input, test::ResourcePath(params_.video.clip_name, "yuv"), |
1046 params_.common.width, params_.common.height, params_.common.fps, | 1056 params_.video.width, params_.video.height, params_.video.fps, |
1047 clock_)); | 1057 clock_)); |
1048 ASSERT_TRUE(capturer_) << "Could not create capturer for " | 1058 ASSERT_TRUE(capturer_) << "Could not create capturer for " |
1049 << params_.video.clip_name | 1059 << params_.video.clip_name |
1050 << ".yuv. Is this resource file present?"; | 1060 << ".yuv. Is this resource file present?"; |
1051 } | 1061 } |
1052 } | 1062 } |
1053 } | 1063 } |
1054 | 1064 |
1055 void VideoQualityTest::RunWithAnalyzer(const Params& params) { | 1065 void VideoQualityTest::RunWithAnalyzer(const Params& params) { |
1056 params_ = params; | 1066 params_ = params; |
1057 | 1067 |
1058 RTC_CHECK(!params_.audio); | 1068 RTC_CHECK(!params_.audio.enabled); |
1059 // TODO(ivica): Merge with RunWithRenderer and use a flag / argument to | 1069 // TODO(ivica): Merge with RunWithRenderer and use a flag / argument to |
1060 // differentiate between the analyzer and the renderer case. | 1070 // differentiate between the analyzer and the renderer case. |
1061 CheckParams(); | 1071 CheckParams(); |
1062 | 1072 |
1063 FILE* graph_data_output_file = nullptr; | 1073 FILE* graph_data_output_file = nullptr; |
1064 if (!params_.analyzer.graph_data_output_filename.empty()) { | 1074 if (!params_.analyzer.graph_data_output_filename.empty()) { |
1065 graph_data_output_file = | 1075 graph_data_output_file = |
1066 fopen(params_.analyzer.graph_data_output_filename.c_str(), "w"); | 1076 fopen(params_.analyzer.graph_data_output_filename.c_str(), "w"); |
1067 RTC_CHECK(graph_data_output_file) | 1077 RTC_CHECK(graph_data_output_file) |
1068 << "Can't open the file " << params_.analyzer.graph_data_output_filename | 1078 << "Can't open the file " << params_.analyzer.graph_data_output_filename |
1069 << "!"; | 1079 << "!"; |
1070 } | 1080 } |
1071 | 1081 |
1072 Call::Config call_config; | 1082 Call::Config call_config; |
1073 call_config.bitrate_config = params.common.call_bitrate_config; | 1083 call_config.bitrate_config = params_.call.call_bitrate_config; |
1074 CreateCalls(call_config, call_config); | 1084 CreateCalls(call_config, call_config); |
1075 | 1085 |
1076 test::LayerFilteringTransport send_transport( | 1086 test::LayerFilteringTransport send_transport( |
1077 params.pipe, sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9, | 1087 params_.pipe, sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9, |
1078 params.common.selected_tl, params_.ss.selected_sl); | 1088 params_.video.selected_tl, params_.ss.selected_sl); |
1079 test::DirectTransport recv_transport(params.pipe, receiver_call_.get()); | 1089 test::DirectTransport recv_transport(params_.pipe, receiver_call_.get()); |
1080 | 1090 |
1081 std::string graph_title = params_.analyzer.graph_title; | 1091 std::string graph_title = params_.analyzer.graph_title; |
1082 if (graph_title.empty()) | 1092 if (graph_title.empty()) |
1083 graph_title = VideoQualityTest::GenerateGraphTitle(); | 1093 graph_title = VideoQualityTest::GenerateGraphTitle(); |
1084 | 1094 |
1085 // In the case of different resolutions, the functions calculating PSNR and | 1095 // In the case of different resolutions, the functions calculating PSNR and |
1086 // SSIM return -1.0, instead of a positive value as usual. VideoAnalyzer | 1096 // SSIM return -1.0, instead of a positive value as usual. VideoAnalyzer |
1087 // aborts if the average psnr/ssim are below the given threshold, which is | 1097 // aborts if the average psnr/ssim are below the given threshold, which is |
1088 // 0.0 by default. Setting the thresholds to -1.1 prevents the unnecessary | 1098 // 0.0 by default. Setting the thresholds to -1.1 prevents the unnecessary |
1089 // abort. | 1099 // abort. |
1090 VideoStream& selected_stream = params_.ss.streams[params_.ss.selected_stream]; | 1100 VideoStream& selected_stream = params_.ss.streams[params_.ss.selected_stream]; |
1091 int selected_sl = params_.ss.selected_sl != -1 | 1101 int selected_sl = params_.ss.selected_sl != -1 |
1092 ? params_.ss.selected_sl | 1102 ? params_.ss.selected_sl |
1093 : params_.ss.num_spatial_layers - 1; | 1103 : params_.ss.num_spatial_layers - 1; |
1094 bool disable_quality_check = | 1104 bool disable_quality_check = |
1095 selected_stream.width != params_.common.width || | 1105 selected_stream.width != params_.video.width || |
1096 selected_stream.height != params_.common.height || | 1106 selected_stream.height != params_.video.height || |
1097 (!params_.ss.spatial_layers.empty() && | 1107 (!params_.ss.spatial_layers.empty() && |
1098 params_.ss.spatial_layers[selected_sl].scaling_factor_num != | 1108 params_.ss.spatial_layers[selected_sl].scaling_factor_num != |
1099 params_.ss.spatial_layers[selected_sl].scaling_factor_den); | 1109 params_.ss.spatial_layers[selected_sl].scaling_factor_den); |
1100 if (disable_quality_check) { | 1110 if (disable_quality_check) { |
1101 fprintf(stderr, | 1111 fprintf(stderr, |
1102 "Warning: Calculating PSNR and SSIM for downsized resolution " | 1112 "Warning: Calculating PSNR and SSIM for downsized resolution " |
1103 "not implemented yet! Skipping PSNR and SSIM calculations!"); | 1113 "not implemented yet! Skipping PSNR and SSIM calculations!"); |
1104 } | 1114 } |
1105 | 1115 |
1106 VideoAnalyzer analyzer( | 1116 VideoAnalyzer analyzer( |
1107 &send_transport, params_.analyzer.test_label, | 1117 &send_transport, params_.analyzer.test_label, |
1108 disable_quality_check ? -1.1 : params_.analyzer.avg_psnr_threshold, | 1118 disable_quality_check ? -1.1 : params_.analyzer.avg_psnr_threshold, |
1109 disable_quality_check ? -1.1 : params_.analyzer.avg_ssim_threshold, | 1119 disable_quality_check ? -1.1 : params_.analyzer.avg_ssim_threshold, |
1110 params_.analyzer.test_durations_secs * params_.common.fps, | 1120 params_.analyzer.test_durations_secs * params_.video.fps, |
1111 graph_data_output_file, graph_title, | 1121 graph_data_output_file, graph_title, |
1112 kVideoSendSsrcs[params_.ss.selected_stream]); | 1122 kVideoSendSsrcs[params_.ss.selected_stream]); |
1113 | 1123 |
1114 analyzer.SetReceiver(receiver_call_->Receiver()); | 1124 analyzer.SetReceiver(receiver_call_->Receiver()); |
1115 send_transport.SetReceiver(&analyzer); | 1125 send_transport.SetReceiver(&analyzer); |
1116 recv_transport.SetReceiver(sender_call_->Receiver()); | 1126 recv_transport.SetReceiver(sender_call_->Receiver()); |
1117 | 1127 |
1118 SetupCommon(&analyzer, &recv_transport); | 1128 SetupCommon(&analyzer, &recv_transport); |
1119 video_receive_configs_[params_.ss.selected_stream].renderer = &analyzer; | 1129 video_receive_configs_[params_.ss.selected_stream].renderer = &analyzer; |
1120 video_send_config_.pre_encode_callback = analyzer.pre_encode_proxy(); | 1130 video_send_config_.pre_encode_callback = analyzer.pre_encode_proxy(); |
(...skipping 30 matching lines...) Expand all Loading... | |
1151 | 1161 |
1152 if (graph_data_output_file) | 1162 if (graph_data_output_file) |
1153 fclose(graph_data_output_file); | 1163 fclose(graph_data_output_file); |
1154 } | 1164 } |
1155 | 1165 |
1156 void VideoQualityTest::RunWithRenderers(const Params& params) { | 1166 void VideoQualityTest::RunWithRenderers(const Params& params) { |
1157 params_ = params; | 1167 params_ = params; |
1158 CheckParams(); | 1168 CheckParams(); |
1159 | 1169 |
1160 std::unique_ptr<test::VideoRenderer> local_preview( | 1170 std::unique_ptr<test::VideoRenderer> local_preview( |
1161 test::VideoRenderer::Create("Local Preview", params_.common.width, | 1171 test::VideoRenderer::Create("Local Preview", params_.video.width, |
1162 params_.common.height)); | 1172 params_.video.height)); |
1163 size_t stream_id = params_.ss.selected_stream; | 1173 size_t stream_id = params_.ss.selected_stream; |
1164 std::string title = "Loopback Video"; | 1174 std::string title = "Loopback Video"; |
1165 if (params_.ss.streams.size() > 1) { | 1175 if (params_.ss.streams.size() > 1) { |
1166 std::ostringstream s; | 1176 std::ostringstream s; |
1167 s << stream_id; | 1177 s << stream_id; |
1168 title += " - Stream #" + s.str(); | 1178 title += " - Stream #" + s.str(); |
1169 } | 1179 } |
1170 | 1180 |
1171 std::unique_ptr<test::VideoRenderer> loopback_video( | 1181 std::unique_ptr<test::VideoRenderer> loopback_video( |
1172 test::VideoRenderer::Create(title.c_str(), | 1182 test::VideoRenderer::Create(title.c_str(), |
1173 params_.ss.streams[stream_id].width, | 1183 params_.ss.streams[stream_id].width, |
1174 params_.ss.streams[stream_id].height)); | 1184 params_.ss.streams[stream_id].height)); |
1175 | 1185 |
1176 // TODO(ivica): Remove bitrate_config and use the default Call::Config(), to | 1186 // TODO(ivica): Remove bitrate_config and use the default Call::Config(), to |
1177 // match the full stack tests. | 1187 // match the full stack tests. |
1178 Call::Config call_config; | 1188 Call::Config call_config; |
1179 call_config.bitrate_config = params_.common.call_bitrate_config; | 1189 call_config.bitrate_config = params_.call.call_bitrate_config; |
1180 | 1190 |
1181 ::VoiceEngineState voe; | 1191 ::VoiceEngineState voe; |
1182 if (params_.audio) { | 1192 if (params_.audio.enabled) { |
1183 CreateVoiceEngine(&voe, decoder_factory_); | 1193 CreateVoiceEngine(&voe, decoder_factory_); |
1184 AudioState::Config audio_state_config; | 1194 AudioState::Config audio_state_config; |
1185 audio_state_config.voice_engine = voe.voice_engine; | 1195 audio_state_config.voice_engine = voe.voice_engine; |
1186 call_config.audio_state = AudioState::Create(audio_state_config); | 1196 call_config.audio_state = AudioState::Create(audio_state_config); |
1187 } | 1197 } |
1188 | 1198 |
1189 std::unique_ptr<Call> call(Call::Create(call_config)); | 1199 std::unique_ptr<Call> call(Call::Create(call_config)); |
1190 | 1200 |
1191 test::LayerFilteringTransport transport( | 1201 test::LayerFilteringTransport transport( |
1192 params.pipe, call.get(), kPayloadTypeVP8, kPayloadTypeVP9, | 1202 params.pipe, call.get(), kPayloadTypeVP8, kPayloadTypeVP9, |
1193 params.common.selected_tl, params_.ss.selected_sl); | 1203 params.video.selected_tl, params_.ss.selected_sl); |
1194 // TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at | 1204 // TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at |
1195 // least share as much code as possible. That way this test would also match | 1205 // least share as much code as possible. That way this test would also match |
1196 // the full stack tests better. | 1206 // the full stack tests better. |
1197 transport.SetReceiver(call->Receiver()); | 1207 transport.SetReceiver(call->Receiver()); |
1198 | 1208 |
1199 SetupCommon(&transport, &transport); | 1209 SetupCommon(&transport, &transport); |
1200 | 1210 |
1201 video_send_config_.pre_encode_callback = local_preview.get(); | 1211 video_send_config_.pre_encode_callback = local_preview.get(); |
1202 video_receive_configs_[stream_id].renderer = loopback_video.get(); | 1212 video_receive_configs_[stream_id].renderer = loopback_video.get(); |
1203 if (params_.audio && params_.audio_video_sync) | 1213 if (params_.audio.enabled && params_.audio.sync_video) |
1204 video_receive_configs_[stream_id].sync_group = kSyncGroup; | 1214 video_receive_configs_[stream_id].sync_group = kSyncGroup; |
1205 | 1215 |
1206 video_send_config_.suspend_below_min_bitrate = | 1216 video_send_config_.suspend_below_min_bitrate = |
1207 params_.common.suspend_below_min_bitrate; | 1217 params_.video.suspend_below_min_bitrate; |
1208 | 1218 |
1209 if (params.common.fec) { | 1219 if (params.video.fec) { |
1210 video_send_config_.rtp.fec.red_payload_type = kRedPayloadType; | 1220 video_send_config_.rtp.fec.red_payload_type = kRedPayloadType; |
1211 video_send_config_.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType; | 1221 video_send_config_.rtp.fec.ulpfec_payload_type = kUlpfecPayloadType; |
1212 video_receive_configs_[stream_id].rtp.fec.red_payload_type = | 1222 video_receive_configs_[stream_id].rtp.fec.red_payload_type = |
1213 kRedPayloadType; | 1223 kRedPayloadType; |
1214 video_receive_configs_[stream_id].rtp.fec.ulpfec_payload_type = | 1224 video_receive_configs_[stream_id].rtp.fec.ulpfec_payload_type = |
1215 kUlpfecPayloadType; | 1225 kUlpfecPayloadType; |
1216 } | 1226 } |
1217 | 1227 |
1218 if (params_.screenshare.enabled) | 1228 if (params_.screenshare.enabled) |
1219 SetupScreenshare(); | 1229 SetupScreenshare(); |
1220 | 1230 |
1221 video_send_stream_ = call->CreateVideoSendStream( | 1231 video_send_stream_ = call->CreateVideoSendStream( |
1222 video_send_config_.Copy(), video_encoder_config_.Copy()); | 1232 video_send_config_.Copy(), video_encoder_config_.Copy()); |
1223 VideoReceiveStream* video_receive_stream = | 1233 VideoReceiveStream* video_receive_stream = |
1224 call->CreateVideoReceiveStream(video_receive_configs_[stream_id].Copy()); | 1234 call->CreateVideoReceiveStream(video_receive_configs_[stream_id].Copy()); |
1225 CreateCapturer(video_send_stream_->Input()); | 1235 CreateCapturer(video_send_stream_->Input()); |
1226 | 1236 |
1227 AudioReceiveStream* audio_receive_stream = nullptr; | 1237 AudioReceiveStream* audio_receive_stream = nullptr; |
1228 if (params_.audio) { | 1238 if (params_.audio.enabled) { |
1229 audio_send_config_ = AudioSendStream::Config(&transport); | 1239 audio_send_config_ = AudioSendStream::Config(&transport); |
1230 audio_send_config_.voe_channel_id = voe.send_channel_id; | 1240 audio_send_config_.voe_channel_id = voe.send_channel_id; |
1231 audio_send_config_.rtp.ssrc = kAudioSendSsrc; | 1241 audio_send_config_.rtp.ssrc = kAudioSendSsrc; |
1232 | 1242 |
1233 // Add extension to enable audio send side BWE, and allow audio bit rate | 1243 // Add extension to enable audio send side BWE, and allow audio bit rate |
1234 // adaptation. | 1244 // adaptation. |
1235 audio_send_config_.rtp.extensions.clear(); | 1245 audio_send_config_.rtp.extensions.clear(); |
1236 if (params_.common.send_side_bwe) { | 1246 if (params_.call.send_side_bwe) { |
1237 audio_send_config_.rtp.extensions.push_back(webrtc::RtpExtension( | 1247 audio_send_config_.rtp.extensions.push_back(webrtc::RtpExtension( |
1238 webrtc::RtpExtension::kTransportSequenceNumberUri, | 1248 webrtc::RtpExtension::kTransportSequenceNumberUri, |
1239 test::kTransportSequenceNumberExtensionId)); | 1249 test::kTransportSequenceNumberExtensionId)); |
1240 audio_send_config_.min_bitrate_kbps = kOpusMinBitrate / 1000; | 1250 audio_send_config_.min_bitrate_kbps = kOpusMinBitrate / 1000; |
1241 audio_send_config_.max_bitrate_kbps = kOpusBitrateFb / 1000; | 1251 audio_send_config_.max_bitrate_kbps = kOpusBitrateFb / 1000; |
1242 } | 1252 } |
1243 | 1253 |
1244 audio_send_stream_ = call->CreateAudioSendStream(audio_send_config_); | 1254 audio_send_stream_ = call->CreateAudioSendStream(audio_send_config_); |
1245 | 1255 |
1246 AudioReceiveStream::Config audio_config; | 1256 AudioReceiveStream::Config audio_config; |
1247 audio_config.rtp.local_ssrc = kReceiverLocalAudioSsrc; | 1257 audio_config.rtp.local_ssrc = kReceiverLocalAudioSsrc; |
1248 audio_config.rtcp_send_transport = &transport; | 1258 audio_config.rtcp_send_transport = &transport; |
1249 audio_config.voe_channel_id = voe.receive_channel_id; | 1259 audio_config.voe_channel_id = voe.receive_channel_id; |
1250 audio_config.rtp.remote_ssrc = audio_send_config_.rtp.ssrc; | 1260 audio_config.rtp.remote_ssrc = audio_send_config_.rtp.ssrc; |
1251 audio_config.rtp.transport_cc = params_.common.send_side_bwe; | 1261 audio_config.rtp.transport_cc = params_.call.send_side_bwe; |
1252 audio_config.rtp.extensions = audio_send_config_.rtp.extensions; | 1262 audio_config.rtp.extensions = audio_send_config_.rtp.extensions; |
1253 audio_config.decoder_factory = decoder_factory_; | 1263 audio_config.decoder_factory = decoder_factory_; |
1254 if (params_.audio_video_sync) | 1264 if (params_.audio.sync_video) |
1255 audio_config.sync_group = kSyncGroup; | 1265 audio_config.sync_group = kSyncGroup; |
1256 | 1266 |
1257 audio_receive_stream =call->CreateAudioReceiveStream(audio_config); | 1267 audio_receive_stream =call->CreateAudioReceiveStream(audio_config); |
1258 | 1268 |
1259 const CodecInst kOpusInst = {120, "OPUS", 48000, 960, 2, 64000}; | 1269 const CodecInst kOpusInst = {120, "OPUS", 48000, 960, 2, 64000}; |
1260 EXPECT_EQ(0, voe.codec->SetSendCodec(voe.send_channel_id, kOpusInst)); | 1270 EXPECT_EQ(0, voe.codec->SetSendCodec(voe.send_channel_id, kOpusInst)); |
1261 } | 1271 } |
1262 | 1272 |
1263 // Start sending and receiving video. | 1273 // Start sending and receiving video. |
1264 video_receive_stream->Start(); | 1274 video_receive_stream->Start(); |
1265 video_send_stream_->Start(); | 1275 video_send_stream_->Start(); |
1266 capturer_->Start(); | 1276 capturer_->Start(); |
1267 | 1277 |
1268 if (params_.audio) { | 1278 if (params_.audio.enabled) { |
1269 // Start receiving audio. | 1279 // Start receiving audio. |
1270 audio_receive_stream->Start(); | 1280 audio_receive_stream->Start(); |
1271 EXPECT_EQ(0, voe.base->StartPlayout(voe.receive_channel_id)); | 1281 EXPECT_EQ(0, voe.base->StartPlayout(voe.receive_channel_id)); |
1272 EXPECT_EQ(0, voe.base->StartReceive(voe.receive_channel_id)); | 1282 EXPECT_EQ(0, voe.base->StartReceive(voe.receive_channel_id)); |
1273 | 1283 |
1274 // Start sending audio. | 1284 // Start sending audio. |
1275 audio_send_stream_->Start(); | 1285 audio_send_stream_->Start(); |
1276 EXPECT_EQ(0, voe.base->StartSend(voe.send_channel_id)); | 1286 EXPECT_EQ(0, voe.base->StartSend(voe.send_channel_id)); |
1277 } | 1287 } |
1278 | 1288 |
1279 test::PressEnterToContinue(); | 1289 test::PressEnterToContinue(); |
1280 | 1290 |
1281 if (params_.audio) { | 1291 if (params_.audio.enabled) { |
1282 // Stop sending audio. | 1292 // Stop sending audio. |
1283 EXPECT_EQ(0, voe.base->StopSend(voe.send_channel_id)); | 1293 EXPECT_EQ(0, voe.base->StopSend(voe.send_channel_id)); |
1284 audio_send_stream_->Stop(); | 1294 audio_send_stream_->Stop(); |
1285 | 1295 |
1286 // Stop receiving audio. | 1296 // Stop receiving audio. |
1287 EXPECT_EQ(0, voe.base->StopReceive(voe.receive_channel_id)); | 1297 EXPECT_EQ(0, voe.base->StopReceive(voe.receive_channel_id)); |
1288 EXPECT_EQ(0, voe.base->StopPlayout(voe.receive_channel_id)); | 1298 EXPECT_EQ(0, voe.base->StopPlayout(voe.receive_channel_id)); |
1289 audio_receive_stream->Stop(); | 1299 audio_receive_stream->Stop(); |
1290 } | 1300 } |
1291 | 1301 |
1292 // Stop receiving and sending video. | 1302 // Stop receiving and sending video. |
1293 capturer_->Stop(); | 1303 capturer_->Stop(); |
1294 video_send_stream_->Stop(); | 1304 video_send_stream_->Stop(); |
1295 video_receive_stream->Stop(); | 1305 video_receive_stream->Stop(); |
1296 | 1306 |
1297 call->DestroyVideoReceiveStream(video_receive_stream); | 1307 call->DestroyVideoReceiveStream(video_receive_stream); |
1298 call->DestroyVideoSendStream(video_send_stream_); | 1308 call->DestroyVideoSendStream(video_send_stream_); |
1299 | 1309 |
1300 if (params_.audio) { | 1310 if (params_.audio.enabled) { |
1301 call->DestroyAudioSendStream(audio_send_stream_); | 1311 call->DestroyAudioSendStream(audio_send_stream_); |
1302 call->DestroyAudioReceiveStream(audio_receive_stream); | 1312 call->DestroyAudioReceiveStream(audio_receive_stream); |
1303 } | 1313 } |
1304 | 1314 |
1305 transport.StopSending(); | 1315 transport.StopSending(); |
1306 if (params_.audio) | 1316 if (params_.audio.enabled) |
1307 DestroyVoiceEngine(&voe); | 1317 DestroyVoiceEngine(&voe); |
1308 } | 1318 } |
1309 | 1319 |
1310 } // namespace webrtc | 1320 } // namespace webrtc |
OLD | NEW |