| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 #include "webrtc/video/video_quality_test.h" | 10 #include "webrtc/video/video_quality_test.h" |
| (...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 106 // The highest layer must match the incoming resolution. | 106 // The highest layer must match the incoming resolution. |
| 107 std::vector<webrtc::VideoStream> streams = streams_; | 107 std::vector<webrtc::VideoStream> streams = streams_; |
| 108 streams[streams_.size() - 1].height = height; | 108 streams[streams_.size() - 1].height = height; |
| 109 streams[streams_.size() - 1].width = width; | 109 streams[streams_.size() - 1].width = width; |
| 110 return streams; | 110 return streams; |
| 111 } | 111 } |
| 112 | 112 |
| 113 std::vector<webrtc::VideoStream> streams_; | 113 std::vector<webrtc::VideoStream> streams_; |
| 114 }; | 114 }; |
| 115 | 115 |
| 116 bool IsFlexfec(int payload_type) { |
| 117 return payload_type == webrtc::VideoQualityTest::kFlexfecPayloadType; |
| 118 } |
| 119 |
| 116 } // namespace | 120 } // namespace |
| 117 | 121 |
| 118 namespace webrtc { | 122 namespace webrtc { |
| 119 | 123 |
| 120 class VideoAnalyzer : public PacketReceiver, | 124 class VideoAnalyzer : public PacketReceiver, |
| 121 public Transport, | 125 public Transport, |
| 122 public rtc::VideoSinkInterface<VideoFrame>, | 126 public rtc::VideoSinkInterface<VideoFrame>, |
| 123 public EncodedFrameObserver { | 127 public EncodedFrameObserver { |
| 124 public: | 128 public: |
| 125 VideoAnalyzer(test::LayerFilteringTransport* transport, | 129 VideoAnalyzer(test::LayerFilteringTransport* transport, |
| (...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 208 const PacketTime& packet_time) override { | 212 const PacketTime& packet_time) override { |
| 209 // Ignore timestamps of RTCP packets. They're not synchronized with | 213 // Ignore timestamps of RTCP packets. They're not synchronized with |
| 210 // RTP packet timestamps and so they would confuse wrap_handler_. | 214 // RTP packet timestamps and so they would confuse wrap_handler_. |
| 211 if (RtpHeaderParser::IsRtcp(packet, length)) { | 215 if (RtpHeaderParser::IsRtcp(packet, length)) { |
| 212 return receiver_->DeliverPacket(media_type, packet, length, packet_time); | 216 return receiver_->DeliverPacket(media_type, packet, length, packet_time); |
| 213 } | 217 } |
| 214 | 218 |
| 215 RtpUtility::RtpHeaderParser parser(packet, length); | 219 RtpUtility::RtpHeaderParser parser(packet, length); |
| 216 RTPHeader header; | 220 RTPHeader header; |
| 217 parser.Parse(&header); | 221 parser.Parse(&header); |
| 218 { | 222 if (!IsFlexfec(header.payloadType)) { |
| 223 // Ignore FlexFEC timestamps, to avoid collisions with media timestamps. |
| 224 // (FlexFEC and media are sent on different SSRCs, which have different |
| 225 // timestamps spaces.) |
| 219 rtc::CritScope lock(&crit_); | 226 rtc::CritScope lock(&crit_); |
| 220 int64_t timestamp = | 227 int64_t timestamp = |
| 221 wrap_handler_.Unwrap(header.timestamp - rtp_timestamp_delta_); | 228 wrap_handler_.Unwrap(header.timestamp - rtp_timestamp_delta_); |
| 222 recv_times_[timestamp] = | 229 recv_times_[timestamp] = |
| 223 Clock::GetRealTimeClock()->CurrentNtpInMilliseconds(); | 230 Clock::GetRealTimeClock()->CurrentNtpInMilliseconds(); |
| 224 } | 231 } |
| 225 | 232 |
| 226 return receiver_->DeliverPacket(media_type, packet, length, packet_time); | 233 return receiver_->DeliverPacket(media_type, packet, length, packet_time); |
| 227 } | 234 } |
| 228 | 235 |
| (...skipping 24 matching lines...) Expand all Loading... |
| 253 int64_t current_time = | 260 int64_t current_time = |
| 254 Clock::GetRealTimeClock()->CurrentNtpInMilliseconds(); | 261 Clock::GetRealTimeClock()->CurrentNtpInMilliseconds(); |
| 255 bool result = transport_->SendRtp(packet, length, options); | 262 bool result = transport_->SendRtp(packet, length, options); |
| 256 { | 263 { |
| 257 rtc::CritScope lock(&crit_); | 264 rtc::CritScope lock(&crit_); |
| 258 | 265 |
| 259 if (rtp_timestamp_delta_ == 0) { | 266 if (rtp_timestamp_delta_ == 0) { |
| 260 rtp_timestamp_delta_ = header.timestamp - *first_send_timestamp_; | 267 rtp_timestamp_delta_ = header.timestamp - *first_send_timestamp_; |
| 261 first_send_timestamp_ = rtc::Optional<uint32_t>(); | 268 first_send_timestamp_ = rtc::Optional<uint32_t>(); |
| 262 } | 269 } |
| 263 int64_t timestamp = | 270 if (!IsFlexfec(header.payloadType)) { |
| 264 wrap_handler_.Unwrap(header.timestamp - rtp_timestamp_delta_); | 271 // Ignore FlexFEC timestamps, to avoid collisions with media timestamps. |
| 265 send_times_[timestamp] = current_time; | 272 // (FlexFEC and media are sent on different SSRCs, which have different |
| 266 if (!transport_->DiscardedLastPacket() && | 273 // timestamps spaces.) |
| 267 header.ssrc == ssrc_to_analyze_) { | 274 int64_t timestamp = |
| 268 encoded_frame_sizes_[timestamp] += | 275 wrap_handler_.Unwrap(header.timestamp - rtp_timestamp_delta_); |
| 269 length - (header.headerLength + header.paddingLength); | 276 send_times_[timestamp] = current_time; |
| 277 if (!transport_->DiscardedLastPacket() && |
| 278 header.ssrc == ssrc_to_analyze_) { |
| 279 encoded_frame_sizes_[timestamp] += |
| 280 length - (header.headerLength + header.paddingLength); |
| 281 } |
| 270 } | 282 } |
| 271 } | 283 } |
| 272 return result; | 284 return result; |
| 273 } | 285 } |
| 274 | 286 |
| 275 bool SendRtcp(const uint8_t* packet, size_t length) override { | 287 bool SendRtcp(const uint8_t* packet, size_t length) override { |
| 276 return transport_->SendRtcp(packet, length); | 288 return transport_->SendRtcp(packet, length); |
| 277 } | 289 } |
| 278 | 290 |
| 279 void EncodedFrameCallback(const EncodedFrame& frame) override { | 291 void EncodedFrameCallback(const EncodedFrame& frame) override { |
| (...skipping 379 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 659 | 671 |
| 660 if (comparison.dropped) { | 672 if (comparison.dropped) { |
| 661 ++dropped_frames_; | 673 ++dropped_frames_; |
| 662 return; | 674 return; |
| 663 } | 675 } |
| 664 if (last_render_time_ != 0) | 676 if (last_render_time_ != 0) |
| 665 rendered_delta_.AddSample(comparison.render_time_ms - last_render_time_); | 677 rendered_delta_.AddSample(comparison.render_time_ms - last_render_time_); |
| 666 last_render_time_ = comparison.render_time_ms; | 678 last_render_time_ = comparison.render_time_ms; |
| 667 | 679 |
| 668 sender_time_.AddSample(comparison.send_time_ms - comparison.input_time_ms); | 680 sender_time_.AddSample(comparison.send_time_ms - comparison.input_time_ms); |
| 669 receiver_time_.AddSample(comparison.render_time_ms - | 681 if (comparison.recv_time_ms > 0) { |
| 670 comparison.recv_time_ms); | 682 // If recv_time_ms == 0, this frame consisted of a packets which were all |
| 683 // lost in the transport. Since we were able to render the frame, however, |
| 684 // the dropped packets were recovered by FlexFEC. The FlexFEC recovery |
| 685 // happens internally in Call, and we can therefore here not know which |
| 686 // FEC packets that protected the lost media packets. Consequently, we |
| 687 // were not able to record a meaningful recv_time_ms. We therefore skip |
| 688 // this sample. |
| 689 // |
| 690 // The reasoning above does not hold for ULPFEC and RTX, as for those |
| 691 // strategies the timestamp of the received packets is set to the |
| 692 // timestamp of the protected/retransmitted media packet. I.e., then |
| 693 // recv_time_ms != 0, even though the media packets were lost. |
| 694 receiver_time_.AddSample(comparison.render_time_ms - |
| 695 comparison.recv_time_ms); |
| 696 } |
| 671 end_to_end_.AddSample(comparison.render_time_ms - comparison.input_time_ms); | 697 end_to_end_.AddSample(comparison.render_time_ms - comparison.input_time_ms); |
| 672 encoded_frame_size_.AddSample(comparison.encoded_frame_size); | 698 encoded_frame_size_.AddSample(comparison.encoded_frame_size); |
| 673 } | 699 } |
| 674 | 700 |
| 675 void PrintResult(const char* result_type, | 701 void PrintResult(const char* result_type, |
| 676 test::Statistics stats, | 702 test::Statistics stats, |
| 677 const char* unit) { | 703 const char* unit) { |
| 678 printf("RESULT %s: %s = {%f, %f}%s\n", | 704 printf("RESULT %s: %s = {%f, %f}%s\n", |
| 679 result_type, | 705 result_type, |
| 680 test_label_.c_str(), | 706 test_label_.c_str(), |
| (...skipping 818 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1499 std::ostringstream str; | 1525 std::ostringstream str; |
| 1500 str << receive_logs_++; | 1526 str << receive_logs_++; |
| 1501 std::string path = | 1527 std::string path = |
| 1502 params_.video.encoded_frame_base_path + "." + str.str() + ".recv.ivf"; | 1528 params_.video.encoded_frame_base_path + "." + str.str() + ".recv.ivf"; |
| 1503 stream->EnableEncodedFrameRecording(rtc::CreatePlatformFile(path), | 1529 stream->EnableEncodedFrameRecording(rtc::CreatePlatformFile(path), |
| 1504 10000000); | 1530 10000000); |
| 1505 } | 1531 } |
| 1506 } | 1532 } |
| 1507 | 1533 |
| 1508 } // namespace webrtc | 1534 } // namespace webrtc |
| OLD | NEW |