| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 430 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 441 bool RTPSender::ActivateCVORtpHeaderExtension() { | 441 bool RTPSender::ActivateCVORtpHeaderExtension() { |
| 442 if (!video_rotation_active_) { | 442 if (!video_rotation_active_) { |
| 443 rtc::CritScope lock(&send_critsect_); | 443 rtc::CritScope lock(&send_critsect_); |
| 444 if (rtp_header_extension_map_.SetActive(kRtpExtensionVideoRotation, true)) { | 444 if (rtp_header_extension_map_.SetActive(kRtpExtensionVideoRotation, true)) { |
| 445 video_rotation_active_ = true; | 445 video_rotation_active_ = true; |
| 446 } | 446 } |
| 447 } | 447 } |
| 448 return video_rotation_active_; | 448 return video_rotation_active_; |
| 449 } | 449 } |
| 450 | 450 |
| 451 bool RTPSender::SendOutgoingData(FrameType frame_type, | 451 int32_t RTPSender::SendOutgoingData(FrameType frame_type, |
| 452 int8_t payload_type, | 452 int8_t payload_type, |
| 453 uint32_t capture_timestamp, | 453 uint32_t capture_timestamp, |
| 454 int64_t capture_time_ms, | 454 int64_t capture_time_ms, |
| 455 const uint8_t* payload_data, | 455 const uint8_t* payload_data, |
| 456 size_t payload_size, | 456 size_t payload_size, |
| 457 const RTPFragmentationHeader* fragmentation, | 457 const RTPFragmentationHeader* fragmentation, |
| 458 const RTPVideoHeader* rtp_header, | 458 const RTPVideoHeader* rtp_hdr) { |
| 459 uint32_t* transport_frame_id_out) { | |
| 460 uint32_t ssrc; | 459 uint32_t ssrc; |
| 461 uint16_t sequence_number; | 460 uint16_t sequence_number; |
| 462 { | 461 { |
| 463 // Drop this packet if we're not sending media packets. | 462 // Drop this packet if we're not sending media packets. |
| 464 rtc::CritScope lock(&send_critsect_); | 463 rtc::CritScope lock(&send_critsect_); |
| 465 ssrc = ssrc_; | 464 ssrc = ssrc_; |
| 466 sequence_number = sequence_number_; | 465 sequence_number = sequence_number_; |
| 467 if (!sending_media_) | 466 if (!sending_media_) { |
| 468 return true; | 467 return 0; |
| 468 } |
| 469 } | 469 } |
| 470 RtpVideoCodecTypes video_type = kRtpVideoGeneric; | 470 RtpVideoCodecTypes video_type = kRtpVideoGeneric; |
| 471 if (CheckPayloadType(payload_type, &video_type) != 0) { | 471 if (CheckPayloadType(payload_type, &video_type) != 0) { |
| 472 LOG(LS_ERROR) << "Don't send data with unknown payload type: " | 472 LOG(LS_ERROR) << "Don't send data with unknown payload type: " |
| 473 << static_cast<int>(payload_type) << "."; | 473 << static_cast<int>(payload_type) << "."; |
| 474 return false; | 474 return -1; |
| 475 } | 475 } |
| 476 | 476 |
| 477 bool result; | 477 int32_t ret_val; |
| 478 if (audio_configured_) { | 478 if (audio_configured_) { |
| 479 TRACE_EVENT_ASYNC_STEP1("webrtc", "Audio", capture_timestamp, | 479 TRACE_EVENT_ASYNC_STEP1("webrtc", "Audio", capture_timestamp, |
| 480 "Send", "type", FrameTypeToString(frame_type)); | 480 "Send", "type", FrameTypeToString(frame_type)); |
| 481 assert(frame_type == kAudioFrameSpeech || frame_type == kAudioFrameCN || | 481 assert(frame_type == kAudioFrameSpeech || frame_type == kAudioFrameCN || |
| 482 frame_type == kEmptyFrame); | 482 frame_type == kEmptyFrame); |
| 483 | 483 |
| 484 result = audio_->SendAudio(frame_type, payload_type, capture_timestamp, | 484 ret_val = audio_->SendAudio(frame_type, payload_type, capture_timestamp, |
| 485 payload_data, payload_size, fragmentation); | 485 payload_data, payload_size, fragmentation); |
| 486 } else { | 486 } else { |
| 487 TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", capture_time_ms, | 487 TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", capture_time_ms, |
| 488 "Send", "type", FrameTypeToString(frame_type)); | 488 "Send", "type", FrameTypeToString(frame_type)); |
| 489 assert(frame_type != kAudioFrameSpeech && frame_type != kAudioFrameCN); | 489 assert(frame_type != kAudioFrameSpeech && frame_type != kAudioFrameCN); |
| 490 | 490 |
| 491 if (frame_type == kEmptyFrame) | 491 if (frame_type == kEmptyFrame) |
| 492 return true; | 492 return 0; |
| 493 | 493 |
| 494 if (rtp_header) { | 494 if (rtp_hdr) { |
| 495 playout_delay_oracle_.UpdateRequest(ssrc, rtp_header->playout_delay, | 495 playout_delay_oracle_.UpdateRequest(ssrc, rtp_hdr->playout_delay, |
| 496 sequence_number); | 496 sequence_number); |
| 497 } | 497 } |
| 498 | 498 |
| 499 // Update the active/inactive status of playout delay extension based | 499 // Update the active/inactive status of playout delay extension based |
| 500 // on what the oracle indicates. | 500 // on what the oracle indicates. |
| 501 { | 501 { |
| 502 rtc::CritScope lock(&send_critsect_); | 502 rtc::CritScope lock(&send_critsect_); |
| 503 if (playout_delay_active_ != playout_delay_oracle_.send_playout_delay()) { | 503 if (playout_delay_active_ != playout_delay_oracle_.send_playout_delay()) { |
| 504 playout_delay_active_ = playout_delay_oracle_.send_playout_delay(); | 504 playout_delay_active_ = playout_delay_oracle_.send_playout_delay(); |
| 505 rtp_header_extension_map_.SetActive(kRtpExtensionPlayoutDelay, | 505 rtp_header_extension_map_.SetActive(kRtpExtensionPlayoutDelay, |
| 506 playout_delay_active_); | 506 playout_delay_active_); |
| 507 } | 507 } |
| 508 } | 508 } |
| 509 | 509 |
| 510 result = video_->SendVideo(video_type, frame_type, payload_type, | 510 ret_val = video_->SendVideo( |
| 511 capture_timestamp, capture_time_ms, payload_data, | 511 video_type, frame_type, payload_type, capture_timestamp, |
| 512 payload_size, fragmentation, rtp_header); | 512 capture_time_ms, payload_data, payload_size, fragmentation, rtp_hdr); |
| 513 } | |
| 514 | |
| 515 if (transport_frame_id_out) { | |
| 516 rtc::CritScope lock(&send_critsect_); | |
| 517 // TODO(sergeyu): Move RTP timestamp calculation from BuildRTPheader() to | |
| 518 // SendOutgoingData() and pass it to SendVideo()/SendAudio() calls. | |
| 519 *transport_frame_id_out = timestamp_; | |
| 520 } | 513 } |
| 521 | 514 |
| 522 rtc::CritScope cs(&statistics_crit_); | 515 rtc::CritScope cs(&statistics_crit_); |
| 523 // Note: This is currently only counting for video. | 516 // Note: This is currently only counting for video. |
| 524 if (frame_type == kVideoFrameKey) { | 517 if (frame_type == kVideoFrameKey) { |
| 525 ++frame_counts_.key_frames; | 518 ++frame_counts_.key_frames; |
| 526 } else if (frame_type == kVideoFrameDelta) { | 519 } else if (frame_type == kVideoFrameDelta) { |
| 527 ++frame_counts_.delta_frames; | 520 ++frame_counts_.delta_frames; |
| 528 } | 521 } |
| 529 if (frame_count_observer_) { | 522 if (frame_count_observer_) { |
| 530 frame_count_observer_->FrameCountUpdated(frame_counts_, ssrc); | 523 frame_count_observer_->FrameCountUpdated(frame_counts_, ssrc); |
| 531 } | 524 } |
| 532 | 525 |
| 533 return result; | 526 return ret_val; |
| 534 } | 527 } |
| 535 | 528 |
| 536 size_t RTPSender::TrySendRedundantPayloads(size_t bytes_to_send, | 529 size_t RTPSender::TrySendRedundantPayloads(size_t bytes_to_send, |
| 537 int probe_cluster_id) { | 530 int probe_cluster_id) { |
| 538 { | 531 { |
| 539 rtc::CritScope lock(&send_critsect_); | 532 rtc::CritScope lock(&send_critsect_); |
| 540 if (!sending_media_) | 533 if (!sending_media_) |
| 541 return 0; | 534 return 0; |
| 542 if ((rtx_ & kRtxRedundantPayloads) == 0) | 535 if ((rtx_ & kRtxRedundantPayloads) == 0) |
| 543 return 0; | 536 return 0; |
| (...skipping 401 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 945 if (audio_configured_ || bytes == 0) | 938 if (audio_configured_ || bytes == 0) |
| 946 return 0; | 939 return 0; |
| 947 size_t bytes_sent = TrySendRedundantPayloads(bytes, probe_cluster_id); | 940 size_t bytes_sent = TrySendRedundantPayloads(bytes, probe_cluster_id); |
| 948 if (bytes_sent < bytes) | 941 if (bytes_sent < bytes) |
| 949 bytes_sent += | 942 bytes_sent += |
| 950 SendPadData(bytes - bytes_sent, false, 0, 0, probe_cluster_id); | 943 SendPadData(bytes - bytes_sent, false, 0, 0, probe_cluster_id); |
| 951 return bytes_sent; | 944 return bytes_sent; |
| 952 } | 945 } |
| 953 | 946 |
| 954 // TODO(pwestin): send in the RtpHeaderParser to avoid parsing it again. | 947 // TODO(pwestin): send in the RtpHeaderParser to avoid parsing it again. |
| 955 bool RTPSender::SendToNetwork(uint8_t* buffer, | 948 int32_t RTPSender::SendToNetwork(uint8_t* buffer, |
| 956 size_t payload_length, | 949 size_t payload_length, |
| 957 size_t rtp_header_length, | 950 size_t rtp_header_length, |
| 958 int64_t capture_time_ms, | 951 int64_t capture_time_ms, |
| 959 StorageType storage, | 952 StorageType storage, |
| 960 RtpPacketSender::Priority priority) { | 953 RtpPacketSender::Priority priority) { |
| 961 size_t length = payload_length + rtp_header_length; | 954 size_t length = payload_length + rtp_header_length; |
| 962 RtpUtility::RtpHeaderParser rtp_parser(buffer, length); | 955 RtpUtility::RtpHeaderParser rtp_parser(buffer, length); |
| 963 | 956 |
| 964 RTPHeader rtp_header; | 957 RTPHeader rtp_header; |
| 965 rtp_parser.Parse(&rtp_header); | 958 rtp_parser.Parse(&rtp_header); |
| 966 | 959 |
| 967 int64_t now_ms = clock_->TimeInMilliseconds(); | 960 int64_t now_ms = clock_->TimeInMilliseconds(); |
| 968 | 961 |
| 969 // |capture_time_ms| <= 0 is considered invalid. | 962 // |capture_time_ms| <= 0 is considered invalid. |
| 970 // TODO(holmer): This should be changed all over Video Engine so that negative | 963 // TODO(holmer): This should be changed all over Video Engine so that negative |
| 971 // time is consider invalid, while 0 is considered a valid time. | 964 // time is consider invalid, while 0 is considered a valid time. |
| 972 if (capture_time_ms > 0) { | 965 if (capture_time_ms > 0) { |
| 973 UpdateTransmissionTimeOffset(buffer, length, rtp_header, | 966 UpdateTransmissionTimeOffset(buffer, length, rtp_header, |
| 974 now_ms - capture_time_ms); | 967 now_ms - capture_time_ms); |
| 975 } | 968 } |
| 976 | 969 |
| 977 UpdateAbsoluteSendTime(buffer, length, rtp_header, now_ms); | 970 UpdateAbsoluteSendTime(buffer, length, rtp_header, now_ms); |
| 978 | 971 |
| 979 // Used for NACK and to spread out the transmission of packets. | 972 // Used for NACK and to spread out the transmission of packets. |
| 980 if (packet_history_.PutRTPPacket(buffer, length, capture_time_ms, storage) != | 973 if (packet_history_.PutRTPPacket(buffer, length, capture_time_ms, storage) != |
| 981 0) { | 974 0) { |
| 982 return false; | 975 return -1; |
| 983 } | 976 } |
| 984 | 977 |
| 985 if (paced_sender_) { | 978 if (paced_sender_) { |
| 986 // Correct offset between implementations of millisecond time stamps in | 979 // Correct offset between implementations of millisecond time stamps in |
| 987 // TickTime and Clock. | 980 // TickTime and Clock. |
| 988 int64_t corrected_time_ms = capture_time_ms + clock_delta_ms_; | 981 int64_t corrected_time_ms = capture_time_ms + clock_delta_ms_; |
| 989 paced_sender_->InsertPacket(priority, rtp_header.ssrc, | 982 paced_sender_->InsertPacket(priority, rtp_header.ssrc, |
| 990 rtp_header.sequenceNumber, corrected_time_ms, | 983 rtp_header.sequenceNumber, corrected_time_ms, |
| 991 payload_length, false); | 984 payload_length, false); |
| 992 if (last_capture_time_ms_sent_ == 0 || | 985 if (last_capture_time_ms_sent_ == 0 || |
| 993 corrected_time_ms > last_capture_time_ms_sent_) { | 986 corrected_time_ms > last_capture_time_ms_sent_) { |
| 994 last_capture_time_ms_sent_ = corrected_time_ms; | 987 last_capture_time_ms_sent_ = corrected_time_ms; |
| 995 TRACE_EVENT_ASYNC_BEGIN1(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), | 988 TRACE_EVENT_ASYNC_BEGIN1(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), |
| 996 "PacedSend", corrected_time_ms, | 989 "PacedSend", corrected_time_ms, |
| 997 "capture_time_ms", corrected_time_ms); | 990 "capture_time_ms", corrected_time_ms); |
| 998 } | 991 } |
| 999 return true; | 992 return 0; |
| 1000 } | 993 } |
| 1001 | 994 |
| 1002 PacketOptions options; | 995 PacketOptions options; |
| 1003 if (UpdateTransportSequenceNumber(buffer, length, rtp_header, | 996 if (UpdateTransportSequenceNumber(buffer, length, rtp_header, |
| 1004 &options.packet_id)) { | 997 &options.packet_id)) { |
| 1005 if (transport_feedback_observer_) | 998 if (transport_feedback_observer_) |
| 1006 transport_feedback_observer_->AddPacket(options.packet_id, length, | 999 transport_feedback_observer_->AddPacket(options.packet_id, length, |
| 1007 PacketInfo::kNotAProbe); | 1000 PacketInfo::kNotAProbe); |
| 1008 } | 1001 } |
| 1009 UpdateDelayStatistics(capture_time_ms, now_ms); | 1002 UpdateDelayStatistics(capture_time_ms, now_ms); |
| 1010 UpdateOnSendPacket(options.packet_id, capture_time_ms, rtp_header.ssrc); | 1003 UpdateOnSendPacket(options.packet_id, capture_time_ms, rtp_header.ssrc); |
| 1011 | 1004 |
| 1012 bool sent = SendPacketToNetwork(buffer, length, options); | 1005 bool sent = SendPacketToNetwork(buffer, length, options); |
| 1013 | 1006 |
| 1014 // Mark the packet as sent in the history even if send failed. Dropping a | 1007 // Mark the packet as sent in the history even if send failed. Dropping a |
| 1015 // packet here should be treated as any other packet drop so we should be | 1008 // packet here should be treated as any other packet drop so we should be |
| 1016 // ready for a retransmission. | 1009 // ready for a retransmission. |
| 1017 packet_history_.SetSent(rtp_header.sequenceNumber); | 1010 packet_history_.SetSent(rtp_header.sequenceNumber); |
| 1018 | 1011 |
| 1019 if (!sent) | 1012 if (!sent) |
| 1020 return false; | 1013 return -1; |
| 1021 | 1014 |
| 1022 { | 1015 { |
| 1023 rtc::CritScope lock(&send_critsect_); | 1016 rtc::CritScope lock(&send_critsect_); |
| 1024 media_has_been_sent_ = true; | 1017 media_has_been_sent_ = true; |
| 1025 } | 1018 } |
| 1026 UpdateRtpStats(buffer, length, rtp_header, false, false); | 1019 UpdateRtpStats(buffer, length, rtp_header, false, false); |
| 1027 return true; | 1020 return 0; |
| 1028 } | 1021 } |
| 1029 | 1022 |
| 1030 void RTPSender::UpdateDelayStatistics(int64_t capture_time_ms, int64_t now_ms) { | 1023 void RTPSender::UpdateDelayStatistics(int64_t capture_time_ms, int64_t now_ms) { |
| 1031 if (!send_side_delay_observer_ || capture_time_ms <= 0) | 1024 if (!send_side_delay_observer_ || capture_time_ms <= 0) |
| 1032 return; | 1025 return; |
| 1033 | 1026 |
| 1034 uint32_t ssrc; | 1027 uint32_t ssrc; |
| 1035 int avg_delay_ms = 0; | 1028 int avg_delay_ms = 0; |
| 1036 int max_delay_ms = 0; | 1029 int max_delay_ms = 0; |
| 1037 { | 1030 { |
| (...skipping 863 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1901 rtc::CritScope lock(&send_critsect_); | 1894 rtc::CritScope lock(&send_critsect_); |
| 1902 | 1895 |
| 1903 RtpState state; | 1896 RtpState state; |
| 1904 state.sequence_number = sequence_number_rtx_; | 1897 state.sequence_number = sequence_number_rtx_; |
| 1905 state.start_timestamp = start_timestamp_; | 1898 state.start_timestamp = start_timestamp_; |
| 1906 | 1899 |
| 1907 return state; | 1900 return state; |
| 1908 } | 1901 } |
| 1909 | 1902 |
| 1910 } // namespace webrtc | 1903 } // namespace webrtc |
| OLD | NEW |