| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 17 matching lines...) Expand all Loading... |
| 28 // Max in the RFC 3550 is 255 bytes, we limit it to be modulus 32 for SRTP. | 28 // Max in the RFC 3550 is 255 bytes, we limit it to be modulus 32 for SRTP. |
| 29 const size_t kMaxPaddingLength = 224; | 29 const size_t kMaxPaddingLength = 224; |
| 30 const int kSendSideDelayWindowMs = 1000; | 30 const int kSendSideDelayWindowMs = 1000; |
| 31 | 31 |
| 32 namespace { | 32 namespace { |
| 33 | 33 |
| 34 const size_t kRtpHeaderLength = 12; | 34 const size_t kRtpHeaderLength = 12; |
| 35 | 35 |
| 36 const char* FrameTypeToString(FrameType frame_type) { | 36 const char* FrameTypeToString(FrameType frame_type) { |
| 37 switch (frame_type) { | 37 switch (frame_type) { |
| 38 case kFrameEmpty: return "empty"; | 38 case kEmptyFrame: |
| 39 return "empty"; |
| 39 case kAudioFrameSpeech: return "audio_speech"; | 40 case kAudioFrameSpeech: return "audio_speech"; |
| 40 case kAudioFrameCN: return "audio_cn"; | 41 case kAudioFrameCN: return "audio_cn"; |
| 41 case kVideoFrameKey: return "video_key"; | 42 case kVideoFrameKey: return "video_key"; |
| 42 case kVideoFrameDelta: return "video_delta"; | 43 case kVideoFrameDelta: return "video_delta"; |
| 43 } | 44 } |
| 44 return ""; | 45 return ""; |
| 45 } | 46 } |
| 46 | 47 |
| 47 } // namespace | 48 } // namespace |
| 48 | 49 |
| (...skipping 453 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 502 if (CheckPayloadType(payload_type, &video_type) != 0) { | 503 if (CheckPayloadType(payload_type, &video_type) != 0) { |
| 503 LOG(LS_ERROR) << "Don't send data with unknown payload type."; | 504 LOG(LS_ERROR) << "Don't send data with unknown payload type."; |
| 504 return -1; | 505 return -1; |
| 505 } | 506 } |
| 506 | 507 |
| 507 int32_t ret_val; | 508 int32_t ret_val; |
| 508 if (audio_configured_) { | 509 if (audio_configured_) { |
| 509 TRACE_EVENT_ASYNC_STEP1("webrtc", "Audio", capture_timestamp, | 510 TRACE_EVENT_ASYNC_STEP1("webrtc", "Audio", capture_timestamp, |
| 510 "Send", "type", FrameTypeToString(frame_type)); | 511 "Send", "type", FrameTypeToString(frame_type)); |
| 511 assert(frame_type == kAudioFrameSpeech || frame_type == kAudioFrameCN || | 512 assert(frame_type == kAudioFrameSpeech || frame_type == kAudioFrameCN || |
| 512 frame_type == kFrameEmpty); | 513 frame_type == kEmptyFrame); |
| 513 | 514 |
| 514 ret_val = audio_->SendAudio(frame_type, payload_type, capture_timestamp, | 515 ret_val = audio_->SendAudio(frame_type, payload_type, capture_timestamp, |
| 515 payload_data, payload_size, fragmentation); | 516 payload_data, payload_size, fragmentation); |
| 516 } else { | 517 } else { |
| 517 TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", capture_time_ms, | 518 TRACE_EVENT_ASYNC_STEP1("webrtc", "Video", capture_time_ms, |
| 518 "Send", "type", FrameTypeToString(frame_type)); | 519 "Send", "type", FrameTypeToString(frame_type)); |
| 519 assert(frame_type != kAudioFrameSpeech && frame_type != kAudioFrameCN); | 520 assert(frame_type != kAudioFrameSpeech && frame_type != kAudioFrameCN); |
| 520 | 521 |
| 521 if (frame_type == kFrameEmpty) | 522 if (frame_type == kEmptyFrame) |
| 522 return 0; | 523 return 0; |
| 523 | 524 |
| 524 ret_val = | 525 ret_val = |
| 525 video_->SendVideo(video_type, frame_type, payload_type, | 526 video_->SendVideo(video_type, frame_type, payload_type, |
| 526 capture_timestamp, capture_time_ms, payload_data, | 527 capture_timestamp, capture_time_ms, payload_data, |
| 527 payload_size, fragmentation, rtp_hdr); | 528 payload_size, fragmentation, rtp_hdr); |
| 528 } | 529 } |
| 529 | 530 |
| 530 CriticalSectionScoped cs(statistics_crit_.get()); | 531 CriticalSectionScoped cs(statistics_crit_.get()); |
| 531 // Note: This is currently only counting for video. | 532 // Note: This is currently only counting for video. |
| (...skipping 1359 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1891 CriticalSectionScoped lock(send_critsect_.get()); | 1892 CriticalSectionScoped lock(send_critsect_.get()); |
| 1892 | 1893 |
| 1893 RtpState state; | 1894 RtpState state; |
| 1894 state.sequence_number = sequence_number_rtx_; | 1895 state.sequence_number = sequence_number_rtx_; |
| 1895 state.start_timestamp = start_timestamp_; | 1896 state.start_timestamp = start_timestamp_; |
| 1896 | 1897 |
| 1897 return state; | 1898 return state; |
| 1898 } | 1899 } |
| 1899 | 1900 |
| 1900 } // namespace webrtc | 1901 } // namespace webrtc |
| OLD | NEW |