Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(26)

Side by Side Diff: webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc

Issue 2067673004: Style cleanups in RtpSender. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@master
Patch Set: addressed feedback Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 #include "webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h" 11 #include "webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h"
12 12
13 #include <string.h> 13 #include <string.h>
14 14
15 #include "webrtc/base/logging.h" 15 #include "webrtc/base/logging.h"
16 #include "webrtc/base/timeutils.h" 16 #include "webrtc/base/timeutils.h"
17 #include "webrtc/base/trace_event.h" 17 #include "webrtc/base/trace_event.h"
18 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h" 18 #include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
19 #include "webrtc/modules/rtp_rtcp/source/byte_io.h" 19 #include "webrtc/modules/rtp_rtcp/source/byte_io.h"
20 20
21 namespace webrtc { 21 namespace webrtc {
22 22
23 static const int kDtmfFrequencyHz = 8000; 23 static const int kDtmfFrequencyHz = 8000;
24 24
25 RTPSenderAudio::RTPSenderAudio(Clock* clock, RTPSender* rtpSender) 25 RTPSenderAudio::RTPSenderAudio(Clock* clock, RTPSender* rtp_sender)
26 : _clock(clock), 26 : clock_(clock),
27 _rtpSender(rtpSender), 27 rtp_sender_(rtp_sender),
28 _packetSizeSamples(160), 28 packet_size_samples_(160),
29 _dtmfEventIsOn(false), 29 dtmf_event_is_on_(false),
30 _dtmfEventFirstPacketSent(false), 30 dtmf_event_first_packet_sent_(false),
31 _dtmfPayloadType(-1), 31 dtmf_payload_type_(-1),
32 _dtmfTimestamp(0), 32 dtmf_timestamp_(0),
33 _dtmfKey(0), 33 dtmf_key_(0),
34 _dtmfLengthSamples(0), 34 dtmf_length_samples_(0),
35 _dtmfLevel(0), 35 dtmf_level_(0),
36 _dtmfTimeLastSent(0), 36 dtmf_time_last_sent_(0),
37 _dtmfTimestampLastSent(0), 37 dtmf_timestamp_last_sent_(0),
38 _REDPayloadType(-1), 38 red_payload_type_(-1),
39 _inbandVADactive(false), 39 inband_vad_active_(false),
40 _cngNBPayloadType(-1), 40 cngnb_payload_type_(-1),
41 _cngWBPayloadType(-1), 41 cngwb_payload_type_(-1),
42 _cngSWBPayloadType(-1), 42 cngswb_payload_type_(-1),
43 _cngFBPayloadType(-1), 43 cngfb_payload_type_(-1),
44 _lastPayloadType(-1), 44 last_payload_type_(-1),
45 _audioLevel_dBov(0) {} 45 audio_level_dbov_(0) {}
46 46
47 RTPSenderAudio::~RTPSenderAudio() {} 47 RTPSenderAudio::~RTPSenderAudio() {}
48 48
49 int RTPSenderAudio::AudioFrequency() const { 49 int RTPSenderAudio::AudioFrequency() const {
50 return kDtmfFrequencyHz; 50 return kDtmfFrequencyHz;
51 } 51 }
52 52
53 // set audio packet size, used to determine when it's time to send a DTMF packet 53 // set audio packet size, used to determine when it's time to send a DTMF packet
54 // in silence (CNG) 54 // in silence (CNG)
55 int32_t RTPSenderAudio::SetAudioPacketSize(uint16_t packetSizeSamples) { 55 int32_t RTPSenderAudio::SetAudioPacketSize(uint16_t packet_size_samples) {
56 rtc::CritScope cs(&_sendAudioCritsect); 56 rtc::CritScope cs(&send_audio_critsect_);
57 57 packet_size_samples_ = packet_size_samples;
58 _packetSizeSamples = packetSizeSamples;
59 return 0; 58 return 0;
60 } 59 }
61 60
62 int32_t RTPSenderAudio::RegisterAudioPayload( 61 int32_t RTPSenderAudio::RegisterAudioPayload(
63 const char payloadName[RTP_PAYLOAD_NAME_SIZE], 62 const char payloadName[RTP_PAYLOAD_NAME_SIZE],
64 const int8_t payloadType, 63 const int8_t payload_type,
65 const uint32_t frequency, 64 const uint32_t frequency,
66 const size_t channels, 65 const size_t channels,
67 const uint32_t rate, 66 const uint32_t rate,
68 RtpUtility::Payload** payload) { 67 RtpUtility::Payload** payload) {
69 if (RtpUtility::StringCompare(payloadName, "cn", 2)) { 68 if (RtpUtility::StringCompare(payloadName, "cn", 2)) {
70 rtc::CritScope cs(&_sendAudioCritsect); 69 rtc::CritScope cs(&send_audio_critsect_);
71 // we can have multiple CNG payload types 70 // we can have multiple CNG payload types
72 switch (frequency) { 71 switch (frequency) {
73 case 8000: 72 case 8000:
74 _cngNBPayloadType = payloadType; 73 cngnb_payload_type_ = payload_type;
75 break; 74 break;
76 case 16000: 75 case 16000:
77 _cngWBPayloadType = payloadType; 76 cngwb_payload_type_ = payload_type;
78 break; 77 break;
79 case 32000: 78 case 32000:
80 _cngSWBPayloadType = payloadType; 79 cngswb_payload_type_ = payload_type;
81 break; 80 break;
82 case 48000: 81 case 48000:
83 _cngFBPayloadType = payloadType; 82 cngfb_payload_type_ = payload_type;
84 break; 83 break;
85 default: 84 default:
86 return -1; 85 return -1;
87 } 86 }
88 } else if (RtpUtility::StringCompare(payloadName, "telephone-event", 15)) { 87 } else if (RtpUtility::StringCompare(payloadName, "telephone-event", 15)) {
89 rtc::CritScope cs(&_sendAudioCritsect); 88 rtc::CritScope cs(&send_audio_critsect_);
90 // Don't add it to the list 89 // Don't add it to the list
91 // we dont want to allow send with a DTMF payloadtype 90 // we dont want to allow send with a DTMF payloadtype
92 _dtmfPayloadType = payloadType; 91 dtmf_payload_type_ = payload_type;
93 return 0; 92 return 0;
94 // The default timestamp rate is 8000 Hz, but other rates may be defined. 93 // The default timestamp rate is 8000 Hz, but other rates may be defined.
95 } 94 }
96 *payload = new RtpUtility::Payload; 95 *payload = new RtpUtility::Payload;
97 (*payload)->typeSpecific.Audio.frequency = frequency; 96 (*payload)->typeSpecific.Audio.frequency = frequency;
98 (*payload)->typeSpecific.Audio.channels = channels; 97 (*payload)->typeSpecific.Audio.channels = channels;
99 (*payload)->typeSpecific.Audio.rate = rate; 98 (*payload)->typeSpecific.Audio.rate = rate;
100 (*payload)->audio = true; 99 (*payload)->audio = true;
101 (*payload)->name[RTP_PAYLOAD_NAME_SIZE - 1] = '\0'; 100 (*payload)->name[RTP_PAYLOAD_NAME_SIZE - 1] = '\0';
102 strncpy((*payload)->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1); 101 strncpy((*payload)->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
103 return 0; 102 return 0;
104 } 103 }
105 104
106 bool RTPSenderAudio::MarkerBit(FrameType frameType, int8_t payload_type) { 105 bool RTPSenderAudio::MarkerBit(FrameType frame_type, int8_t payload_type) {
107 rtc::CritScope cs(&_sendAudioCritsect); 106 rtc::CritScope cs(&send_audio_critsect_);
108 // for audio true for first packet in a speech burst 107 // for audio true for first packet in a speech burst
109 bool markerBit = false; 108 bool marker_bit = false;
110 if (_lastPayloadType != payload_type) { 109 if (last_payload_type_ != payload_type) {
111 if (payload_type != -1 && (_cngNBPayloadType == payload_type || 110 if (payload_type != -1 && (cngnb_payload_type_ == payload_type ||
112 _cngWBPayloadType == payload_type || 111 cngwb_payload_type_ == payload_type ||
113 _cngSWBPayloadType == payload_type || 112 cngswb_payload_type_ == payload_type ||
114 _cngFBPayloadType == payload_type)) { 113 cngfb_payload_type_ == payload_type)) {
115 // Only set a marker bit when we change payload type to a non CNG 114 // Only set a marker bit when we change payload type to a non CNG
116 return false; 115 return false;
117 } 116 }
118 117
119 // payload_type differ 118 // payload_type differ
120 if (_lastPayloadType == -1) { 119 if (last_payload_type_ == -1) {
121 if (frameType != kAudioFrameCN) { 120 if (frame_type != kAudioFrameCN) {
122 // first packet and NOT CNG 121 // first packet and NOT CNG
123 return true; 122 return true;
124 } else { 123 } else {
125 // first packet and CNG 124 // first packet and CNG
126 _inbandVADactive = true; 125 inband_vad_active_ = true;
127 return false; 126 return false;
128 } 127 }
129 } 128 }
130 129
131 // not first packet AND 130 // not first packet AND
132 // not CNG AND 131 // not CNG AND
133 // payload_type changed 132 // payload_type changed
134 133
135 // set a marker bit when we change payload type 134 // set a marker bit when we change payload type
136 markerBit = true; 135 marker_bit = true;
137 } 136 }
138 137
139 // For G.723 G.729, AMR etc we can have inband VAD 138 // For G.723 G.729, AMR etc we can have inband VAD
140 if (frameType == kAudioFrameCN) { 139 if (frame_type == kAudioFrameCN) {
141 _inbandVADactive = true; 140 inband_vad_active_ = true;
142 } else if (_inbandVADactive) { 141 } else if (inband_vad_active_) {
143 _inbandVADactive = false; 142 inband_vad_active_ = false;
144 markerBit = true; 143 marker_bit = true;
145 } 144 }
146 return markerBit; 145 return marker_bit;
147 } 146 }
148 147
149 int32_t RTPSenderAudio::SendAudio(FrameType frameType, 148 int32_t RTPSenderAudio::SendAudio(FrameType frame_type,
150 int8_t payloadType, 149 int8_t payload_type,
151 uint32_t captureTimeStamp, 150 uint32_t capture_timestamp,
152 const uint8_t* payloadData, 151 const uint8_t* payload_data,
153 size_t dataSize, 152 size_t data_size,
154 const RTPFragmentationHeader* fragmentation) { 153 const RTPFragmentationHeader* fragmentation) {
155 // TODO(pwestin) Breakup function in smaller functions. 154 // TODO(pwestin) Breakup function in smaller functions.
156 size_t payloadSize = dataSize; 155 size_t payload_size = data_size;
157 size_t maxPayloadLength = _rtpSender->MaxPayloadLength(); 156 size_t max_payload_length = rtp_sender_->MaxPayloadLength();
158 uint16_t dtmfLengthMS = 0; 157 uint16_t dtmf_length_ms = 0;
159 uint8_t key = 0; 158 uint8_t key = 0;
160 int red_payload_type; 159 int red_payload_type;
161 uint8_t audio_level_dbov; 160 uint8_t audio_level_dbov;
162 int8_t dtmf_payload_type; 161 int8_t dtmf_payload_type;
163 uint16_t packet_size_samples; 162 uint16_t packet_size_samples;
164 { 163 {
165 rtc::CritScope cs(&_sendAudioCritsect); 164 rtc::CritScope cs(&send_audio_critsect_);
166 red_payload_type = _REDPayloadType; 165 red_payload_type = red_payload_type_;
167 audio_level_dbov = _audioLevel_dBov; 166 audio_level_dbov = audio_level_dbov_;
168 dtmf_payload_type = _dtmfPayloadType; 167 dtmf_payload_type = dtmf_payload_type_;
169 packet_size_samples = _packetSizeSamples; 168 packet_size_samples = packet_size_samples_;
170 } 169 }
171 170
172 // Check if we have pending DTMFs to send 171 // Check if we have pending DTMFs to send
173 if (!_dtmfEventIsOn && PendingDTMF()) { 172 if (!dtmf_event_is_on_ && PendingDTMF()) {
174 int64_t delaySinceLastDTMF = 173 int64_t delaySinceLastDTMF =
175 _clock->TimeInMilliseconds() - _dtmfTimeLastSent; 174 clock_->TimeInMilliseconds() - dtmf_time_last_sent_;
176 175
177 if (delaySinceLastDTMF > 100) { 176 if (delaySinceLastDTMF > 100) {
178 // New tone to play 177 // New tone to play
179 _dtmfTimestamp = captureTimeStamp; 178 dtmf_timestamp_ = capture_timestamp;
180 if (NextDTMF(&key, &dtmfLengthMS, &_dtmfLevel) >= 0) { 179 if (NextDTMF(&key, &dtmf_length_ms, &dtmf_level_) >= 0) {
181 _dtmfEventFirstPacketSent = false; 180 dtmf_event_first_packet_sent_ = false;
182 _dtmfKey = key; 181 dtmf_key_ = key;
183 _dtmfLengthSamples = (kDtmfFrequencyHz / 1000) * dtmfLengthMS; 182 dtmf_length_samples_ = (kDtmfFrequencyHz / 1000) * dtmf_length_ms;
184 _dtmfEventIsOn = true; 183 dtmf_event_is_on_ = true;
185 } 184 }
186 } 185 }
187 } 186 }
188 187
189 // A source MAY send events and coded audio packets for the same time 188 // A source MAY send events and coded audio packets for the same time
190 // but we don't support it 189 // but we don't support it
191 if (_dtmfEventIsOn) { 190 if (dtmf_event_is_on_) {
192 if (frameType == kEmptyFrame) { 191 if (frame_type == kEmptyFrame) {
193 // kEmptyFrame is used to drive the DTMF when in CN mode 192 // kEmptyFrame is used to drive the DTMF when in CN mode
194 // it can be triggered more frequently than we want to send the 193 // it can be triggered more frequently than we want to send the
195 // DTMF packets. 194 // DTMF packets.
196 if (packet_size_samples > (captureTimeStamp - _dtmfTimestampLastSent)) { 195 if (packet_size_samples >
196 (capture_timestamp - dtmf_timestamp_last_sent_)) {
197 // not time to send yet 197 // not time to send yet
198 return 0; 198 return 0;
199 } 199 }
200 } 200 }
201 _dtmfTimestampLastSent = captureTimeStamp; 201 dtmf_timestamp_last_sent_ = capture_timestamp;
202 uint32_t dtmfDurationSamples = captureTimeStamp - _dtmfTimestamp; 202 uint32_t dtmf_duration_samples = capture_timestamp - dtmf_timestamp_;
203 bool ended = false; 203 bool ended = false;
204 bool send = true; 204 bool send = true;
205 205
206 if (_dtmfLengthSamples > dtmfDurationSamples) { 206 if (dtmf_length_samples_ > dtmf_duration_samples) {
207 if (dtmfDurationSamples <= 0) { 207 if (dtmf_duration_samples <= 0) {
208 // Skip send packet at start, since we shouldn't use duration 0 208 // Skip send packet at start, since we shouldn't use duration 0
209 send = false; 209 send = false;
210 } 210 }
211 } else { 211 } else {
212 ended = true; 212 ended = true;
213 _dtmfEventIsOn = false; 213 dtmf_event_is_on_ = false;
214 _dtmfTimeLastSent = _clock->TimeInMilliseconds(); 214 dtmf_time_last_sent_ = clock_->TimeInMilliseconds();
215 } 215 }
216 if (send) { 216 if (send) {
217 if (dtmfDurationSamples > 0xffff) { 217 if (dtmf_duration_samples > 0xffff) {
218 // RFC 4733 2.5.2.3 Long-Duration Events 218 // RFC 4733 2.5.2.3 Long-Duration Events
219 SendTelephoneEventPacket(ended, dtmf_payload_type, _dtmfTimestamp, 219 SendTelephoneEventPacket(ended, dtmf_payload_type, dtmf_timestamp_,
220 static_cast<uint16_t>(0xffff), false); 220 static_cast<uint16_t>(0xffff), false);
221 221
222 // set new timestap for this segment 222 // set new timestap for this segment
223 _dtmfTimestamp = captureTimeStamp; 223 dtmf_timestamp_ = capture_timestamp;
224 dtmfDurationSamples -= 0xffff; 224 dtmf_duration_samples -= 0xffff;
225 _dtmfLengthSamples -= 0xffff; 225 dtmf_length_samples_ -= 0xffff;
226 226
227 return SendTelephoneEventPacket( 227 return SendTelephoneEventPacket(
228 ended, dtmf_payload_type, _dtmfTimestamp, 228 ended, dtmf_payload_type, dtmf_timestamp_,
229 static_cast<uint16_t>(dtmfDurationSamples), false); 229 static_cast<uint16_t>(dtmf_duration_samples), false);
230 } else { 230 } else {
231 if (SendTelephoneEventPacket(ended, dtmf_payload_type, _dtmfTimestamp, 231 if (SendTelephoneEventPacket(ended, dtmf_payload_type, dtmf_timestamp_,
232 static_cast<uint16_t>(dtmfDurationSamples), 232 dtmf_duration_samples,
233 !_dtmfEventFirstPacketSent) != 0) { 233 !dtmf_event_first_packet_sent_) != 0) {
234 return -1; 234 return -1;
235 } 235 }
236 _dtmfEventFirstPacketSent = true; 236 dtmf_event_first_packet_sent_ = true;
237 return 0; 237 return 0;
238 } 238 }
239 } 239 }
240 return 0; 240 return 0;
241 } 241 }
242 if (payloadSize == 0 || payloadData == NULL) { 242 if (payload_size == 0 || payload_data == NULL) {
243 if (frameType == kEmptyFrame) { 243 if (frame_type == kEmptyFrame) {
244 // we don't send empty audio RTP packets 244 // we don't send empty audio RTP packets
245 // no error since we use it to drive DTMF when we use VAD 245 // no error since we use it to drive DTMF when we use VAD
246 return 0; 246 return 0;
247 } 247 }
248 return -1; 248 return -1;
249 } 249 }
250 uint8_t dataBuffer[IP_PACKET_SIZE]; 250 uint8_t dataBuffer[IP_PACKET_SIZE];
251 bool markerBit = MarkerBit(frameType, payloadType); 251 bool marker_bit = MarkerBit(frame_type, payload_type);
252 252
253 int32_t rtpHeaderLength = 0; 253 int32_t rtpHeaderLength = 0;
254 uint16_t timestampOffset = 0; 254 uint16_t timestampOffset = 0;
255 255
256 if (red_payload_type >= 0 && fragmentation && !markerBit && 256 if (red_payload_type >= 0 && fragmentation && !marker_bit &&
257 fragmentation->fragmentationVectorSize > 1) { 257 fragmentation->fragmentationVectorSize > 1) {
258 // have we configured RED? use its payload type 258 // have we configured RED? use its payload type
259 // we need to get the current timestamp to calc the diff 259 // we need to get the current timestamp to calc the diff
260 uint32_t oldTimeStamp = _rtpSender->Timestamp(); 260 uint32_t oldTimeStamp = rtp_sender_->Timestamp();
danilchap 2016/06/15 20:04:21 old_timestamp
Sergey Ulanov 2016/06/15 20:53:24 Done.
261 rtpHeaderLength = _rtpSender->BuildRTPheader(dataBuffer, red_payload_type, 261 rtpHeaderLength = rtp_sender_->BuildRtpHeader(
262 markerBit, captureTimeStamp, 262 dataBuffer, red_payload_type, marker_bit, capture_timestamp,
263 _clock->TimeInMilliseconds()); 263 clock_->TimeInMilliseconds());
264 264
265 timestampOffset = uint16_t(_rtpSender->Timestamp() - oldTimeStamp); 265 timestampOffset = uint16_t(rtp_sender_->Timestamp() - oldTimeStamp);
266 } else { 266 } else {
267 rtpHeaderLength = _rtpSender->BuildRTPheader(dataBuffer, payloadType, 267 rtpHeaderLength = rtp_sender_->BuildRtpHeader(dataBuffer, payload_type,
268 markerBit, captureTimeStamp, 268 marker_bit, capture_timestamp,
269 _clock->TimeInMilliseconds()); 269 clock_->TimeInMilliseconds());
270 } 270 }
271 if (rtpHeaderLength <= 0) { 271 if (rtpHeaderLength <= 0) {
272 return -1; 272 return -1;
273 } 273 }
274 if (maxPayloadLength < (rtpHeaderLength + payloadSize)) { 274 if (max_payload_length < (rtpHeaderLength + payload_size)) {
275 // Too large payload buffer. 275 // Too large payload buffer.
276 return -1; 276 return -1;
277 } 277 }
278 if (red_payload_type >= 0 && // Have we configured RED? 278 if (red_payload_type >= 0 && // Have we configured RED?
279 fragmentation && fragmentation->fragmentationVectorSize > 1 && 279 fragmentation && fragmentation->fragmentationVectorSize > 1 &&
280 !markerBit) { 280 !marker_bit) {
281 if (timestampOffset <= 0x3fff) { 281 if (timestampOffset <= 0x3fff) {
282 if (fragmentation->fragmentationVectorSize != 2) { 282 if (fragmentation->fragmentationVectorSize != 2) {
283 // we only support 2 codecs when using RED 283 // we only support 2 codecs when using RED
284 return -1; 284 return -1;
285 } 285 }
286 // only 0x80 if we have multiple blocks 286 // only 0x80 if we have multiple blocks
287 dataBuffer[rtpHeaderLength++] = 287 dataBuffer[rtpHeaderLength++] =
288 0x80 + fragmentation->fragmentationPlType[1]; 288 0x80 + fragmentation->fragmentationPlType[1];
289 size_t blockLength = fragmentation->fragmentationLength[1]; 289 size_t blockLength = fragmentation->fragmentationLength[1];
290 290
291 // sanity blockLength 291 // sanity blockLength
292 if (blockLength > 0x3ff) { // block length 10 bits 1023 bytes 292 if (blockLength > 0x3ff) { // block length 10 bits 1023 bytes
293 return -1; 293 return -1;
294 } 294 }
295 uint32_t REDheader = (timestampOffset << 10) + blockLength; 295 uint32_t REDheader = (timestampOffset << 10) + blockLength;
296 ByteWriter<uint32_t>::WriteBigEndian(dataBuffer + rtpHeaderLength, 296 ByteWriter<uint32_t>::WriteBigEndian(dataBuffer + rtpHeaderLength,
297 REDheader); 297 REDheader);
298 rtpHeaderLength += 3; 298 rtpHeaderLength += 3;
299 299
300 dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0]; 300 dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0];
301 // copy the RED data 301 // copy the RED data
302 memcpy(dataBuffer + rtpHeaderLength, 302 memcpy(dataBuffer + rtpHeaderLength,
303 payloadData + fragmentation->fragmentationOffset[1], 303 payload_data + fragmentation->fragmentationOffset[1],
304 fragmentation->fragmentationLength[1]); 304 fragmentation->fragmentationLength[1]);
305 305
306 // copy the normal data 306 // copy the normal data
307 memcpy( 307 memcpy(
308 dataBuffer + rtpHeaderLength + fragmentation->fragmentationLength[1], 308 dataBuffer + rtpHeaderLength + fragmentation->fragmentationLength[1],
309 payloadData + fragmentation->fragmentationOffset[0], 309 payload_data + fragmentation->fragmentationOffset[0],
310 fragmentation->fragmentationLength[0]); 310 fragmentation->fragmentationLength[0]);
311 311
312 payloadSize = fragmentation->fragmentationLength[0] + 312 payload_size = fragmentation->fragmentationLength[0] +
313 fragmentation->fragmentationLength[1]; 313 fragmentation->fragmentationLength[1];
314 } else { 314 } else {
315 // silence for too long send only new data 315 // silence for too long send only new data
316 dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0]; 316 dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0];
317 memcpy(dataBuffer + rtpHeaderLength, 317 memcpy(dataBuffer + rtpHeaderLength,
318 payloadData + fragmentation->fragmentationOffset[0], 318 payload_data + fragmentation->fragmentationOffset[0],
319 fragmentation->fragmentationLength[0]); 319 fragmentation->fragmentationLength[0]);
320 320
321 payloadSize = fragmentation->fragmentationLength[0]; 321 payload_size = fragmentation->fragmentationLength[0];
322 } 322 }
323 } else { 323 } else {
324 if (fragmentation && fragmentation->fragmentationVectorSize > 0) { 324 if (fragmentation && fragmentation->fragmentationVectorSize > 0) {
325 // use the fragment info if we have one 325 // use the fragment info if we have one
326 dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0]; 326 dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0];
327 memcpy(dataBuffer + rtpHeaderLength, 327 memcpy(dataBuffer + rtpHeaderLength,
328 payloadData + fragmentation->fragmentationOffset[0], 328 payload_data + fragmentation->fragmentationOffset[0],
329 fragmentation->fragmentationLength[0]); 329 fragmentation->fragmentationLength[0]);
330 330
331 payloadSize = fragmentation->fragmentationLength[0]; 331 payload_size = fragmentation->fragmentationLength[0];
332 } else { 332 } else {
333 memcpy(dataBuffer + rtpHeaderLength, payloadData, payloadSize); 333 memcpy(dataBuffer + rtpHeaderLength, payload_data, payload_size);
334 } 334 }
335 } 335 }
336 336
337 { 337 {
338 rtc::CritScope cs(&_sendAudioCritsect); 338 rtc::CritScope cs(&send_audio_critsect_);
339 _lastPayloadType = payloadType; 339 last_payload_type_ = payload_type;
340 } 340 }
341 // Update audio level extension, if included. 341 // Update audio level extension, if included.
342 size_t packetSize = payloadSize + rtpHeaderLength; 342 size_t packetSize = payload_size + rtpHeaderLength;
343 RtpUtility::RtpHeaderParser rtp_parser(dataBuffer, packetSize); 343 RtpUtility::RtpHeaderParser rtp_parser(dataBuffer, packetSize);
344 RTPHeader rtp_header; 344 RTPHeader rtp_header;
345 rtp_parser.Parse(&rtp_header); 345 rtp_parser.Parse(&rtp_header);
346 _rtpSender->UpdateAudioLevel(dataBuffer, packetSize, rtp_header, 346 rtp_sender_->UpdateAudioLevel(dataBuffer, packetSize, rtp_header,
347 (frameType == kAudioFrameSpeech), 347 (frame_type == kAudioFrameSpeech),
348 audio_level_dbov); 348 audio_level_dbov);
349 TRACE_EVENT_ASYNC_END2("webrtc", "Audio", captureTimeStamp, "timestamp", 349 TRACE_EVENT_ASYNC_END2("webrtc", "Audio", capture_timestamp, "timestamp",
350 _rtpSender->Timestamp(), "seqnum", 350 rtp_sender_->Timestamp(), "seqnum",
351 _rtpSender->SequenceNumber()); 351 rtp_sender_->SequenceNumber());
352 int32_t send_result = _rtpSender->SendToNetwork( 352 int32_t send_result = rtp_sender_->SendToNetwork(
353 dataBuffer, payloadSize, rtpHeaderLength, 353 dataBuffer, payload_size, rtpHeaderLength,
354 rtc::TimeMillis(), kAllowRetransmission, 354 rtc::TimeMillis(), kAllowRetransmission,
355 RtpPacketSender::kHighPriority); 355 RtpPacketSender::kHighPriority);
356 if (first_packet_sent_()) { 356 if (first_packet_sent_()) {
357 LOG(LS_INFO) << "First audio RTP packet sent to pacer"; 357 LOG(LS_INFO) << "First audio RTP packet sent to pacer";
358 } 358 }
359 return send_result; 359 return send_result;
360 } 360 }
361 361
362 // Audio level magnitude and voice activity flag are set for each RTP packet 362 // Audio level magnitude and voice activity flag are set for each RTP packet
363 int32_t RTPSenderAudio::SetAudioLevel(uint8_t level_dBov) { 363 int32_t RTPSenderAudio::SetAudioLevel(uint8_t level_dbov) {
364 if (level_dBov > 127) { 364 if (level_dbov > 127) {
365 return -1; 365 return -1;
366 } 366 }
367 rtc::CritScope cs(&_sendAudioCritsect); 367 rtc::CritScope cs(&send_audio_critsect_);
368 _audioLevel_dBov = level_dBov; 368 audio_level_dbov_ = level_dbov;
369 return 0; 369 return 0;
370 } 370 }
371 371
372 // Set payload type for Redundant Audio Data RFC 2198 372 // Set payload type for Redundant Audio Data RFC 2198
373 int32_t RTPSenderAudio::SetRED(int8_t payloadType) { 373 int32_t RTPSenderAudio::SetRED(int8_t payload_type) {
374 if (payloadType < -1) { 374 if (payload_type < -1) {
375 return -1; 375 return -1;
376 } 376 }
377 rtc::CritScope cs(&_sendAudioCritsect); 377 rtc::CritScope cs(&send_audio_critsect_);
378 _REDPayloadType = payloadType; 378 red_payload_type_ = payload_type;
379 return 0; 379 return 0;
380 } 380 }
381 381
382 // Get payload type for Redundant Audio Data RFC 2198 382 // Get payload type for Redundant Audio Data RFC 2198
383 int32_t RTPSenderAudio::RED(int8_t* payloadType) const { 383 int32_t RTPSenderAudio::RED(int8_t* payload_type) const {
384 rtc::CritScope cs(&_sendAudioCritsect); 384 rtc::CritScope cs(&send_audio_critsect_);
385 if (_REDPayloadType == -1) { 385 if (red_payload_type_ == -1) {
386 // not configured 386 // not configured
387 return -1; 387 return -1;
388 } 388 }
389 *payloadType = _REDPayloadType; 389 *payload_type = red_payload_type_;
390 return 0; 390 return 0;
391 } 391 }
392 392
393 // Send a TelephoneEvent tone using RFC 2833 (4733) 393 // Send a TelephoneEvent tone using RFC 2833 (4733)
394 int32_t RTPSenderAudio::SendTelephoneEvent(uint8_t key, 394 int32_t RTPSenderAudio::SendTelephoneEvent(uint8_t key,
395 uint16_t time_ms, 395 uint16_t time_ms,
396 uint8_t level) { 396 uint8_t level) {
397 { 397 {
398 rtc::CritScope lock(&_sendAudioCritsect); 398 rtc::CritScope lock(&send_audio_critsect_);
399 if (_dtmfPayloadType < 0) { 399 if (dtmf_payload_type_ < 0) {
400 // TelephoneEvent payloadtype not configured 400 // TelephoneEvent payloadtype not configured
401 return -1; 401 return -1;
402 } 402 }
403 } 403 }
404 return AddDTMF(key, time_ms, level); 404 return AddDTMF(key, time_ms, level);
405 } 405 }
406 406
407 int32_t RTPSenderAudio::SendTelephoneEventPacket(bool ended, 407 int32_t RTPSenderAudio::SendTelephoneEventPacket(bool ended,
408 int8_t dtmf_payload_type, 408 int8_t dtmf_payload_type,
409 uint32_t dtmfTimeStamp, 409 uint32_t dtmf_timestamp,
410 uint16_t duration, 410 uint16_t duration,
411 bool markerBit) { 411 bool marker_bit) {
412 uint8_t dtmfbuffer[IP_PACKET_SIZE]; 412 uint8_t dtmfbuffer[IP_PACKET_SIZE];
413 uint8_t sendCount = 1; 413 uint8_t sendCount = 1;
414 int32_t retVal = 0; 414 int32_t retVal = 0;
415 415
416 if (ended) { 416 if (ended) {
417 // resend last packet in an event 3 times 417 // resend last packet in an event 3 times
418 sendCount = 3; 418 sendCount = 3;
419 } 419 }
420 do { 420 do {
421 // Send DTMF data 421 // Send DTMF data
422 _rtpSender->BuildRTPheader(dtmfbuffer, dtmf_payload_type, markerBit, 422 rtp_sender_->BuildRtpHeader(dtmfbuffer, dtmf_payload_type, marker_bit,
423 dtmfTimeStamp, _clock->TimeInMilliseconds()); 423 dtmf_timestamp, clock_->TimeInMilliseconds());
424 424
425 // reset CSRC and X bit 425 // reset CSRC and X bit
426 dtmfbuffer[0] &= 0xe0; 426 dtmfbuffer[0] &= 0xe0;
427 427
428 // Create DTMF data 428 // Create DTMF data
429 /* From RFC 2833: 429 /* From RFC 2833:
430 430
431 0 1 2 3 431 0 1 2 3
432 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 432 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
433 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ 433 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
434 | event |E|R| volume | duration | 434 | event |E|R| volume | duration |
435 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ 435 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
436 */ 436 */
437 // R bit always cleared 437 // R bit always cleared
438 uint8_t R = 0x00; 438 uint8_t R = 0x00;
439 uint8_t volume = _dtmfLevel; 439 uint8_t volume = dtmf_level_;
440 440
441 // First packet un-ended 441 // First packet un-ended
442 uint8_t E = ended ? 0x80 : 0x00; 442 uint8_t E = ended ? 0x80 : 0x00;
443 443
444 // First byte is Event number, equals key number 444 // First byte is Event number, equals key number
445 dtmfbuffer[12] = _dtmfKey; 445 dtmfbuffer[12] = dtmf_key_;
446 dtmfbuffer[13] = E | R | volume; 446 dtmfbuffer[13] = E | R | volume;
447 ByteWriter<uint16_t>::WriteBigEndian(dtmfbuffer + 14, duration); 447 ByteWriter<uint16_t>::WriteBigEndian(dtmfbuffer + 14, duration);
448 448
449 TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), 449 TRACE_EVENT_INSTANT2(
450 "Audio::SendTelephoneEvent", "timestamp", 450 TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), "Audio::SendTelephoneEvent",
451 dtmfTimeStamp, "seqnum", _rtpSender->SequenceNumber()); 451 "timestamp", dtmf_timestamp, "seqnum", rtp_sender_->SequenceNumber());
452 retVal = _rtpSender->SendToNetwork( 452 retVal = rtp_sender_->SendToNetwork(
453 dtmfbuffer, 4, 12, rtc::TimeMillis(), 453 dtmfbuffer, 4, 12, rtc::TimeMillis(),
454 kAllowRetransmission, RtpPacketSender::kHighPriority); 454 kAllowRetransmission, RtpPacketSender::kHighPriority);
455 sendCount--; 455 sendCount--;
456 } while (sendCount > 0 && retVal == 0); 456 } while (sendCount > 0 && retVal == 0);
457 457
458 return retVal; 458 return retVal;
459 } 459 }
460 } // namespace webrtc 460 } // namespace webrtc
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698