Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 138 if (frame_type == kAudioFrameCN) { | 138 if (frame_type == kAudioFrameCN) { |
| 139 inband_vad_active_ = true; | 139 inband_vad_active_ = true; |
| 140 } else if (inband_vad_active_) { | 140 } else if (inband_vad_active_) { |
| 141 inband_vad_active_ = false; | 141 inband_vad_active_ = false; |
| 142 marker_bit = true; | 142 marker_bit = true; |
| 143 } | 143 } |
| 144 return marker_bit; | 144 return marker_bit; |
| 145 } | 145 } |
| 146 | 146 |
| 147 bool RTPSenderAudio::SendAudio(FrameType frame_type, | 147 bool RTPSenderAudio::SendAudio(FrameType frame_type, |
| 148 int8_t payload_type, | 148 int8_t payload_type, |
| 149 uint32_t capture_timestamp, | 149 uint32_t rtp_timestamp, |
| 150 const uint8_t* payload_data, | 150 const uint8_t* payload_data, |
| 151 size_t data_size, | 151 size_t data_size, |
| 152 const RTPFragmentationHeader* fragmentation) { | 152 const RTPFragmentationHeader* fragmentation) { |
| 153 // TODO(pwestin) Breakup function in smaller functions. | 153 // TODO(pwestin) Breakup function in smaller functions. |
| 154 size_t payload_size = data_size; | 154 size_t payload_size = data_size; |
| 155 size_t max_payload_length = rtp_sender_->MaxPayloadLength(); | 155 size_t max_payload_length = rtp_sender_->MaxPayloadLength(); |
| 156 uint16_t dtmf_length_ms = 0; | 156 uint16_t dtmf_length_ms = 0; |
| 157 uint8_t key = 0; | 157 uint8_t key = 0; |
| 158 uint8_t audio_level_dbov; | 158 uint8_t audio_level_dbov; |
| 159 int8_t dtmf_payload_type; | 159 int8_t dtmf_payload_type; |
| 160 uint16_t packet_size_samples; | 160 uint16_t packet_size_samples; |
| 161 { | 161 { |
| 162 rtc::CritScope cs(&send_audio_critsect_); | 162 rtc::CritScope cs(&send_audio_critsect_); |
| 163 audio_level_dbov = audio_level_dbov_; | 163 audio_level_dbov = audio_level_dbov_; |
| 164 dtmf_payload_type = dtmf_payload_type_; | 164 dtmf_payload_type = dtmf_payload_type_; |
| 165 packet_size_samples = packet_size_samples_; | 165 packet_size_samples = packet_size_samples_; |
| 166 } | 166 } |
| 167 | 167 |
| 168 // Check if we have pending DTMFs to send | 168 // Check if we have pending DTMFs to send |
| 169 if (!dtmf_event_is_on_ && PendingDTMF()) { | 169 if (!dtmf_event_is_on_ && PendingDTMF()) { |
| 170 int64_t delaySinceLastDTMF = | 170 int64_t delaySinceLastDTMF = |
| 171 clock_->TimeInMilliseconds() - dtmf_time_last_sent_; | 171 clock_->TimeInMilliseconds() - dtmf_time_last_sent_; |
| 172 | 172 |
| 173 if (delaySinceLastDTMF > 100) { | 173 if (delaySinceLastDTMF > 100) { |
| 174 // New tone to play | 174 // New tone to play |
| 175 dtmf_timestamp_ = capture_timestamp; | 175 dtmf_timestamp_ = rtp_timestamp; |
| 176 if (NextDTMF(&key, &dtmf_length_ms, &dtmf_level_) >= 0) { | 176 if (NextDTMF(&key, &dtmf_length_ms, &dtmf_level_) >= 0) { |
| 177 dtmf_event_first_packet_sent_ = false; | 177 dtmf_event_first_packet_sent_ = false; |
| 178 dtmf_key_ = key; | 178 dtmf_key_ = key; |
| 179 dtmf_length_samples_ = (kDtmfFrequencyHz / 1000) * dtmf_length_ms; | 179 dtmf_length_samples_ = (kDtmfFrequencyHz / 1000) * dtmf_length_ms; |
| 180 dtmf_event_is_on_ = true; | 180 dtmf_event_is_on_ = true; |
| 181 } | 181 } |
| 182 } | 182 } |
| 183 } | 183 } |
| 184 | 184 |
| 185 // A source MAY send events and coded audio packets for the same time | 185 // A source MAY send events and coded audio packets for the same time |
| 186 // but we don't support it | 186 // but we don't support it |
| 187 if (dtmf_event_is_on_) { | 187 if (dtmf_event_is_on_) { |
| 188 if (frame_type == kEmptyFrame) { | 188 if (frame_type == kEmptyFrame) { |
| 189 // kEmptyFrame is used to drive the DTMF when in CN mode | 189 // kEmptyFrame is used to drive the DTMF when in CN mode |
| 190 // it can be triggered more frequently than we want to send the | 190 // it can be triggered more frequently than we want to send the |
| 191 // DTMF packets. | 191 // DTMF packets. |
| 192 if (packet_size_samples > | 192 if (packet_size_samples > (rtp_timestamp - dtmf_timestamp_last_sent_)) { |
| 193 (capture_timestamp - dtmf_timestamp_last_sent_)) { | |
| 194 // not time to send yet | 193 // not time to send yet |
| 195 return true; | 194 return true; |
| 196 } | 195 } |
| 197 } | 196 } |
| 198 dtmf_timestamp_last_sent_ = capture_timestamp; | 197 dtmf_timestamp_last_sent_ = rtp_timestamp; |
| 199 uint32_t dtmf_duration_samples = capture_timestamp - dtmf_timestamp_; | 198 uint32_t dtmf_duration_samples = rtp_timestamp - dtmf_timestamp_; |
| 200 bool ended = false; | 199 bool ended = false; |
| 201 bool send = true; | 200 bool send = true; |
| 202 | 201 |
| 203 if (dtmf_length_samples_ > dtmf_duration_samples) { | 202 if (dtmf_length_samples_ > dtmf_duration_samples) { |
| 204 if (dtmf_duration_samples <= 0) { | 203 if (dtmf_duration_samples <= 0) { |
| 205 // Skip send packet at start, since we shouldn't use duration 0 | 204 // Skip send packet at start, since we shouldn't use duration 0 |
| 206 send = false; | 205 send = false; |
| 207 } | 206 } |
| 208 } else { | 207 } else { |
| 209 ended = true; | 208 ended = true; |
| 210 dtmf_event_is_on_ = false; | 209 dtmf_event_is_on_ = false; |
| 211 dtmf_time_last_sent_ = clock_->TimeInMilliseconds(); | 210 dtmf_time_last_sent_ = clock_->TimeInMilliseconds(); |
| 212 } | 211 } |
| 213 if (send) { | 212 if (send) { |
| 214 if (dtmf_duration_samples > 0xffff) { | 213 if (dtmf_duration_samples > 0xffff) { |
| 215 // RFC 4733 2.5.2.3 Long-Duration Events | 214 // RFC 4733 2.5.2.3 Long-Duration Events |
| 216 SendTelephoneEventPacket(ended, dtmf_payload_type, dtmf_timestamp_, | 215 SendTelephoneEventPacket(ended, dtmf_payload_type, dtmf_timestamp_, |
| 217 static_cast<uint16_t>(0xffff), false); | 216 static_cast<uint16_t>(0xffff), false); |
| 218 | 217 |
| 219 // set new timestap for this segment | 218 // set new timestap for this segment |
| 220 dtmf_timestamp_ = capture_timestamp; | 219 dtmf_timestamp_ = rtp_timestamp; |
| 221 dtmf_duration_samples -= 0xffff; | 220 dtmf_duration_samples -= 0xffff; |
| 222 dtmf_length_samples_ -= 0xffff; | 221 dtmf_length_samples_ -= 0xffff; |
| 223 | 222 |
| 224 return SendTelephoneEventPacket( | 223 return SendTelephoneEventPacket( |
| 225 ended, dtmf_payload_type, dtmf_timestamp_, | 224 ended, dtmf_payload_type, dtmf_timestamp_, |
| 226 static_cast<uint16_t>(dtmf_duration_samples), false); | 225 static_cast<uint16_t>(dtmf_duration_samples), false); |
| 227 } else { | 226 } else { |
| 228 if (!SendTelephoneEventPacket(ended, dtmf_payload_type, dtmf_timestamp_, | 227 if (!SendTelephoneEventPacket(ended, dtmf_payload_type, dtmf_timestamp_, |
| 229 dtmf_duration_samples, | 228 dtmf_duration_samples, |
| 230 !dtmf_event_first_packet_sent_)) { | 229 !dtmf_event_first_packet_sent_)) { |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 242 // no error since we use it to drive DTMF when we use VAD | 241 // no error since we use it to drive DTMF when we use VAD |
| 243 return true; | 242 return true; |
| 244 } | 243 } |
| 245 return false; | 244 return false; |
| 246 } | 245 } |
| 247 uint8_t data_buffer[IP_PACKET_SIZE]; | 246 uint8_t data_buffer[IP_PACKET_SIZE]; |
| 248 bool marker_bit = MarkerBit(frame_type, payload_type); | 247 bool marker_bit = MarkerBit(frame_type, payload_type); |
| 249 | 248 |
| 250 int32_t rtpHeaderLength = 0; | 249 int32_t rtpHeaderLength = 0; |
| 251 | 250 |
| 252 rtpHeaderLength = rtp_sender_->BuildRtpHeader(data_buffer, payload_type, | 251 rtpHeaderLength = |
| 253 marker_bit, capture_timestamp, | 252 rtp_sender_->BuildRtpHeader(data_buffer, payload_type, marker_bit, |
| 254 clock_->TimeInMilliseconds()); | 253 rtp_timestamp, clock_->TimeInMilliseconds()); |
| 255 if (rtpHeaderLength <= 0) { | 254 if (rtpHeaderLength <= 0) { |
| 256 return false; | 255 return false; |
| 257 } | 256 } |
| 258 if (max_payload_length < (rtpHeaderLength + payload_size)) { | 257 if (max_payload_length < (rtpHeaderLength + payload_size)) { |
| 259 // Too large payload buffer. | 258 // Too large payload buffer. |
| 260 return false; | 259 return false; |
| 261 } | 260 } |
| 262 if (fragmentation && fragmentation->fragmentationVectorSize > 0) { | 261 if (fragmentation && fragmentation->fragmentationVectorSize > 0) { |
| 263 // use the fragment info if we have one | 262 // use the fragment info if we have one |
| 264 data_buffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0]; | 263 data_buffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0]; |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 276 last_payload_type_ = payload_type; | 275 last_payload_type_ = payload_type; |
| 277 } | 276 } |
| 278 // Update audio level extension, if included. | 277 // Update audio level extension, if included. |
| 279 size_t packetSize = payload_size + rtpHeaderLength; | 278 size_t packetSize = payload_size + rtpHeaderLength; |
| 280 RtpUtility::RtpHeaderParser rtp_parser(data_buffer, packetSize); | 279 RtpUtility::RtpHeaderParser rtp_parser(data_buffer, packetSize); |
| 281 RTPHeader rtp_header; | 280 RTPHeader rtp_header; |
| 282 rtp_parser.Parse(&rtp_header); | 281 rtp_parser.Parse(&rtp_header); |
| 283 rtp_sender_->UpdateAudioLevel(data_buffer, packetSize, rtp_header, | 282 rtp_sender_->UpdateAudioLevel(data_buffer, packetSize, rtp_header, |
| 284 (frame_type == kAudioFrameSpeech), | 283 (frame_type == kAudioFrameSpeech), |
| 285 audio_level_dbov); | 284 audio_level_dbov); |
| 286 TRACE_EVENT_ASYNC_END2("webrtc", "Audio", capture_timestamp, "timestamp", | 285 TRACE_EVENT_ASYNC_END2("webrtc", "Audio", rtp_timestamp, "timestamp", |
| 287 rtp_sender_->Timestamp(), "seqnum", | 286 rtp_timestamp, "seqnum", |
|
Sergey Ulanov
2016/08/19 18:19:13
Is it necessary to pass the same timestamp twice h
danilchap
2016/08/20 11:50:47
They have different meaning: first timestamp is a
| |
| 288 rtp_sender_->SequenceNumber()); | 287 rtp_sender_->SequenceNumber()); |
| 289 bool send_result = rtp_sender_->SendToNetwork( | 288 bool send_result = rtp_sender_->SendToNetwork( |
| 290 data_buffer, payload_size, rtpHeaderLength, rtc::TimeMillis(), | 289 data_buffer, payload_size, rtpHeaderLength, rtc::TimeMillis(), |
| 291 kAllowRetransmission, RtpPacketSender::kHighPriority); | 290 kAllowRetransmission, RtpPacketSender::kHighPriority); |
| 292 if (first_packet_sent_()) { | 291 if (first_packet_sent_()) { |
| 293 LOG(LS_INFO) << "First audio RTP packet sent to pacer"; | 292 LOG(LS_INFO) << "First audio RTP packet sent to pacer"; |
| 294 } | 293 } |
| 295 return send_result; | 294 return send_result; |
| 296 } | 295 } |
| 297 | 296 |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 369 "timestamp", dtmf_timestamp, "seqnum", rtp_sender_->SequenceNumber()); | 368 "timestamp", dtmf_timestamp, "seqnum", rtp_sender_->SequenceNumber()); |
| 370 result = rtp_sender_->SendToNetwork(dtmfbuffer, 4, 12, rtc::TimeMillis(), | 369 result = rtp_sender_->SendToNetwork(dtmfbuffer, 4, 12, rtc::TimeMillis(), |
| 371 kAllowRetransmission, | 370 kAllowRetransmission, |
| 372 RtpPacketSender::kHighPriority); | 371 RtpPacketSender::kHighPriority); |
| 373 send_count--; | 372 send_count--; |
| 374 } while (send_count > 0 && result); | 373 } while (send_count > 0 && result); |
| 375 | 374 |
| 376 return result; | 375 return result; |
| 377 } | 376 } |
| 378 } // namespace webrtc | 377 } // namespace webrtc |
| OLD | NEW |