OLD | NEW |
1 /* | 1 /* |
2 * libjingle | 2 * libjingle |
3 * Copyright 2012 Google Inc. | 3 * Copyright 2012 Google Inc. |
4 * | 4 * |
5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
7 * | 7 * |
8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
11 * this list of conditions and the following disclaimer in the documentation | 11 * this list of conditions and the following disclaimer in the documentation |
12 * and/or other materials provided with the distribution. | 12 * and/or other materials provided with the distribution. |
13 * 3. The name of the author may not be used to endorse or promote products | 13 * 3. The name of the author may not be used to endorse or promote products |
14 * derived from this software without specific prior written permission. | 14 * derived from this software without specific prior written permission. |
15 * | 15 * |
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED | 16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED |
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF | 17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF |
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO | 18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO |
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | 19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | 20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; | 21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; |
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, | 22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, |
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR | 23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR |
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF | 24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF |
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
26 */ | 26 */ |
27 | 27 |
28 #include "talk/app/webrtc/mediastreamsignaling.h" | 28 #include "talk/app/webrtc/mediastreamsignaling.h" |
29 | 29 |
30 // TODO(deadbeef): Remove this file once Chrome build files don't reference it. | 30 #include <vector> |
| 31 |
| 32 #include "talk/app/webrtc/audiotrack.h" |
| 33 #include "talk/app/webrtc/mediaconstraintsinterface.h" |
| 34 #include "talk/app/webrtc/mediastreamproxy.h" |
| 35 #include "talk/app/webrtc/mediastreamtrackproxy.h" |
| 36 #include "talk/app/webrtc/remoteaudiosource.h" |
| 37 #include "talk/app/webrtc/remotevideocapturer.h" |
| 38 #include "talk/app/webrtc/sctputils.h" |
| 39 #include "talk/app/webrtc/videosource.h" |
| 40 #include "talk/app/webrtc/videotrack.h" |
| 41 #include "talk/media/sctp/sctpdataengine.h" |
| 42 #include "webrtc/base/bytebuffer.h" |
| 43 #include "webrtc/base/stringutils.h" |
| 44 |
| 45 static const char kDefaultStreamLabel[] = "default"; |
| 46 static const char kDefaultAudioTrackLabel[] = "defaulta0"; |
| 47 static const char kDefaultVideoTrackLabel[] = "defaultv0"; |
| 48 |
| 49 namespace webrtc { |
| 50 |
| 51 using rtc::scoped_ptr; |
| 52 using rtc::scoped_refptr; |
| 53 |
| 54 static bool ParseConstraintsForAnswer( |
| 55 const MediaConstraintsInterface* constraints, |
| 56 cricket::MediaSessionOptions* options) { |
| 57 bool value = false; |
| 58 size_t mandatory_constraints_satisfied = 0; |
| 59 |
| 60 // kOfferToReceiveAudio defaults to true according to spec. |
| 61 if (!FindConstraint(constraints, |
| 62 MediaConstraintsInterface::kOfferToReceiveAudio, |
| 63 &value, &mandatory_constraints_satisfied) || value) { |
| 64 options->recv_audio = true; |
| 65 } |
| 66 |
| 67 // kOfferToReceiveVideo defaults to false according to spec. But |
| 68 // if it is an answer and video is offered, we should still accept video |
| 69 // per default. |
| 70 value = false; |
| 71 if (!FindConstraint(constraints, |
| 72 MediaConstraintsInterface::kOfferToReceiveVideo, |
| 73 &value, &mandatory_constraints_satisfied) || value) { |
| 74 options->recv_video = true; |
| 75 } |
| 76 |
| 77 if (FindConstraint(constraints, |
| 78 MediaConstraintsInterface::kVoiceActivityDetection, |
| 79 &value, &mandatory_constraints_satisfied)) { |
| 80 options->vad_enabled = value; |
| 81 } |
| 82 |
| 83 if (FindConstraint(constraints, |
| 84 MediaConstraintsInterface::kUseRtpMux, |
| 85 &value, &mandatory_constraints_satisfied)) { |
| 86 options->bundle_enabled = value; |
| 87 } else { |
| 88 // kUseRtpMux defaults to true according to spec. |
| 89 options->bundle_enabled = true; |
| 90 } |
| 91 if (FindConstraint(constraints, |
| 92 MediaConstraintsInterface::kIceRestart, |
| 93 &value, &mandatory_constraints_satisfied)) { |
| 94 options->transport_options.ice_restart = value; |
| 95 } else { |
| 96 // kIceRestart defaults to false according to spec. |
| 97 options->transport_options.ice_restart = false; |
| 98 } |
| 99 |
| 100 if (!constraints) { |
| 101 return true; |
| 102 } |
| 103 return mandatory_constraints_satisfied == constraints->GetMandatory().size(); |
| 104 } |
| 105 |
| 106 // Returns true if if at least one media content is present and |
| 107 // |options.bundle_enabled| is true. |
| 108 // Bundle will be enabled by default if at least one media content is present |
| 109 // and the constraint kUseRtpMux has not disabled bundle. |
| 110 static bool EvaluateNeedForBundle(const cricket::MediaSessionOptions& options) { |
| 111 return options.bundle_enabled && |
| 112 (options.has_audio() || options.has_video() || options.has_data()); |
| 113 } |
| 114 |
| 115 static bool MediaContentDirectionHasSend(cricket::MediaContentDirection dir) { |
| 116 return dir == cricket::MD_SENDONLY || dir == cricket::MD_SENDRECV; |
| 117 } |
| 118 |
| 119 static bool IsValidOfferToReceiveMedia(int value) { |
| 120 typedef PeerConnectionInterface::RTCOfferAnswerOptions Options; |
| 121 return (value >= Options::kUndefined) && |
| 122 (value <= Options::kMaxOfferToReceiveMedia); |
| 123 } |
| 124 |
| 125 // Add the stream and RTP data channel info to |session_options|. |
| 126 static void SetStreams( |
| 127 cricket::MediaSessionOptions* session_options, |
| 128 rtc::scoped_refptr<StreamCollection> streams, |
| 129 const MediaStreamSignaling::RtpDataChannels& rtp_data_channels) { |
| 130 session_options->streams.clear(); |
| 131 if (streams != NULL) { |
| 132 for (size_t i = 0; i < streams->count(); ++i) { |
| 133 MediaStreamInterface* stream = streams->at(i); |
| 134 |
| 135 AudioTrackVector audio_tracks(stream->GetAudioTracks()); |
| 136 |
| 137 // For each audio track in the stream, add it to the MediaSessionOptions. |
| 138 for (size_t j = 0; j < audio_tracks.size(); ++j) { |
| 139 scoped_refptr<MediaStreamTrackInterface> track(audio_tracks[j]); |
| 140 session_options->AddSendStream( |
| 141 cricket::MEDIA_TYPE_AUDIO, track->id(), stream->label()); |
| 142 } |
| 143 |
| 144 VideoTrackVector video_tracks(stream->GetVideoTracks()); |
| 145 |
| 146 // For each video track in the stream, add it to the MediaSessionOptions. |
| 147 for (size_t j = 0; j < video_tracks.size(); ++j) { |
| 148 scoped_refptr<MediaStreamTrackInterface> track(video_tracks[j]); |
| 149 session_options->AddSendStream( |
| 150 cricket::MEDIA_TYPE_VIDEO, track->id(), stream->label()); |
| 151 } |
| 152 } |
| 153 } |
| 154 |
| 155 // Check for data channels. |
| 156 MediaStreamSignaling::RtpDataChannels::const_iterator data_channel_it = |
| 157 rtp_data_channels.begin(); |
| 158 for (; data_channel_it != rtp_data_channels.end(); ++data_channel_it) { |
| 159 const DataChannel* channel = data_channel_it->second; |
| 160 if (channel->state() == DataChannel::kConnecting || |
| 161 channel->state() == DataChannel::kOpen) { |
| 162 // |streamid| and |sync_label| are both set to the DataChannel label |
| 163 // here so they can be signaled the same way as MediaStreams and Tracks. |
| 164 // For MediaStreams, the sync_label is the MediaStream label and the |
| 165 // track label is the same as |streamid|. |
| 166 const std::string& streamid = channel->label(); |
| 167 const std::string& sync_label = channel->label(); |
| 168 session_options->AddSendStream( |
| 169 cricket::MEDIA_TYPE_DATA, streamid, sync_label); |
| 170 } |
| 171 } |
| 172 } |
| 173 |
| 174 // Factory class for creating remote MediaStreams and MediaStreamTracks. |
| 175 class RemoteMediaStreamFactory { |
| 176 public: |
| 177 explicit RemoteMediaStreamFactory(rtc::Thread* signaling_thread, |
| 178 cricket::ChannelManager* channel_manager) |
| 179 : signaling_thread_(signaling_thread), |
| 180 channel_manager_(channel_manager) { |
| 181 } |
| 182 |
| 183 rtc::scoped_refptr<MediaStreamInterface> CreateMediaStream( |
| 184 const std::string& stream_label) { |
| 185 return MediaStreamProxy::Create( |
| 186 signaling_thread_, MediaStream::Create(stream_label)); |
| 187 } |
| 188 |
| 189 AudioTrackInterface* AddAudioTrack(webrtc::MediaStreamInterface* stream, |
| 190 const std::string& track_id) { |
| 191 return AddTrack<AudioTrackInterface, AudioTrack, AudioTrackProxy>( |
| 192 stream, track_id, RemoteAudioSource::Create().get()); |
| 193 } |
| 194 |
| 195 VideoTrackInterface* AddVideoTrack(webrtc::MediaStreamInterface* stream, |
| 196 const std::string& track_id) { |
| 197 return AddTrack<VideoTrackInterface, VideoTrack, VideoTrackProxy>( |
| 198 stream, track_id, VideoSource::Create(channel_manager_, |
| 199 new RemoteVideoCapturer(), |
| 200 NULL).get()); |
| 201 } |
| 202 |
| 203 private: |
| 204 template <typename TI, typename T, typename TP, typename S> |
| 205 TI* AddTrack(MediaStreamInterface* stream, const std::string& track_id, |
| 206 S* source) { |
| 207 rtc::scoped_refptr<TI> track( |
| 208 TP::Create(signaling_thread_, T::Create(track_id, source))); |
| 209 track->set_state(webrtc::MediaStreamTrackInterface::kLive); |
| 210 if (stream->AddTrack(track)) { |
| 211 return track; |
| 212 } |
| 213 return NULL; |
| 214 } |
| 215 |
| 216 rtc::Thread* signaling_thread_; |
| 217 cricket::ChannelManager* channel_manager_; |
| 218 }; |
| 219 |
| 220 MediaStreamSignaling::MediaStreamSignaling( |
| 221 rtc::Thread* signaling_thread, |
| 222 MediaStreamSignalingObserver* stream_observer, |
| 223 cricket::ChannelManager* channel_manager) |
| 224 : signaling_thread_(signaling_thread), |
| 225 data_channel_factory_(NULL), |
| 226 stream_observer_(stream_observer), |
| 227 local_streams_(StreamCollection::Create()), |
| 228 remote_streams_(StreamCollection::Create()), |
| 229 remote_stream_factory_(new RemoteMediaStreamFactory(signaling_thread, |
| 230 channel_manager)), |
| 231 last_allocated_sctp_even_sid_(-2), |
| 232 last_allocated_sctp_odd_sid_(-1) { |
| 233 } |
| 234 |
| 235 MediaStreamSignaling::~MediaStreamSignaling() { |
| 236 } |
| 237 |
| 238 void MediaStreamSignaling::TearDown() { |
| 239 OnAudioChannelClose(); |
| 240 OnVideoChannelClose(); |
| 241 OnDataChannelClose(); |
| 242 } |
| 243 |
| 244 bool MediaStreamSignaling::IsSctpSidAvailable(int sid) const { |
| 245 if (sid < 0 || sid > static_cast<int>(cricket::kMaxSctpSid)) |
| 246 return false; |
| 247 |
| 248 return FindDataChannelBySid(sid) < 0; |
| 249 } |
| 250 |
| 251 // Gets the first unused odd/even id based on the DTLS role. If |role| is |
| 252 // SSL_CLIENT, the allocated id starts from 0 and takes even numbers; otherwise, |
| 253 // the id starts from 1 and takes odd numbers. Returns false if no id can be |
| 254 // allocated. |
| 255 bool MediaStreamSignaling::AllocateSctpSid(rtc::SSLRole role, int* sid) { |
| 256 int& last_id = (role == rtc::SSL_CLIENT) ? |
| 257 last_allocated_sctp_even_sid_ : last_allocated_sctp_odd_sid_; |
| 258 |
| 259 do { |
| 260 last_id += 2; |
| 261 } while (last_id <= static_cast<int>(cricket::kMaxSctpSid) && |
| 262 !IsSctpSidAvailable(last_id)); |
| 263 |
| 264 if (last_id > static_cast<int>(cricket::kMaxSctpSid)) { |
| 265 return false; |
| 266 } |
| 267 |
| 268 *sid = last_id; |
| 269 return true; |
| 270 } |
| 271 |
| 272 bool MediaStreamSignaling::HasDataChannels() const { |
| 273 return !rtp_data_channels_.empty() || !sctp_data_channels_.empty(); |
| 274 } |
| 275 |
| 276 bool MediaStreamSignaling::AddDataChannel(DataChannel* data_channel) { |
| 277 ASSERT(data_channel != NULL); |
| 278 if (data_channel->data_channel_type() == cricket::DCT_RTP) { |
| 279 if (rtp_data_channels_.find(data_channel->label()) != |
| 280 rtp_data_channels_.end()) { |
| 281 LOG(LS_ERROR) << "DataChannel with label " << data_channel->label() |
| 282 << " already exists."; |
| 283 return false; |
| 284 } |
| 285 rtp_data_channels_[data_channel->label()] = data_channel; |
| 286 } else { |
| 287 ASSERT(data_channel->data_channel_type() == cricket::DCT_SCTP); |
| 288 sctp_data_channels_.push_back(data_channel); |
| 289 } |
| 290 return true; |
| 291 } |
| 292 |
| 293 bool MediaStreamSignaling::AddDataChannelFromOpenMessage( |
| 294 const cricket::ReceiveDataParams& params, |
| 295 const rtc::Buffer& payload) { |
| 296 if (!data_channel_factory_) { |
| 297 LOG(LS_WARNING) << "Remote peer requested a DataChannel but DataChannels " |
| 298 << "are not supported."; |
| 299 return false; |
| 300 } |
| 301 |
| 302 std::string label; |
| 303 InternalDataChannelInit config; |
| 304 config.id = params.ssrc; |
| 305 if (!ParseDataChannelOpenMessage(payload, &label, &config)) { |
| 306 LOG(LS_WARNING) << "Failed to parse the OPEN message for sid " |
| 307 << params.ssrc; |
| 308 return false; |
| 309 } |
| 310 config.open_handshake_role = InternalDataChannelInit::kAcker; |
| 311 |
| 312 scoped_refptr<DataChannel> channel( |
| 313 data_channel_factory_->CreateDataChannel(label, &config)); |
| 314 if (!channel.get()) { |
| 315 LOG(LS_ERROR) << "Failed to create DataChannel from the OPEN message."; |
| 316 return false; |
| 317 } |
| 318 |
| 319 stream_observer_->OnAddDataChannel(channel); |
| 320 return true; |
| 321 } |
| 322 |
| 323 void MediaStreamSignaling::RemoveSctpDataChannel(int sid) { |
| 324 ASSERT(sid >= 0); |
| 325 for (SctpDataChannels::iterator iter = sctp_data_channels_.begin(); |
| 326 iter != sctp_data_channels_.end(); |
| 327 ++iter) { |
| 328 if ((*iter)->id() == sid) { |
| 329 sctp_data_channels_.erase(iter); |
| 330 |
| 331 if (rtc::IsEven(sid) && sid <= last_allocated_sctp_even_sid_) { |
| 332 last_allocated_sctp_even_sid_ = sid - 2; |
| 333 } else if (rtc::IsOdd(sid) && sid <= last_allocated_sctp_odd_sid_) { |
| 334 last_allocated_sctp_odd_sid_ = sid - 2; |
| 335 } |
| 336 return; |
| 337 } |
| 338 } |
| 339 } |
| 340 |
| 341 bool MediaStreamSignaling::AddLocalStream(MediaStreamInterface* local_stream) { |
| 342 if (local_streams_->find(local_stream->label()) != NULL) { |
| 343 LOG(LS_WARNING) << "MediaStream with label " << local_stream->label() |
| 344 << "already exist."; |
| 345 return false; |
| 346 } |
| 347 local_streams_->AddStream(local_stream); |
| 348 |
| 349 // Find tracks that has already been configured in SDP. This can occur if a |
| 350 // local session description that contains the MSID of these tracks is set |
| 351 // before AddLocalStream is called. It can also occur if the local session |
| 352 // description is not changed and RemoveLocalStream |
| 353 // is called and later AddLocalStream is called again with the same stream. |
| 354 AudioTrackVector audio_tracks = local_stream->GetAudioTracks(); |
| 355 for (AudioTrackVector::const_iterator it = audio_tracks.begin(); |
| 356 it != audio_tracks.end(); ++it) { |
| 357 const TrackInfo* track_info = FindTrackInfo(local_audio_tracks_, |
| 358 local_stream->label(), |
| 359 (*it)->id()); |
| 360 if (track_info) { |
| 361 OnLocalTrackSeen(track_info->stream_label, track_info->track_id, |
| 362 track_info->ssrc, cricket::MEDIA_TYPE_AUDIO); |
| 363 } |
| 364 } |
| 365 |
| 366 VideoTrackVector video_tracks = local_stream->GetVideoTracks(); |
| 367 for (VideoTrackVector::const_iterator it = video_tracks.begin(); |
| 368 it != video_tracks.end(); ++it) { |
| 369 const TrackInfo* track_info = FindTrackInfo(local_video_tracks_, |
| 370 local_stream->label(), |
| 371 (*it)->id()); |
| 372 if (track_info) { |
| 373 OnLocalTrackSeen(track_info->stream_label, track_info->track_id, |
| 374 track_info->ssrc, cricket::MEDIA_TYPE_VIDEO); |
| 375 } |
| 376 } |
| 377 return true; |
| 378 } |
| 379 |
| 380 void MediaStreamSignaling::RemoveLocalStream( |
| 381 MediaStreamInterface* local_stream) { |
| 382 AudioTrackVector audio_tracks = local_stream->GetAudioTracks(); |
| 383 for (AudioTrackVector::const_iterator it = audio_tracks.begin(); |
| 384 it != audio_tracks.end(); ++it) { |
| 385 const TrackInfo* track_info = FindTrackInfo(local_audio_tracks_, |
| 386 local_stream->label(), |
| 387 (*it)->id()); |
| 388 if (track_info) { |
| 389 stream_observer_->OnRemoveLocalAudioTrack(local_stream, *it, |
| 390 track_info->ssrc); |
| 391 } |
| 392 } |
| 393 VideoTrackVector video_tracks = local_stream->GetVideoTracks(); |
| 394 for (VideoTrackVector::const_iterator it = video_tracks.begin(); |
| 395 it != video_tracks.end(); ++it) { |
| 396 const TrackInfo* track_info = FindTrackInfo(local_video_tracks_, |
| 397 local_stream->label(), |
| 398 (*it)->id()); |
| 399 if (track_info) { |
| 400 stream_observer_->OnRemoveLocalVideoTrack(local_stream, *it); |
| 401 } |
| 402 } |
| 403 |
| 404 local_streams_->RemoveStream(local_stream); |
| 405 stream_observer_->OnRemoveLocalStream(local_stream); |
| 406 } |
| 407 |
| 408 bool MediaStreamSignaling::GetOptionsForOffer( |
| 409 const PeerConnectionInterface::RTCOfferAnswerOptions& rtc_options, |
| 410 cricket::MediaSessionOptions* session_options) { |
| 411 typedef PeerConnectionInterface::RTCOfferAnswerOptions RTCOfferAnswerOptions; |
| 412 if (!IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_audio) || |
| 413 !IsValidOfferToReceiveMedia(rtc_options.offer_to_receive_video)) { |
| 414 return false; |
| 415 } |
| 416 |
| 417 SetStreams(session_options, local_streams_, rtp_data_channels_); |
| 418 |
| 419 // According to the spec, offer to receive audio/video if the constraint is |
| 420 // not set and there are send streams. |
| 421 if (rtc_options.offer_to_receive_audio == RTCOfferAnswerOptions::kUndefined) { |
| 422 session_options->recv_audio = |
| 423 session_options->HasSendMediaStream(cricket::MEDIA_TYPE_AUDIO); |
| 424 } else { |
| 425 session_options->recv_audio = (rtc_options.offer_to_receive_audio > 0); |
| 426 } |
| 427 if (rtc_options.offer_to_receive_video == RTCOfferAnswerOptions::kUndefined) { |
| 428 session_options->recv_video = |
| 429 session_options->HasSendMediaStream(cricket::MEDIA_TYPE_VIDEO); |
| 430 } else { |
| 431 session_options->recv_video = (rtc_options.offer_to_receive_video > 0); |
| 432 } |
| 433 |
| 434 session_options->vad_enabled = rtc_options.voice_activity_detection; |
| 435 session_options->transport_options.ice_restart = rtc_options.ice_restart; |
| 436 session_options->bundle_enabled = rtc_options.use_rtp_mux; |
| 437 |
| 438 session_options->bundle_enabled = EvaluateNeedForBundle(*session_options); |
| 439 return true; |
| 440 } |
| 441 |
| 442 bool MediaStreamSignaling::GetOptionsForAnswer( |
| 443 const MediaConstraintsInterface* constraints, |
| 444 cricket::MediaSessionOptions* options) { |
| 445 SetStreams(options, local_streams_, rtp_data_channels_); |
| 446 |
| 447 options->recv_audio = false; |
| 448 options->recv_video = false; |
| 449 if (!ParseConstraintsForAnswer(constraints, options)) { |
| 450 return false; |
| 451 } |
| 452 options->bundle_enabled = EvaluateNeedForBundle(*options); |
| 453 return true; |
| 454 } |
| 455 |
| 456 // Updates or creates remote MediaStream objects given a |
| 457 // remote SessionDesription. |
| 458 // If the remote SessionDesription contains new remote MediaStreams |
| 459 // the observer OnAddStream method is called. If a remote MediaStream is missing |
| 460 // from the remote SessionDescription OnRemoveStream is called. |
| 461 void MediaStreamSignaling::OnRemoteDescriptionChanged( |
| 462 const SessionDescriptionInterface* desc) { |
| 463 const cricket::SessionDescription* remote_desc = desc->description(); |
| 464 rtc::scoped_refptr<StreamCollection> new_streams( |
| 465 StreamCollection::Create()); |
| 466 |
| 467 // Find all audio rtp streams and create corresponding remote AudioTracks |
| 468 // and MediaStreams. |
| 469 const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc); |
| 470 if (audio_content) { |
| 471 const cricket::AudioContentDescription* desc = |
| 472 static_cast<const cricket::AudioContentDescription*>( |
| 473 audio_content->description); |
| 474 UpdateRemoteStreamsList(desc->streams(), desc->type(), new_streams); |
| 475 remote_info_.default_audio_track_needed = |
| 476 MediaContentDirectionHasSend(desc->direction()) && |
| 477 desc->streams().empty(); |
| 478 } |
| 479 |
| 480 // Find all video rtp streams and create corresponding remote VideoTracks |
| 481 // and MediaStreams. |
| 482 const cricket::ContentInfo* video_content = GetFirstVideoContent(remote_desc); |
| 483 if (video_content) { |
| 484 const cricket::VideoContentDescription* desc = |
| 485 static_cast<const cricket::VideoContentDescription*>( |
| 486 video_content->description); |
| 487 UpdateRemoteStreamsList(desc->streams(), desc->type(), new_streams); |
| 488 remote_info_.default_video_track_needed = |
| 489 MediaContentDirectionHasSend(desc->direction()) && |
| 490 desc->streams().empty(); |
| 491 } |
| 492 |
| 493 // Update the DataChannels with the information from the remote peer. |
| 494 const cricket::ContentInfo* data_content = GetFirstDataContent(remote_desc); |
| 495 if (data_content) { |
| 496 const cricket::DataContentDescription* data_desc = |
| 497 static_cast<const cricket::DataContentDescription*>( |
| 498 data_content->description); |
| 499 if (rtc::starts_with( |
| 500 data_desc->protocol().data(), cricket::kMediaProtocolRtpPrefix)) { |
| 501 UpdateRemoteRtpDataChannels(data_desc->streams()); |
| 502 } |
| 503 } |
| 504 |
| 505 // Iterate new_streams and notify the observer about new MediaStreams. |
| 506 for (size_t i = 0; i < new_streams->count(); ++i) { |
| 507 MediaStreamInterface* new_stream = new_streams->at(i); |
| 508 stream_observer_->OnAddRemoteStream(new_stream); |
| 509 } |
| 510 |
| 511 // Find removed MediaStreams. |
| 512 if (remote_info_.IsDefaultMediaStreamNeeded() && |
| 513 remote_streams_->find(kDefaultStreamLabel) != NULL) { |
| 514 // The default media stream already exists. No need to do anything. |
| 515 } else { |
| 516 UpdateEndedRemoteMediaStreams(); |
| 517 remote_info_.msid_supported |= remote_streams_->count() > 0; |
| 518 } |
| 519 MaybeCreateDefaultStream(); |
| 520 } |
| 521 |
| 522 void MediaStreamSignaling::OnLocalDescriptionChanged( |
| 523 const SessionDescriptionInterface* desc) { |
| 524 const cricket::ContentInfo* audio_content = |
| 525 GetFirstAudioContent(desc->description()); |
| 526 if (audio_content) { |
| 527 if (audio_content->rejected) { |
| 528 RejectRemoteTracks(cricket::MEDIA_TYPE_AUDIO); |
| 529 } |
| 530 const cricket::AudioContentDescription* audio_desc = |
| 531 static_cast<const cricket::AudioContentDescription*>( |
| 532 audio_content->description); |
| 533 UpdateLocalTracks(audio_desc->streams(), audio_desc->type()); |
| 534 } |
| 535 |
| 536 const cricket::ContentInfo* video_content = |
| 537 GetFirstVideoContent(desc->description()); |
| 538 if (video_content) { |
| 539 if (video_content->rejected) { |
| 540 RejectRemoteTracks(cricket::MEDIA_TYPE_VIDEO); |
| 541 } |
| 542 const cricket::VideoContentDescription* video_desc = |
| 543 static_cast<const cricket::VideoContentDescription*>( |
| 544 video_content->description); |
| 545 UpdateLocalTracks(video_desc->streams(), video_desc->type()); |
| 546 } |
| 547 |
| 548 const cricket::ContentInfo* data_content = |
| 549 GetFirstDataContent(desc->description()); |
| 550 if (data_content) { |
| 551 const cricket::DataContentDescription* data_desc = |
| 552 static_cast<const cricket::DataContentDescription*>( |
| 553 data_content->description); |
| 554 if (rtc::starts_with( |
| 555 data_desc->protocol().data(), cricket::kMediaProtocolRtpPrefix)) { |
| 556 UpdateLocalRtpDataChannels(data_desc->streams()); |
| 557 } |
| 558 } |
| 559 } |
| 560 |
| 561 void MediaStreamSignaling::OnAudioChannelClose() { |
| 562 RejectRemoteTracks(cricket::MEDIA_TYPE_AUDIO); |
| 563 } |
| 564 |
| 565 void MediaStreamSignaling::OnVideoChannelClose() { |
| 566 RejectRemoteTracks(cricket::MEDIA_TYPE_VIDEO); |
| 567 } |
| 568 |
| 569 void MediaStreamSignaling::OnDataChannelClose() { |
| 570 // Use a temporary copy of the RTP/SCTP DataChannel list because the |
| 571 // DataChannel may callback to us and try to modify the list. |
| 572 RtpDataChannels temp_rtp_dcs; |
| 573 temp_rtp_dcs.swap(rtp_data_channels_); |
| 574 RtpDataChannels::iterator it1 = temp_rtp_dcs.begin(); |
| 575 for (; it1 != temp_rtp_dcs.end(); ++it1) { |
| 576 it1->second->OnDataEngineClose(); |
| 577 } |
| 578 |
| 579 SctpDataChannels temp_sctp_dcs; |
| 580 temp_sctp_dcs.swap(sctp_data_channels_); |
| 581 SctpDataChannels::iterator it2 = temp_sctp_dcs.begin(); |
| 582 for (; it2 != temp_sctp_dcs.end(); ++it2) { |
| 583 (*it2)->OnDataEngineClose(); |
| 584 } |
| 585 } |
| 586 |
| 587 void MediaStreamSignaling::UpdateRemoteStreamsList( |
| 588 const cricket::StreamParamsVec& streams, |
| 589 cricket::MediaType media_type, |
| 590 StreamCollection* new_streams) { |
| 591 TrackInfos* current_tracks = GetRemoteTracks(media_type); |
| 592 |
| 593 // Find removed tracks. Ie tracks where the track id or ssrc don't match the |
| 594 // new StreamParam. |
| 595 TrackInfos::iterator track_it = current_tracks->begin(); |
| 596 while (track_it != current_tracks->end()) { |
| 597 const TrackInfo& info = *track_it; |
| 598 const cricket::StreamParams* params = |
| 599 cricket::GetStreamBySsrc(streams, info.ssrc); |
| 600 if (!params || params->id != info.track_id) { |
| 601 OnRemoteTrackRemoved(info.stream_label, info.track_id, media_type); |
| 602 track_it = current_tracks->erase(track_it); |
| 603 } else { |
| 604 ++track_it; |
| 605 } |
| 606 } |
| 607 |
| 608 // Find new and active tracks. |
| 609 for (cricket::StreamParamsVec::const_iterator it = streams.begin(); |
| 610 it != streams.end(); ++it) { |
| 611 // The sync_label is the MediaStream label and the |stream.id| is the |
| 612 // track id. |
| 613 const std::string& stream_label = it->sync_label; |
| 614 const std::string& track_id = it->id; |
| 615 uint32_t ssrc = it->first_ssrc(); |
| 616 |
| 617 rtc::scoped_refptr<MediaStreamInterface> stream = |
| 618 remote_streams_->find(stream_label); |
| 619 if (!stream) { |
| 620 // This is a new MediaStream. Create a new remote MediaStream. |
| 621 stream = remote_stream_factory_->CreateMediaStream(stream_label); |
| 622 remote_streams_->AddStream(stream); |
| 623 new_streams->AddStream(stream); |
| 624 } |
| 625 |
| 626 const TrackInfo* track_info = FindTrackInfo(*current_tracks, stream_label, |
| 627 track_id); |
| 628 if (!track_info) { |
| 629 current_tracks->push_back(TrackInfo(stream_label, track_id, ssrc)); |
| 630 OnRemoteTrackSeen(stream_label, track_id, it->first_ssrc(), media_type); |
| 631 } |
| 632 } |
| 633 } |
| 634 |
| 635 void MediaStreamSignaling::OnRemoteTrackSeen(const std::string& stream_label, |
| 636 const std::string& track_id, |
| 637 uint32_t ssrc, |
| 638 cricket::MediaType media_type) { |
| 639 MediaStreamInterface* stream = remote_streams_->find(stream_label); |
| 640 |
| 641 if (media_type == cricket::MEDIA_TYPE_AUDIO) { |
| 642 AudioTrackInterface* audio_track = |
| 643 remote_stream_factory_->AddAudioTrack(stream, track_id); |
| 644 stream_observer_->OnAddRemoteAudioTrack(stream, audio_track, ssrc); |
| 645 } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { |
| 646 VideoTrackInterface* video_track = |
| 647 remote_stream_factory_->AddVideoTrack(stream, track_id); |
| 648 stream_observer_->OnAddRemoteVideoTrack(stream, video_track, ssrc); |
| 649 } else { |
| 650 ASSERT(false && "Invalid media type"); |
| 651 } |
| 652 } |
| 653 |
| 654 void MediaStreamSignaling::OnRemoteTrackRemoved( |
| 655 const std::string& stream_label, |
| 656 const std::string& track_id, |
| 657 cricket::MediaType media_type) { |
| 658 MediaStreamInterface* stream = remote_streams_->find(stream_label); |
| 659 |
| 660 if (media_type == cricket::MEDIA_TYPE_AUDIO) { |
| 661 rtc::scoped_refptr<AudioTrackInterface> audio_track = |
| 662 stream->FindAudioTrack(track_id); |
| 663 if (audio_track) { |
| 664 audio_track->set_state(webrtc::MediaStreamTrackInterface::kEnded); |
| 665 stream->RemoveTrack(audio_track); |
| 666 stream_observer_->OnRemoveRemoteAudioTrack(stream, audio_track); |
| 667 } |
| 668 } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { |
| 669 rtc::scoped_refptr<VideoTrackInterface> video_track = |
| 670 stream->FindVideoTrack(track_id); |
| 671 if (video_track) { |
| 672 video_track->set_state(webrtc::MediaStreamTrackInterface::kEnded); |
| 673 stream->RemoveTrack(video_track); |
| 674 stream_observer_->OnRemoveRemoteVideoTrack(stream, video_track); |
| 675 } |
| 676 } else { |
| 677 ASSERT(false && "Invalid media type"); |
| 678 } |
| 679 } |
| 680 |
| 681 void MediaStreamSignaling::RejectRemoteTracks(cricket::MediaType media_type) { |
| 682 TrackInfos* current_tracks = GetRemoteTracks(media_type); |
| 683 for (TrackInfos::iterator track_it = current_tracks->begin(); |
| 684 track_it != current_tracks->end(); ++track_it) { |
| 685 const TrackInfo& info = *track_it; |
| 686 MediaStreamInterface* stream = remote_streams_->find(info.stream_label); |
| 687 if (media_type == cricket::MEDIA_TYPE_AUDIO) { |
| 688 AudioTrackInterface* track = stream->FindAudioTrack(info.track_id); |
| 689 // There's no guarantee the track is still available, e.g. the track may |
| 690 // have been removed from the stream by javascript. |
| 691 if (track) { |
| 692 track->set_state(webrtc::MediaStreamTrackInterface::kEnded); |
| 693 } |
| 694 } |
| 695 if (media_type == cricket::MEDIA_TYPE_VIDEO) { |
| 696 VideoTrackInterface* track = stream->FindVideoTrack(info.track_id); |
| 697 // There's no guarantee the track is still available, e.g. the track may |
| 698 // have been removed from the stream by javascript. |
| 699 if (track) { |
| 700 track->set_state(webrtc::MediaStreamTrackInterface::kEnded); |
| 701 } |
| 702 } |
| 703 } |
| 704 } |
| 705 |
| 706 void MediaStreamSignaling::UpdateEndedRemoteMediaStreams() { |
| 707 std::vector<scoped_refptr<MediaStreamInterface> > streams_to_remove; |
| 708 for (size_t i = 0; i < remote_streams_->count(); ++i) { |
| 709 MediaStreamInterface*stream = remote_streams_->at(i); |
| 710 if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) { |
| 711 streams_to_remove.push_back(stream); |
| 712 } |
| 713 } |
| 714 |
| 715 std::vector<scoped_refptr<MediaStreamInterface> >::const_iterator it; |
| 716 for (it = streams_to_remove.begin(); it != streams_to_remove.end(); ++it) { |
| 717 remote_streams_->RemoveStream(*it); |
| 718 stream_observer_->OnRemoveRemoteStream(*it); |
| 719 } |
| 720 } |
| 721 |
| 722 void MediaStreamSignaling::MaybeCreateDefaultStream() { |
| 723 if (!remote_info_.IsDefaultMediaStreamNeeded()) |
| 724 return; |
| 725 |
| 726 bool default_created = false; |
| 727 |
| 728 scoped_refptr<MediaStreamInterface> default_remote_stream = |
| 729 remote_streams_->find(kDefaultStreamLabel); |
| 730 if (default_remote_stream == NULL) { |
| 731 default_created = true; |
| 732 default_remote_stream = |
| 733 remote_stream_factory_->CreateMediaStream(kDefaultStreamLabel); |
| 734 remote_streams_->AddStream(default_remote_stream); |
| 735 } |
| 736 if (remote_info_.default_audio_track_needed && |
| 737 default_remote_stream->GetAudioTracks().size() == 0) { |
| 738 remote_audio_tracks_.push_back(TrackInfo(kDefaultStreamLabel, |
| 739 kDefaultAudioTrackLabel, 0)); |
| 740 |
| 741 OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0, |
| 742 cricket::MEDIA_TYPE_AUDIO); |
| 743 } |
| 744 if (remote_info_.default_video_track_needed && |
| 745 default_remote_stream->GetVideoTracks().size() == 0) { |
| 746 remote_video_tracks_.push_back(TrackInfo(kDefaultStreamLabel, |
| 747 kDefaultVideoTrackLabel, 0)); |
| 748 OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0, |
| 749 cricket::MEDIA_TYPE_VIDEO); |
| 750 } |
| 751 if (default_created) { |
| 752 stream_observer_->OnAddRemoteStream(default_remote_stream); |
| 753 } |
| 754 } |
| 755 |
| 756 MediaStreamSignaling::TrackInfos* MediaStreamSignaling::GetRemoteTracks( |
| 757 cricket::MediaType type) { |
| 758 if (type == cricket::MEDIA_TYPE_AUDIO) |
| 759 return &remote_audio_tracks_; |
| 760 else if (type == cricket::MEDIA_TYPE_VIDEO) |
| 761 return &remote_video_tracks_; |
| 762 ASSERT(false && "Unknown MediaType"); |
| 763 return NULL; |
| 764 } |
| 765 |
| 766 MediaStreamSignaling::TrackInfos* MediaStreamSignaling::GetLocalTracks( |
| 767 cricket::MediaType media_type) { |
| 768 ASSERT(media_type == cricket::MEDIA_TYPE_AUDIO || |
| 769 media_type == cricket::MEDIA_TYPE_VIDEO); |
| 770 |
| 771 return (media_type == cricket::MEDIA_TYPE_AUDIO) ? |
| 772 &local_audio_tracks_ : &local_video_tracks_; |
| 773 } |
| 774 |
| 775 void MediaStreamSignaling::UpdateLocalTracks( |
| 776 const std::vector<cricket::StreamParams>& streams, |
| 777 cricket::MediaType media_type) { |
| 778 TrackInfos* current_tracks = GetLocalTracks(media_type); |
| 779 |
| 780 // Find removed tracks. Ie tracks where the track id, stream label or ssrc |
| 781 // don't match the new StreamParam. |
| 782 TrackInfos::iterator track_it = current_tracks->begin(); |
| 783 while (track_it != current_tracks->end()) { |
| 784 const TrackInfo& info = *track_it; |
| 785 const cricket::StreamParams* params = |
| 786 cricket::GetStreamBySsrc(streams, info.ssrc); |
| 787 if (!params || params->id != info.track_id || |
| 788 params->sync_label != info.stream_label) { |
| 789 OnLocalTrackRemoved(info.stream_label, info.track_id, info.ssrc, |
| 790 media_type); |
| 791 track_it = current_tracks->erase(track_it); |
| 792 } else { |
| 793 ++track_it; |
| 794 } |
| 795 } |
| 796 |
| 797 // Find new and active tracks. |
| 798 for (cricket::StreamParamsVec::const_iterator it = streams.begin(); |
| 799 it != streams.end(); ++it) { |
| 800 // The sync_label is the MediaStream label and the |stream.id| is the |
| 801 // track id. |
| 802 const std::string& stream_label = it->sync_label; |
| 803 const std::string& track_id = it->id; |
| 804 uint32_t ssrc = it->first_ssrc(); |
| 805 const TrackInfo* track_info = FindTrackInfo(*current_tracks, |
| 806 stream_label, |
| 807 track_id); |
| 808 if (!track_info) { |
| 809 current_tracks->push_back(TrackInfo(stream_label, track_id, ssrc)); |
| 810 OnLocalTrackSeen(stream_label, track_id, it->first_ssrc(), media_type); |
| 811 } |
| 812 } |
| 813 } |
| 814 |
| 815 void MediaStreamSignaling::OnLocalTrackSeen(const std::string& stream_label, |
| 816 const std::string& track_id, |
| 817 uint32_t ssrc, |
| 818 cricket::MediaType media_type) { |
| 819 MediaStreamInterface* stream = local_streams_->find(stream_label); |
| 820 if (!stream) { |
| 821 LOG(LS_WARNING) << "An unknown local MediaStream with label " |
| 822 << stream_label << " has been configured."; |
| 823 return; |
| 824 } |
| 825 |
| 826 if (media_type == cricket::MEDIA_TYPE_AUDIO) { |
| 827 AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id); |
| 828 if (!audio_track) { |
| 829 LOG(LS_WARNING) << "An unknown local AudioTrack with id , " |
| 830 << track_id << " has been configured."; |
| 831 return; |
| 832 } |
| 833 stream_observer_->OnAddLocalAudioTrack(stream, audio_track, ssrc); |
| 834 } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { |
| 835 VideoTrackInterface* video_track = stream->FindVideoTrack(track_id); |
| 836 if (!video_track) { |
| 837 LOG(LS_WARNING) << "An unknown local VideoTrack with id , " |
| 838 << track_id << " has been configured."; |
| 839 return; |
| 840 } |
| 841 stream_observer_->OnAddLocalVideoTrack(stream, video_track, ssrc); |
| 842 } else { |
| 843 ASSERT(false && "Invalid media type"); |
| 844 } |
| 845 } |
| 846 |
| 847 void MediaStreamSignaling::OnLocalTrackRemoved(const std::string& stream_label, |
| 848 const std::string& track_id, |
| 849 uint32_t ssrc, |
| 850 cricket::MediaType media_type) { |
| 851 MediaStreamInterface* stream = local_streams_->find(stream_label); |
| 852 if (!stream) { |
| 853 // This is the normal case. Ie RemoveLocalStream has been called and the |
| 854 // SessionDescriptions has been renegotiated. |
| 855 return; |
| 856 } |
| 857 // A track has been removed from the SessionDescription but the MediaStream |
| 858 // is still associated with MediaStreamSignaling. This only occurs if the SDP |
| 859 // doesn't match with the calls to AddLocalStream and RemoveLocalStream. |
| 860 |
| 861 if (media_type == cricket::MEDIA_TYPE_AUDIO) { |
| 862 AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id); |
| 863 if (!audio_track) { |
| 864 return; |
| 865 } |
| 866 stream_observer_->OnRemoveLocalAudioTrack(stream, audio_track, ssrc); |
| 867 } else if (media_type == cricket::MEDIA_TYPE_VIDEO) { |
| 868 VideoTrackInterface* video_track = stream->FindVideoTrack(track_id); |
| 869 if (!video_track) { |
| 870 return; |
| 871 } |
| 872 stream_observer_->OnRemoveLocalVideoTrack(stream, video_track); |
| 873 } else { |
| 874 ASSERT(false && "Invalid media type."); |
| 875 } |
| 876 } |
| 877 |
| 878 void MediaStreamSignaling::UpdateLocalRtpDataChannels( |
| 879 const cricket::StreamParamsVec& streams) { |
| 880 std::vector<std::string> existing_channels; |
| 881 |
| 882 // Find new and active data channels. |
| 883 for (cricket::StreamParamsVec::const_iterator it =streams.begin(); |
| 884 it != streams.end(); ++it) { |
| 885 // |it->sync_label| is actually the data channel label. The reason is that |
| 886 // we use the same naming of data channels as we do for |
| 887 // MediaStreams and Tracks. |
| 888 // For MediaStreams, the sync_label is the MediaStream label and the |
| 889 // track label is the same as |streamid|. |
| 890 const std::string& channel_label = it->sync_label; |
| 891 RtpDataChannels::iterator data_channel_it = |
| 892 rtp_data_channels_.find(channel_label); |
| 893 if (!VERIFY(data_channel_it != rtp_data_channels_.end())) { |
| 894 continue; |
| 895 } |
| 896 // Set the SSRC the data channel should use for sending. |
| 897 data_channel_it->second->SetSendSsrc(it->first_ssrc()); |
| 898 existing_channels.push_back(data_channel_it->first); |
| 899 } |
| 900 |
| 901 UpdateClosingDataChannels(existing_channels, true); |
| 902 } |
| 903 |
| 904 void MediaStreamSignaling::UpdateRemoteRtpDataChannels( |
| 905 const cricket::StreamParamsVec& streams) { |
| 906 std::vector<std::string> existing_channels; |
| 907 |
| 908 // Find new and active data channels. |
| 909 for (cricket::StreamParamsVec::const_iterator it = streams.begin(); |
| 910 it != streams.end(); ++it) { |
| 911 // The data channel label is either the mslabel or the SSRC if the mslabel |
| 912 // does not exist. Ex a=ssrc:444330170 mslabel:test1. |
| 913 std::string label = it->sync_label.empty() ? |
| 914 rtc::ToString(it->first_ssrc()) : it->sync_label; |
| 915 RtpDataChannels::iterator data_channel_it = |
| 916 rtp_data_channels_.find(label); |
| 917 if (data_channel_it == rtp_data_channels_.end()) { |
| 918 // This is a new data channel. |
| 919 CreateRemoteDataChannel(label, it->first_ssrc()); |
| 920 } else { |
| 921 data_channel_it->second->SetReceiveSsrc(it->first_ssrc()); |
| 922 } |
| 923 existing_channels.push_back(label); |
| 924 } |
| 925 |
| 926 UpdateClosingDataChannels(existing_channels, false); |
| 927 } |
| 928 |
| 929 void MediaStreamSignaling::UpdateClosingDataChannels( |
| 930 const std::vector<std::string>& active_channels, bool is_local_update) { |
| 931 RtpDataChannels::iterator it = rtp_data_channels_.begin(); |
| 932 while (it != rtp_data_channels_.end()) { |
| 933 DataChannel* data_channel = it->second; |
| 934 if (std::find(active_channels.begin(), active_channels.end(), |
| 935 data_channel->label()) != active_channels.end()) { |
| 936 ++it; |
| 937 continue; |
| 938 } |
| 939 |
| 940 if (is_local_update) |
| 941 data_channel->SetSendSsrc(0); |
| 942 else |
| 943 data_channel->RemotePeerRequestClose(); |
| 944 |
| 945 if (data_channel->state() == DataChannel::kClosed) { |
| 946 rtp_data_channels_.erase(it); |
| 947 it = rtp_data_channels_.begin(); |
| 948 } else { |
| 949 ++it; |
| 950 } |
| 951 } |
| 952 } |
| 953 |
| 954 void MediaStreamSignaling::CreateRemoteDataChannel(const std::string& label, |
| 955 uint32_t remote_ssrc) { |
| 956 if (!data_channel_factory_) { |
| 957 LOG(LS_WARNING) << "Remote peer requested a DataChannel but DataChannels " |
| 958 << "are not supported."; |
| 959 return; |
| 960 } |
| 961 scoped_refptr<DataChannel> channel( |
| 962 data_channel_factory_->CreateDataChannel(label, NULL)); |
| 963 if (!channel.get()) { |
| 964 LOG(LS_WARNING) << "Remote peer requested a DataChannel but" |
| 965 << "CreateDataChannel failed."; |
| 966 return; |
| 967 } |
| 968 channel->SetReceiveSsrc(remote_ssrc); |
| 969 stream_observer_->OnAddDataChannel(channel); |
| 970 } |
| 971 |
| 972 void MediaStreamSignaling::OnDataTransportCreatedForSctp() { |
| 973 SctpDataChannels::iterator it = sctp_data_channels_.begin(); |
| 974 for (; it != sctp_data_channels_.end(); ++it) { |
| 975 (*it)->OnTransportChannelCreated(); |
| 976 } |
| 977 } |
| 978 |
| 979 void MediaStreamSignaling::OnDtlsRoleReadyForSctp(rtc::SSLRole role) { |
| 980 SctpDataChannels::iterator it = sctp_data_channels_.begin(); |
| 981 for (; it != sctp_data_channels_.end(); ++it) { |
| 982 if ((*it)->id() < 0) { |
| 983 int sid; |
| 984 if (!AllocateSctpSid(role, &sid)) { |
| 985 LOG(LS_ERROR) << "Failed to allocate SCTP sid."; |
| 986 continue; |
| 987 } |
| 988 (*it)->SetSctpSid(sid); |
| 989 } |
| 990 } |
| 991 } |
| 992 |
| 993 void MediaStreamSignaling::OnRemoteSctpDataChannelClosed(uint32_t sid) { |
| 994 int index = FindDataChannelBySid(sid); |
| 995 if (index < 0) { |
| 996 LOG(LS_WARNING) << "Unexpected sid " << sid |
| 997 << " of the remotely closed DataChannel."; |
| 998 return; |
| 999 } |
| 1000 sctp_data_channels_[index]->Close(); |
| 1001 } |
| 1002 |
| 1003 const MediaStreamSignaling::TrackInfo* |
| 1004 MediaStreamSignaling::FindTrackInfo( |
| 1005 const MediaStreamSignaling::TrackInfos& infos, |
| 1006 const std::string& stream_label, |
| 1007 const std::string track_id) const { |
| 1008 |
| 1009 for (TrackInfos::const_iterator it = infos.begin(); |
| 1010 it != infos.end(); ++it) { |
| 1011 if (it->stream_label == stream_label && it->track_id == track_id) |
| 1012 return &*it; |
| 1013 } |
| 1014 return NULL; |
| 1015 } |
| 1016 |
| 1017 int MediaStreamSignaling::FindDataChannelBySid(int sid) const { |
| 1018 for (size_t i = 0; i < sctp_data_channels_.size(); ++i) { |
| 1019 if (sctp_data_channels_[i]->id() == sid) { |
| 1020 return static_cast<int>(i); |
| 1021 } |
| 1022 } |
| 1023 return -1; |
| 1024 } |
| 1025 |
| 1026 } // namespace webrtc |
OLD | NEW |