OLD | NEW |
(Empty) | |
| 1 /* |
| 2 * libjingle |
| 3 * Copyright 2012 Google Inc. |
| 4 * |
| 5 * Redistribution and use in source and binary forms, with or without |
| 6 * modification, are permitted provided that the following conditions are met: |
| 7 * |
| 8 * 1. Redistributions of source code must retain the above copyright notice, |
| 9 * this list of conditions and the following disclaimer. |
| 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
| 11 * this list of conditions and the following disclaimer in the documentation |
| 12 * and/or other materials provided with the distribution. |
| 13 * 3. The name of the author may not be used to endorse or promote products |
| 14 * derived from this software without specific prior written permission. |
| 15 * |
| 16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED |
| 17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF |
| 18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO |
| 19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
| 21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; |
| 22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, |
| 23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR |
| 24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF |
| 25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 26 */ |
| 27 |
| 28 #include <string> |
| 29 #include <vector> |
| 30 |
| 31 #include "talk/app/webrtc/audiotrack.h" |
| 32 #include "talk/app/webrtc/mediastream.h" |
| 33 #include "talk/app/webrtc/mediastreamsignaling.h" |
| 34 #include "talk/app/webrtc/sctputils.h" |
| 35 #include "talk/app/webrtc/streamcollection.h" |
| 36 #include "talk/app/webrtc/test/fakeconstraints.h" |
| 37 #include "talk/app/webrtc/test/fakedatachannelprovider.h" |
| 38 #include "talk/app/webrtc/videotrack.h" |
| 39 #include "talk/media/base/fakemediaengine.h" |
| 40 #include "webrtc/p2p/base/constants.h" |
| 41 #include "webrtc/p2p/base/sessiondescription.h" |
| 42 #include "talk/session/media/channelmanager.h" |
| 43 #include "webrtc/base/gunit.h" |
| 44 #include "webrtc/base/scoped_ptr.h" |
| 45 #include "webrtc/base/stringutils.h" |
| 46 #include "webrtc/base/thread.h" |
| 47 |
| 48 static const char kStreams[][8] = {"stream1", "stream2"}; |
| 49 static const char kAudioTracks[][32] = {"audiotrack0", "audiotrack1"}; |
| 50 static const char kVideoTracks[][32] = {"videotrack0", "videotrack1"}; |
| 51 |
| 52 using webrtc::AudioTrack; |
| 53 using webrtc::AudioTrackInterface; |
| 54 using webrtc::AudioTrackVector; |
| 55 using webrtc::VideoTrack; |
| 56 using webrtc::VideoTrackInterface; |
| 57 using webrtc::VideoTrackVector; |
| 58 using webrtc::DataChannelInterface; |
| 59 using webrtc::FakeConstraints; |
| 60 using webrtc::IceCandidateInterface; |
| 61 using webrtc::MediaConstraintsInterface; |
| 62 using webrtc::MediaStreamInterface; |
| 63 using webrtc::MediaStreamTrackInterface; |
| 64 using webrtc::PeerConnectionInterface; |
| 65 using webrtc::SdpParseError; |
| 66 using webrtc::SessionDescriptionInterface; |
| 67 using webrtc::StreamCollection; |
| 68 using webrtc::StreamCollectionInterface; |
| 69 |
| 70 typedef PeerConnectionInterface::RTCOfferAnswerOptions RTCOfferAnswerOptions; |
| 71 |
| 72 // Reference SDP with a MediaStream with label "stream1" and audio track with |
| 73 // id "audio_1" and a video track with id "video_1; |
| 74 static const char kSdpStringWithStream1[] = |
| 75 "v=0\r\n" |
| 76 "o=- 0 0 IN IP4 127.0.0.1\r\n" |
| 77 "s=-\r\n" |
| 78 "t=0 0\r\n" |
| 79 "m=audio 1 RTP/AVPF 103\r\n" |
| 80 "a=mid:audio\r\n" |
| 81 "a=rtpmap:103 ISAC/16000\r\n" |
| 82 "a=ssrc:1 cname:stream1\r\n" |
| 83 "a=ssrc:1 mslabel:stream1\r\n" |
| 84 "a=ssrc:1 label:audiotrack0\r\n" |
| 85 "m=video 1 RTP/AVPF 120\r\n" |
| 86 "a=mid:video\r\n" |
| 87 "a=rtpmap:120 VP8/90000\r\n" |
| 88 "a=ssrc:2 cname:stream1\r\n" |
| 89 "a=ssrc:2 mslabel:stream1\r\n" |
| 90 "a=ssrc:2 label:videotrack0\r\n"; |
| 91 |
| 92 // Reference SDP with two MediaStreams with label "stream1" and "stream2. Each |
| 93 // MediaStreams have one audio track and one video track. |
| 94 // This uses MSID. |
| 95 static const char kSdpStringWith2Stream[] = |
| 96 "v=0\r\n" |
| 97 "o=- 0 0 IN IP4 127.0.0.1\r\n" |
| 98 "s=-\r\n" |
| 99 "t=0 0\r\n" |
| 100 "a=msid-semantic: WMS stream1 stream2\r\n" |
| 101 "m=audio 1 RTP/AVPF 103\r\n" |
| 102 "a=mid:audio\r\n" |
| 103 "a=rtpmap:103 ISAC/16000\r\n" |
| 104 "a=ssrc:1 cname:stream1\r\n" |
| 105 "a=ssrc:1 msid:stream1 audiotrack0\r\n" |
| 106 "a=ssrc:3 cname:stream2\r\n" |
| 107 "a=ssrc:3 msid:stream2 audiotrack1\r\n" |
| 108 "m=video 1 RTP/AVPF 120\r\n" |
| 109 "a=mid:video\r\n" |
| 110 "a=rtpmap:120 VP8/0\r\n" |
| 111 "a=ssrc:2 cname:stream1\r\n" |
| 112 "a=ssrc:2 msid:stream1 videotrack0\r\n" |
| 113 "a=ssrc:4 cname:stream2\r\n" |
| 114 "a=ssrc:4 msid:stream2 videotrack1\r\n"; |
| 115 |
| 116 // Reference SDP without MediaStreams. Msid is not supported. |
| 117 static const char kSdpStringWithoutStreams[] = |
| 118 "v=0\r\n" |
| 119 "o=- 0 0 IN IP4 127.0.0.1\r\n" |
| 120 "s=-\r\n" |
| 121 "t=0 0\r\n" |
| 122 "m=audio 1 RTP/AVPF 103\r\n" |
| 123 "a=mid:audio\r\n" |
| 124 "a=rtpmap:103 ISAC/16000\r\n" |
| 125 "m=video 1 RTP/AVPF 120\r\n" |
| 126 "a=mid:video\r\n" |
| 127 "a=rtpmap:120 VP8/90000\r\n"; |
| 128 |
| 129 // Reference SDP without MediaStreams. Msid is supported. |
| 130 static const char kSdpStringWithMsidWithoutStreams[] = |
| 131 "v=0\r\n" |
| 132 "o=- 0 0 IN IP4 127.0.0.1\r\n" |
| 133 "s=-\r\n" |
| 134 "t=0 0\r\n" |
| 135 "a=msid-semantic: WMS\r\n" |
| 136 "m=audio 1 RTP/AVPF 103\r\n" |
| 137 "a=mid:audio\r\n" |
| 138 "a=rtpmap:103 ISAC/16000\r\n" |
| 139 "m=video 1 RTP/AVPF 120\r\n" |
| 140 "a=mid:video\r\n" |
| 141 "a=rtpmap:120 VP8/90000\r\n"; |
| 142 |
| 143 // Reference SDP without MediaStreams and audio only. |
| 144 static const char kSdpStringWithoutStreamsAudioOnly[] = |
| 145 "v=0\r\n" |
| 146 "o=- 0 0 IN IP4 127.0.0.1\r\n" |
| 147 "s=-\r\n" |
| 148 "t=0 0\r\n" |
| 149 "m=audio 1 RTP/AVPF 103\r\n" |
| 150 "a=mid:audio\r\n" |
| 151 "a=rtpmap:103 ISAC/16000\r\n"; |
| 152 |
| 153 // Reference SENDONLY SDP without MediaStreams. Msid is not supported. |
| 154 static const char kSdpStringSendOnlyWithWithoutStreams[] = |
| 155 "v=0\r\n" |
| 156 "o=- 0 0 IN IP4 127.0.0.1\r\n" |
| 157 "s=-\r\n" |
| 158 "t=0 0\r\n" |
| 159 "m=audio 1 RTP/AVPF 103\r\n" |
| 160 "a=mid:audio\r\n" |
| 161 "a=sendonly" |
| 162 "a=rtpmap:103 ISAC/16000\r\n" |
| 163 "m=video 1 RTP/AVPF 120\r\n" |
| 164 "a=mid:video\r\n" |
| 165 "a=sendonly" |
| 166 "a=rtpmap:120 VP8/90000\r\n"; |
| 167 |
| 168 static const char kSdpStringInit[] = |
| 169 "v=0\r\n" |
| 170 "o=- 0 0 IN IP4 127.0.0.1\r\n" |
| 171 "s=-\r\n" |
| 172 "t=0 0\r\n" |
| 173 "a=msid-semantic: WMS\r\n"; |
| 174 |
| 175 static const char kSdpStringAudio[] = |
| 176 "m=audio 1 RTP/AVPF 103\r\n" |
| 177 "a=mid:audio\r\n" |
| 178 "a=rtpmap:103 ISAC/16000\r\n"; |
| 179 |
| 180 static const char kSdpStringVideo[] = |
| 181 "m=video 1 RTP/AVPF 120\r\n" |
| 182 "a=mid:video\r\n" |
| 183 "a=rtpmap:120 VP8/90000\r\n"; |
| 184 |
| 185 static const char kSdpStringMs1Audio0[] = |
| 186 "a=ssrc:1 cname:stream1\r\n" |
| 187 "a=ssrc:1 msid:stream1 audiotrack0\r\n"; |
| 188 |
| 189 static const char kSdpStringMs1Video0[] = |
| 190 "a=ssrc:2 cname:stream1\r\n" |
| 191 "a=ssrc:2 msid:stream1 videotrack0\r\n"; |
| 192 |
| 193 static const char kSdpStringMs1Audio1[] = |
| 194 "a=ssrc:3 cname:stream1\r\n" |
| 195 "a=ssrc:3 msid:stream1 audiotrack1\r\n"; |
| 196 |
| 197 static const char kSdpStringMs1Video1[] = |
| 198 "a=ssrc:4 cname:stream1\r\n" |
| 199 "a=ssrc:4 msid:stream1 videotrack1\r\n"; |
| 200 |
| 201 // Verifies that |options| contain all tracks in |collection| and that |
| 202 // the |options| has set the the has_audio and has_video flags correct. |
| 203 static void VerifyMediaOptions(StreamCollectionInterface* collection, |
| 204 const cricket::MediaSessionOptions& options) { |
| 205 if (!collection) { |
| 206 return; |
| 207 } |
| 208 |
| 209 size_t stream_index = 0; |
| 210 for (size_t i = 0; i < collection->count(); ++i) { |
| 211 MediaStreamInterface* stream = collection->at(i); |
| 212 AudioTrackVector audio_tracks = stream->GetAudioTracks(); |
| 213 ASSERT_GE(options.streams.size(), stream_index + audio_tracks.size()); |
| 214 for (size_t j = 0; j < audio_tracks.size(); ++j) { |
| 215 webrtc::AudioTrackInterface* audio = audio_tracks[j]; |
| 216 EXPECT_EQ(options.streams[stream_index].sync_label, stream->label()); |
| 217 EXPECT_EQ(options.streams[stream_index++].id, audio->id()); |
| 218 EXPECT_TRUE(options.has_audio()); |
| 219 } |
| 220 VideoTrackVector video_tracks = stream->GetVideoTracks(); |
| 221 ASSERT_GE(options.streams.size(), stream_index + video_tracks.size()); |
| 222 for (size_t j = 0; j < video_tracks.size(); ++j) { |
| 223 webrtc::VideoTrackInterface* video = video_tracks[j]; |
| 224 EXPECT_EQ(options.streams[stream_index].sync_label, stream->label()); |
| 225 EXPECT_EQ(options.streams[stream_index++].id, video->id()); |
| 226 EXPECT_TRUE(options.has_video()); |
| 227 } |
| 228 } |
| 229 } |
| 230 |
| 231 static bool CompareStreamCollections(StreamCollectionInterface* s1, |
| 232 StreamCollectionInterface* s2) { |
| 233 if (s1 == NULL || s2 == NULL || s1->count() != s2->count()) |
| 234 return false; |
| 235 |
| 236 for (size_t i = 0; i != s1->count(); ++i) { |
| 237 if (s1->at(i)->label() != s2->at(i)->label()) |
| 238 return false; |
| 239 webrtc::AudioTrackVector audio_tracks1 = s1->at(i)->GetAudioTracks(); |
| 240 webrtc::AudioTrackVector audio_tracks2 = s2->at(i)->GetAudioTracks(); |
| 241 webrtc::VideoTrackVector video_tracks1 = s1->at(i)->GetVideoTracks(); |
| 242 webrtc::VideoTrackVector video_tracks2 = s2->at(i)->GetVideoTracks(); |
| 243 |
| 244 if (audio_tracks1.size() != audio_tracks2.size()) |
| 245 return false; |
| 246 for (size_t j = 0; j != audio_tracks1.size(); ++j) { |
| 247 if (audio_tracks1[j]->id() != audio_tracks2[j]->id()) |
| 248 return false; |
| 249 } |
| 250 if (video_tracks1.size() != video_tracks2.size()) |
| 251 return false; |
| 252 for (size_t j = 0; j != video_tracks1.size(); ++j) { |
| 253 if (video_tracks1[j]->id() != video_tracks2[j]->id()) |
| 254 return false; |
| 255 } |
| 256 } |
| 257 return true; |
| 258 } |
| 259 |
| 260 class FakeDataChannelFactory : public webrtc::DataChannelFactory { |
| 261 public: |
| 262 FakeDataChannelFactory(FakeDataChannelProvider* provider, |
| 263 cricket::DataChannelType dct, |
| 264 webrtc::MediaStreamSignaling* media_stream_signaling) |
| 265 : provider_(provider), |
| 266 type_(dct), |
| 267 media_stream_signaling_(media_stream_signaling) {} |
| 268 |
| 269 virtual rtc::scoped_refptr<webrtc::DataChannel> CreateDataChannel( |
| 270 const std::string& label, |
| 271 const webrtc::InternalDataChannelInit* config) { |
| 272 last_init_ = *config; |
| 273 rtc::scoped_refptr<webrtc::DataChannel> data_channel = |
| 274 webrtc::DataChannel::Create(provider_, type_, label, *config); |
| 275 media_stream_signaling_->AddDataChannel(data_channel); |
| 276 return data_channel; |
| 277 } |
| 278 |
| 279 const webrtc::InternalDataChannelInit& last_init() const { |
| 280 return last_init_; |
| 281 } |
| 282 |
| 283 private: |
| 284 FakeDataChannelProvider* provider_; |
| 285 cricket::DataChannelType type_; |
| 286 webrtc::MediaStreamSignaling* media_stream_signaling_; |
| 287 webrtc::InternalDataChannelInit last_init_; |
| 288 }; |
| 289 |
| 290 class MockSignalingObserver : public webrtc::MediaStreamSignalingObserver { |
| 291 public: |
| 292 MockSignalingObserver() |
| 293 : remote_media_streams_(StreamCollection::Create()) { |
| 294 } |
| 295 |
| 296 virtual ~MockSignalingObserver() { |
| 297 } |
| 298 |
| 299 // New remote stream have been discovered. |
| 300 virtual void OnAddRemoteStream(MediaStreamInterface* remote_stream) { |
| 301 remote_media_streams_->AddStream(remote_stream); |
| 302 } |
| 303 |
| 304 // Remote stream is no longer available. |
| 305 virtual void OnRemoveRemoteStream(MediaStreamInterface* remote_stream) { |
| 306 remote_media_streams_->RemoveStream(remote_stream); |
| 307 } |
| 308 |
| 309 virtual void OnAddDataChannel(DataChannelInterface* data_channel) { |
| 310 } |
| 311 |
| 312 virtual void OnAddLocalAudioTrack(MediaStreamInterface* stream, |
| 313 AudioTrackInterface* audio_track, |
| 314 uint32_t ssrc) { |
| 315 AddTrack(&local_audio_tracks_, stream, audio_track, ssrc); |
| 316 } |
| 317 |
| 318 virtual void OnAddLocalVideoTrack(MediaStreamInterface* stream, |
| 319 VideoTrackInterface* video_track, |
| 320 uint32_t ssrc) { |
| 321 AddTrack(&local_video_tracks_, stream, video_track, ssrc); |
| 322 } |
| 323 |
| 324 virtual void OnRemoveLocalAudioTrack(MediaStreamInterface* stream, |
| 325 AudioTrackInterface* audio_track, |
| 326 uint32_t ssrc) { |
| 327 RemoveTrack(&local_audio_tracks_, stream, audio_track); |
| 328 } |
| 329 |
| 330 virtual void OnRemoveLocalVideoTrack(MediaStreamInterface* stream, |
| 331 VideoTrackInterface* video_track) { |
| 332 RemoveTrack(&local_video_tracks_, stream, video_track); |
| 333 } |
| 334 |
| 335 virtual void OnAddRemoteAudioTrack(MediaStreamInterface* stream, |
| 336 AudioTrackInterface* audio_track, |
| 337 uint32_t ssrc) { |
| 338 AddTrack(&remote_audio_tracks_, stream, audio_track, ssrc); |
| 339 } |
| 340 |
| 341 virtual void OnAddRemoteVideoTrack(MediaStreamInterface* stream, |
| 342 VideoTrackInterface* video_track, |
| 343 uint32_t ssrc) { |
| 344 AddTrack(&remote_video_tracks_, stream, video_track, ssrc); |
| 345 } |
| 346 |
| 347 virtual void OnRemoveRemoteAudioTrack(MediaStreamInterface* stream, |
| 348 AudioTrackInterface* audio_track) { |
| 349 RemoveTrack(&remote_audio_tracks_, stream, audio_track); |
| 350 } |
| 351 |
| 352 virtual void OnRemoveRemoteVideoTrack(MediaStreamInterface* stream, |
| 353 VideoTrackInterface* video_track) { |
| 354 RemoveTrack(&remote_video_tracks_, stream, video_track); |
| 355 } |
| 356 |
| 357 virtual void OnRemoveLocalStream(MediaStreamInterface* stream) { |
| 358 } |
| 359 |
| 360 MediaStreamInterface* RemoteStream(const std::string& label) { |
| 361 return remote_media_streams_->find(label); |
| 362 } |
| 363 |
| 364 StreamCollectionInterface* remote_streams() const { |
| 365 return remote_media_streams_; |
| 366 } |
| 367 |
| 368 size_t NumberOfRemoteAudioTracks() { return remote_audio_tracks_.size(); } |
| 369 |
| 370 void VerifyRemoteAudioTrack(const std::string& stream_label, |
| 371 const std::string& track_id, |
| 372 uint32_t ssrc) { |
| 373 VerifyTrack(remote_audio_tracks_, stream_label, track_id, ssrc); |
| 374 } |
| 375 |
| 376 size_t NumberOfRemoteVideoTracks() { return remote_video_tracks_.size(); } |
| 377 |
| 378 void VerifyRemoteVideoTrack(const std::string& stream_label, |
| 379 const std::string& track_id, |
| 380 uint32_t ssrc) { |
| 381 VerifyTrack(remote_video_tracks_, stream_label, track_id, ssrc); |
| 382 } |
| 383 |
| 384 size_t NumberOfLocalAudioTracks() { return local_audio_tracks_.size(); } |
| 385 void VerifyLocalAudioTrack(const std::string& stream_label, |
| 386 const std::string& track_id, |
| 387 uint32_t ssrc) { |
| 388 VerifyTrack(local_audio_tracks_, stream_label, track_id, ssrc); |
| 389 } |
| 390 |
| 391 size_t NumberOfLocalVideoTracks() { return local_video_tracks_.size(); } |
| 392 |
| 393 void VerifyLocalVideoTrack(const std::string& stream_label, |
| 394 const std::string& track_id, |
| 395 uint32_t ssrc) { |
| 396 VerifyTrack(local_video_tracks_, stream_label, track_id, ssrc); |
| 397 } |
| 398 |
| 399 private: |
| 400 struct TrackInfo { |
| 401 TrackInfo() {} |
| 402 TrackInfo(const std::string& stream_label, |
| 403 const std::string track_id, |
| 404 uint32_t ssrc) |
| 405 : stream_label(stream_label), track_id(track_id), ssrc(ssrc) {} |
| 406 std::string stream_label; |
| 407 std::string track_id; |
| 408 uint32_t ssrc; |
| 409 }; |
| 410 typedef std::vector<TrackInfo> TrackInfos; |
| 411 |
| 412 void AddTrack(TrackInfos* track_infos, |
| 413 MediaStreamInterface* stream, |
| 414 MediaStreamTrackInterface* track, |
| 415 uint32_t ssrc) { |
| 416 (*track_infos).push_back(TrackInfo(stream->label(), track->id(), ssrc)); |
| 417 } |
| 418 |
| 419 void RemoveTrack(TrackInfos* track_infos, MediaStreamInterface* stream, |
| 420 MediaStreamTrackInterface* track) { |
| 421 for (TrackInfos::iterator it = track_infos->begin(); |
| 422 it != track_infos->end(); ++it) { |
| 423 if (it->stream_label == stream->label() && it->track_id == track->id()) { |
| 424 track_infos->erase(it); |
| 425 return; |
| 426 } |
| 427 } |
| 428 ADD_FAILURE(); |
| 429 } |
| 430 |
| 431 const TrackInfo* FindTrackInfo(const TrackInfos& infos, |
| 432 const std::string& stream_label, |
| 433 const std::string track_id) const { |
| 434 for (TrackInfos::const_iterator it = infos.begin(); |
| 435 it != infos.end(); ++it) { |
| 436 if (it->stream_label == stream_label && it->track_id == track_id) |
| 437 return &*it; |
| 438 } |
| 439 return NULL; |
| 440 } |
| 441 |
| 442 void VerifyTrack(const TrackInfos& track_infos, |
| 443 const std::string& stream_label, |
| 444 const std::string& track_id, |
| 445 uint32_t ssrc) { |
| 446 const TrackInfo* track_info = FindTrackInfo(track_infos, |
| 447 stream_label, |
| 448 track_id); |
| 449 ASSERT_TRUE(track_info != NULL); |
| 450 EXPECT_EQ(ssrc, track_info->ssrc); |
| 451 } |
| 452 |
| 453 TrackInfos remote_audio_tracks_; |
| 454 TrackInfos remote_video_tracks_; |
| 455 TrackInfos local_audio_tracks_; |
| 456 TrackInfos local_video_tracks_; |
| 457 |
| 458 rtc::scoped_refptr<StreamCollection> remote_media_streams_; |
| 459 }; |
| 460 |
| 461 class MediaStreamSignalingForTest : public webrtc::MediaStreamSignaling { |
| 462 public: |
| 463 MediaStreamSignalingForTest(MockSignalingObserver* observer, |
| 464 cricket::ChannelManager* channel_manager) |
| 465 : webrtc::MediaStreamSignaling(rtc::Thread::Current(), observer, |
| 466 channel_manager) { |
| 467 }; |
| 468 |
| 469 using webrtc::MediaStreamSignaling::GetOptionsForOffer; |
| 470 using webrtc::MediaStreamSignaling::GetOptionsForAnswer; |
| 471 using webrtc::MediaStreamSignaling::OnRemoteDescriptionChanged; |
| 472 using webrtc::MediaStreamSignaling::remote_streams; |
| 473 }; |
| 474 |
| 475 class MediaStreamSignalingTest: public testing::Test { |
| 476 protected: |
| 477 virtual void SetUp() { |
| 478 observer_.reset(new MockSignalingObserver()); |
| 479 channel_manager_.reset( |
| 480 new cricket::ChannelManager(new cricket::FakeMediaEngine(), |
| 481 rtc::Thread::Current())); |
| 482 signaling_.reset(new MediaStreamSignalingForTest(observer_.get(), |
| 483 channel_manager_.get())); |
| 484 data_channel_provider_.reset(new FakeDataChannelProvider()); |
| 485 } |
| 486 |
| 487 // Create a collection of streams. |
| 488 // CreateStreamCollection(1) creates a collection that |
| 489 // correspond to kSdpString1. |
| 490 // CreateStreamCollection(2) correspond to kSdpString2. |
| 491 rtc::scoped_refptr<StreamCollection> |
| 492 CreateStreamCollection(int number_of_streams) { |
| 493 rtc::scoped_refptr<StreamCollection> local_collection( |
| 494 StreamCollection::Create()); |
| 495 |
| 496 for (int i = 0; i < number_of_streams; ++i) { |
| 497 rtc::scoped_refptr<webrtc::MediaStreamInterface> stream( |
| 498 webrtc::MediaStream::Create(kStreams[i])); |
| 499 |
| 500 // Add a local audio track. |
| 501 rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track( |
| 502 webrtc::AudioTrack::Create(kAudioTracks[i], NULL)); |
| 503 stream->AddTrack(audio_track); |
| 504 |
| 505 // Add a local video track. |
| 506 rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track( |
| 507 webrtc::VideoTrack::Create(kVideoTracks[i], NULL)); |
| 508 stream->AddTrack(video_track); |
| 509 |
| 510 local_collection->AddStream(stream); |
| 511 } |
| 512 return local_collection; |
| 513 } |
| 514 |
| 515 // This functions Creates a MediaStream with label kStreams[0] and |
| 516 // |number_of_audio_tracks| and |number_of_video_tracks| tracks and the |
| 517 // corresponding SessionDescriptionInterface. The SessionDescriptionInterface |
| 518 // is returned in |desc| and the MediaStream is stored in |
| 519 // |reference_collection_| |
| 520 void CreateSessionDescriptionAndReference( |
| 521 size_t number_of_audio_tracks, |
| 522 size_t number_of_video_tracks, |
| 523 SessionDescriptionInterface** desc) { |
| 524 ASSERT_TRUE(desc != NULL); |
| 525 ASSERT_LE(number_of_audio_tracks, 2u); |
| 526 ASSERT_LE(number_of_video_tracks, 2u); |
| 527 |
| 528 reference_collection_ = StreamCollection::Create(); |
| 529 std::string sdp_ms1 = std::string(kSdpStringInit); |
| 530 |
| 531 std::string mediastream_label = kStreams[0]; |
| 532 |
| 533 rtc::scoped_refptr<webrtc::MediaStreamInterface> stream( |
| 534 webrtc::MediaStream::Create(mediastream_label)); |
| 535 reference_collection_->AddStream(stream); |
| 536 |
| 537 if (number_of_audio_tracks > 0) { |
| 538 sdp_ms1 += std::string(kSdpStringAudio); |
| 539 sdp_ms1 += std::string(kSdpStringMs1Audio0); |
| 540 AddAudioTrack(kAudioTracks[0], stream); |
| 541 } |
| 542 if (number_of_audio_tracks > 1) { |
| 543 sdp_ms1 += kSdpStringMs1Audio1; |
| 544 AddAudioTrack(kAudioTracks[1], stream); |
| 545 } |
| 546 |
| 547 if (number_of_video_tracks > 0) { |
| 548 sdp_ms1 += std::string(kSdpStringVideo); |
| 549 sdp_ms1 += std::string(kSdpStringMs1Video0); |
| 550 AddVideoTrack(kVideoTracks[0], stream); |
| 551 } |
| 552 if (number_of_video_tracks > 1) { |
| 553 sdp_ms1 += kSdpStringMs1Video1; |
| 554 AddVideoTrack(kVideoTracks[1], stream); |
| 555 } |
| 556 |
| 557 *desc = webrtc::CreateSessionDescription( |
| 558 SessionDescriptionInterface::kOffer, sdp_ms1, NULL); |
| 559 } |
| 560 |
| 561 void AddAudioTrack(const std::string& track_id, |
| 562 MediaStreamInterface* stream) { |
| 563 rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track( |
| 564 webrtc::AudioTrack::Create(track_id, NULL)); |
| 565 ASSERT_TRUE(stream->AddTrack(audio_track)); |
| 566 } |
| 567 |
| 568 void AddVideoTrack(const std::string& track_id, |
| 569 MediaStreamInterface* stream) { |
| 570 rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track( |
| 571 webrtc::VideoTrack::Create(track_id, NULL)); |
| 572 ASSERT_TRUE(stream->AddTrack(video_track)); |
| 573 } |
| 574 |
| 575 rtc::scoped_refptr<webrtc::DataChannel> AddDataChannel( |
| 576 cricket::DataChannelType type, const std::string& label, int id) { |
| 577 webrtc::InternalDataChannelInit config; |
| 578 config.id = id; |
| 579 rtc::scoped_refptr<webrtc::DataChannel> data_channel( |
| 580 webrtc::DataChannel::Create( |
| 581 data_channel_provider_.get(), type, label, config)); |
| 582 EXPECT_TRUE(data_channel.get() != NULL); |
| 583 EXPECT_TRUE(signaling_->AddDataChannel(data_channel.get())); |
| 584 return data_channel; |
| 585 } |
| 586 |
| 587 // ChannelManager is used by VideoSource, so it should be released after all |
| 588 // the video tracks. Put it as the first private variable should ensure that. |
| 589 rtc::scoped_ptr<cricket::ChannelManager> channel_manager_; |
| 590 rtc::scoped_refptr<StreamCollection> reference_collection_; |
| 591 rtc::scoped_ptr<MockSignalingObserver> observer_; |
| 592 rtc::scoped_ptr<MediaStreamSignalingForTest> signaling_; |
| 593 rtc::scoped_ptr<FakeDataChannelProvider> data_channel_provider_; |
| 594 }; |
| 595 |
| 596 TEST_F(MediaStreamSignalingTest, GetOptionsForOfferWithInvalidAudioOption) { |
| 597 RTCOfferAnswerOptions rtc_options; |
| 598 rtc_options.offer_to_receive_audio = RTCOfferAnswerOptions::kUndefined - 1; |
| 599 |
| 600 cricket::MediaSessionOptions options; |
| 601 EXPECT_FALSE(signaling_->GetOptionsForOffer(rtc_options, &options)); |
| 602 |
| 603 rtc_options.offer_to_receive_audio = |
| 604 RTCOfferAnswerOptions::kMaxOfferToReceiveMedia + 1; |
| 605 EXPECT_FALSE(signaling_->GetOptionsForOffer(rtc_options, &options)); |
| 606 } |
| 607 |
| 608 |
| 609 TEST_F(MediaStreamSignalingTest, GetOptionsForOfferWithInvalidVideoOption) { |
| 610 RTCOfferAnswerOptions rtc_options; |
| 611 rtc_options.offer_to_receive_video = |
| 612 RTCOfferAnswerOptions::kUndefined - 1; |
| 613 |
| 614 cricket::MediaSessionOptions options; |
| 615 EXPECT_FALSE(signaling_->GetOptionsForOffer(rtc_options, &options)); |
| 616 |
| 617 rtc_options.offer_to_receive_video = |
| 618 RTCOfferAnswerOptions::kMaxOfferToReceiveMedia + 1; |
| 619 EXPECT_FALSE(signaling_->GetOptionsForOffer(rtc_options, &options)); |
| 620 } |
| 621 |
| 622 // Test that a MediaSessionOptions is created for an offer if |
| 623 // OfferToReceiveAudio and OfferToReceiveVideo options are set but no |
| 624 // MediaStreams are sent. |
| 625 TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithAudioVideo) { |
| 626 RTCOfferAnswerOptions rtc_options; |
| 627 rtc_options.offer_to_receive_audio = 1; |
| 628 rtc_options.offer_to_receive_video = 1; |
| 629 |
| 630 cricket::MediaSessionOptions options; |
| 631 EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options)); |
| 632 EXPECT_TRUE(options.has_audio()); |
| 633 EXPECT_TRUE(options.has_video()); |
| 634 EXPECT_TRUE(options.bundle_enabled); |
| 635 } |
| 636 |
| 637 // Test that a correct MediaSessionOptions is created for an offer if |
| 638 // OfferToReceiveAudio is set but no MediaStreams are sent. |
| 639 TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithAudio) { |
| 640 RTCOfferAnswerOptions rtc_options; |
| 641 rtc_options.offer_to_receive_audio = 1; |
| 642 |
| 643 cricket::MediaSessionOptions options; |
| 644 EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options)); |
| 645 EXPECT_TRUE(options.has_audio()); |
| 646 EXPECT_FALSE(options.has_video()); |
| 647 EXPECT_TRUE(options.bundle_enabled); |
| 648 } |
| 649 |
| 650 // Test that a correct MediaSessionOptions is created for an offer if |
| 651 // the default OfferOptons is used or MediaStreams are sent. |
| 652 TEST_F(MediaStreamSignalingTest, GetDefaultMediaSessionOptionsForOffer) { |
| 653 RTCOfferAnswerOptions rtc_options; |
| 654 |
| 655 cricket::MediaSessionOptions options; |
| 656 EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options)); |
| 657 EXPECT_FALSE(options.has_audio()); |
| 658 EXPECT_FALSE(options.has_video()); |
| 659 EXPECT_FALSE(options.bundle_enabled); |
| 660 EXPECT_TRUE(options.vad_enabled); |
| 661 EXPECT_FALSE(options.transport_options.ice_restart); |
| 662 } |
| 663 |
| 664 // Test that a correct MediaSessionOptions is created for an offer if |
| 665 // OfferToReceiveVideo is set but no MediaStreams are sent. |
| 666 TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithVideo) { |
| 667 RTCOfferAnswerOptions rtc_options; |
| 668 rtc_options.offer_to_receive_audio = 0; |
| 669 rtc_options.offer_to_receive_video = 1; |
| 670 |
| 671 cricket::MediaSessionOptions options; |
| 672 EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options)); |
| 673 EXPECT_FALSE(options.has_audio()); |
| 674 EXPECT_TRUE(options.has_video()); |
| 675 EXPECT_TRUE(options.bundle_enabled); |
| 676 } |
| 677 |
| 678 // Test that a correct MediaSessionOptions is created for an offer if |
| 679 // UseRtpMux is set to false. |
| 680 TEST_F(MediaStreamSignalingTest, |
| 681 GetMediaSessionOptionsForOfferWithBundleDisabled) { |
| 682 RTCOfferAnswerOptions rtc_options; |
| 683 rtc_options.offer_to_receive_audio = 1; |
| 684 rtc_options.offer_to_receive_video = 1; |
| 685 rtc_options.use_rtp_mux = false; |
| 686 |
| 687 cricket::MediaSessionOptions options; |
| 688 EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options)); |
| 689 EXPECT_TRUE(options.has_audio()); |
| 690 EXPECT_TRUE(options.has_video()); |
| 691 EXPECT_FALSE(options.bundle_enabled); |
| 692 } |
| 693 |
| 694 // Test that a correct MediaSessionOptions is created to restart ice if |
| 695 // IceRestart is set. It also tests that subsequent MediaSessionOptions don't |
| 696 // have |transport_options.ice_restart| set. |
| 697 TEST_F(MediaStreamSignalingTest, |
| 698 GetMediaSessionOptionsForOfferWithIceRestart) { |
| 699 RTCOfferAnswerOptions rtc_options; |
| 700 rtc_options.ice_restart = true; |
| 701 |
| 702 cricket::MediaSessionOptions options; |
| 703 EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options)); |
| 704 EXPECT_TRUE(options.transport_options.ice_restart); |
| 705 |
| 706 rtc_options = RTCOfferAnswerOptions(); |
| 707 EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options)); |
| 708 EXPECT_FALSE(options.transport_options.ice_restart); |
| 709 } |
| 710 |
| 711 // Test that a correct MediaSessionOptions are created for an offer if |
| 712 // a MediaStream is sent and later updated with a new track. |
| 713 // MediaConstraints are not used. |
| 714 TEST_F(MediaStreamSignalingTest, AddTrackToLocalMediaStream) { |
| 715 RTCOfferAnswerOptions rtc_options; |
| 716 rtc::scoped_refptr<StreamCollection> local_streams( |
| 717 CreateStreamCollection(1)); |
| 718 MediaStreamInterface* local_stream = local_streams->at(0); |
| 719 EXPECT_TRUE(signaling_->AddLocalStream(local_stream)); |
| 720 cricket::MediaSessionOptions options; |
| 721 EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options)); |
| 722 VerifyMediaOptions(local_streams, options); |
| 723 |
| 724 cricket::MediaSessionOptions updated_options; |
| 725 local_stream->AddTrack(AudioTrack::Create(kAudioTracks[1], NULL)); |
| 726 EXPECT_TRUE(signaling_->GetOptionsForOffer(rtc_options, &options)); |
| 727 VerifyMediaOptions(local_streams, options); |
| 728 } |
| 729 |
| 730 // Test that the MediaConstraints in an answer don't affect if audio and video |
| 731 // is offered in an offer but that if kOfferToReceiveAudio or |
| 732 // kOfferToReceiveVideo constraints are true in an offer, the media type will be |
| 733 // included in subsequent answers. |
| 734 TEST_F(MediaStreamSignalingTest, MediaConstraintsInAnswer) { |
| 735 FakeConstraints answer_c; |
| 736 answer_c.SetMandatoryReceiveAudio(true); |
| 737 answer_c.SetMandatoryReceiveVideo(true); |
| 738 |
| 739 cricket::MediaSessionOptions answer_options; |
| 740 EXPECT_TRUE(signaling_->GetOptionsForAnswer(&answer_c, &answer_options)); |
| 741 EXPECT_TRUE(answer_options.has_audio()); |
| 742 EXPECT_TRUE(answer_options.has_video()); |
| 743 |
| 744 RTCOfferAnswerOptions rtc_offer_optoins; |
| 745 |
| 746 cricket::MediaSessionOptions offer_options; |
| 747 EXPECT_TRUE( |
| 748 signaling_->GetOptionsForOffer(rtc_offer_optoins, &offer_options)); |
| 749 EXPECT_FALSE(offer_options.has_audio()); |
| 750 EXPECT_FALSE(offer_options.has_video()); |
| 751 |
| 752 RTCOfferAnswerOptions updated_rtc_offer_optoins; |
| 753 updated_rtc_offer_optoins.offer_to_receive_audio = 1; |
| 754 updated_rtc_offer_optoins.offer_to_receive_video = 1; |
| 755 |
| 756 cricket::MediaSessionOptions updated_offer_options; |
| 757 EXPECT_TRUE(signaling_->GetOptionsForOffer(updated_rtc_offer_optoins, |
| 758 &updated_offer_options)); |
| 759 EXPECT_TRUE(updated_offer_options.has_audio()); |
| 760 EXPECT_TRUE(updated_offer_options.has_video()); |
| 761 |
| 762 // Since an offer has been created with both audio and video, subsequent |
| 763 // offers and answers should contain both audio and video. |
| 764 // Answers will only contain the media types that exist in the offer |
| 765 // regardless of the value of |updated_answer_options.has_audio| and |
| 766 // |updated_answer_options.has_video|. |
| 767 FakeConstraints updated_answer_c; |
| 768 answer_c.SetMandatoryReceiveAudio(false); |
| 769 answer_c.SetMandatoryReceiveVideo(false); |
| 770 |
| 771 cricket::MediaSessionOptions updated_answer_options; |
| 772 EXPECT_TRUE(signaling_->GetOptionsForAnswer(&updated_answer_c, |
| 773 &updated_answer_options)); |
| 774 EXPECT_TRUE(updated_answer_options.has_audio()); |
| 775 EXPECT_TRUE(updated_answer_options.has_video()); |
| 776 |
| 777 RTCOfferAnswerOptions default_rtc_options; |
| 778 EXPECT_TRUE(signaling_->GetOptionsForOffer(default_rtc_options, |
| 779 &updated_offer_options)); |
| 780 // By default, |has_audio| or |has_video| are false if there is no media |
| 781 // track. |
| 782 EXPECT_FALSE(updated_offer_options.has_audio()); |
| 783 EXPECT_FALSE(updated_offer_options.has_video()); |
| 784 } |
| 785 |
| 786 // This test verifies that the remote MediaStreams corresponding to a received |
| 787 // SDP string is created. In this test the two separate MediaStreams are |
| 788 // signaled. |
| 789 TEST_F(MediaStreamSignalingTest, UpdateRemoteStreams) { |
| 790 rtc::scoped_ptr<SessionDescriptionInterface> desc( |
| 791 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 792 kSdpStringWithStream1, NULL)); |
| 793 EXPECT_TRUE(desc != NULL); |
| 794 signaling_->OnRemoteDescriptionChanged(desc.get()); |
| 795 |
| 796 rtc::scoped_refptr<StreamCollection> reference( |
| 797 CreateStreamCollection(1)); |
| 798 EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(), |
| 799 reference.get())); |
| 800 EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(), |
| 801 reference.get())); |
| 802 EXPECT_EQ(1u, observer_->NumberOfRemoteAudioTracks()); |
| 803 observer_->VerifyRemoteAudioTrack(kStreams[0], kAudioTracks[0], 1); |
| 804 EXPECT_EQ(1u, observer_->NumberOfRemoteVideoTracks()); |
| 805 observer_->VerifyRemoteVideoTrack(kStreams[0], kVideoTracks[0], 2); |
| 806 ASSERT_EQ(1u, observer_->remote_streams()->count()); |
| 807 MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); |
| 808 EXPECT_TRUE(remote_stream->GetVideoTracks()[0]->GetSource() != NULL); |
| 809 |
| 810 // Create a session description based on another SDP with another |
| 811 // MediaStream. |
| 812 rtc::scoped_ptr<SessionDescriptionInterface> update_desc( |
| 813 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 814 kSdpStringWith2Stream, NULL)); |
| 815 EXPECT_TRUE(update_desc != NULL); |
| 816 signaling_->OnRemoteDescriptionChanged(update_desc.get()); |
| 817 |
| 818 rtc::scoped_refptr<StreamCollection> reference2( |
| 819 CreateStreamCollection(2)); |
| 820 EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(), |
| 821 reference2.get())); |
| 822 EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(), |
| 823 reference2.get())); |
| 824 |
| 825 EXPECT_EQ(2u, observer_->NumberOfRemoteAudioTracks()); |
| 826 observer_->VerifyRemoteAudioTrack(kStreams[0], kAudioTracks[0], 1); |
| 827 observer_->VerifyRemoteAudioTrack(kStreams[1], kAudioTracks[1], 3); |
| 828 EXPECT_EQ(2u, observer_->NumberOfRemoteVideoTracks()); |
| 829 observer_->VerifyRemoteVideoTrack(kStreams[0], kVideoTracks[0], 2); |
| 830 observer_->VerifyRemoteVideoTrack(kStreams[1], kVideoTracks[1], 4); |
| 831 } |
| 832 |
| 833 // This test verifies that the remote MediaStreams corresponding to a received |
| 834 // SDP string is created. In this test the same remote MediaStream is signaled |
| 835 // but MediaStream tracks are added and removed. |
| 836 TEST_F(MediaStreamSignalingTest, AddRemoveTrackFromExistingRemoteMediaStream) { |
| 837 rtc::scoped_ptr<SessionDescriptionInterface> desc_ms1; |
| 838 CreateSessionDescriptionAndReference(1, 1, desc_ms1.use()); |
| 839 signaling_->OnRemoteDescriptionChanged(desc_ms1.get()); |
| 840 EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(), |
| 841 reference_collection_)); |
| 842 |
| 843 // Add extra audio and video tracks to the same MediaStream. |
| 844 rtc::scoped_ptr<SessionDescriptionInterface> desc_ms1_two_tracks; |
| 845 CreateSessionDescriptionAndReference(2, 2, desc_ms1_two_tracks.use()); |
| 846 signaling_->OnRemoteDescriptionChanged(desc_ms1_two_tracks.get()); |
| 847 EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(), |
| 848 reference_collection_)); |
| 849 EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(), |
| 850 reference_collection_)); |
| 851 |
| 852 // Remove the extra audio and video tracks again. |
| 853 rtc::scoped_ptr<SessionDescriptionInterface> desc_ms2; |
| 854 CreateSessionDescriptionAndReference(1, 1, desc_ms2.use()); |
| 855 signaling_->OnRemoteDescriptionChanged(desc_ms2.get()); |
| 856 EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(), |
| 857 reference_collection_)); |
| 858 EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(), |
| 859 reference_collection_)); |
| 860 } |
| 861 |
| 862 // This test that remote tracks are ended if a |
| 863 // local session description is set that rejects the media content type. |
| 864 TEST_F(MediaStreamSignalingTest, RejectMediaContent) { |
| 865 rtc::scoped_ptr<SessionDescriptionInterface> desc( |
| 866 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 867 kSdpStringWithStream1, NULL)); |
| 868 EXPECT_TRUE(desc != NULL); |
| 869 signaling_->OnRemoteDescriptionChanged(desc.get()); |
| 870 |
| 871 ASSERT_EQ(1u, observer_->remote_streams()->count()); |
| 872 MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); |
| 873 ASSERT_EQ(1u, remote_stream->GetVideoTracks().size()); |
| 874 ASSERT_EQ(1u, remote_stream->GetAudioTracks().size()); |
| 875 |
| 876 rtc::scoped_refptr<webrtc::VideoTrackInterface> remote_video = |
| 877 remote_stream->GetVideoTracks()[0]; |
| 878 EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_video->state()); |
| 879 rtc::scoped_refptr<webrtc::AudioTrackInterface> remote_audio = |
| 880 remote_stream->GetAudioTracks()[0]; |
| 881 EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state()); |
| 882 |
| 883 cricket::ContentInfo* video_info = |
| 884 desc->description()->GetContentByName("video"); |
| 885 ASSERT_TRUE(video_info != NULL); |
| 886 video_info->rejected = true; |
| 887 signaling_->OnLocalDescriptionChanged(desc.get()); |
| 888 EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_video->state()); |
| 889 EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state()); |
| 890 |
| 891 cricket::ContentInfo* audio_info = |
| 892 desc->description()->GetContentByName("audio"); |
| 893 ASSERT_TRUE(audio_info != NULL); |
| 894 audio_info->rejected = true; |
| 895 signaling_->OnLocalDescriptionChanged(desc.get()); |
| 896 EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_audio->state()); |
| 897 } |
| 898 |
| 899 // This test that it won't crash if the remote track as been removed outside |
| 900 // of MediaStreamSignaling and then MediaStreamSignaling tries to reject |
| 901 // this track. |
| 902 TEST_F(MediaStreamSignalingTest, RemoveTrackThenRejectMediaContent) { |
| 903 rtc::scoped_ptr<SessionDescriptionInterface> desc( |
| 904 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 905 kSdpStringWithStream1, NULL)); |
| 906 EXPECT_TRUE(desc != NULL); |
| 907 signaling_->OnRemoteDescriptionChanged(desc.get()); |
| 908 |
| 909 MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); |
| 910 remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]); |
| 911 remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]); |
| 912 |
| 913 cricket::ContentInfo* video_info = |
| 914 desc->description()->GetContentByName("video"); |
| 915 video_info->rejected = true; |
| 916 signaling_->OnLocalDescriptionChanged(desc.get()); |
| 917 |
| 918 cricket::ContentInfo* audio_info = |
| 919 desc->description()->GetContentByName("audio"); |
| 920 audio_info->rejected = true; |
| 921 signaling_->OnLocalDescriptionChanged(desc.get()); |
| 922 |
| 923 // No crash is a pass. |
| 924 } |
| 925 |
| 926 // This tests that a default MediaStream is created if a remote session |
| 927 // description doesn't contain any streams and no MSID support. |
| 928 // It also tests that the default stream is updated if a video m-line is added |
| 929 // in a subsequent session description. |
| 930 TEST_F(MediaStreamSignalingTest, SdpWithoutMsidCreatesDefaultStream) { |
| 931 rtc::scoped_ptr<SessionDescriptionInterface> desc_audio_only( |
| 932 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 933 kSdpStringWithoutStreamsAudioOnly, |
| 934 NULL)); |
| 935 ASSERT_TRUE(desc_audio_only != NULL); |
| 936 signaling_->OnRemoteDescriptionChanged(desc_audio_only.get()); |
| 937 |
| 938 EXPECT_EQ(1u, signaling_->remote_streams()->count()); |
| 939 ASSERT_EQ(1u, observer_->remote_streams()->count()); |
| 940 MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); |
| 941 |
| 942 EXPECT_EQ(1u, remote_stream->GetAudioTracks().size()); |
| 943 EXPECT_EQ(0u, remote_stream->GetVideoTracks().size()); |
| 944 EXPECT_EQ("default", remote_stream->label()); |
| 945 |
| 946 rtc::scoped_ptr<SessionDescriptionInterface> desc( |
| 947 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 948 kSdpStringWithoutStreams, NULL)); |
| 949 ASSERT_TRUE(desc != NULL); |
| 950 signaling_->OnRemoteDescriptionChanged(desc.get()); |
| 951 EXPECT_EQ(1u, signaling_->remote_streams()->count()); |
| 952 ASSERT_EQ(1u, remote_stream->GetAudioTracks().size()); |
| 953 EXPECT_EQ("defaulta0", remote_stream->GetAudioTracks()[0]->id()); |
| 954 ASSERT_EQ(1u, remote_stream->GetVideoTracks().size()); |
| 955 EXPECT_EQ("defaultv0", remote_stream->GetVideoTracks()[0]->id()); |
| 956 observer_->VerifyRemoteAudioTrack("default", "defaulta0", 0); |
| 957 observer_->VerifyRemoteVideoTrack("default", "defaultv0", 0); |
| 958 } |
| 959 |
| 960 // This tests that a default MediaStream is created if a remote session |
| 961 // description doesn't contain any streams and media direction is send only. |
| 962 TEST_F(MediaStreamSignalingTest, RecvOnlySdpWithoutMsidCreatesDefaultStream) { |
| 963 rtc::scoped_ptr<SessionDescriptionInterface> desc( |
| 964 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 965 kSdpStringSendOnlyWithWithoutStreams, |
| 966 NULL)); |
| 967 ASSERT_TRUE(desc != NULL); |
| 968 signaling_->OnRemoteDescriptionChanged(desc.get()); |
| 969 |
| 970 EXPECT_EQ(1u, signaling_->remote_streams()->count()); |
| 971 ASSERT_EQ(1u, observer_->remote_streams()->count()); |
| 972 MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); |
| 973 |
| 974 EXPECT_EQ(1u, remote_stream->GetAudioTracks().size()); |
| 975 EXPECT_EQ(1u, remote_stream->GetVideoTracks().size()); |
| 976 EXPECT_EQ("default", remote_stream->label()); |
| 977 } |
| 978 |
| 979 // This tests that it won't crash when MediaStreamSignaling tries to remove |
| 980 // a remote track that as already been removed from the mediastream. |
| 981 TEST_F(MediaStreamSignalingTest, RemoveAlreadyGoneRemoteStream) { |
| 982 rtc::scoped_ptr<SessionDescriptionInterface> desc_audio_only( |
| 983 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 984 kSdpStringWithoutStreams, |
| 985 NULL)); |
| 986 ASSERT_TRUE(desc_audio_only != NULL); |
| 987 signaling_->OnRemoteDescriptionChanged(desc_audio_only.get()); |
| 988 MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); |
| 989 remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]); |
| 990 remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]); |
| 991 |
| 992 rtc::scoped_ptr<SessionDescriptionInterface> desc( |
| 993 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 994 kSdpStringWithoutStreams, NULL)); |
| 995 ASSERT_TRUE(desc != NULL); |
| 996 signaling_->OnRemoteDescriptionChanged(desc.get()); |
| 997 |
| 998 // No crash is a pass. |
| 999 } |
| 1000 |
| 1001 // This tests that a default MediaStream is created if the remote session |
| 1002 // description doesn't contain any streams and don't contain an indication if |
| 1003 // MSID is supported. |
| 1004 TEST_F(MediaStreamSignalingTest, |
| 1005 SdpWithoutMsidAndStreamsCreatesDefaultStream) { |
| 1006 rtc::scoped_ptr<SessionDescriptionInterface> desc( |
| 1007 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 1008 kSdpStringWithoutStreams, |
| 1009 NULL)); |
| 1010 ASSERT_TRUE(desc != NULL); |
| 1011 signaling_->OnRemoteDescriptionChanged(desc.get()); |
| 1012 |
| 1013 ASSERT_EQ(1u, observer_->remote_streams()->count()); |
| 1014 MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0); |
| 1015 EXPECT_EQ(1u, remote_stream->GetAudioTracks().size()); |
| 1016 EXPECT_EQ(1u, remote_stream->GetVideoTracks().size()); |
| 1017 } |
| 1018 |
| 1019 // This tests that a default MediaStream is not created if the remote session |
| 1020 // description doesn't contain any streams but does support MSID. |
| 1021 TEST_F(MediaStreamSignalingTest, SdpWitMsidDontCreatesDefaultStream) { |
| 1022 rtc::scoped_ptr<SessionDescriptionInterface> desc_msid_without_streams( |
| 1023 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 1024 kSdpStringWithMsidWithoutStreams, |
| 1025 NULL)); |
| 1026 signaling_->OnRemoteDescriptionChanged(desc_msid_without_streams.get()); |
| 1027 EXPECT_EQ(0u, observer_->remote_streams()->count()); |
| 1028 } |
| 1029 |
| 1030 // This test that a default MediaStream is not created if a remote session |
| 1031 // description is updated to not have any MediaStreams. |
| 1032 TEST_F(MediaStreamSignalingTest, VerifyDefaultStreamIsNotCreated) { |
| 1033 rtc::scoped_ptr<SessionDescriptionInterface> desc( |
| 1034 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 1035 kSdpStringWithStream1, |
| 1036 NULL)); |
| 1037 ASSERT_TRUE(desc != NULL); |
| 1038 signaling_->OnRemoteDescriptionChanged(desc.get()); |
| 1039 rtc::scoped_refptr<StreamCollection> reference( |
| 1040 CreateStreamCollection(1)); |
| 1041 EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(), |
| 1042 reference.get())); |
| 1043 |
| 1044 rtc::scoped_ptr<SessionDescriptionInterface> desc_without_streams( |
| 1045 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 1046 kSdpStringWithoutStreams, |
| 1047 NULL)); |
| 1048 signaling_->OnRemoteDescriptionChanged(desc_without_streams.get()); |
| 1049 EXPECT_EQ(0u, observer_->remote_streams()->count()); |
| 1050 } |
| 1051 |
| 1052 // This test that the correct MediaStreamSignalingObserver methods are called |
| 1053 // when MediaStreamSignaling::OnLocalDescriptionChanged is called with an |
| 1054 // updated local session description. |
| 1055 TEST_F(MediaStreamSignalingTest, LocalDescriptionChanged) { |
| 1056 rtc::scoped_ptr<SessionDescriptionInterface> desc_1; |
| 1057 CreateSessionDescriptionAndReference(2, 2, desc_1.use()); |
| 1058 |
| 1059 signaling_->AddLocalStream(reference_collection_->at(0)); |
| 1060 signaling_->OnLocalDescriptionChanged(desc_1.get()); |
| 1061 EXPECT_EQ(2u, observer_->NumberOfLocalAudioTracks()); |
| 1062 EXPECT_EQ(2u, observer_->NumberOfLocalVideoTracks()); |
| 1063 observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1); |
| 1064 observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2); |
| 1065 observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[1], 3); |
| 1066 observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[1], 4); |
| 1067 |
| 1068 // Remove an audio and video track. |
| 1069 rtc::scoped_ptr<SessionDescriptionInterface> desc_2; |
| 1070 CreateSessionDescriptionAndReference(1, 1, desc_2.use()); |
| 1071 signaling_->OnLocalDescriptionChanged(desc_2.get()); |
| 1072 EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks()); |
| 1073 EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks()); |
| 1074 observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1); |
| 1075 observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2); |
| 1076 } |
| 1077 |
| 1078 // This test that the correct MediaStreamSignalingObserver methods are called |
| 1079 // when MediaStreamSignaling::AddLocalStream is called after |
| 1080 // MediaStreamSignaling::OnLocalDescriptionChanged is called. |
| 1081 TEST_F(MediaStreamSignalingTest, AddLocalStreamAfterLocalDescriptionChanged) { |
| 1082 rtc::scoped_ptr<SessionDescriptionInterface> desc_1; |
| 1083 CreateSessionDescriptionAndReference(2, 2, desc_1.use()); |
| 1084 |
| 1085 signaling_->OnLocalDescriptionChanged(desc_1.get()); |
| 1086 EXPECT_EQ(0u, observer_->NumberOfLocalAudioTracks()); |
| 1087 EXPECT_EQ(0u, observer_->NumberOfLocalVideoTracks()); |
| 1088 |
| 1089 signaling_->AddLocalStream(reference_collection_->at(0)); |
| 1090 EXPECT_EQ(2u, observer_->NumberOfLocalAudioTracks()); |
| 1091 EXPECT_EQ(2u, observer_->NumberOfLocalVideoTracks()); |
| 1092 observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1); |
| 1093 observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2); |
| 1094 observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[1], 3); |
| 1095 observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[1], 4); |
| 1096 } |
| 1097 |
| 1098 // This test that the correct MediaStreamSignalingObserver methods are called |
| 1099 // if the ssrc on a local track is changed when |
| 1100 // MediaStreamSignaling::OnLocalDescriptionChanged is called. |
| 1101 TEST_F(MediaStreamSignalingTest, ChangeSsrcOnTrackInLocalSessionDescription) { |
| 1102 rtc::scoped_ptr<SessionDescriptionInterface> desc; |
| 1103 CreateSessionDescriptionAndReference(1, 1, desc.use()); |
| 1104 |
| 1105 signaling_->AddLocalStream(reference_collection_->at(0)); |
| 1106 signaling_->OnLocalDescriptionChanged(desc.get()); |
| 1107 EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks()); |
| 1108 EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks()); |
| 1109 observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1); |
| 1110 observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2); |
| 1111 |
| 1112 // Change the ssrc of the audio and video track. |
| 1113 std::string sdp; |
| 1114 desc->ToString(&sdp); |
| 1115 std::string ssrc_org = "a=ssrc:1"; |
| 1116 std::string ssrc_to = "a=ssrc:97"; |
| 1117 rtc::replace_substrs(ssrc_org.c_str(), ssrc_org.length(), |
| 1118 ssrc_to.c_str(), ssrc_to.length(), |
| 1119 &sdp); |
| 1120 ssrc_org = "a=ssrc:2"; |
| 1121 ssrc_to = "a=ssrc:98"; |
| 1122 rtc::replace_substrs(ssrc_org.c_str(), ssrc_org.length(), |
| 1123 ssrc_to.c_str(), ssrc_to.length(), |
| 1124 &sdp); |
| 1125 rtc::scoped_ptr<SessionDescriptionInterface> updated_desc( |
| 1126 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 1127 sdp, NULL)); |
| 1128 |
| 1129 signaling_->OnLocalDescriptionChanged(updated_desc.get()); |
| 1130 EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks()); |
| 1131 EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks()); |
| 1132 observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 97); |
| 1133 observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 98); |
| 1134 } |
| 1135 |
| 1136 // This test that the correct MediaStreamSignalingObserver methods are called |
| 1137 // if a new session description is set with the same tracks but they are now |
| 1138 // sent on a another MediaStream. |
| 1139 TEST_F(MediaStreamSignalingTest, SignalSameTracksInSeparateMediaStream) { |
| 1140 rtc::scoped_ptr<SessionDescriptionInterface> desc; |
| 1141 CreateSessionDescriptionAndReference(1, 1, desc.use()); |
| 1142 |
| 1143 signaling_->AddLocalStream(reference_collection_->at(0)); |
| 1144 signaling_->OnLocalDescriptionChanged(desc.get()); |
| 1145 EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks()); |
| 1146 EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks()); |
| 1147 |
| 1148 std::string stream_label_0 = kStreams[0]; |
| 1149 observer_->VerifyLocalAudioTrack(stream_label_0, kAudioTracks[0], 1); |
| 1150 observer_->VerifyLocalVideoTrack(stream_label_0, kVideoTracks[0], 2); |
| 1151 |
| 1152 // Add a new MediaStream but with the same tracks as in the first stream. |
| 1153 std::string stream_label_1 = kStreams[1]; |
| 1154 rtc::scoped_refptr<webrtc::MediaStreamInterface> stream_1( |
| 1155 webrtc::MediaStream::Create(kStreams[1])); |
| 1156 stream_1->AddTrack(reference_collection_->at(0)->GetVideoTracks()[0]); |
| 1157 stream_1->AddTrack(reference_collection_->at(0)->GetAudioTracks()[0]); |
| 1158 signaling_->AddLocalStream(stream_1); |
| 1159 |
| 1160 // Replace msid in the original SDP. |
| 1161 std::string sdp; |
| 1162 desc->ToString(&sdp); |
| 1163 rtc::replace_substrs( |
| 1164 kStreams[0], strlen(kStreams[0]), kStreams[1], strlen(kStreams[1]), &sdp); |
| 1165 |
| 1166 rtc::scoped_ptr<SessionDescriptionInterface> updated_desc( |
| 1167 webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer, |
| 1168 sdp, NULL)); |
| 1169 |
| 1170 signaling_->OnLocalDescriptionChanged(updated_desc.get()); |
| 1171 observer_->VerifyLocalAudioTrack(kStreams[1], kAudioTracks[0], 1); |
| 1172 observer_->VerifyLocalVideoTrack(kStreams[1], kVideoTracks[0], 2); |
| 1173 EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks()); |
| 1174 EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks()); |
| 1175 } |
| 1176 |
| 1177 // Verifies that an even SCTP id is allocated for SSL_CLIENT and an odd id for |
| 1178 // SSL_SERVER. |
| 1179 TEST_F(MediaStreamSignalingTest, SctpIdAllocationBasedOnRole) { |
| 1180 int id; |
| 1181 ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_SERVER, &id)); |
| 1182 EXPECT_EQ(1, id); |
| 1183 ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_CLIENT, &id)); |
| 1184 EXPECT_EQ(0, id); |
| 1185 ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_SERVER, &id)); |
| 1186 EXPECT_EQ(3, id); |
| 1187 ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_CLIENT, &id)); |
| 1188 EXPECT_EQ(2, id); |
| 1189 } |
| 1190 |
| 1191 // Verifies that SCTP ids of existing DataChannels are not reused. |
| 1192 TEST_F(MediaStreamSignalingTest, SctpIdAllocationNoReuse) { |
| 1193 int old_id = 1; |
| 1194 AddDataChannel(cricket::DCT_SCTP, "a", old_id); |
| 1195 |
| 1196 int new_id; |
| 1197 ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_SERVER, &new_id)); |
| 1198 EXPECT_NE(old_id, new_id); |
| 1199 |
| 1200 // Creates a DataChannel with id 0. |
| 1201 old_id = 0; |
| 1202 AddDataChannel(cricket::DCT_SCTP, "a", old_id); |
| 1203 ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_CLIENT, &new_id)); |
| 1204 EXPECT_NE(old_id, new_id); |
| 1205 } |
| 1206 |
| 1207 // Verifies that SCTP ids of removed DataChannels can be reused. |
| 1208 TEST_F(MediaStreamSignalingTest, SctpIdReusedForRemovedDataChannel) { |
| 1209 int odd_id = 1; |
| 1210 int even_id = 0; |
| 1211 AddDataChannel(cricket::DCT_SCTP, "a", odd_id); |
| 1212 AddDataChannel(cricket::DCT_SCTP, "a", even_id); |
| 1213 |
| 1214 int allocated_id = -1; |
| 1215 ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_SERVER, |
| 1216 &allocated_id)); |
| 1217 EXPECT_EQ(odd_id + 2, allocated_id); |
| 1218 AddDataChannel(cricket::DCT_SCTP, "a", allocated_id); |
| 1219 |
| 1220 ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_CLIENT, |
| 1221 &allocated_id)); |
| 1222 EXPECT_EQ(even_id + 2, allocated_id); |
| 1223 AddDataChannel(cricket::DCT_SCTP, "a", allocated_id); |
| 1224 |
| 1225 signaling_->RemoveSctpDataChannel(odd_id); |
| 1226 signaling_->RemoveSctpDataChannel(even_id); |
| 1227 |
| 1228 // Verifies that removed DataChannel ids are reused. |
| 1229 ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_SERVER, |
| 1230 &allocated_id)); |
| 1231 EXPECT_EQ(odd_id, allocated_id); |
| 1232 |
| 1233 ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_CLIENT, |
| 1234 &allocated_id)); |
| 1235 EXPECT_EQ(even_id, allocated_id); |
| 1236 |
| 1237 // Verifies that used higher DataChannel ids are not reused. |
| 1238 ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_SERVER, |
| 1239 &allocated_id)); |
| 1240 EXPECT_NE(odd_id + 2, allocated_id); |
| 1241 |
| 1242 ASSERT_TRUE(signaling_->AllocateSctpSid(rtc::SSL_CLIENT, |
| 1243 &allocated_id)); |
| 1244 EXPECT_NE(even_id + 2, allocated_id); |
| 1245 |
| 1246 } |
| 1247 |
| 1248 // Verifies that duplicated label is not allowed for RTP data channel. |
| 1249 TEST_F(MediaStreamSignalingTest, RtpDuplicatedLabelNotAllowed) { |
| 1250 AddDataChannel(cricket::DCT_RTP, "a", -1); |
| 1251 |
| 1252 webrtc::InternalDataChannelInit config; |
| 1253 rtc::scoped_refptr<webrtc::DataChannel> data_channel = |
| 1254 webrtc::DataChannel::Create( |
| 1255 data_channel_provider_.get(), cricket::DCT_RTP, "a", config); |
| 1256 ASSERT_TRUE(data_channel.get() != NULL); |
| 1257 EXPECT_FALSE(signaling_->AddDataChannel(data_channel.get())); |
| 1258 } |
| 1259 |
| 1260 // Verifies that duplicated label is allowed for SCTP data channel. |
| 1261 TEST_F(MediaStreamSignalingTest, SctpDuplicatedLabelAllowed) { |
| 1262 AddDataChannel(cricket::DCT_SCTP, "a", -1); |
| 1263 AddDataChannel(cricket::DCT_SCTP, "a", -1); |
| 1264 } |
| 1265 |
| 1266 // Verifies the correct configuration is used to create DataChannel from an OPEN |
| 1267 // message. |
| 1268 TEST_F(MediaStreamSignalingTest, CreateDataChannelFromOpenMessage) { |
| 1269 FakeDataChannelFactory fake_factory(data_channel_provider_.get(), |
| 1270 cricket::DCT_SCTP, |
| 1271 signaling_.get()); |
| 1272 signaling_->SetDataChannelFactory(&fake_factory); |
| 1273 webrtc::DataChannelInit config; |
| 1274 config.id = 1; |
| 1275 rtc::Buffer payload; |
| 1276 webrtc::WriteDataChannelOpenMessage("a", config, &payload); |
| 1277 cricket::ReceiveDataParams params; |
| 1278 params.ssrc = config.id; |
| 1279 EXPECT_TRUE(signaling_->AddDataChannelFromOpenMessage(params, payload)); |
| 1280 EXPECT_EQ(config.id, fake_factory.last_init().id); |
| 1281 EXPECT_FALSE(fake_factory.last_init().negotiated); |
| 1282 EXPECT_EQ(webrtc::InternalDataChannelInit::kAcker, |
| 1283 fake_factory.last_init().open_handshake_role); |
| 1284 } |
| 1285 |
| 1286 // Verifies that duplicated label from OPEN message is allowed. |
| 1287 TEST_F(MediaStreamSignalingTest, DuplicatedLabelFromOpenMessageAllowed) { |
| 1288 AddDataChannel(cricket::DCT_SCTP, "a", -1); |
| 1289 |
| 1290 FakeDataChannelFactory fake_factory(data_channel_provider_.get(), |
| 1291 cricket::DCT_SCTP, |
| 1292 signaling_.get()); |
| 1293 signaling_->SetDataChannelFactory(&fake_factory); |
| 1294 webrtc::DataChannelInit config; |
| 1295 config.id = 0; |
| 1296 rtc::Buffer payload; |
| 1297 webrtc::WriteDataChannelOpenMessage("a", config, &payload); |
| 1298 cricket::ReceiveDataParams params; |
| 1299 params.ssrc = config.id; |
| 1300 EXPECT_TRUE(signaling_->AddDataChannelFromOpenMessage(params, payload)); |
| 1301 } |
| 1302 |
| 1303 // Verifies that a DataChannel closed remotely is closed locally. |
| 1304 TEST_F(MediaStreamSignalingTest, |
| 1305 SctpDataChannelClosedLocallyWhenClosedRemotely) { |
| 1306 webrtc::InternalDataChannelInit config; |
| 1307 config.id = 0; |
| 1308 |
| 1309 rtc::scoped_refptr<webrtc::DataChannel> data_channel = |
| 1310 webrtc::DataChannel::Create( |
| 1311 data_channel_provider_.get(), cricket::DCT_SCTP, "a", config); |
| 1312 ASSERT_TRUE(data_channel.get() != NULL); |
| 1313 EXPECT_EQ(webrtc::DataChannelInterface::kConnecting, |
| 1314 data_channel->state()); |
| 1315 |
| 1316 EXPECT_TRUE(signaling_->AddDataChannel(data_channel.get())); |
| 1317 |
| 1318 signaling_->OnRemoteSctpDataChannelClosed(config.id); |
| 1319 EXPECT_EQ(webrtc::DataChannelInterface::kClosed, data_channel->state()); |
| 1320 } |
| 1321 |
| 1322 // Verifies that DataChannel added from OPEN message is added to |
| 1323 // MediaStreamSignaling only once (webrtc issue 3778). |
| 1324 TEST_F(MediaStreamSignalingTest, DataChannelFromOpenMessageAddedOnce) { |
| 1325 FakeDataChannelFactory fake_factory(data_channel_provider_.get(), |
| 1326 cricket::DCT_SCTP, |
| 1327 signaling_.get()); |
| 1328 signaling_->SetDataChannelFactory(&fake_factory); |
| 1329 webrtc::DataChannelInit config; |
| 1330 config.id = 1; |
| 1331 rtc::Buffer payload; |
| 1332 webrtc::WriteDataChannelOpenMessage("a", config, &payload); |
| 1333 cricket::ReceiveDataParams params; |
| 1334 params.ssrc = config.id; |
| 1335 EXPECT_TRUE(signaling_->AddDataChannelFromOpenMessage(params, payload)); |
| 1336 EXPECT_TRUE(signaling_->HasDataChannels()); |
| 1337 |
| 1338 // Removes the DataChannel and verifies that no DataChannel is left. |
| 1339 signaling_->RemoveSctpDataChannel(config.id); |
| 1340 EXPECT_FALSE(signaling_->HasDataChannels()); |
| 1341 } |
OLD | NEW |