OLD | NEW |
(Empty) | |
| 1 /* |
| 2 * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. |
| 3 * |
| 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ |
| 10 |
| 11 #include "webrtc/examples/unityplugin/simple_peer_connection.h" |
| 12 |
| 13 #include <utility> |
| 14 |
| 15 #include "webrtc/api/test/fakeconstraints.h" |
| 16 #include "webrtc/base/json.h" |
| 17 #include "webrtc/media/engine/webrtcvideocapturerfactory.h" |
| 18 #include "webrtc/modules/video_capture/video_capture_factory.h" |
| 19 |
| 20 // Names used for a IceCandidate JSON object. |
| 21 const char kCandidateSdpMidName[] = "sdpMid"; |
| 22 const char kCandidateSdpMlineIndexName[] = "sdpMLineIndex"; |
| 23 const char kCandidateSdpName[] = "candidate"; |
| 24 |
| 25 // Names used for a SessionDescription JSON object. |
| 26 const char kSessionDescriptionTypeName[] = "type"; |
| 27 const char kSessionDescriptionSdpName[] = "sdp"; |
| 28 |
| 29 // Names used for media stream labels. |
| 30 const char kAudioLabel[] = "audio_label"; |
| 31 const char kVideoLabel[] = "video_label"; |
| 32 const char kStreamLabel[] = "stream_label"; |
| 33 |
| 34 std::string GetEnvVarOrDefault(const char* env_var_name, |
| 35 const char* default_value) { |
| 36 std::string value; |
| 37 const char* env_var = getenv(env_var_name); |
| 38 if (env_var) |
| 39 value = env_var; |
| 40 |
| 41 if (value.empty()) |
| 42 value = default_value; |
| 43 |
| 44 return value; |
| 45 } |
| 46 |
| 47 std::string GetPeerConnectionString() { |
| 48 return GetEnvVarOrDefault("WEBRTC_CONNECT", "stun:stun.l.google.com:19302"); |
| 49 } |
| 50 |
| 51 class DummySetSessionDescriptionObserver |
| 52 : public webrtc::SetSessionDescriptionObserver { |
| 53 public: |
| 54 static DummySetSessionDescriptionObserver* Create() { |
| 55 return new rtc::RefCountedObject<DummySetSessionDescriptionObserver>(); |
| 56 } |
| 57 virtual void OnSuccess() { LOG(INFO) << __FUNCTION__; } |
| 58 virtual void OnFailure(const std::string& error) { |
| 59 LOG(INFO) << __FUNCTION__ << " " << error; |
| 60 } |
| 61 |
| 62 protected: |
| 63 DummySetSessionDescriptionObserver() {} |
| 64 ~DummySetSessionDescriptionObserver() {} |
| 65 }; |
| 66 |
| 67 int Conductor::peer_count_ = 0; |
| 68 std::unique_ptr<rtc::Thread> Conductor::worker_thread_ = nullptr; |
| 69 std::unique_ptr<rtc::Thread> Conductor::signaling_thread_ = nullptr; |
| 70 rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> |
| 71 Conductor::peer_connection_factory_ = nullptr; |
| 72 |
| 73 bool Conductor::InitializePeerConnection(bool is_receiver) { |
| 74 RTC_DCHECK(peer_connection_.get() == nullptr); |
| 75 |
| 76 if (peer_connection_factory_ == nullptr) { |
| 77 worker_thread_.reset(new rtc::Thread()); |
| 78 worker_thread_->Start(); |
| 79 signaling_thread_.reset(new rtc::Thread()); |
| 80 signaling_thread_->Start(); |
| 81 |
| 82 peer_connection_factory_ = webrtc::CreatePeerConnectionFactory( |
| 83 worker_thread_.get(), worker_thread_.get(), signaling_thread_.get(), |
| 84 nullptr, nullptr, nullptr); |
| 85 } |
| 86 if (!peer_connection_factory_.get()) { |
| 87 DeletePeerConnection(); |
| 88 return false; |
| 89 } |
| 90 |
| 91 peer_count_++; |
| 92 if (!CreatePeerConnection(is_receiver)) { |
| 93 DeletePeerConnection(); |
| 94 return false; |
| 95 } |
| 96 return peer_connection_.get() != nullptr; |
| 97 } |
| 98 |
| 99 bool Conductor::CreatePeerConnection(bool is_receiver) { |
| 100 RTC_DCHECK(peer_connection_factory_.get() != nullptr); |
| 101 RTC_DCHECK(peer_connection_.get() == nullptr); |
| 102 |
| 103 webrtc::PeerConnectionInterface::RTCConfiguration config; |
| 104 webrtc::PeerConnectionInterface::IceServer server; |
| 105 server.uri = GetPeerConnectionString(); |
| 106 config.servers.push_back(server); |
| 107 |
| 108 webrtc::FakeConstraints constraints; |
| 109 constraints.SetAllowDtlsSctpDataChannels(); |
| 110 |
| 111 if (is_receiver) { |
| 112 constraints.SetMandatoryReceiveAudio(true); |
| 113 constraints.SetMandatoryReceiveVideo(true); |
| 114 } |
| 115 |
| 116 peer_connection_ = peer_connection_factory_->CreatePeerConnection( |
| 117 config, &constraints, nullptr, nullptr, this); |
| 118 |
| 119 return peer_connection_.get() != nullptr; |
| 120 } |
| 121 |
| 122 void Conductor::DeletePeerConnection() { |
| 123 peer_count_--; |
| 124 |
| 125 CloseDataChannel(); |
| 126 peer_connection_ = nullptr; |
| 127 active_streams_.clear(); |
| 128 |
| 129 if (peer_count_ == 0) { |
| 130 peer_connection_factory_ = nullptr; |
| 131 signaling_thread_.reset(); |
| 132 worker_thread_.reset(); |
| 133 } |
| 134 } |
| 135 |
| 136 bool Conductor::CreateOffer() { |
| 137 if (!peer_connection_.get()) |
| 138 return false; |
| 139 |
| 140 peer_connection_->CreateOffer(this, nullptr); |
| 141 return true; |
| 142 } |
| 143 |
| 144 bool Conductor::CreateAnswer() { |
| 145 if (!peer_connection_.get()) |
| 146 return false; |
| 147 |
| 148 peer_connection_->CreateAnswer(this, nullptr); |
| 149 return true; |
| 150 } |
| 151 |
| 152 void Conductor::OnSuccess(webrtc::SessionDescriptionInterface* desc) { |
| 153 peer_connection_->SetLocalDescription( |
| 154 DummySetSessionDescriptionObserver::Create(), desc); |
| 155 |
| 156 std::string sdp; |
| 157 desc->ToString(&sdp); |
| 158 |
| 159 Json::StyledWriter writer; |
| 160 Json::Value jmessage; |
| 161 jmessage[kSessionDescriptionTypeName] = desc->type(); |
| 162 jmessage[kSessionDescriptionSdpName] = sdp; |
| 163 |
| 164 if (OnLocalSdpReady) |
| 165 OnLocalSdpReady(writer.write(jmessage).c_str()); |
| 166 } |
| 167 |
| 168 void Conductor::OnFailure(const std::string& error) { |
| 169 LOG(LERROR) << error; |
| 170 |
| 171 if (OnFailureMessage) |
| 172 OnFailureMessage(error.c_str()); |
| 173 } |
| 174 |
| 175 void Conductor::OnIceCandidate(const webrtc::IceCandidateInterface* candidate) { |
| 176 LOG(INFO) << __FUNCTION__ << " " << candidate->sdp_mline_index(); |
| 177 |
| 178 Json::StyledWriter writer; |
| 179 Json::Value jmessage; |
| 180 |
| 181 jmessage[kCandidateSdpMidName] = candidate->sdp_mid(); |
| 182 jmessage[kCandidateSdpMlineIndexName] = candidate->sdp_mline_index(); |
| 183 std::string sdp; |
| 184 if (!candidate->ToString(&sdp)) { |
| 185 LOG(LS_ERROR) << "Failed to serialize candidate"; |
| 186 return; |
| 187 } |
| 188 jmessage[kCandidateSdpName] = sdp; |
| 189 |
| 190 if (OnIceCandiateReady) |
| 191 OnIceCandiateReady(writer.write(jmessage).c_str()); |
| 192 } |
| 193 |
| 194 void Conductor::RegisterOnVideoFramReady(VIDEOFRAMEREADY_CALLBACK callback) { |
| 195 OnVideoFrameReady = callback; |
| 196 } |
| 197 |
| 198 void Conductor::RegisterOnLocalDataChannelReady( |
| 199 LOCALDATACHANNELREADY_CALLBACK callback) { |
| 200 OnLocalDataChannelReady = callback; |
| 201 } |
| 202 |
| 203 void Conductor::RegisterOnDataFromDataChannelReady( |
| 204 DATAFROMEDATECHANNELREADY_CALLBACK callback) { |
| 205 OnDataFromDataChannelReady = callback; |
| 206 } |
| 207 |
| 208 void Conductor::RegisterOnFailure(FAILURE_CALLBACK callback) { |
| 209 OnFailureMessage = callback; |
| 210 } |
| 211 |
| 212 void Conductor::RegisterOnAudioBusReady(AUDIOBUSREADY_CALLBACK callback) { |
| 213 OnAudioReady = callback; |
| 214 } |
| 215 |
| 216 void Conductor::RegisterOnLocalSdpReadytoSend( |
| 217 LOCALSDPREADYTOSEND_CALLBACK callback) { |
| 218 OnLocalSdpReady = callback; |
| 219 } |
| 220 |
| 221 void Conductor::RegisterOnIceCandiateReadytoSend( |
| 222 ICECANDIDATEREADYTOSEND_CALLBACK callback) { |
| 223 OnIceCandiateReady = callback; |
| 224 } |
| 225 |
| 226 bool Conductor::ReceivedSdp(const char* msg) { |
| 227 if (!peer_connection_) |
| 228 return false; |
| 229 |
| 230 std::string message(msg); |
| 231 |
| 232 Json::Reader reader; |
| 233 Json::Value jmessage; |
| 234 if (!reader.parse(message, jmessage)) { |
| 235 LOG(WARNING) << "Received unknown message. " << message; |
| 236 return false; |
| 237 } |
| 238 std::string type; |
| 239 std::string json_object; |
| 240 |
| 241 rtc::GetStringFromJsonObject(jmessage, kSessionDescriptionTypeName, &type); |
| 242 if (type.empty()) |
| 243 return false; |
| 244 |
| 245 std::string sdp; |
| 246 if (!rtc::GetStringFromJsonObject(jmessage, kSessionDescriptionSdpName, |
| 247 &sdp)) { |
| 248 LOG(WARNING) << "Can't parse received session description message."; |
| 249 return false; |
| 250 } |
| 251 webrtc::SdpParseError error; |
| 252 webrtc::SessionDescriptionInterface* session_description( |
| 253 webrtc::CreateSessionDescription(type, sdp, &error)); |
| 254 if (!session_description) { |
| 255 LOG(WARNING) << "Can't parse received session description message. " |
| 256 << "SdpParseError was: " << error.description; |
| 257 return false; |
| 258 } |
| 259 LOG(INFO) << " Received session description :" << message; |
| 260 peer_connection_->SetRemoteDescription( |
| 261 DummySetSessionDescriptionObserver::Create(), session_description); |
| 262 |
| 263 return true; |
| 264 } |
| 265 |
| 266 bool Conductor::ReceivedIceCandidate(const char* ice_candidate) { |
| 267 if (!peer_connection_) |
| 268 return false; |
| 269 |
| 270 std::string message(ice_candidate); |
| 271 |
| 272 Json::Reader reader; |
| 273 Json::Value jmessage; |
| 274 if (!reader.parse(message, jmessage)) { |
| 275 LOG(WARNING) << "Received unknown message. " << message; |
| 276 return false; |
| 277 } |
| 278 std::string type; |
| 279 std::string json_object; |
| 280 |
| 281 rtc::GetStringFromJsonObject(jmessage, kSessionDescriptionTypeName, &type); |
| 282 if (!type.empty()) |
| 283 return false; |
| 284 |
| 285 std::string sdp_mid; |
| 286 int sdp_mlineindex = 0; |
| 287 std::string sdp; |
| 288 if (!rtc::GetStringFromJsonObject(jmessage, kCandidateSdpMidName, &sdp_mid) || |
| 289 !rtc::GetIntFromJsonObject(jmessage, kCandidateSdpMlineIndexName, |
| 290 &sdp_mlineindex) || |
| 291 !rtc::GetStringFromJsonObject(jmessage, kCandidateSdpName, &sdp)) { |
| 292 LOG(WARNING) << "Can't parse received message."; |
| 293 return false; |
| 294 } |
| 295 webrtc::SdpParseError error; |
| 296 std::unique_ptr<webrtc::IceCandidateInterface> candidate( |
| 297 webrtc::CreateIceCandidate(sdp_mid, sdp_mlineindex, sdp, &error)); |
| 298 if (!candidate.get()) { |
| 299 LOG(WARNING) << "Can't parse received candidate message. " |
| 300 << "SdpParseError was: " << error.description; |
| 301 return false; |
| 302 } |
| 303 if (!peer_connection_->AddIceCandidate(candidate.get())) { |
| 304 LOG(WARNING) << "Failed to apply the received candidate"; |
| 305 return false; |
| 306 } |
| 307 LOG(INFO) << " Received candidate :" << message; |
| 308 return true; |
| 309 } |
| 310 |
| 311 void Conductor::SetAudioControl(bool is_mute, bool is_record) { |
| 312 is_mute_audio_ = is_mute; |
| 313 is_record_audio_ = is_record; |
| 314 |
| 315 SetAudioControl(); |
| 316 } |
| 317 |
| 318 void Conductor::SetAudioControl() { |
| 319 if (!remote_stream_) |
| 320 return; |
| 321 webrtc::AudioTrackVector tracks = remote_stream_->GetAudioTracks(); |
| 322 if (tracks.empty()) |
| 323 return; |
| 324 |
| 325 webrtc::AudioTrackInterface* audio_track = tracks[0]; |
| 326 std::string id = audio_track->id(); |
| 327 if (is_record_audio_) |
| 328 audio_track->AddSink(this); |
| 329 else |
| 330 audio_track->RemoveSink(this); |
| 331 |
| 332 for (auto& track : tracks) { |
| 333 if (is_mute_audio_) |
| 334 track->set_enabled(false); |
| 335 else |
| 336 track->set_enabled(true); |
| 337 } |
| 338 } |
| 339 |
| 340 void Conductor::OnAddStream( |
| 341 rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) { |
| 342 LOG(INFO) << __FUNCTION__ << " " << stream->label(); |
| 343 remote_stream_ = stream; |
| 344 |
| 345 SetAudioControl(); |
| 346 } |
| 347 |
| 348 std::unique_ptr<cricket::VideoCapturer> Conductor::OpenVideoCaptureDevice() { |
| 349 std::vector<std::string> device_names; |
| 350 { |
| 351 std::unique_ptr<webrtc::VideoCaptureModule::DeviceInfo> info( |
| 352 webrtc::VideoCaptureFactory::CreateDeviceInfo()); |
| 353 if (!info) { |
| 354 return nullptr; |
| 355 } |
| 356 int num_devices = info->NumberOfDevices(); |
| 357 for (int i = 0; i < num_devices; ++i) { |
| 358 const uint32_t kSize = 256; |
| 359 char name[kSize] = {0}; |
| 360 char id[kSize] = {0}; |
| 361 if (info->GetDeviceName(i, name, kSize, id, kSize) != -1) { |
| 362 device_names.push_back(name); |
| 363 } |
| 364 } |
| 365 } |
| 366 |
| 367 cricket::WebRtcVideoDeviceCapturerFactory factory; |
| 368 std::unique_ptr<cricket::VideoCapturer> capturer; |
| 369 for (const auto& name : device_names) { |
| 370 capturer = factory.Create(cricket::Device(name, 0)); |
| 371 if (capturer) { |
| 372 break; |
| 373 } |
| 374 } |
| 375 return capturer; |
| 376 } |
| 377 |
| 378 void Conductor::AddStreams(bool audio_only) { |
| 379 if (active_streams_.find(kStreamLabel) != active_streams_.end()) |
| 380 return; // Already added. |
| 381 |
| 382 rtc::scoped_refptr<webrtc::MediaStreamInterface> stream = |
| 383 peer_connection_factory_->CreateLocalMediaStream(kStreamLabel); |
| 384 |
| 385 rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track( |
| 386 peer_connection_factory_->CreateAudioTrack( |
| 387 kAudioLabel, peer_connection_factory_->CreateAudioSource(nullptr))); |
| 388 std::string id = audio_track->id(); |
| 389 stream->AddTrack(audio_track); |
| 390 |
| 391 if (!audio_only) { |
| 392 std::unique_ptr<cricket::VideoCapturer> capture = OpenVideoCaptureDevice(); |
| 393 if (capture) { |
| 394 rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track( |
| 395 peer_connection_factory_->CreateVideoTrack( |
| 396 kVideoLabel, peer_connection_factory_->CreateVideoSource( |
| 397 OpenVideoCaptureDevice(), nullptr))); |
| 398 |
| 399 stream->AddTrack(video_track); |
| 400 } |
| 401 } |
| 402 |
| 403 if (!peer_connection_->AddStream(stream)) { |
| 404 LOG(LS_ERROR) << "Adding stream to PeerConnection failed"; |
| 405 } |
| 406 |
| 407 typedef std::pair<std::string, |
| 408 rtc::scoped_refptr<webrtc::MediaStreamInterface>> |
| 409 MediaStreamPair; |
| 410 active_streams_.insert(MediaStreamPair(stream->label(), stream)); |
| 411 } |
| 412 |
| 413 bool Conductor::CreateDataChannel() { |
| 414 struct webrtc::DataChannelInit init; |
| 415 init.ordered = true; |
| 416 init.reliable = true; |
| 417 data_channel_ = peer_connection_->CreateDataChannel("Hello", &init); |
| 418 if (data_channel_.get()) { |
| 419 data_channel_->RegisterObserver(this); |
| 420 LOG(LS_INFO) << "Succeeds to create data channel"; |
| 421 return true; |
| 422 } else { |
| 423 LOG(LS_INFO) << "Fails to create data channel"; |
| 424 return false; |
| 425 } |
| 426 } |
| 427 |
| 428 void Conductor::CloseDataChannel() { |
| 429 if (data_channel_.get()) { |
| 430 data_channel_->UnregisterObserver(); |
| 431 data_channel_->Close(); |
| 432 } |
| 433 data_channel_ = nullptr; |
| 434 } |
| 435 |
| 436 bool Conductor::SendDataViaDataChannel(const std::string& data) { |
| 437 if (!data_channel_.get()) { |
| 438 LOG(LS_INFO) << "Data channel is not established"; |
| 439 return false; |
| 440 } |
| 441 webrtc::DataBuffer buffer(data); |
| 442 data_channel_->Send(buffer); |
| 443 return true; |
| 444 } |
| 445 |
| 446 // Peerconnection observer |
| 447 void Conductor::OnDataChannel( |
| 448 rtc::scoped_refptr<webrtc::DataChannelInterface> channel) { |
| 449 channel->RegisterObserver(this); |
| 450 } |
| 451 |
| 452 void Conductor::OnStateChange() { |
| 453 if (data_channel_) { |
| 454 webrtc::DataChannelInterface::DataState state = data_channel_->state(); |
| 455 if (state == webrtc::DataChannelInterface::kOpen) { |
| 456 if (OnLocalDataChannelReady) |
| 457 OnLocalDataChannelReady(); |
| 458 LOG(LS_INFO) << "Data channel is open"; |
| 459 } |
| 460 } |
| 461 } |
| 462 |
| 463 // A data buffer was successfully received. |
| 464 void Conductor::OnMessage(const webrtc::DataBuffer& buffer) { |
| 465 size_t size = buffer.data.size(); |
| 466 char* msg = new char[size + 1]; |
| 467 memcpy(msg, buffer.data.data(), size); |
| 468 msg[size] = 0; |
| 469 if (OnDataFromDataChannelReady) |
| 470 OnDataFromDataChannelReady(msg); |
| 471 delete[] msg; |
| 472 } |
| 473 |
| 474 // AudioTrackSinkInterface implementation. |
| 475 void Conductor::OnData(const void* audio_data, |
| 476 int bits_per_sample, |
| 477 int sample_rate, |
| 478 size_t number_of_channels, |
| 479 size_t number_of_frames) { |
| 480 if (OnAudioReady) |
| 481 OnAudioReady(audio_data, bits_per_sample, sample_rate, |
| 482 static_cast<int>(number_of_channels), |
| 483 static_cast<int>(number_of_frames)); |
| 484 } |
| 485 |
| 486 std::vector<uint32_t> Conductor::GetRemoteAudioTrackSsrcs() { |
| 487 std::vector<rtc::scoped_refptr<webrtc::RtpReceiverInterface>> receivers = |
| 488 peer_connection_->GetReceivers(); |
| 489 |
| 490 std::vector<uint32_t> ssrcs; |
| 491 for (const auto& receiver : receivers) { |
| 492 if (receiver->media_type() != cricket::MEDIA_TYPE_AUDIO) |
| 493 continue; |
| 494 |
| 495 std::vector<webrtc::RtpEncodingParameters> params = |
| 496 receiver->GetParameters().encodings; |
| 497 |
| 498 for (const auto& param : params) { |
| 499 uint32_t ssrc = param.ssrc.value_or(0); |
| 500 if (ssrc > 0) |
| 501 ssrcs.push_back(ssrc); |
| 502 } |
| 503 } |
| 504 |
| 505 return ssrcs; |
| 506 } |
OLD | NEW |