OLD | NEW |
(Empty) | |
| 1 /* |
| 2 * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. |
| 3 * |
| 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ |
| 10 |
| 11 #include "webrtc/examples/unityplugin/simple_peer_connection.h" |
| 12 |
| 13 #include <utility> |
| 14 |
| 15 #include "webrtc/api/test/fakeconstraints.h" |
| 16 #include "webrtc/base/json.h" |
| 17 #include "webrtc/media/engine/webrtcvideocapturerfactory.h" |
| 18 #include "webrtc/modules/video_capture/video_capture_factory.h" |
| 19 |
| 20 // Names used for a IceCandidate JSON object. |
| 21 const char kCandidateSdpMidName[] = "sdpMid"; |
| 22 const char kCandidateSdpMlineIndexName[] = "sdpMLineIndex"; |
| 23 const char kCandidateSdpName[] = "candidate"; |
| 24 |
| 25 // Names used for a SessionDescription JSON object. |
| 26 const char kSessionDescriptionTypeName[] = "type"; |
| 27 const char kSessionDescriptionSdpName[] = "sdp"; |
| 28 |
| 29 // Names used for media stream labels. |
| 30 const char kAudioLabel[] = "audio_label"; |
| 31 const char kVideoLabel[] = "video_label"; |
| 32 const char kStreamLabel[] = "stream_label"; |
| 33 |
| 34 namespace { |
| 35 static int g_peer_count = 0; |
| 36 static std::unique_ptr<rtc::Thread> g_worker_thread; |
| 37 static std::unique_ptr<rtc::Thread> g_signaling_thread; |
| 38 static rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> |
| 39 g_peer_connection_factory; |
| 40 |
| 41 std::string GetEnvVarOrDefault(const char* env_var_name, |
| 42 const char* default_value) { |
| 43 std::string value; |
| 44 const char* env_var = getenv(env_var_name); |
| 45 if (env_var) |
| 46 value = env_var; |
| 47 |
| 48 if (value.empty()) |
| 49 value = default_value; |
| 50 |
| 51 return value; |
| 52 } |
| 53 |
| 54 std::string GetPeerConnectionString() { |
| 55 return GetEnvVarOrDefault("WEBRTC_CONNECT", "stun:stun.l.google.com:19302"); |
| 56 } |
| 57 |
| 58 class DummySetSessionDescriptionObserver |
| 59 : public webrtc::SetSessionDescriptionObserver { |
| 60 public: |
| 61 static DummySetSessionDescriptionObserver* Create() { |
| 62 return new rtc::RefCountedObject<DummySetSessionDescriptionObserver>(); |
| 63 } |
| 64 virtual void OnSuccess() { LOG(INFO) << __FUNCTION__; } |
| 65 virtual void OnFailure(const std::string& error) { |
| 66 LOG(INFO) << __FUNCTION__ << " " << error; |
| 67 } |
| 68 |
| 69 protected: |
| 70 DummySetSessionDescriptionObserver() {} |
| 71 ~DummySetSessionDescriptionObserver() {} |
| 72 }; |
| 73 |
| 74 } // namespace |
| 75 |
| 76 bool Conductor::InitializePeerConnection(bool is_receiver) { |
| 77 RTC_DCHECK(peer_connection_.get() == nullptr); |
| 78 |
| 79 if (g_peer_connection_factory == nullptr) { |
| 80 g_worker_thread.reset(new rtc::Thread()); |
| 81 g_worker_thread->Start(); |
| 82 g_signaling_thread.reset(new rtc::Thread()); |
| 83 g_signaling_thread->Start(); |
| 84 |
| 85 g_peer_connection_factory = webrtc::CreatePeerConnectionFactory( |
| 86 g_worker_thread.get(), g_worker_thread.get(), g_signaling_thread.get(), |
| 87 nullptr, nullptr, nullptr); |
| 88 } |
| 89 if (!g_peer_connection_factory.get()) { |
| 90 DeletePeerConnection(); |
| 91 return false; |
| 92 } |
| 93 |
| 94 g_peer_count++; |
| 95 if (!CreatePeerConnection(is_receiver)) { |
| 96 DeletePeerConnection(); |
| 97 return false; |
| 98 } |
| 99 return peer_connection_.get() != nullptr; |
| 100 } |
| 101 |
| 102 bool Conductor::CreatePeerConnection(bool is_receiver) { |
| 103 RTC_DCHECK(g_peer_connection_factory.get() != nullptr); |
| 104 RTC_DCHECK(peer_connection_.get() == nullptr); |
| 105 |
| 106 webrtc::PeerConnectionInterface::RTCConfiguration config; |
| 107 webrtc::PeerConnectionInterface::IceServer server; |
| 108 server.uri = GetPeerConnectionString(); |
| 109 config.servers.push_back(server); |
| 110 |
| 111 webrtc::FakeConstraints constraints; |
| 112 constraints.SetAllowDtlsSctpDataChannels(); |
| 113 |
| 114 if (is_receiver) { |
| 115 constraints.SetMandatoryReceiveAudio(true); |
| 116 constraints.SetMandatoryReceiveVideo(true); |
| 117 } |
| 118 |
| 119 peer_connection_ = g_peer_connection_factory->CreatePeerConnection( |
| 120 config, &constraints, nullptr, nullptr, this); |
| 121 |
| 122 return peer_connection_.get() != nullptr; |
| 123 } |
| 124 |
| 125 void Conductor::DeletePeerConnection() { |
| 126 g_peer_count--; |
| 127 |
| 128 CloseDataChannel(); |
| 129 peer_connection_ = nullptr; |
| 130 active_streams_.clear(); |
| 131 |
| 132 if (g_peer_count == 0) { |
| 133 g_peer_connection_factory = nullptr; |
| 134 g_signaling_thread.reset(); |
| 135 g_worker_thread.reset(); |
| 136 } |
| 137 } |
| 138 |
| 139 bool Conductor::CreateOffer() { |
| 140 if (!peer_connection_.get()) |
| 141 return false; |
| 142 |
| 143 peer_connection_->CreateOffer(this, nullptr); |
| 144 return true; |
| 145 } |
| 146 |
| 147 bool Conductor::CreateAnswer() { |
| 148 if (!peer_connection_.get()) |
| 149 return false; |
| 150 |
| 151 peer_connection_->CreateAnswer(this, nullptr); |
| 152 return true; |
| 153 } |
| 154 |
| 155 void Conductor::OnSuccess(webrtc::SessionDescriptionInterface* desc) { |
| 156 peer_connection_->SetLocalDescription( |
| 157 DummySetSessionDescriptionObserver::Create(), desc); |
| 158 |
| 159 std::string sdp; |
| 160 desc->ToString(&sdp); |
| 161 |
| 162 Json::StyledWriter writer; |
| 163 Json::Value jmessage; |
| 164 jmessage[kSessionDescriptionTypeName] = desc->type(); |
| 165 jmessage[kSessionDescriptionSdpName] = sdp; |
| 166 |
| 167 if (OnLocalSdpReady) |
| 168 OnLocalSdpReady(writer.write(jmessage).c_str()); |
| 169 } |
| 170 |
| 171 void Conductor::OnFailure(const std::string& error) { |
| 172 LOG(LERROR) << error; |
| 173 |
| 174 if (OnFailureMessage) |
| 175 OnFailureMessage(error.c_str()); |
| 176 } |
| 177 |
| 178 void Conductor::OnIceCandidate(const webrtc::IceCandidateInterface* candidate) { |
| 179 LOG(INFO) << __FUNCTION__ << " " << candidate->sdp_mline_index(); |
| 180 |
| 181 Json::StyledWriter writer; |
| 182 Json::Value jmessage; |
| 183 |
| 184 jmessage[kCandidateSdpMidName] = candidate->sdp_mid(); |
| 185 jmessage[kCandidateSdpMlineIndexName] = candidate->sdp_mline_index(); |
| 186 std::string sdp; |
| 187 if (!candidate->ToString(&sdp)) { |
| 188 LOG(LS_ERROR) << "Failed to serialize candidate"; |
| 189 return; |
| 190 } |
| 191 jmessage[kCandidateSdpName] = sdp; |
| 192 |
| 193 if (OnIceCandiateReady) |
| 194 OnIceCandiateReady(writer.write(jmessage).c_str()); |
| 195 } |
| 196 |
| 197 void Conductor::RegisterOnVideoFramReady(VIDEOFRAMEREADY_CALLBACK callback) { |
| 198 OnVideoFrameReady = callback; |
| 199 } |
| 200 |
| 201 void Conductor::RegisterOnLocalDataChannelReady( |
| 202 LOCALDATACHANNELREADY_CALLBACK callback) { |
| 203 OnLocalDataChannelReady = callback; |
| 204 } |
| 205 |
| 206 void Conductor::RegisterOnDataFromDataChannelReady( |
| 207 DATAFROMEDATECHANNELREADY_CALLBACK callback) { |
| 208 OnDataFromDataChannelReady = callback; |
| 209 } |
| 210 |
| 211 void Conductor::RegisterOnFailure(FAILURE_CALLBACK callback) { |
| 212 OnFailureMessage = callback; |
| 213 } |
| 214 |
| 215 void Conductor::RegisterOnAudioBusReady(AUDIOBUSREADY_CALLBACK callback) { |
| 216 OnAudioReady = callback; |
| 217 } |
| 218 |
| 219 void Conductor::RegisterOnLocalSdpReadytoSend( |
| 220 LOCALSDPREADYTOSEND_CALLBACK callback) { |
| 221 OnLocalSdpReady = callback; |
| 222 } |
| 223 |
| 224 void Conductor::RegisterOnIceCandiateReadytoSend( |
| 225 ICECANDIDATEREADYTOSEND_CALLBACK callback) { |
| 226 OnIceCandiateReady = callback; |
| 227 } |
| 228 |
| 229 bool Conductor::ReceivedSdp(const char* msg) { |
| 230 if (!peer_connection_) |
| 231 return false; |
| 232 |
| 233 std::string message(msg); |
| 234 |
| 235 Json::Reader reader; |
| 236 Json::Value jmessage; |
| 237 if (!reader.parse(message, jmessage)) { |
| 238 LOG(WARNING) << "Received unknown message. " << message; |
| 239 return false; |
| 240 } |
| 241 std::string type; |
| 242 std::string json_object; |
| 243 |
| 244 rtc::GetStringFromJsonObject(jmessage, kSessionDescriptionTypeName, &type); |
| 245 if (type.empty()) |
| 246 return false; |
| 247 |
| 248 std::string sdp; |
| 249 if (!rtc::GetStringFromJsonObject(jmessage, kSessionDescriptionSdpName, |
| 250 &sdp)) { |
| 251 LOG(WARNING) << "Can't parse received session description message."; |
| 252 return false; |
| 253 } |
| 254 webrtc::SdpParseError error; |
| 255 webrtc::SessionDescriptionInterface* session_description( |
| 256 webrtc::CreateSessionDescription(type, sdp, &error)); |
| 257 if (!session_description) { |
| 258 LOG(WARNING) << "Can't parse received session description message. " |
| 259 << "SdpParseError was: " << error.description; |
| 260 return false; |
| 261 } |
| 262 LOG(INFO) << " Received session description :" << message; |
| 263 peer_connection_->SetRemoteDescription( |
| 264 DummySetSessionDescriptionObserver::Create(), session_description); |
| 265 |
| 266 return true; |
| 267 } |
| 268 |
| 269 bool Conductor::ReceivedIceCandidate(const char* ice_candidate) { |
| 270 if (!peer_connection_) |
| 271 return false; |
| 272 |
| 273 std::string message(ice_candidate); |
| 274 |
| 275 Json::Reader reader; |
| 276 Json::Value jmessage; |
| 277 if (!reader.parse(message, jmessage)) { |
| 278 LOG(WARNING) << "Received unknown message. " << message; |
| 279 return false; |
| 280 } |
| 281 std::string type; |
| 282 std::string json_object; |
| 283 |
| 284 rtc::GetStringFromJsonObject(jmessage, kSessionDescriptionTypeName, &type); |
| 285 if (!type.empty()) |
| 286 return false; |
| 287 |
| 288 std::string sdp_mid; |
| 289 int sdp_mlineindex = 0; |
| 290 std::string sdp; |
| 291 if (!rtc::GetStringFromJsonObject(jmessage, kCandidateSdpMidName, &sdp_mid) || |
| 292 !rtc::GetIntFromJsonObject(jmessage, kCandidateSdpMlineIndexName, |
| 293 &sdp_mlineindex) || |
| 294 !rtc::GetStringFromJsonObject(jmessage, kCandidateSdpName, &sdp)) { |
| 295 LOG(WARNING) << "Can't parse received message."; |
| 296 return false; |
| 297 } |
| 298 webrtc::SdpParseError error; |
| 299 std::unique_ptr<webrtc::IceCandidateInterface> candidate( |
| 300 webrtc::CreateIceCandidate(sdp_mid, sdp_mlineindex, sdp, &error)); |
| 301 if (!candidate.get()) { |
| 302 LOG(WARNING) << "Can't parse received candidate message. " |
| 303 << "SdpParseError was: " << error.description; |
| 304 return false; |
| 305 } |
| 306 if (!peer_connection_->AddIceCandidate(candidate.get())) { |
| 307 LOG(WARNING) << "Failed to apply the received candidate"; |
| 308 return false; |
| 309 } |
| 310 LOG(INFO) << " Received candidate :" << message; |
| 311 return true; |
| 312 } |
| 313 |
| 314 void Conductor::SetAudioControl(bool is_mute, bool is_record) { |
| 315 is_mute_audio_ = is_mute; |
| 316 is_record_audio_ = is_record; |
| 317 |
| 318 SetAudioControl(); |
| 319 } |
| 320 |
| 321 void Conductor::SetAudioControl() { |
| 322 if (!remote_stream_) |
| 323 return; |
| 324 webrtc::AudioTrackVector tracks = remote_stream_->GetAudioTracks(); |
| 325 if (tracks.empty()) |
| 326 return; |
| 327 |
| 328 webrtc::AudioTrackInterface* audio_track = tracks[0]; |
| 329 std::string id = audio_track->id(); |
| 330 if (is_record_audio_) |
| 331 audio_track->AddSink(this); |
| 332 else |
| 333 audio_track->RemoveSink(this); |
| 334 |
| 335 for (auto& track : tracks) { |
| 336 if (is_mute_audio_) |
| 337 track->set_enabled(false); |
| 338 else |
| 339 track->set_enabled(true); |
| 340 } |
| 341 } |
| 342 |
| 343 void Conductor::OnAddStream( |
| 344 rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) { |
| 345 LOG(INFO) << __FUNCTION__ << " " << stream->label(); |
| 346 remote_stream_ = stream; |
| 347 |
| 348 SetAudioControl(); |
| 349 } |
| 350 |
| 351 std::unique_ptr<cricket::VideoCapturer> Conductor::OpenVideoCaptureDevice() { |
| 352 std::vector<std::string> device_names; |
| 353 { |
| 354 std::unique_ptr<webrtc::VideoCaptureModule::DeviceInfo> info( |
| 355 webrtc::VideoCaptureFactory::CreateDeviceInfo()); |
| 356 if (!info) { |
| 357 return nullptr; |
| 358 } |
| 359 int num_devices = info->NumberOfDevices(); |
| 360 for (int i = 0; i < num_devices; ++i) { |
| 361 const uint32_t kSize = 256; |
| 362 char name[kSize] = {0}; |
| 363 char id[kSize] = {0}; |
| 364 if (info->GetDeviceName(i, name, kSize, id, kSize) != -1) { |
| 365 device_names.push_back(name); |
| 366 } |
| 367 } |
| 368 } |
| 369 |
| 370 cricket::WebRtcVideoDeviceCapturerFactory factory; |
| 371 std::unique_ptr<cricket::VideoCapturer> capturer; |
| 372 for (const auto& name : device_names) { |
| 373 capturer = factory.Create(cricket::Device(name, 0)); |
| 374 if (capturer) { |
| 375 break; |
| 376 } |
| 377 } |
| 378 return capturer; |
| 379 } |
| 380 |
| 381 void Conductor::AddStreams(bool audio_only) { |
| 382 if (active_streams_.find(kStreamLabel) != active_streams_.end()) |
| 383 return; // Already added. |
| 384 |
| 385 rtc::scoped_refptr<webrtc::MediaStreamInterface> stream = |
| 386 g_peer_connection_factory->CreateLocalMediaStream(kStreamLabel); |
| 387 |
| 388 rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track( |
| 389 g_peer_connection_factory->CreateAudioTrack( |
| 390 kAudioLabel, g_peer_connection_factory->CreateAudioSource(nullptr))); |
| 391 std::string id = audio_track->id(); |
| 392 stream->AddTrack(audio_track); |
| 393 |
| 394 if (!audio_only) { |
| 395 std::unique_ptr<cricket::VideoCapturer> capture = OpenVideoCaptureDevice(); |
| 396 if (capture) { |
| 397 rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track( |
| 398 g_peer_connection_factory->CreateVideoTrack( |
| 399 kVideoLabel, g_peer_connection_factory->CreateVideoSource( |
| 400 OpenVideoCaptureDevice(), nullptr))); |
| 401 |
| 402 stream->AddTrack(video_track); |
| 403 } |
| 404 } |
| 405 |
| 406 if (!peer_connection_->AddStream(stream)) { |
| 407 LOG(LS_ERROR) << "Adding stream to PeerConnection failed"; |
| 408 } |
| 409 |
| 410 typedef std::pair<std::string, |
| 411 rtc::scoped_refptr<webrtc::MediaStreamInterface>> |
| 412 MediaStreamPair; |
| 413 active_streams_.insert(MediaStreamPair(stream->label(), stream)); |
| 414 } |
| 415 |
| 416 bool Conductor::CreateDataChannel() { |
| 417 struct webrtc::DataChannelInit init; |
| 418 init.ordered = true; |
| 419 init.reliable = true; |
| 420 data_channel_ = peer_connection_->CreateDataChannel("Hello", &init); |
| 421 if (data_channel_.get()) { |
| 422 data_channel_->RegisterObserver(this); |
| 423 LOG(LS_INFO) << "Succeeds to create data channel"; |
| 424 return true; |
| 425 } else { |
| 426 LOG(LS_INFO) << "Fails to create data channel"; |
| 427 return false; |
| 428 } |
| 429 } |
| 430 |
| 431 void Conductor::CloseDataChannel() { |
| 432 if (data_channel_.get()) { |
| 433 data_channel_->UnregisterObserver(); |
| 434 data_channel_->Close(); |
| 435 } |
| 436 data_channel_ = nullptr; |
| 437 } |
| 438 |
| 439 bool Conductor::SendDataViaDataChannel(const std::string& data) { |
| 440 if (!data_channel_.get()) { |
| 441 LOG(LS_INFO) << "Data channel is not established"; |
| 442 return false; |
| 443 } |
| 444 webrtc::DataBuffer buffer(data); |
| 445 data_channel_->Send(buffer); |
| 446 return true; |
| 447 } |
| 448 |
| 449 // Peerconnection observer |
| 450 void Conductor::OnDataChannel( |
| 451 rtc::scoped_refptr<webrtc::DataChannelInterface> channel) { |
| 452 channel->RegisterObserver(this); |
| 453 } |
| 454 |
| 455 void Conductor::OnStateChange() { |
| 456 if (data_channel_) { |
| 457 webrtc::DataChannelInterface::DataState state = data_channel_->state(); |
| 458 if (state == webrtc::DataChannelInterface::kOpen) { |
| 459 if (OnLocalDataChannelReady) |
| 460 OnLocalDataChannelReady(); |
| 461 LOG(LS_INFO) << "Data channel is open"; |
| 462 } |
| 463 } |
| 464 } |
| 465 |
| 466 // A data buffer was successfully received. |
| 467 void Conductor::OnMessage(const webrtc::DataBuffer& buffer) { |
| 468 size_t size = buffer.data.size(); |
| 469 char* msg = new char[size + 1]; |
| 470 memcpy(msg, buffer.data.data(), size); |
| 471 msg[size] = 0; |
| 472 if (OnDataFromDataChannelReady) |
| 473 OnDataFromDataChannelReady(msg); |
| 474 delete[] msg; |
| 475 } |
| 476 |
| 477 // AudioTrackSinkInterface implementation. |
| 478 void Conductor::OnData(const void* audio_data, |
| 479 int bits_per_sample, |
| 480 int sample_rate, |
| 481 size_t number_of_channels, |
| 482 size_t number_of_frames) { |
| 483 if (OnAudioReady) |
| 484 OnAudioReady(audio_data, bits_per_sample, sample_rate, |
| 485 static_cast<int>(number_of_channels), |
| 486 static_cast<int>(number_of_frames)); |
| 487 } |
| 488 |
| 489 std::vector<uint32_t> Conductor::GetRemoteAudioTrackSsrcs() { |
| 490 std::vector<rtc::scoped_refptr<webrtc::RtpReceiverInterface>> receivers = |
| 491 peer_connection_->GetReceivers(); |
| 492 |
| 493 std::vector<uint32_t> ssrcs; |
| 494 for (const auto& receiver : receivers) { |
| 495 if (receiver->media_type() != cricket::MEDIA_TYPE_AUDIO) |
| 496 continue; |
| 497 |
| 498 std::vector<webrtc::RtpEncodingParameters> params = |
| 499 receiver->GetParameters().encodings; |
| 500 |
| 501 for (const auto& param : params) { |
| 502 uint32_t ssrc = param.ssrc.value_or(0); |
| 503 if (ssrc > 0) |
| 504 ssrcs.push_back(ssrc); |
| 505 } |
| 506 } |
| 507 |
| 508 return ssrcs; |
| 509 } |
OLD | NEW |