OLD | NEW |
| (Empty) |
1 /* | |
2 * libjingle | |
3 * Copyright 2014 Google Inc. | |
4 * | |
5 * Redistribution and use in source and binary forms, with or without | |
6 * modification, are permitted provided that the following conditions are met: | |
7 * | |
8 * 1. Redistributions of source code must retain the above copyright notice, | |
9 * this list of conditions and the following disclaimer. | |
10 * 2. Redistributions in binary form must reproduce the above copyright notice, | |
11 * this list of conditions and the following disclaimer in the documentation | |
12 * and/or other materials provided with the distribution. | |
13 * 3. The name of the author may not be used to endorse or promote products | |
14 * derived from this software without specific prior written permission. | |
15 * | |
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED | |
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF | |
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO | |
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; | |
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, | |
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR | |
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF | |
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
26 */ | |
27 | |
28 package org.appspot.apprtc; | |
29 | |
30 import android.content.Context; | |
31 import android.opengl.EGLContext; | |
32 import android.util.Log; | |
33 | |
34 import org.appspot.apprtc.AppRTCClient.SignalingParameters; | |
35 import org.appspot.apprtc.util.LooperExecutor; | |
36 import org.webrtc.DataChannel; | |
37 import org.webrtc.IceCandidate; | |
38 import org.webrtc.Logging; | |
39 import org.webrtc.MediaCodecVideoEncoder; | |
40 import org.webrtc.MediaConstraints; | |
41 import org.webrtc.MediaConstraints.KeyValuePair; | |
42 import org.webrtc.MediaStream; | |
43 import org.webrtc.PeerConnection; | |
44 import org.webrtc.PeerConnection.IceConnectionState; | |
45 import org.webrtc.PeerConnectionFactory; | |
46 import org.webrtc.SdpObserver; | |
47 import org.webrtc.SessionDescription; | |
48 import org.webrtc.StatsObserver; | |
49 import org.webrtc.StatsReport; | |
50 import org.webrtc.VideoCapturerAndroid; | |
51 import org.webrtc.VideoRenderer; | |
52 import org.webrtc.VideoSource; | |
53 import org.webrtc.VideoTrack; | |
54 | |
55 import java.util.EnumSet; | |
56 import java.util.LinkedList; | |
57 import java.util.Timer; | |
58 import java.util.TimerTask; | |
59 import java.util.regex.Matcher; | |
60 import java.util.regex.Pattern; | |
61 | |
62 /** | |
63 * Peer connection client implementation. | |
64 * | |
65 * <p>All public methods are routed to local looper thread. | |
66 * All PeerConnectionEvents callbacks are invoked from the same looper thread. | |
67 * This class is a singleton. | |
68 */ | |
69 public class PeerConnectionClient { | |
70 public static final String VIDEO_TRACK_ID = "ARDAMSv0"; | |
71 public static final String AUDIO_TRACK_ID = "ARDAMSa0"; | |
72 private static final String TAG = "PCRTCClient"; | |
73 private static final String FIELD_TRIAL_VP9 = "WebRTC-SupportVP9/Enabled/"; | |
74 private static final String VIDEO_CODEC_VP8 = "VP8"; | |
75 private static final String VIDEO_CODEC_VP9 = "VP9"; | |
76 private static final String VIDEO_CODEC_H264 = "H264"; | |
77 private static final String AUDIO_CODEC_OPUS = "opus"; | |
78 private static final String AUDIO_CODEC_ISAC = "ISAC"; | |
79 private static final String VIDEO_CODEC_PARAM_START_BITRATE = | |
80 "x-google-start-bitrate"; | |
81 private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate"; | |
82 private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCanc
ellation"; | |
83 private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT= "googAutoGainC
ontrol"; | |
84 private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpass
Filter"; | |
85 private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSup
pression"; | |
86 private static final String MAX_VIDEO_WIDTH_CONSTRAINT = "maxWidth"; | |
87 private static final String MIN_VIDEO_WIDTH_CONSTRAINT = "minWidth"; | |
88 private static final String MAX_VIDEO_HEIGHT_CONSTRAINT = "maxHeight"; | |
89 private static final String MIN_VIDEO_HEIGHT_CONSTRAINT = "minHeight"; | |
90 private static final String MAX_VIDEO_FPS_CONSTRAINT = "maxFrameRate"; | |
91 private static final String MIN_VIDEO_FPS_CONSTRAINT = "minFrameRate"; | |
92 private static final String DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT = "DtlsSrtpKeyA
greement"; | |
93 private static final int HD_VIDEO_WIDTH = 1280; | |
94 private static final int HD_VIDEO_HEIGHT = 720; | |
95 private static final int MAX_VIDEO_WIDTH = 1280; | |
96 private static final int MAX_VIDEO_HEIGHT = 1280; | |
97 private static final int MAX_VIDEO_FPS = 30; | |
98 | |
99 private static final PeerConnectionClient instance = new PeerConnectionClient(
); | |
100 private final PCObserver pcObserver = new PCObserver(); | |
101 private final SDPObserver sdpObserver = new SDPObserver(); | |
102 private final LooperExecutor executor; | |
103 | |
104 private PeerConnectionFactory factory; | |
105 private PeerConnection peerConnection; | |
106 PeerConnectionFactory.Options options = null; | |
107 private VideoSource videoSource; | |
108 private boolean videoCallEnabled; | |
109 private boolean preferIsac; | |
110 private boolean preferH264; | |
111 private boolean videoSourceStopped; | |
112 private boolean isError; | |
113 private Timer statsTimer; | |
114 private VideoRenderer.Callbacks localRender; | |
115 private VideoRenderer.Callbacks remoteRender; | |
116 private SignalingParameters signalingParameters; | |
117 private MediaConstraints pcConstraints; | |
118 private MediaConstraints videoConstraints; | |
119 private MediaConstraints audioConstraints; | |
120 private MediaConstraints sdpMediaConstraints; | |
121 private PeerConnectionParameters peerConnectionParameters; | |
122 // Queued remote ICE candidates are consumed only after both local and | |
123 // remote descriptions are set. Similarly local ICE candidates are sent to | |
124 // remote peer after both local and remote description are set. | |
125 private LinkedList<IceCandidate> queuedRemoteCandidates; | |
126 private PeerConnectionEvents events; | |
127 private boolean isInitiator; | |
128 private SessionDescription localSdp; // either offer or answer SDP | |
129 private MediaStream mediaStream; | |
130 private int numberOfCameras; | |
131 private VideoCapturerAndroid videoCapturer; | |
132 // enableVideo is set to true if video should be rendered and sent. | |
133 private boolean renderVideo; | |
134 private VideoTrack localVideoTrack; | |
135 private VideoTrack remoteVideoTrack; | |
136 | |
137 /** | |
138 * Peer connection parameters. | |
139 */ | |
140 public static class PeerConnectionParameters { | |
141 public final boolean videoCallEnabled; | |
142 public final boolean loopback; | |
143 public final int videoWidth; | |
144 public final int videoHeight; | |
145 public final int videoFps; | |
146 public final int videoStartBitrate; | |
147 public final String videoCodec; | |
148 public final boolean videoCodecHwAcceleration; | |
149 public final int audioStartBitrate; | |
150 public final String audioCodec; | |
151 public final boolean noAudioProcessing; | |
152 public final boolean cpuOveruseDetection; | |
153 | |
154 public PeerConnectionParameters( | |
155 boolean videoCallEnabled, boolean loopback, | |
156 int videoWidth, int videoHeight, int videoFps, int videoStartBitrate, | |
157 String videoCodec, boolean videoCodecHwAcceleration, | |
158 int audioStartBitrate, String audioCodec, | |
159 boolean noAudioProcessing, boolean cpuOveruseDetection) { | |
160 this.videoCallEnabled = videoCallEnabled; | |
161 this.loopback = loopback; | |
162 this.videoWidth = videoWidth; | |
163 this.videoHeight = videoHeight; | |
164 this.videoFps = videoFps; | |
165 this.videoStartBitrate = videoStartBitrate; | |
166 this.videoCodec = videoCodec; | |
167 this.videoCodecHwAcceleration = videoCodecHwAcceleration; | |
168 this.audioStartBitrate = audioStartBitrate; | |
169 this.audioCodec = audioCodec; | |
170 this.noAudioProcessing = noAudioProcessing; | |
171 this.cpuOveruseDetection = cpuOveruseDetection; | |
172 } | |
173 } | |
174 | |
175 /** | |
176 * Peer connection events. | |
177 */ | |
178 public static interface PeerConnectionEvents { | |
179 /** | |
180 * Callback fired once local SDP is created and set. | |
181 */ | |
182 public void onLocalDescription(final SessionDescription sdp); | |
183 | |
184 /** | |
185 * Callback fired once local Ice candidate is generated. | |
186 */ | |
187 public void onIceCandidate(final IceCandidate candidate); | |
188 | |
189 /** | |
190 * Callback fired once connection is established (IceConnectionState is | |
191 * CONNECTED). | |
192 */ | |
193 public void onIceConnected(); | |
194 | |
195 /** | |
196 * Callback fired once connection is closed (IceConnectionState is | |
197 * DISCONNECTED). | |
198 */ | |
199 public void onIceDisconnected(); | |
200 | |
201 /** | |
202 * Callback fired once peer connection is closed. | |
203 */ | |
204 public void onPeerConnectionClosed(); | |
205 | |
206 /** | |
207 * Callback fired once peer connection statistics is ready. | |
208 */ | |
209 public void onPeerConnectionStatsReady(final StatsReport[] reports); | |
210 | |
211 /** | |
212 * Callback fired once peer connection error happened. | |
213 */ | |
214 public void onPeerConnectionError(final String description); | |
215 } | |
216 | |
217 private PeerConnectionClient() { | |
218 executor = new LooperExecutor(); | |
219 // Looper thread is started once in private ctor and is used for all | |
220 // peer connection API calls to ensure new peer connection factory is | |
221 // created on the same thread as previously destroyed factory. | |
222 executor.requestStart(); | |
223 } | |
224 | |
225 public static PeerConnectionClient getInstance() { | |
226 return instance; | |
227 } | |
228 | |
229 public void setPeerConnectionFactoryOptions(PeerConnectionFactory.Options opti
ons) { | |
230 this.options = options; | |
231 } | |
232 | |
233 public void createPeerConnectionFactory( | |
234 final Context context, | |
235 final EGLContext renderEGLContext, | |
236 final PeerConnectionParameters peerConnectionParameters, | |
237 final PeerConnectionEvents events) { | |
238 this.peerConnectionParameters = peerConnectionParameters; | |
239 this.events = events; | |
240 videoCallEnabled = peerConnectionParameters.videoCallEnabled; | |
241 // Reset variables to initial states. | |
242 factory = null; | |
243 peerConnection = null; | |
244 preferIsac = false; | |
245 preferH264 = false; | |
246 videoSourceStopped = false; | |
247 isError = false; | |
248 queuedRemoteCandidates = null; | |
249 localSdp = null; // either offer or answer SDP | |
250 mediaStream = null; | |
251 videoCapturer = null; | |
252 renderVideo = true; | |
253 localVideoTrack = null; | |
254 remoteVideoTrack = null; | |
255 statsTimer = new Timer(); | |
256 | |
257 executor.execute(new Runnable() { | |
258 @Override | |
259 public void run() { | |
260 createPeerConnectionFactoryInternal(context, renderEGLContext); | |
261 } | |
262 }); | |
263 } | |
264 | |
265 public void createPeerConnection( | |
266 final VideoRenderer.Callbacks localRender, | |
267 final VideoRenderer.Callbacks remoteRender, | |
268 final SignalingParameters signalingParameters) { | |
269 if (peerConnectionParameters == null) { | |
270 Log.e(TAG, "Creating peer connection without initializing factory."); | |
271 return; | |
272 } | |
273 this.localRender = localRender; | |
274 this.remoteRender = remoteRender; | |
275 this.signalingParameters = signalingParameters; | |
276 executor.execute(new Runnable() { | |
277 @Override | |
278 public void run() { | |
279 createMediaConstraintsInternal(); | |
280 createPeerConnectionInternal(); | |
281 } | |
282 }); | |
283 } | |
284 | |
285 public void close() { | |
286 executor.execute(new Runnable() { | |
287 @Override | |
288 public void run() { | |
289 closeInternal(); | |
290 } | |
291 }); | |
292 } | |
293 | |
294 public boolean isVideoCallEnabled() { | |
295 return videoCallEnabled; | |
296 } | |
297 | |
298 private void createPeerConnectionFactoryInternal( | |
299 Context context, EGLContext renderEGLContext) { | |
300 Log.d(TAG, "Create peer connection factory with EGLContext " | |
301 + renderEGLContext + ". Use video: " | |
302 + peerConnectionParameters.videoCallEnabled); | |
303 isError = false; | |
304 // Check if VP9 is used by default. | |
305 if (videoCallEnabled && peerConnectionParameters.videoCodec != null | |
306 && peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_VP9)) { | |
307 PeerConnectionFactory.initializeFieldTrials(FIELD_TRIAL_VP9); | |
308 } else { | |
309 PeerConnectionFactory.initializeFieldTrials(null); | |
310 } | |
311 // Check if H.264 is used by default. | |
312 preferH264 = false; | |
313 if (videoCallEnabled && peerConnectionParameters.videoCodec != null | |
314 && peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_H264)) { | |
315 preferH264 = true; | |
316 } | |
317 // Check if ISAC is used by default. | |
318 preferIsac = false; | |
319 if (peerConnectionParameters.audioCodec != null | |
320 && peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC)) { | |
321 preferIsac = true; | |
322 } | |
323 if (!PeerConnectionFactory.initializeAndroidGlobals( | |
324 context, true, true, | |
325 peerConnectionParameters.videoCodecHwAcceleration, renderEGLContext)) { | |
326 events.onPeerConnectionError("Failed to initializeAndroidGlobals"); | |
327 } | |
328 factory = new PeerConnectionFactory(); | |
329 if (options != null) { | |
330 Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMas
k); | |
331 factory.setOptions(options); | |
332 } | |
333 Log.d(TAG, "Peer connection factory created."); | |
334 } | |
335 | |
336 private void createMediaConstraintsInternal() { | |
337 // Create peer connection constraints. | |
338 pcConstraints = new MediaConstraints(); | |
339 // Enable DTLS for normal calls and disable for loopback calls. | |
340 if (peerConnectionParameters.loopback) { | |
341 pcConstraints.optional.add( | |
342 new MediaConstraints.KeyValuePair(DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT,
"false")); | |
343 } else { | |
344 pcConstraints.optional.add( | |
345 new MediaConstraints.KeyValuePair(DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT,
"true")); | |
346 } | |
347 | |
348 // Check if there is a camera on device and disable video call if not. | |
349 numberOfCameras = VideoCapturerAndroid.getDeviceCount(); | |
350 if (numberOfCameras == 0) { | |
351 Log.w(TAG, "No camera on device. Switch to audio only call."); | |
352 videoCallEnabled = false; | |
353 } | |
354 // Create video constraints if video call is enabled. | |
355 if (videoCallEnabled) { | |
356 videoConstraints = new MediaConstraints(); | |
357 int videoWidth = peerConnectionParameters.videoWidth; | |
358 int videoHeight = peerConnectionParameters.videoHeight; | |
359 | |
360 // If VP8 HW video encoder is supported and video resolution is not | |
361 // specified force it to HD. | |
362 if ((videoWidth == 0 || videoHeight == 0) | |
363 && peerConnectionParameters.videoCodecHwAcceleration | |
364 && MediaCodecVideoEncoder.isVp8HwSupported()) { | |
365 videoWidth = HD_VIDEO_WIDTH; | |
366 videoHeight = HD_VIDEO_HEIGHT; | |
367 } | |
368 | |
369 // Add video resolution constraints. | |
370 if (videoWidth > 0 && videoHeight > 0) { | |
371 videoWidth = Math.min(videoWidth, MAX_VIDEO_WIDTH); | |
372 videoHeight = Math.min(videoHeight, MAX_VIDEO_HEIGHT); | |
373 videoConstraints.mandatory.add(new KeyValuePair( | |
374 MIN_VIDEO_WIDTH_CONSTRAINT, Integer.toString(videoWidth))); | |
375 videoConstraints.mandatory.add(new KeyValuePair( | |
376 MAX_VIDEO_WIDTH_CONSTRAINT, Integer.toString(videoWidth))); | |
377 videoConstraints.mandatory.add(new KeyValuePair( | |
378 MIN_VIDEO_HEIGHT_CONSTRAINT, Integer.toString(videoHeight))); | |
379 videoConstraints.mandatory.add(new KeyValuePair( | |
380 MAX_VIDEO_HEIGHT_CONSTRAINT, Integer.toString(videoHeight))); | |
381 } | |
382 | |
383 // Add fps constraints. | |
384 int videoFps = peerConnectionParameters.videoFps; | |
385 if (videoFps > 0) { | |
386 videoFps = Math.min(videoFps, MAX_VIDEO_FPS); | |
387 videoConstraints.mandatory.add(new KeyValuePair( | |
388 MIN_VIDEO_FPS_CONSTRAINT, Integer.toString(videoFps))); | |
389 videoConstraints.mandatory.add(new KeyValuePair( | |
390 MAX_VIDEO_FPS_CONSTRAINT, Integer.toString(videoFps))); | |
391 } | |
392 } | |
393 | |
394 // Create audio constraints. | |
395 audioConstraints = new MediaConstraints(); | |
396 // added for audio performance measurements | |
397 if (peerConnectionParameters.noAudioProcessing) { | |
398 Log.d(TAG, "Disabling audio processing"); | |
399 audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair( | |
400 AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false")); | |
401 audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair( | |
402 AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false")); | |
403 audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair( | |
404 AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false")); | |
405 audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair( | |
406 AUDIO_NOISE_SUPPRESSION_CONSTRAINT , "false")); | |
407 } | |
408 // Create SDP constraints. | |
409 sdpMediaConstraints = new MediaConstraints(); | |
410 sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair( | |
411 "OfferToReceiveAudio", "true")); | |
412 if (videoCallEnabled || peerConnectionParameters.loopback) { | |
413 sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair( | |
414 "OfferToReceiveVideo", "true")); | |
415 } else { | |
416 sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair( | |
417 "OfferToReceiveVideo", "false")); | |
418 } | |
419 } | |
420 | |
421 private void createPeerConnectionInternal() { | |
422 if (factory == null || isError) { | |
423 Log.e(TAG, "Peerconnection factory is not created"); | |
424 return; | |
425 } | |
426 Log.d(TAG, "Create peer connection"); | |
427 Log.d(TAG, "PCConstraints: " + pcConstraints.toString()); | |
428 if (videoConstraints != null) { | |
429 Log.d(TAG, "VideoConstraints: " + videoConstraints.toString()); | |
430 } | |
431 queuedRemoteCandidates = new LinkedList<IceCandidate>(); | |
432 | |
433 PeerConnection.RTCConfiguration rtcConfig = | |
434 new PeerConnection.RTCConfiguration(signalingParameters.iceServers); | |
435 // TCP candidates are only useful when connecting to a server that supports | |
436 // ICE-TCP. | |
437 rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; | |
438 rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE; | |
439 rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE; | |
440 | |
441 peerConnection = factory.createPeerConnection( | |
442 rtcConfig, pcConstraints, pcObserver); | |
443 isInitiator = false; | |
444 | |
445 // Set default WebRTC tracing and INFO libjingle logging. | |
446 // NOTE: this _must_ happen while |factory| is alive! | |
447 Logging.enableTracing( | |
448 "logcat:", | |
449 EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT), | |
450 Logging.Severity.LS_INFO); | |
451 | |
452 mediaStream = factory.createLocalMediaStream("ARDAMS"); | |
453 if (videoCallEnabled) { | |
454 String cameraDeviceName = VideoCapturerAndroid.getDeviceName(0); | |
455 String frontCameraDeviceName = | |
456 VideoCapturerAndroid.getNameOfFrontFacingDevice(); | |
457 if (numberOfCameras > 1 && frontCameraDeviceName != null) { | |
458 cameraDeviceName = frontCameraDeviceName; | |
459 } | |
460 Log.d(TAG, "Opening camera: " + cameraDeviceName); | |
461 videoCapturer = VideoCapturerAndroid.create(cameraDeviceName, null); | |
462 if (videoCapturer == null) { | |
463 reportError("Failed to open camera"); | |
464 return; | |
465 } | |
466 mediaStream.addTrack(createVideoTrack(videoCapturer)); | |
467 } | |
468 | |
469 mediaStream.addTrack(factory.createAudioTrack( | |
470 AUDIO_TRACK_ID, | |
471 factory.createAudioSource(audioConstraints))); | |
472 peerConnection.addStream(mediaStream); | |
473 | |
474 Log.d(TAG, "Peer connection created."); | |
475 } | |
476 | |
477 private void closeInternal() { | |
478 Log.d(TAG, "Closing peer connection."); | |
479 statsTimer.cancel(); | |
480 if (peerConnection != null) { | |
481 peerConnection.dispose(); | |
482 peerConnection = null; | |
483 } | |
484 Log.d(TAG, "Closing video source."); | |
485 if (videoSource != null) { | |
486 videoSource.dispose(); | |
487 videoSource = null; | |
488 } | |
489 Log.d(TAG, "Closing peer connection factory."); | |
490 if (factory != null) { | |
491 factory.dispose(); | |
492 factory = null; | |
493 } | |
494 options = null; | |
495 Log.d(TAG, "Closing peer connection done."); | |
496 events.onPeerConnectionClosed(); | |
497 } | |
498 | |
499 public boolean isHDVideo() { | |
500 if (!videoCallEnabled) { | |
501 return false; | |
502 } | |
503 int minWidth = 0; | |
504 int minHeight = 0; | |
505 for (KeyValuePair keyValuePair : videoConstraints.mandatory) { | |
506 if (keyValuePair.getKey().equals("minWidth")) { | |
507 try { | |
508 minWidth = Integer.parseInt(keyValuePair.getValue()); | |
509 } catch (NumberFormatException e) { | |
510 Log.e(TAG, "Can not parse video width from video constraints"); | |
511 } | |
512 } else if (keyValuePair.getKey().equals("minHeight")) { | |
513 try { | |
514 minHeight = Integer.parseInt(keyValuePair.getValue()); | |
515 } catch (NumberFormatException e) { | |
516 Log.e(TAG, "Can not parse video height from video constraints"); | |
517 } | |
518 } | |
519 } | |
520 if (minWidth * minHeight >= 1280 * 720) { | |
521 return true; | |
522 } else { | |
523 return false; | |
524 } | |
525 } | |
526 | |
527 private void getStats() { | |
528 if (peerConnection == null || isError) { | |
529 return; | |
530 } | |
531 boolean success = peerConnection.getStats(new StatsObserver() { | |
532 @Override | |
533 public void onComplete(final StatsReport[] reports) { | |
534 events.onPeerConnectionStatsReady(reports); | |
535 } | |
536 }, null); | |
537 if (!success) { | |
538 Log.e(TAG, "getStats() returns false!"); | |
539 } | |
540 } | |
541 | |
542 public void enableStatsEvents(boolean enable, int periodMs) { | |
543 if (enable) { | |
544 try { | |
545 statsTimer.schedule(new TimerTask() { | |
546 @Override | |
547 public void run() { | |
548 executor.execute(new Runnable() { | |
549 @Override | |
550 public void run() { | |
551 getStats(); | |
552 } | |
553 }); | |
554 } | |
555 }, 0, periodMs); | |
556 } catch (Exception e) { | |
557 Log.e(TAG, "Can not schedule statistics timer", e); | |
558 } | |
559 } else { | |
560 statsTimer.cancel(); | |
561 } | |
562 } | |
563 | |
564 public void setVideoEnabled(final boolean enable) { | |
565 executor.execute(new Runnable() { | |
566 @Override | |
567 public void run() { | |
568 renderVideo = enable; | |
569 if (localVideoTrack != null) { | |
570 localVideoTrack.setEnabled(renderVideo); | |
571 } | |
572 if (remoteVideoTrack != null) { | |
573 remoteVideoTrack.setEnabled(renderVideo); | |
574 } | |
575 } | |
576 }); | |
577 } | |
578 | |
579 public void createOffer() { | |
580 executor.execute(new Runnable() { | |
581 @Override | |
582 public void run() { | |
583 if (peerConnection != null && !isError) { | |
584 Log.d(TAG, "PC Create OFFER"); | |
585 isInitiator = true; | |
586 peerConnection.createOffer(sdpObserver, sdpMediaConstraints); | |
587 } | |
588 } | |
589 }); | |
590 } | |
591 | |
592 public void createAnswer() { | |
593 executor.execute(new Runnable() { | |
594 @Override | |
595 public void run() { | |
596 if (peerConnection != null && !isError) { | |
597 Log.d(TAG, "PC create ANSWER"); | |
598 isInitiator = false; | |
599 peerConnection.createAnswer(sdpObserver, sdpMediaConstraints); | |
600 } | |
601 } | |
602 }); | |
603 } | |
604 | |
605 public void addRemoteIceCandidate(final IceCandidate candidate) { | |
606 executor.execute(new Runnable() { | |
607 @Override | |
608 public void run() { | |
609 if (peerConnection != null && !isError) { | |
610 if (queuedRemoteCandidates != null) { | |
611 queuedRemoteCandidates.add(candidate); | |
612 } else { | |
613 peerConnection.addIceCandidate(candidate); | |
614 } | |
615 } | |
616 } | |
617 }); | |
618 } | |
619 | |
620 public void setRemoteDescription(final SessionDescription sdp) { | |
621 executor.execute(new Runnable() { | |
622 @Override | |
623 public void run() { | |
624 if (peerConnection == null || isError) { | |
625 return; | |
626 } | |
627 String sdpDescription = sdp.description; | |
628 if (preferIsac) { | |
629 sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true); | |
630 } | |
631 if (videoCallEnabled && preferH264) { | |
632 sdpDescription = preferCodec(sdpDescription, VIDEO_CODEC_H264, false); | |
633 } | |
634 if (videoCallEnabled && peerConnectionParameters.videoStartBitrate > 0)
{ | |
635 sdpDescription = setStartBitrate(VIDEO_CODEC_VP8, true, | |
636 sdpDescription, peerConnectionParameters.videoStartBitrate); | |
637 sdpDescription = setStartBitrate(VIDEO_CODEC_VP9, true, | |
638 sdpDescription, peerConnectionParameters.videoStartBitrate); | |
639 sdpDescription = setStartBitrate(VIDEO_CODEC_H264, true, | |
640 sdpDescription, peerConnectionParameters.videoStartBitrate); | |
641 } | |
642 if (peerConnectionParameters.audioStartBitrate > 0) { | |
643 sdpDescription = setStartBitrate(AUDIO_CODEC_OPUS, false, | |
644 sdpDescription, peerConnectionParameters.audioStartBitrate); | |
645 } | |
646 Log.d(TAG, "Set remote SDP."); | |
647 SessionDescription sdpRemote = new SessionDescription( | |
648 sdp.type, sdpDescription); | |
649 peerConnection.setRemoteDescription(sdpObserver, sdpRemote); | |
650 } | |
651 }); | |
652 } | |
653 | |
654 public void stopVideoSource() { | |
655 executor.execute(new Runnable() { | |
656 @Override | |
657 public void run() { | |
658 if (videoSource != null && !videoSourceStopped) { | |
659 Log.d(TAG, "Stop video source."); | |
660 videoSource.stop(); | |
661 videoSourceStopped = true; | |
662 } | |
663 } | |
664 }); | |
665 } | |
666 | |
667 public void startVideoSource() { | |
668 executor.execute(new Runnable() { | |
669 @Override | |
670 public void run() { | |
671 if (videoSource != null && videoSourceStopped) { | |
672 Log.d(TAG, "Restart video source."); | |
673 videoSource.restart(); | |
674 videoSourceStopped = false; | |
675 } | |
676 } | |
677 }); | |
678 } | |
679 | |
680 private void reportError(final String errorMessage) { | |
681 Log.e(TAG, "Peerconnection error: " + errorMessage); | |
682 executor.execute(new Runnable() { | |
683 @Override | |
684 public void run() { | |
685 if (!isError) { | |
686 events.onPeerConnectionError(errorMessage); | |
687 isError = true; | |
688 } | |
689 } | |
690 }); | |
691 } | |
692 | |
693 private VideoTrack createVideoTrack(VideoCapturerAndroid capturer) { | |
694 videoSource = factory.createVideoSource(capturer, videoConstraints); | |
695 | |
696 localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource); | |
697 localVideoTrack.setEnabled(renderVideo); | |
698 localVideoTrack.addRenderer(new VideoRenderer(localRender)); | |
699 return localVideoTrack; | |
700 } | |
701 | |
702 private static String setStartBitrate(String codec, boolean isVideoCodec, | |
703 String sdpDescription, int bitrateKbps) { | |
704 String[] lines = sdpDescription.split("\r\n"); | |
705 int rtpmapLineIndex = -1; | |
706 boolean sdpFormatUpdated = false; | |
707 String codecRtpMap = null; | |
708 // Search for codec rtpmap in format | |
709 // a=rtpmap:<payload type> <encoding name>/<clock rate> [/<encoding paramete
rs>] | |
710 String regex = "^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$"; | |
711 Pattern codecPattern = Pattern.compile(regex); | |
712 for (int i = 0; i < lines.length; i++) { | |
713 Matcher codecMatcher = codecPattern.matcher(lines[i]); | |
714 if (codecMatcher.matches()) { | |
715 codecRtpMap = codecMatcher.group(1); | |
716 rtpmapLineIndex = i; | |
717 break; | |
718 } | |
719 } | |
720 if (codecRtpMap == null) { | |
721 Log.w(TAG, "No rtpmap for " + codec + " codec"); | |
722 return sdpDescription; | |
723 } | |
724 Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap | |
725 + " at " + lines[rtpmapLineIndex]); | |
726 | |
727 // Check if a=fmtp string already exist in remote SDP for this codec and | |
728 // update it with new bitrate parameter. | |
729 regex = "^a=fmtp:" + codecRtpMap + " \\w+=\\d+.*[\r]?$"; | |
730 codecPattern = Pattern.compile(regex); | |
731 for (int i = 0; i < lines.length; i++) { | |
732 Matcher codecMatcher = codecPattern.matcher(lines[i]); | |
733 if (codecMatcher.matches()) { | |
734 Log.d(TAG, "Found " + codec + " " + lines[i]); | |
735 if (isVideoCodec) { | |
736 lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE | |
737 + "=" + bitrateKbps; | |
738 } else { | |
739 lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE | |
740 + "=" + (bitrateKbps * 1000); | |
741 } | |
742 Log.d(TAG, "Update remote SDP line: " + lines[i]); | |
743 sdpFormatUpdated = true; | |
744 break; | |
745 } | |
746 } | |
747 | |
748 StringBuilder newSdpDescription = new StringBuilder(); | |
749 for (int i = 0; i < lines.length; i++) { | |
750 newSdpDescription.append(lines[i]).append("\r\n"); | |
751 // Append new a=fmtp line if no such line exist for a codec. | |
752 if (!sdpFormatUpdated && i == rtpmapLineIndex) { | |
753 String bitrateSet; | |
754 if (isVideoCodec) { | |
755 bitrateSet = "a=fmtp:" + codecRtpMap + " " | |
756 + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps; | |
757 } else { | |
758 bitrateSet = "a=fmtp:" + codecRtpMap + " " | |
759 + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000); | |
760 } | |
761 Log.d(TAG, "Add remote SDP line: " + bitrateSet); | |
762 newSdpDescription.append(bitrateSet).append("\r\n"); | |
763 } | |
764 | |
765 } | |
766 return newSdpDescription.toString(); | |
767 } | |
768 | |
769 private static String preferCodec( | |
770 String sdpDescription, String codec, boolean isAudio) { | |
771 String[] lines = sdpDescription.split("\r\n"); | |
772 int mLineIndex = -1; | |
773 String codecRtpMap = null; | |
774 // a=rtpmap:<payload type> <encoding name>/<clock rate> [/<encoding paramete
rs>] | |
775 String regex = "^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$"; | |
776 Pattern codecPattern = Pattern.compile(regex); | |
777 String mediaDescription = "m=video "; | |
778 if (isAudio) { | |
779 mediaDescription = "m=audio "; | |
780 } | |
781 for (int i = 0; (i < lines.length) | |
782 && (mLineIndex == -1 || codecRtpMap == null); i++) { | |
783 if (lines[i].startsWith(mediaDescription)) { | |
784 mLineIndex = i; | |
785 continue; | |
786 } | |
787 Matcher codecMatcher = codecPattern.matcher(lines[i]); | |
788 if (codecMatcher.matches()) { | |
789 codecRtpMap = codecMatcher.group(1); | |
790 continue; | |
791 } | |
792 } | |
793 if (mLineIndex == -1) { | |
794 Log.w(TAG, "No " + mediaDescription + " line, so can't prefer " + codec); | |
795 return sdpDescription; | |
796 } | |
797 if (codecRtpMap == null) { | |
798 Log.w(TAG, "No rtpmap for " + codec); | |
799 return sdpDescription; | |
800 } | |
801 Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + ", prefer at " | |
802 + lines[mLineIndex]); | |
803 String[] origMLineParts = lines[mLineIndex].split(" "); | |
804 if (origMLineParts.length > 3) { | |
805 StringBuilder newMLine = new StringBuilder(); | |
806 int origPartIndex = 0; | |
807 // Format is: m=<media> <port> <proto> <fmt> ... | |
808 newMLine.append(origMLineParts[origPartIndex++]).append(" "); | |
809 newMLine.append(origMLineParts[origPartIndex++]).append(" "); | |
810 newMLine.append(origMLineParts[origPartIndex++]).append(" "); | |
811 newMLine.append(codecRtpMap); | |
812 for (; origPartIndex < origMLineParts.length; origPartIndex++) { | |
813 if (!origMLineParts[origPartIndex].equals(codecRtpMap)) { | |
814 newMLine.append(" ").append(origMLineParts[origPartIndex]); | |
815 } | |
816 } | |
817 lines[mLineIndex] = newMLine.toString(); | |
818 Log.d(TAG, "Change media description: " + lines[mLineIndex]); | |
819 } else { | |
820 Log.e(TAG, "Wrong SDP media description format: " + lines[mLineIndex]); | |
821 } | |
822 StringBuilder newSdpDescription = new StringBuilder(); | |
823 for (String line : lines) { | |
824 newSdpDescription.append(line).append("\r\n"); | |
825 } | |
826 return newSdpDescription.toString(); | |
827 } | |
828 | |
829 private void drainCandidates() { | |
830 if (queuedRemoteCandidates != null) { | |
831 Log.d(TAG, "Add " + queuedRemoteCandidates.size() + " remote candidates"); | |
832 for (IceCandidate candidate : queuedRemoteCandidates) { | |
833 peerConnection.addIceCandidate(candidate); | |
834 } | |
835 queuedRemoteCandidates = null; | |
836 } | |
837 } | |
838 | |
839 private void switchCameraInternal() { | |
840 if (!videoCallEnabled || numberOfCameras < 2 || isError || videoCapturer ==
null) { | |
841 Log.e(TAG, "Failed to switch camera. Video: " + videoCallEnabled + ". Erro
r : " | |
842 + isError + ". Number of cameras: " + numberOfCameras); | |
843 return; // No video is sent or only one camera is available or error happ
ened. | |
844 } | |
845 Log.d(TAG, "Switch camera"); | |
846 videoCapturer.switchCamera(null); | |
847 } | |
848 | |
849 public void switchCamera() { | |
850 executor.execute(new Runnable() { | |
851 @Override | |
852 public void run() { | |
853 switchCameraInternal(); | |
854 } | |
855 }); | |
856 } | |
857 | |
858 // Implementation detail: observe ICE & stream changes and react accordingly. | |
859 private class PCObserver implements PeerConnection.Observer { | |
860 @Override | |
861 public void onIceCandidate(final IceCandidate candidate){ | |
862 executor.execute(new Runnable() { | |
863 @Override | |
864 public void run() { | |
865 events.onIceCandidate(candidate); | |
866 } | |
867 }); | |
868 } | |
869 | |
870 @Override | |
871 public void onSignalingChange( | |
872 PeerConnection.SignalingState newState) { | |
873 Log.d(TAG, "SignalingState: " + newState); | |
874 } | |
875 | |
876 @Override | |
877 public void onIceConnectionChange( | |
878 final PeerConnection.IceConnectionState newState) { | |
879 executor.execute(new Runnable() { | |
880 @Override | |
881 public void run() { | |
882 Log.d(TAG, "IceConnectionState: " + newState); | |
883 if (newState == IceConnectionState.CONNECTED) { | |
884 events.onIceConnected(); | |
885 } else if (newState == IceConnectionState.DISCONNECTED) { | |
886 events.onIceDisconnected(); | |
887 } else if (newState == IceConnectionState.FAILED) { | |
888 reportError("ICE connection failed."); | |
889 } | |
890 } | |
891 }); | |
892 } | |
893 | |
894 @Override | |
895 public void onIceGatheringChange( | |
896 PeerConnection.IceGatheringState newState) { | |
897 Log.d(TAG, "IceGatheringState: " + newState); | |
898 } | |
899 | |
900 @Override | |
901 public void onIceConnectionReceivingChange(boolean receiving) { | |
902 Log.d(TAG, "IceConnectionReceiving changed to " + receiving); | |
903 } | |
904 | |
905 @Override | |
906 public void onAddStream(final MediaStream stream){ | |
907 executor.execute(new Runnable() { | |
908 @Override | |
909 public void run() { | |
910 if (peerConnection == null || isError) { | |
911 return; | |
912 } | |
913 if (stream.audioTracks.size() > 1 || stream.videoTracks.size() > 1) { | |
914 reportError("Weird-looking stream: " + stream); | |
915 return; | |
916 } | |
917 if (stream.videoTracks.size() == 1) { | |
918 remoteVideoTrack = stream.videoTracks.get(0); | |
919 remoteVideoTrack.setEnabled(renderVideo); | |
920 remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender)); | |
921 } | |
922 } | |
923 }); | |
924 } | |
925 | |
926 @Override | |
927 public void onRemoveStream(final MediaStream stream){ | |
928 executor.execute(new Runnable() { | |
929 @Override | |
930 public void run() { | |
931 if (peerConnection == null || isError) { | |
932 return; | |
933 } | |
934 remoteVideoTrack = null; | |
935 stream.videoTracks.get(0).dispose(); | |
936 } | |
937 }); | |
938 } | |
939 | |
940 @Override | |
941 public void onDataChannel(final DataChannel dc) { | |
942 reportError("AppRTC doesn't use data channels, but got: " + dc.label() | |
943 + " anyway!"); | |
944 } | |
945 | |
946 @Override | |
947 public void onRenegotiationNeeded() { | |
948 // No need to do anything; AppRTC follows a pre-agreed-upon | |
949 // signaling/negotiation protocol. | |
950 } | |
951 } | |
952 | |
953 // Implementation detail: handle offer creation/signaling and answer setting, | |
954 // as well as adding remote ICE candidates once the answer SDP is set. | |
955 private class SDPObserver implements SdpObserver { | |
956 @Override | |
957 public void onCreateSuccess(final SessionDescription origSdp) { | |
958 if (localSdp != null) { | |
959 reportError("Multiple SDP create."); | |
960 return; | |
961 } | |
962 String sdpDescription = origSdp.description; | |
963 if (preferIsac) { | |
964 sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true); | |
965 } | |
966 if (videoCallEnabled && preferH264) { | |
967 sdpDescription = preferCodec(sdpDescription, VIDEO_CODEC_H264, false); | |
968 } | |
969 final SessionDescription sdp = new SessionDescription( | |
970 origSdp.type, sdpDescription); | |
971 localSdp = sdp; | |
972 executor.execute(new Runnable() { | |
973 @Override | |
974 public void run() { | |
975 if (peerConnection != null && !isError) { | |
976 Log.d(TAG, "Set local SDP from " + sdp.type); | |
977 peerConnection.setLocalDescription(sdpObserver, sdp); | |
978 } | |
979 } | |
980 }); | |
981 } | |
982 | |
983 @Override | |
984 public void onSetSuccess() { | |
985 executor.execute(new Runnable() { | |
986 @Override | |
987 public void run() { | |
988 if (peerConnection == null || isError) { | |
989 return; | |
990 } | |
991 if (isInitiator) { | |
992 // For offering peer connection we first create offer and set | |
993 // local SDP, then after receiving answer set remote SDP. | |
994 if (peerConnection.getRemoteDescription() == null) { | |
995 // We've just set our local SDP so time to send it. | |
996 Log.d(TAG, "Local SDP set succesfully"); | |
997 events.onLocalDescription(localSdp); | |
998 } else { | |
999 // We've just set remote description, so drain remote | |
1000 // and send local ICE candidates. | |
1001 Log.d(TAG, "Remote SDP set succesfully"); | |
1002 drainCandidates(); | |
1003 } | |
1004 } else { | |
1005 // For answering peer connection we set remote SDP and then | |
1006 // create answer and set local SDP. | |
1007 if (peerConnection.getLocalDescription() != null) { | |
1008 // We've just set our local SDP so time to send it, drain | |
1009 // remote and send local ICE candidates. | |
1010 Log.d(TAG, "Local SDP set succesfully"); | |
1011 events.onLocalDescription(localSdp); | |
1012 drainCandidates(); | |
1013 } else { | |
1014 // We've just set remote SDP - do nothing for now - | |
1015 // answer will be created soon. | |
1016 Log.d(TAG, "Remote SDP set succesfully"); | |
1017 } | |
1018 } | |
1019 } | |
1020 }); | |
1021 } | |
1022 | |
1023 @Override | |
1024 public void onCreateFailure(final String error) { | |
1025 reportError("createSDP error: " + error); | |
1026 } | |
1027 | |
1028 @Override | |
1029 public void onSetFailure(final String error) { | |
1030 reportError("setSDP error: " + error); | |
1031 } | |
1032 } | |
1033 } | |
OLD | NEW |