Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright 2016 The WebRTC Project Authors. All rights reserved. | 2 * Copyright 2016 The WebRTC Project Authors. All rights reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 22 #include "webrtc/base/timeutils.h" | 22 #include "webrtc/base/timeutils.h" |
| 23 #include "webrtc/media/base/mediachannel.h" | 23 #include "webrtc/media/base/mediachannel.h" |
| 24 #include "webrtc/p2p/base/candidate.h" | 24 #include "webrtc/p2p/base/candidate.h" |
| 25 #include "webrtc/p2p/base/p2pconstants.h" | 25 #include "webrtc/p2p/base/p2pconstants.h" |
| 26 #include "webrtc/p2p/base/port.h" | 26 #include "webrtc/p2p/base/port.h" |
| 27 | 27 |
| 28 namespace webrtc { | 28 namespace webrtc { |
| 29 | 29 |
| 30 namespace { | 30 namespace { |
| 31 | 31 |
| 32 // If the track is associated with multiple SSRCs, the |uint32_t| is the first | |
| 33 // one in the group, used as an identifier for that group. The SSRCs are listed | |
| 34 // in ||Voice/Video][Sender/Receiver]Info::ssrcs()|. | |
| 35 typedef std::map<MediaStreamTrackInterface*, uint32_t> TracksToSsrcs; | |
|
hbos
2016/12/28 16:12:29
There is also std::vector<SsrcGroup> ssrc_groups o
| |
| 36 | |
| 32 std::string RTCCertificateIDFromFingerprint(const std::string& fingerprint) { | 37 std::string RTCCertificateIDFromFingerprint(const std::string& fingerprint) { |
| 33 return "RTCCertificate_" + fingerprint; | 38 return "RTCCertificate_" + fingerprint; |
| 34 } | 39 } |
| 35 | 40 |
| 36 std::string RTCCodecStatsIDFromDirectionMediaAndPayload( | 41 std::string RTCCodecStatsIDFromDirectionMediaAndPayload( |
| 37 bool inbound, bool audio, uint32_t payload_type) { | 42 bool inbound, bool audio, uint32_t payload_type) { |
| 38 // TODO(hbos): When we are able to handle multiple m= lines of the same media | 43 // TODO(hbos): When we are able to handle multiple m= lines of the same media |
| 39 // type (and multiple BaseChannels for the same type is possible?) this needs | 44 // type (and multiple BaseChannels for the same type is possible?) this needs |
| 40 // to be updated to differentiate the transport being used, and stats need to | 45 // to be updated to differentiate the transport being used, and stats need to |
| 41 // be collected for all of them. crbug.com/659117 | 46 // be collected for all of them. crbug.com/659117 |
| 42 if (inbound) { | 47 if (inbound) { |
| 43 return audio ? "RTCCodec_InboundAudio_" + rtc::ToString<>(payload_type) | 48 return audio ? "RTCCodec_InboundAudio_" + rtc::ToString<>(payload_type) |
| 44 : "RTCCodec_InboundVideo_" + rtc::ToString<>(payload_type); | 49 : "RTCCodec_InboundVideo_" + rtc::ToString<>(payload_type); |
| 45 } | 50 } |
| 46 return audio ? "RTCCodec_OutboundAudio_" + rtc::ToString<>(payload_type) | 51 return audio ? "RTCCodec_OutboundAudio_" + rtc::ToString<>(payload_type) |
| 47 : "RTCCodec_OutboundVideo_" + rtc::ToString<>(payload_type); | 52 : "RTCCodec_OutboundVideo_" + rtc::ToString<>(payload_type); |
| 48 } | 53 } |
| 49 | 54 |
| 50 std::string RTCIceCandidatePairStatsIDFromConnectionInfo( | 55 std::string RTCIceCandidatePairStatsIDFromConnectionInfo( |
| 51 const cricket::ConnectionInfo& info) { | 56 const cricket::ConnectionInfo& info) { |
| 52 return "RTCIceCandidatePair_" + info.local_candidate.id() + "_" + | 57 return "RTCIceCandidatePair_" + info.local_candidate.id() + "_" + |
| 53 info.remote_candidate.id(); | 58 info.remote_candidate.id(); |
| 54 } | 59 } |
| 55 | 60 |
| 56 std::string RTCMediaStreamTrackStatsIDFromMediaStreamTrackInterface( | 61 std::string RTCMediaStreamTrackStatsIDFromMediaStreamTrackInterface( |
| 57 const MediaStreamTrackInterface& track) { | 62 const MediaStreamTrackInterface& track, bool is_local) { |
| 58 return "RTCMediaStreamTrack_" + track.id(); | 63 return (is_local ? "RTCMediaStreamTrack_local_" : |
| 64 "RTCMediaStreamTrack_remote_") + track.id(); | |
| 59 } | 65 } |
| 60 | 66 |
| 61 std::string RTCTransportStatsIDFromTransportChannel( | 67 std::string RTCTransportStatsIDFromTransportChannel( |
| 62 const std::string& transport_name, int channel_component) { | 68 const std::string& transport_name, int channel_component) { |
| 63 return "RTCTransport_" + transport_name + "_" + | 69 return "RTCTransport_" + transport_name + "_" + |
| 64 rtc::ToString<>(channel_component); | 70 rtc::ToString<>(channel_component); |
| 65 } | 71 } |
| 66 | 72 |
| 67 std::string RTCTransportStatsIDFromBaseChannel( | 73 std::string RTCTransportStatsIDFromBaseChannel( |
| 68 const ProxyTransportMap& proxy_to_transport, | 74 const ProxyTransportMap& proxy_to_transport, |
| (...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 273 candidate_stats->priority = static_cast<int32_t>(candidate.priority()); | 279 candidate_stats->priority = static_cast<int32_t>(candidate.priority()); |
| 274 | 280 |
| 275 stats = candidate_stats.get(); | 281 stats = candidate_stats.get(); |
| 276 report->AddStats(std::move(candidate_stats)); | 282 report->AddStats(std::move(candidate_stats)); |
| 277 } | 283 } |
| 278 RTC_DCHECK_EQ(stats->type(), is_local ? RTCLocalIceCandidateStats::kType | 284 RTC_DCHECK_EQ(stats->type(), is_local ? RTCLocalIceCandidateStats::kType |
| 279 : RTCRemoteIceCandidateStats::kType); | 285 : RTCRemoteIceCandidateStats::kType); |
| 280 return stats->id(); | 286 return stats->id(); |
| 281 } | 287 } |
| 282 | 288 |
| 289 TracksToSsrcs GetTracksToSsrcs( | |
| 290 const std::vector<rtc::scoped_refptr<RtpSenderInterface>> senders) { | |
| 291 TracksToSsrcs tracks_to_ssrcs; | |
| 292 for (const rtc::scoped_refptr<RtpSenderInterface>& sender : senders) { | |
| 293 if (!sender->track()) | |
| 294 continue; | |
| 295 if (sender->ssrc() != 0) { | |
| 296 // TODO(hbos): What if multiple |RtpSenderInterface| have the same track | |
| 297 // attached yielding multiple SSRCs groups? Only one of them would | |
| 298 // currently be used. crbug.com/659137 | |
| 299 // TODO(hbos): What if |GetParameters().encodings.size() > 1| yields | |
| 300 // multiple SSRC groups per sender? crbug.com/659137 | |
|
hbos
2016/12/28 16:12:30
Looking for input about what to do in either of th
| |
| 301 tracks_to_ssrcs[sender->track().get()] = sender->ssrc(); | |
| 302 } | |
| 303 } | |
| 304 return tracks_to_ssrcs; | |
| 305 } | |
| 306 // TODO(hbos,deadbeef): With template argument |T| for both |RtpSenderInterface| | |
| 307 // and |RtpReceiverInterface| this function should be able to cover both sender | |
| 308 // and receiver cases. But the |RtpSenderInterface| needs to be updated first, | |
| 309 // when this comment was written it could have an |ssrc()| and still not a value | |
| 310 // for |GetParameters().encodings[0].ssrc|. | |
|
hbos
2016/12/28 16:12:30
(Discovered by debugging rtcstats_integrationtest.
| |
| 311 TracksToSsrcs GetTracksToSsrcs( | |
| 312 const std::vector<rtc::scoped_refptr<RtpReceiverInterface>> receivers) { | |
| 313 TracksToSsrcs tracks_to_ssrcs; | |
| 314 for (const rtc::scoped_refptr<RtpReceiverInterface>& receiver : receivers) { | |
| 315 if (!receiver->track()) | |
| 316 continue; | |
| 317 RTC_DCHECK( | |
| 318 tracks_to_ssrcs.find(receiver->track().get()) == tracks_to_ssrcs.end()); | |
| 319 RtpParameters parameters = receiver->GetParameters(); | |
| 320 if (parameters.encodings.empty()) | |
| 321 continue; | |
| 322 RTC_DCHECK_EQ(1u, parameters.encodings.size()); | |
| 323 rtc::Optional<uint32_t> ssrc = parameters.encodings[0].ssrc; | |
| 324 if (ssrc) | |
| 325 tracks_to_ssrcs[receiver->track().get()] = *ssrc; | |
| 326 } | |
| 327 return tracks_to_ssrcs; | |
| 328 } | |
| 329 | |
| 330 // |T| can be any |MediaSenderInfo| or |MediaReceiverInfo|. | |
| 331 template<typename T> | |
| 332 const T* GetMediaInfoFromSsrc(const std::vector<T>& infos, uint32_t ssrc) { | |
| 333 for (const T& info : infos) { | |
| 334 if (info.ssrc() == ssrc) | |
| 335 return &info; | |
| 336 } | |
| 337 return nullptr; | |
| 338 } | |
| 339 | |
| 340 std::vector<std::string> StringSsrcsFromSsrcs( | |
| 341 const std::vector<uint32_t>& ssrcs) { | |
| 342 std::vector<std::string> string_ssrcs; | |
| 343 for (uint32_t ssrc : ssrcs) { | |
| 344 string_ssrcs.push_back(rtc::ToString<>(ssrc)); | |
| 345 } | |
| 346 return string_ssrcs; | |
| 347 } | |
| 348 | |
| 283 void ProduceMediaStreamAndTrackStats( | 349 void ProduceMediaStreamAndTrackStats( |
| 284 int64_t timestamp_us, | 350 int64_t timestamp_us, |
| 285 rtc::scoped_refptr<StreamCollectionInterface> streams, | 351 rtc::scoped_refptr<StreamCollectionInterface> streams, |
| 286 bool is_local, | 352 bool is_local, |
| 353 const TracksToSsrcs& tracks_to_ssrcs, | |
| 354 const rtc::Optional<cricket::VoiceMediaInfo>& voice_info, | |
| 355 const rtc::Optional<cricket::VideoMediaInfo>& video_info, | |
| 287 RTCStatsReport* report) { | 356 RTCStatsReport* report) { |
| 288 // TODO(hbos): When "AddTrack" is implemented we should iterate tracks to | 357 // TODO(hbos): When "AddTrack" is implemented we should iterate tracks to |
| 289 // find which streams exist, not iterate streams to find tracks. | 358 // find which streams exist, not iterate streams to find tracks. |
| 290 // crbug.com/659137 | 359 // crbug.com/659137 |
| 291 // TODO(hbos): Return stats of detached tracks. We have to perform stats | 360 // TODO(hbos): Return stats of detached tracks. We have to perform stats |
| 292 // gathering at the time of detachment to get accurate stats and timestamps. | 361 // gathering at the time of detachment to get accurate stats and timestamps. |
| 293 // crbug.com/659137 | 362 // crbug.com/659137 |
| 294 if (!streams) | 363 if (!streams) |
| 295 return; | 364 return; |
| 296 for (size_t i = 0; i < streams->count(); ++i) { | 365 for (size_t i = 0; i < streams->count(); ++i) { |
| 297 MediaStreamInterface* stream = streams->at(i); | 366 MediaStreamInterface* stream = streams->at(i); |
| 298 | 367 |
| 299 std::unique_ptr<RTCMediaStreamStats> stream_stats( | 368 std::unique_ptr<RTCMediaStreamStats> stream_stats( |
| 300 new RTCMediaStreamStats( | 369 new RTCMediaStreamStats( |
| 301 (is_local ? "RTCMediaStream_local_" : "RTCMediaStream_remote_") + | 370 (is_local ? "RTCMediaStream_local_" : "RTCMediaStream_remote_") + |
| 302 stream->label(), timestamp_us)); | 371 stream->label(), timestamp_us)); |
| 303 stream_stats->stream_identifier = stream->label(); | 372 stream_stats->stream_identifier = stream->label(); |
| 304 stream_stats->track_ids = std::vector<std::string>(); | 373 stream_stats->track_ids = std::vector<std::string>(); |
| 305 // Audio Tracks | 374 // Audio Tracks |
| 306 for (const rtc::scoped_refptr<AudioTrackInterface>& audio_track : | 375 for (const rtc::scoped_refptr<AudioTrackInterface>& audio_track : |
| 307 stream->GetAudioTracks()) { | 376 stream->GetAudioTracks()) { |
| 308 std::string id = RTCMediaStreamTrackStatsIDFromMediaStreamTrackInterface( | 377 std::string id = RTCMediaStreamTrackStatsIDFromMediaStreamTrackInterface( |
| 309 *audio_track.get()); | 378 *audio_track.get(), is_local); |
| 310 if (report->Get(id)) { | |
| 311 // Skip track, stats already exist for it. | |
| 312 continue; | |
| 313 } | |
| 314 std::unique_ptr<RTCMediaStreamTrackStats> audio_track_stats( | 379 std::unique_ptr<RTCMediaStreamTrackStats> audio_track_stats( |
| 315 new RTCMediaStreamTrackStats(id, timestamp_us)); | 380 new RTCMediaStreamTrackStats(id, timestamp_us)); |
| 316 stream_stats->track_ids->push_back(audio_track_stats->id()); | 381 stream_stats->track_ids->push_back(audio_track_stats->id()); |
| 317 SetMediaStreamTrackStatsFromMediaStreamTrackInterface( | 382 SetMediaStreamTrackStatsFromMediaStreamTrackInterface( |
| 318 *audio_track.get(), | 383 *audio_track.get(), |
| 319 audio_track_stats.get()); | 384 audio_track_stats.get()); |
| 385 TracksToSsrcs::const_iterator it = tracks_to_ssrcs.find( | |
| 386 audio_track.get()); | |
| 387 if (it != tracks_to_ssrcs.end() && voice_info) { | |
| 388 uint32_t ssrc = it->second; | |
| 389 if (is_local) { | |
|
hbos
2016/12/29 09:53:55
So I'm assuming it's either [Voice/Video]SenderInf
| |
| 390 const cricket::VoiceSenderInfo* sender_info = | |
| 391 GetMediaInfoFromSsrc(voice_info->senders, ssrc); | |
| 392 if (sender_info) { | |
| 393 audio_track_stats->ssrc_ids = StringSsrcsFromSsrcs( | |
| 394 sender_info->ssrcs()); | |
| 395 } | |
| 396 } else { | |
| 397 const cricket::VoiceReceiverInfo* receiver_info = | |
| 398 GetMediaInfoFromSsrc(voice_info->receivers, ssrc); | |
| 399 if (receiver_info) { | |
| 400 audio_track_stats->ssrc_ids = StringSsrcsFromSsrcs( | |
| 401 receiver_info->ssrcs()); | |
| 402 } | |
| 403 } | |
| 404 } | |
| 320 audio_track_stats->remote_source = !is_local; | 405 audio_track_stats->remote_source = !is_local; |
| 321 audio_track_stats->detached = false; | 406 audio_track_stats->detached = false; |
| 322 int signal_level; | 407 int signal_level; |
| 323 if (audio_track->GetSignalLevel(&signal_level)) { | 408 if (audio_track->GetSignalLevel(&signal_level)) { |
| 324 // Convert signal level from [0,32767] int to [0,1] double. | 409 // Convert signal level from [0,32767] int to [0,1] double. |
| 325 RTC_DCHECK_GE(signal_level, 0); | 410 RTC_DCHECK_GE(signal_level, 0); |
| 326 RTC_DCHECK_LE(signal_level, 32767); | 411 RTC_DCHECK_LE(signal_level, 32767); |
| 327 audio_track_stats->audio_level = signal_level / 32767.0; | 412 audio_track_stats->audio_level = signal_level / 32767.0; |
| 328 } | 413 } |
| 329 if (audio_track->GetAudioProcessor()) { | 414 if (audio_track->GetAudioProcessor()) { |
| 330 AudioProcessorInterface::AudioProcessorStats audio_processor_stats; | 415 AudioProcessorInterface::AudioProcessorStats audio_processor_stats; |
| 331 audio_track->GetAudioProcessor()->GetStats(&audio_processor_stats); | 416 audio_track->GetAudioProcessor()->GetStats(&audio_processor_stats); |
| 332 if (audio_processor_stats.echo_return_loss != -100) { | 417 if (audio_processor_stats.echo_return_loss != -100) { |
| 333 audio_track_stats->echo_return_loss = static_cast<double>( | 418 audio_track_stats->echo_return_loss = static_cast<double>( |
| 334 audio_processor_stats.echo_return_loss); | 419 audio_processor_stats.echo_return_loss); |
| 335 } | 420 } |
| 336 if (audio_processor_stats.echo_return_loss_enhancement != -100) { | 421 if (audio_processor_stats.echo_return_loss_enhancement != -100) { |
| 337 audio_track_stats->echo_return_loss_enhancement = static_cast<double>( | 422 audio_track_stats->echo_return_loss_enhancement = static_cast<double>( |
| 338 audio_processor_stats.echo_return_loss_enhancement); | 423 audio_processor_stats.echo_return_loss_enhancement); |
| 339 } | 424 } |
| 340 } | 425 } |
| 341 report->AddStats(std::move(audio_track_stats)); | 426 report->AddStats(std::move(audio_track_stats)); |
| 342 } | 427 } |
| 343 // Video Tracks | 428 // Video Tracks |
| 344 for (const rtc::scoped_refptr<VideoTrackInterface>& video_track : | 429 for (const rtc::scoped_refptr<VideoTrackInterface>& video_track : |
| 345 stream->GetVideoTracks()) { | 430 stream->GetVideoTracks()) { |
| 346 std::string id = RTCMediaStreamTrackStatsIDFromMediaStreamTrackInterface( | 431 std::string id = RTCMediaStreamTrackStatsIDFromMediaStreamTrackInterface( |
| 347 *video_track.get()); | 432 *video_track.get(), is_local); |
| 348 if (report->Get(id)) { | |
| 349 // Skip track, stats already exist for it. | |
| 350 continue; | |
| 351 } | |
| 352 std::unique_ptr<RTCMediaStreamTrackStats> video_track_stats( | 433 std::unique_ptr<RTCMediaStreamTrackStats> video_track_stats( |
| 353 new RTCMediaStreamTrackStats(id, timestamp_us)); | 434 new RTCMediaStreamTrackStats(id, timestamp_us)); |
| 354 stream_stats->track_ids->push_back(video_track_stats->id()); | 435 stream_stats->track_ids->push_back(video_track_stats->id()); |
| 355 SetMediaStreamTrackStatsFromMediaStreamTrackInterface( | 436 SetMediaStreamTrackStatsFromMediaStreamTrackInterface( |
| 356 *video_track.get(), | 437 *video_track.get(), |
| 357 video_track_stats.get()); | 438 video_track_stats.get()); |
| 439 TracksToSsrcs::const_iterator it = tracks_to_ssrcs.find( | |
| 440 video_track.get()); | |
| 441 if (it != tracks_to_ssrcs.end() && video_info) { | |
| 442 uint32_t ssrc = it->second; | |
| 443 if (is_local) { | |
| 444 const cricket::VideoSenderInfo* sender_info = | |
| 445 GetMediaInfoFromSsrc(video_info->senders, ssrc); | |
| 446 if (sender_info) { | |
| 447 video_track_stats->ssrc_ids = StringSsrcsFromSsrcs( | |
| 448 sender_info->ssrcs()); | |
| 449 } | |
| 450 } else { | |
| 451 const cricket::VideoReceiverInfo* receiver_info = | |
| 452 GetMediaInfoFromSsrc(video_info->receivers, ssrc); | |
| 453 if (receiver_info) { | |
| 454 video_track_stats->ssrc_ids = StringSsrcsFromSsrcs( | |
| 455 receiver_info->ssrcs()); | |
| 456 } | |
| 457 } | |
| 458 } | |
| 358 video_track_stats->remote_source = !is_local; | 459 video_track_stats->remote_source = !is_local; |
| 359 video_track_stats->detached = false; | 460 video_track_stats->detached = false; |
| 360 if (video_track->GetSource()) { | 461 if (video_track->GetSource()) { |
| 361 VideoTrackSourceInterface::Stats video_track_source_stats; | 462 VideoTrackSourceInterface::Stats video_track_source_stats; |
| 362 if (video_track->GetSource()->GetStats(&video_track_source_stats)) { | 463 if (video_track->GetSource()->GetStats(&video_track_source_stats)) { |
| 363 video_track_stats->frame_width = static_cast<uint32_t>( | 464 video_track_stats->frame_width = static_cast<uint32_t>( |
| 364 video_track_source_stats.input_width); | 465 video_track_source_stats.input_width); |
| 365 video_track_stats->frame_height = static_cast<uint32_t>( | 466 video_track_stats->frame_height = static_cast<uint32_t>( |
| 366 video_track_source_stats.input_height); | 467 video_track_source_stats.input_height); |
| 367 } | 468 } |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 420 // case of already gathering stats, |callback_| will be invoked when there | 521 // case of already gathering stats, |callback_| will be invoked when there |
| 421 // are no more pending partial reports. | 522 // are no more pending partial reports. |
| 422 | 523 |
| 423 // "Now" using a system clock, relative to the UNIX epoch (Jan 1, 1970, | 524 // "Now" using a system clock, relative to the UNIX epoch (Jan 1, 1970, |
| 424 // UTC), in microseconds. The system clock could be modified and is not | 525 // UTC), in microseconds. The system clock could be modified and is not |
| 425 // necessarily monotonically increasing. | 526 // necessarily monotonically increasing. |
| 426 int64_t timestamp_us = rtc::TimeUTCMicros(); | 527 int64_t timestamp_us = rtc::TimeUTCMicros(); |
| 427 | 528 |
| 428 num_pending_partial_reports_ = 3; | 529 num_pending_partial_reports_ = 3; |
| 429 partial_report_timestamp_us_ = cache_now_us; | 530 partial_report_timestamp_us_ = cache_now_us; |
| 430 invoker_.AsyncInvoke<void>(RTC_FROM_HERE, signaling_thread_, | |
| 431 rtc::Bind(&RTCStatsCollector::ProducePartialResultsOnSignalingThread, | |
| 432 rtc::scoped_refptr<RTCStatsCollector>(this), timestamp_us)); | |
| 433 | 531 |
| 434 // TODO(hbos): No stats are gathered by | 532 // TODO(hbos): No stats are gathered by |
| 435 // |ProducePartialResultsOnWorkerThread|, remove it. | 533 // |ProducePartialResultsOnWorkerThread|, remove it. |
| 436 invoker_.AsyncInvoke<void>(RTC_FROM_HERE, worker_thread_, | 534 invoker_.AsyncInvoke<void>(RTC_FROM_HERE, worker_thread_, |
| 437 rtc::Bind(&RTCStatsCollector::ProducePartialResultsOnWorkerThread, | 535 rtc::Bind(&RTCStatsCollector::ProducePartialResultsOnWorkerThread, |
| 438 rtc::scoped_refptr<RTCStatsCollector>(this), timestamp_us)); | 536 rtc::scoped_refptr<RTCStatsCollector>(this), timestamp_us)); |
| 439 | 537 |
| 440 // Prepare |channel_names_| and |media_info_| for use in | 538 // Prepare |channel_names_| and |media_info_|. These are read in |
| 441 // |ProducePartialResultsOnNetworkThread|. | 539 // |ProducePartialResultsOnNetworkThread|, and |media_info_| is also used in |
| 540 // |ProducePartialResultsOnSignalingThread|. | |
| 442 channel_name_pairs_.reset(new ChannelNamePairs()); | 541 channel_name_pairs_.reset(new ChannelNamePairs()); |
| 443 if (pc_->session()->voice_channel()) { | 542 if (pc_->session()->voice_channel()) { |
| 444 channel_name_pairs_->voice = rtc::Optional<ChannelNamePair>( | 543 channel_name_pairs_->voice = rtc::Optional<ChannelNamePair>( |
| 445 ChannelNamePair(pc_->session()->voice_channel()->content_name(), | 544 ChannelNamePair(pc_->session()->voice_channel()->content_name(), |
| 446 pc_->session()->voice_channel()->transport_name())); | 545 pc_->session()->voice_channel()->transport_name())); |
| 447 } | 546 } |
| 448 if (pc_->session()->video_channel()) { | 547 if (pc_->session()->video_channel()) { |
| 449 channel_name_pairs_->video = rtc::Optional<ChannelNamePair>( | 548 channel_name_pairs_->video = rtc::Optional<ChannelNamePair>( |
| 450 ChannelNamePair(pc_->session()->video_channel()->content_name(), | 549 ChannelNamePair(pc_->session()->video_channel()->content_name(), |
| 451 pc_->session()->video_channel()->transport_name())); | 550 pc_->session()->video_channel()->transport_name())); |
| 452 } | 551 } |
| 453 if (pc_->session()->data_channel()) { | 552 if (pc_->session()->data_channel()) { |
| 454 channel_name_pairs_->data = rtc::Optional<ChannelNamePair>( | 553 channel_name_pairs_->data = rtc::Optional<ChannelNamePair>( |
| 455 ChannelNamePair(pc_->session()->data_channel()->content_name(), | 554 ChannelNamePair(pc_->session()->data_channel()->content_name(), |
| 456 pc_->session()->data_channel()->transport_name())); | 555 pc_->session()->data_channel()->transport_name())); |
| 457 } | 556 } |
| 458 media_info_.reset(PrepareMediaInfo_s().release()); | 557 media_info_.reset(PrepareMediaInfo_s().release()); |
| 459 invoker_.AsyncInvoke<void>(RTC_FROM_HERE, network_thread_, | 558 invoker_.AsyncInvoke<void>(RTC_FROM_HERE, network_thread_, |
| 460 rtc::Bind(&RTCStatsCollector::ProducePartialResultsOnNetworkThread, | 559 rtc::Bind(&RTCStatsCollector::ProducePartialResultsOnNetworkThread, |
| 461 rtc::scoped_refptr<RTCStatsCollector>(this), timestamp_us)); | 560 rtc::scoped_refptr<RTCStatsCollector>(this), timestamp_us)); |
| 561 ProducePartialResultsOnSignalingThread(timestamp_us); | |
| 462 } | 562 } |
| 463 } | 563 } |
| 464 | 564 |
| 465 void RTCStatsCollector::ClearCachedStatsReport() { | 565 void RTCStatsCollector::ClearCachedStatsReport() { |
| 466 RTC_DCHECK(signaling_thread_->IsCurrent()); | 566 RTC_DCHECK(signaling_thread_->IsCurrent()); |
| 467 cached_report_ = nullptr; | 567 cached_report_ = nullptr; |
| 468 } | 568 } |
| 469 | 569 |
| 470 void RTCStatsCollector::WaitForPendingRequest() { | 570 void RTCStatsCollector::WaitForPendingRequest() { |
| 471 RTC_DCHECK(signaling_thread_->IsCurrent()); | 571 RTC_DCHECK(signaling_thread_->IsCurrent()); |
| (...skipping 223 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 695 report->AddStats(std::move(candidate_pair_stats)); | 795 report->AddStats(std::move(candidate_pair_stats)); |
| 696 } | 796 } |
| 697 } | 797 } |
| 698 } | 798 } |
| 699 } | 799 } |
| 700 | 800 |
| 701 void RTCStatsCollector::ProduceMediaStreamAndTrackStats_s( | 801 void RTCStatsCollector::ProduceMediaStreamAndTrackStats_s( |
| 702 int64_t timestamp_us, RTCStatsReport* report) const { | 802 int64_t timestamp_us, RTCStatsReport* report) const { |
| 703 RTC_DCHECK(signaling_thread_->IsCurrent()); | 803 RTC_DCHECK(signaling_thread_->IsCurrent()); |
| 704 ProduceMediaStreamAndTrackStats( | 804 ProduceMediaStreamAndTrackStats( |
| 705 timestamp_us, pc_->local_streams(), true, report); | 805 timestamp_us, |
| 806 pc_->local_streams(), | |
| 807 true, | |
| 808 GetTracksToSsrcs(pc_->GetSenders()), | |
| 809 media_info_->voice, | |
| 810 media_info_->video, | |
| 811 report); | |
| 706 ProduceMediaStreamAndTrackStats( | 812 ProduceMediaStreamAndTrackStats( |
| 707 timestamp_us, pc_->remote_streams(), false, report); | 813 timestamp_us, |
| 814 pc_->remote_streams(), | |
| 815 false, | |
| 816 GetTracksToSsrcs(pc_->GetReceivers()), | |
| 817 media_info_->voice, | |
| 818 media_info_->video, | |
| 819 report); | |
| 708 } | 820 } |
| 709 | 821 |
| 710 void RTCStatsCollector::ProducePeerConnectionStats_s( | 822 void RTCStatsCollector::ProducePeerConnectionStats_s( |
| 711 int64_t timestamp_us, RTCStatsReport* report) const { | 823 int64_t timestamp_us, RTCStatsReport* report) const { |
| 712 RTC_DCHECK(signaling_thread_->IsCurrent()); | 824 RTC_DCHECK(signaling_thread_->IsCurrent()); |
| 713 std::unique_ptr<RTCPeerConnectionStats> stats( | 825 std::unique_ptr<RTCPeerConnectionStats> stats( |
| 714 new RTCPeerConnectionStats("RTCPeerConnection", timestamp_us)); | 826 new RTCPeerConnectionStats("RTCPeerConnection", timestamp_us)); |
| 715 stats->data_channels_opened = internal_record_.data_channels_opened; | 827 stats->data_channels_opened = internal_record_.data_channels_opened; |
| 716 stats->data_channels_closed = internal_record_.data_channels_closed; | 828 stats->data_channels_closed = internal_record_.data_channels_closed; |
| 717 report->AddStats(std::move(stats)); | 829 report->AddStats(std::move(stats)); |
| (...skipping 247 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 965 const std::string& type) { | 1077 const std::string& type) { |
| 966 return CandidateTypeToRTCIceCandidateType(type); | 1078 return CandidateTypeToRTCIceCandidateType(type); |
| 967 } | 1079 } |
| 968 | 1080 |
| 969 const char* DataStateToRTCDataChannelStateForTesting( | 1081 const char* DataStateToRTCDataChannelStateForTesting( |
| 970 DataChannelInterface::DataState state) { | 1082 DataChannelInterface::DataState state) { |
| 971 return DataStateToRTCDataChannelState(state); | 1083 return DataStateToRTCDataChannelState(state); |
| 972 } | 1084 } |
| 973 | 1085 |
| 974 } // namespace webrtc | 1086 } // namespace webrtc |
| OLD | NEW |