Index: webrtc/media/engine/webrtcvideoengine2.cc |
diff --git a/webrtc/media/engine/webrtcvideoengine2.cc b/webrtc/media/engine/webrtcvideoengine2.cc |
index c6bdafee5a336666a4fd46f5aab78c0590525105..d3ee4fa07954113c07a1e2c84a33d72cffd6970e 100644 |
--- a/webrtc/media/engine/webrtcvideoengine2.cc |
+++ b/webrtc/media/engine/webrtcvideoengine2.cc |
@@ -14,6 +14,7 @@ |
#include <set> |
#include <string> |
+#include "webrtc/base/asyncinvoker.h" |
#include "webrtc/base/buffer.h" |
#include "webrtc/base/logging.h" |
#include "webrtc/base/stringutils.h" |
@@ -314,6 +315,7 @@ static int GetMaxDefaultVideoBitrateKbps(int width, int height) { |
return 2500; |
} |
} |
+ |
} // namespace |
// Constants defined in webrtc/media/engine/constants.h |
@@ -1000,12 +1002,10 @@ bool WebRtcVideoChannel2::AddSendStream(const StreamParams& sp) { |
send_ssrcs_.insert(used_ssrc); |
webrtc::VideoSendStream::Config config(this); |
- config.overuse_callback = this; |
- |
- WebRtcVideoSendStream* stream = |
- new WebRtcVideoSendStream(call_, sp, config, external_encoder_factory_, |
- bitrate_config_.max_bitrate_bps, send_codec_, |
- send_rtp_extensions_, send_params_); |
+ WebRtcVideoSendStream* stream = new WebRtcVideoSendStream( |
+ call_, sp, config, external_encoder_factory_, signal_cpu_adaptation_, |
+ bitrate_config_.max_bitrate_bps, send_codec_, send_rtp_extensions_, |
+ send_params_); |
uint32_t ssrc = sp.first_ssrc(); |
RTC_DCHECK(ssrc != 0); |
@@ -1412,26 +1412,6 @@ void WebRtcVideoChannel2::SetInterface(NetworkInterface* iface) { |
kVideoRtpBufferSize); |
} |
-void WebRtcVideoChannel2::OnLoadUpdate(Load load) { |
- // OnLoadUpdate can not take any locks that are held while creating streams |
- // etc. Doing so establishes lock-order inversions between the webrtc process |
- // thread on stream creation and locks such as stream_crit_ while calling out. |
- rtc::CritScope stream_lock(&capturer_crit_); |
- if (!signal_cpu_adaptation_) |
- return; |
- // Do not adapt resolution for screen content as this will likely result in |
- // blurry and unreadable text. |
- for (auto& kv : capturers_) { |
- if (kv.second != nullptr |
- && !kv.second->IsScreencast() |
- && kv.second->video_adapter() != nullptr) { |
- kv.second->video_adapter()->OnCpuResolutionRequest( |
- load == kOveruse ? CoordinatedVideoAdapter::DOWNGRADE |
- : CoordinatedVideoAdapter::UPGRADE); |
- } |
- } |
-} |
- |
bool WebRtcVideoChannel2::SendRtp(const uint8_t* data, |
size_t len, |
const webrtc::PacketOptions& options) { |
@@ -1490,11 +1470,61 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::AllocatedEncoder::AllocatedEncoder( |
} |
} |
+// Proxy class used for marshalling calls to webrtc::LoadObserver::OnLoadUpdate |
+// from a media engine thread to the worker thread. |
+// This proxy is necessary to make sure VideoSourceInterface::AddOrUpdateSink is |
+// called from only one thread and also because |lock_| should not be held |
+// in a media engine thread since that may cause a deadlock is |stream_| is |
+// destroyed. |
pthatcher1
2016/02/25 20:20:05
Could we just implement WebRtcVideoChannel2::WebRt
perkj_webrtc
2016/02/26 14:08:25
// Invokes function objects (aka functors) asynchr
|
+class WebRtcVideoChannel2::WebRtcVideoSendStream::LoadObserverProxy { |
+ public: |
+ explicit LoadObserverProxy(webrtc::LoadObserver* observer) { |
+ helper_ = new rtc::RefCountedObject<Helper>(observer); |
+ } |
+ ~LoadObserverProxy() { helper_->Detach(); } |
+ |
+ webrtc::LoadObserver* proxy() { return helper_; } |
+ |
+ private: |
+ class Helper : public webrtc::LoadObserver, public rtc::RefCountInterface { |
+ public: |
+ explicit Helper(webrtc::LoadObserver* observer) |
+ : thread_(rtc::Thread::Current()), observer_(observer) {} |
+ void Detach() { |
+ RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
+ observer_ = nullptr; |
+ } |
+ void OnLoadUpdate(webrtc::LoadObserver::Load load) override { |
+ if (rtc::Thread::Current() == thread_) { |
+ observer_->OnLoadUpdate(load); |
+ return; |
+ } |
+ invoker_.AsyncInvoke<void>( |
+ thread_, rtc::Bind(&Helper::OnLoadUpdateOnCorrectThread, this, load)); |
+ } |
+ void OnLoadUpdateOnCorrectThread(webrtc::LoadObserver::Load load) { |
+ RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
+ if (observer_) { |
+ observer_->OnLoadUpdate(load); |
+ } |
+ } |
+ |
+ private: |
+ rtc::ThreadChecker thread_checker_; |
+ rtc::AsyncInvoker invoker_; |
+ rtc::Thread* thread_; |
+ webrtc::LoadObserver* observer_; |
+ }; |
+ |
+ rtc::scoped_refptr<Helper> helper_; |
+}; |
+ |
WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream( |
webrtc::Call* call, |
const StreamParams& sp, |
const webrtc::VideoSendStream::Config& config, |
WebRtcVideoEncoderFactory* external_encoder_factory, |
+ bool enable_cpu_overuse_detection, |
int max_bitrate_bps, |
const rtc::Optional<VideoCodecSettings>& codec_settings, |
const std::vector<webrtc::RtpExtension>& rtp_extensions, |
@@ -1504,15 +1534,18 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream( |
: ssrcs_(sp.ssrcs), |
ssrc_groups_(sp.ssrc_groups), |
call_(call), |
+ cpu_downgrade_counter_(0), |
+ number_of_cpu_adapt_changes_(0), |
+ load_proxy_(new LoadObserverProxy(this)), |
+ capturer_(NULL), |
external_encoder_factory_(external_encoder_factory), |
stream_(NULL), |
parameters_(config, send_params.options, max_bitrate_bps, codec_settings), |
pending_encoder_reconfiguration_(false), |
allocated_encoder_(NULL, webrtc::kVideoCodecUnknown, false), |
- capturer_(NULL), |
+ capturer_is_screencast_(false), |
sending_(false), |
muted_(false), |
- old_adapt_changes_(0), |
first_frame_timestamp_ms_(0), |
last_frame_timestamp_ms_(0) { |
parameters_.config.rtp.max_packet_size = kVideoMtu; |
@@ -1526,6 +1559,8 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::WebRtcVideoSendStream( |
parameters_.config.rtp.rtcp_mode = send_params.rtcp.reduced_size |
? webrtc::RtcpMode::kReducedSize |
: webrtc::RtcpMode::kCompound; |
+ parameters_.config.overuse_callback = |
+ enable_cpu_overuse_detection ? load_proxy_->proxy() : nullptr; |
if (codec_settings) { |
SetCodecAndOptions(*codec_settings, parameters_.options); |
@@ -1589,7 +1624,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame( |
video_frame.set_render_time_ms(last_frame_timestamp_ms_); |
// Reconfigure codec if necessary. |
SetDimensions(video_frame.width(), video_frame.height(), |
- capturer_->IsScreencast()); |
+ capturer_is_screencast_); |
last_rotation_ = video_frame.rotation(); |
stream_->Input()->IncomingCapturedFrame(video_frame); |
@@ -1598,6 +1633,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::OnFrame( |
bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer( |
VideoCapturer* capturer) { |
TRACE_EVENT0("webrtc", "WebRtcVideoSendStream::SetCapturer"); |
+ RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
if (!DisconnectCapturer() && capturer == NULL) { |
return false; |
} |
@@ -1630,10 +1666,10 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer( |
capturer_ = NULL; |
return true; |
} |
- |
- capturer_ = capturer; |
- capturer_->AddOrUpdateSink(this, sink_wants_); |
+ capturer_is_screencast_ = capturer->IsScreencast(); |
} |
+ capturer_ = capturer; |
+ capturer_->AddOrUpdateSink(this, sink_wants_); |
return true; |
} |
@@ -1649,9 +1685,6 @@ bool WebRtcVideoChannel2::WebRtcVideoSendStream::DisconnectCapturer() { |
if (capturer_ == NULL) |
return false; |
- if (capturer_->video_adapter() != nullptr) |
- old_adapt_changes_ += capturer_->video_adapter()->adaptation_changes(); |
- |
capturer = capturer_; |
capturer_ = NULL; |
} |
@@ -1822,8 +1855,7 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::SetSendParameters( |
} else { |
parameters_.options = *params.options; |
} |
- } |
- else if (params.conference_mode && parameters_.codec_settings) { |
+ } else if (params.conference_mode && parameters_.codec_settings) { |
SetCodecAndOptions(*parameters_.codec_settings, parameters_.options); |
return; |
} |
@@ -1950,10 +1982,52 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::Stop() { |
sending_ = false; |
} |
+void WebRtcVideoChannel2::WebRtcVideoSendStream::OnLoadUpdate(Load load) { |
+ RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
+ LOG(LS_INFO) << "OnLoadUpdate " << load; |
+ if (!capturer_) { |
+ return; |
+ } |
+ |
+ rtc::Optional<int> max_pixel_count; |
+ rtc::Optional<int> max_pixel_count_step_up; |
+ if (load == kOveruse) { |
+ { |
+ rtc::CritScope cs(&lock_); |
+ max_pixel_count = rtc::Optional<int>( |
+ (last_dimensions_.height * last_dimensions_.width) / 2); |
+ } |
+ if (!sink_wants_.max_pixel_count || |
+ *sink_wants_.max_pixel_count != *max_pixel_count) { |
+ ++number_of_cpu_adapt_changes_; |
pthatcher1
2016/02/25 20:20:05
If we track cpu_downgrades_ and cpu_upgrades_, wou
perkj_webrtc
2016/02/26 14:08:25
hum- I think the easiest is to move back the logic
|
+ ++cpu_downgrade_counter_; |
+ } |
+ } else { |
+ RTC_DCHECK(load == kUnderuse); |
+ if (cpu_downgrade_counter_ > 0) { |
+ --cpu_downgrade_counter_; |
+ } |
+ { |
+ rtc::CritScope cs(&lock_); |
+ max_pixel_count_step_up = |
+ rtc::Optional<int>(last_dimensions_.height * last_dimensions_.width); |
+ } |
+ if (sink_wants_.max_pixel_count || |
+ (sink_wants_.max_pixel_count_step_up && |
+ *sink_wants_.max_pixel_count_step_up != *max_pixel_count_step_up)) { |
+ ++number_of_cpu_adapt_changes_; |
+ } |
+ } |
+ sink_wants_.max_pixel_count = max_pixel_count; |
+ sink_wants_.max_pixel_count_step_up = max_pixel_count_step_up; |
+ capturer_->AddOrUpdateSink(this, sink_wants_); |
+} |
+ |
VideoSenderInfo |
WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo() { |
VideoSenderInfo info; |
webrtc::VideoSendStream::Stats stats; |
+ RTC_DCHECK(thread_checker_.CalledOnValidThread()); |
{ |
rtc::CritScope cs(&lock_); |
for (uint32_t ssrc : parameters_.config.rtp.ssrcs) |
@@ -1975,23 +2049,20 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::GetVideoSenderInfo() { |
return info; |
stats = stream_->GetStats(); |
- |
- info.adapt_changes = old_adapt_changes_; |
- info.adapt_reason = CoordinatedVideoAdapter::ADAPTREASON_NONE; |
- |
- if (capturer_ != NULL) { |
- if (!capturer_->IsMuted()) { |
- VideoFormat last_captured_frame_format; |
- capturer_->GetStats(&info.adapt_frame_drops, &info.effects_frame_drops, |
- &info.capturer_frame_time, |
- &last_captured_frame_format); |
- info.input_frame_width = last_captured_frame_format.width; |
- info.input_frame_height = last_captured_frame_format.height; |
- } |
- if (capturer_->video_adapter() != nullptr) { |
- info.adapt_changes += capturer_->video_adapter()->adaptation_changes(); |
- info.adapt_reason = capturer_->video_adapter()->adapt_reason(); |
- } |
+ } |
+ info.adapt_changes = number_of_cpu_adapt_changes_; |
+ info.adapt_reason = cpu_downgrade_counter_ == 0 |
+ ? CoordinatedVideoAdapter::ADAPTREASON_NONE |
+ : CoordinatedVideoAdapter::ADAPTREASON_CPU; |
+ |
+ if (capturer_ != NULL) { |
+ if (!capturer_->IsMuted()) { |
+ VideoFormat last_captured_frame_format; |
+ capturer_->GetStats(&info.adapt_frame_drops, &info.effects_frame_drops, |
+ &info.capturer_frame_time, |
+ &last_captured_frame_format); |
+ info.input_frame_width = last_captured_frame_format.width; |
+ info.input_frame_height = last_captured_frame_format.height; |
} |
} |