Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(3)

Side by Side Diff: webrtc/video/vie_encoder.cc

Issue 2060403002: Add task queue to Call. (Closed) Base URL: https://chromium.googlesource.com/external/webrtc.git@move_getpadding
Patch Set: Revert fix for asan, protect instead. Added destruction observer to frames in ViEEncoder tests. Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 #include "webrtc/video/vie_encoder.h" 11 #include "webrtc/video/vie_encoder.h"
12 12
13 #include <algorithm> 13 #include <algorithm>
14 #include <limits> 14 #include <limits>
15 15
16 #include "webrtc/base/checks.h" 16 #include "webrtc/base/checks.h"
17 #include "webrtc/base/logging.h" 17 #include "webrtc/base/logging.h"
18 #include "webrtc/base/trace_event.h" 18 #include "webrtc/base/trace_event.h"
19 #include "webrtc/base/timeutils.h" 19 #include "webrtc/base/timeutils.h"
20 #include "webrtc/modules/pacing/paced_sender.h" 20 #include "webrtc/modules/pacing/paced_sender.h"
21 #include "webrtc/modules/video_coding/include/video_coding.h" 21 #include "webrtc/modules/video_coding/include/video_coding.h"
22 #include "webrtc/modules/video_coding/include/video_coding_defines.h" 22 #include "webrtc/modules/video_coding/include/video_coding_defines.h"
23 #include "webrtc/system_wrappers/include/metrics.h" 23 #include "webrtc/system_wrappers/include/metrics.h"
24 #include "webrtc/video/overuse_frame_detector.h" 24 #include "webrtc/video/overuse_frame_detector.h"
25 #include "webrtc/video/send_statistics_proxy.h" 25 #include "webrtc/video/send_statistics_proxy.h"
26 #include "webrtc/video_frame.h" 26 #include "webrtc/video_frame.h"
27 27
28 namespace webrtc { 28 namespace webrtc {
29 29
30 namespace {
31
32 VideoCodecType PayloadNameToCodecType(const std::string& payload_name) {
33 if (payload_name == "VP8")
34 return kVideoCodecVP8;
35 if (payload_name == "VP9")
36 return kVideoCodecVP9;
37 if (payload_name == "H264")
38 return kVideoCodecH264;
39 return kVideoCodecGeneric;
40 }
41
42 VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config,
43 const std::string& payload_name,
44 int payload_type) {
45 const std::vector<VideoStream>& streams = config.streams;
46 static const int kEncoderMinBitrateKbps = 30;
47 RTC_DCHECK(!streams.empty());
48 RTC_DCHECK_GE(config.min_transmit_bitrate_bps, 0);
49
50 VideoCodec video_codec;
51 memset(&video_codec, 0, sizeof(video_codec));
52 video_codec.codecType = PayloadNameToCodecType(payload_name);
53
54 switch (config.content_type) {
55 case VideoEncoderConfig::ContentType::kRealtimeVideo:
56 video_codec.mode = kRealtimeVideo;
57 break;
58 case VideoEncoderConfig::ContentType::kScreen:
59 video_codec.mode = kScreensharing;
60 if (config.streams.size() == 1 &&
61 config.streams[0].temporal_layer_thresholds_bps.size() == 1) {
62 video_codec.targetBitrate =
63 config.streams[0].temporal_layer_thresholds_bps[0] / 1000;
64 }
65 break;
66 }
67
68 switch (video_codec.codecType) {
69 case kVideoCodecVP8: {
70 if (config.encoder_specific_settings) {
71 video_codec.codecSpecific.VP8 = *reinterpret_cast<const VideoCodecVP8*>(
72 config.encoder_specific_settings);
73 } else {
74 video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
75 }
76 video_codec.codecSpecific.VP8.numberOfTemporalLayers =
77 static_cast<unsigned char>(
78 streams.back().temporal_layer_thresholds_bps.size() + 1);
79 break;
80 }
81 case kVideoCodecVP9: {
82 if (config.encoder_specific_settings) {
83 video_codec.codecSpecific.VP9 = *reinterpret_cast<const VideoCodecVP9*>(
84 config.encoder_specific_settings);
85 if (video_codec.mode == kScreensharing) {
86 video_codec.codecSpecific.VP9.flexibleMode = true;
87 // For now VP9 screensharing use 1 temporal and 2 spatial layers.
88 RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers,
89 1);
90 RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2);
91 }
92 } else {
93 video_codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings();
94 }
95 video_codec.codecSpecific.VP9.numberOfTemporalLayers =
96 static_cast<unsigned char>(
97 streams.back().temporal_layer_thresholds_bps.size() + 1);
98 break;
99 }
100 case kVideoCodecH264: {
101 if (config.encoder_specific_settings) {
102 video_codec.codecSpecific.H264 =
103 *reinterpret_cast<const VideoCodecH264*>(
104 config.encoder_specific_settings);
105 } else {
106 video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
107 }
108 break;
109 }
110 default:
111 // TODO(pbos): Support encoder_settings codec-agnostically.
112 RTC_DCHECK(!config.encoder_specific_settings)
113 << "Encoder-specific settings for codec type not wired up.";
114 break;
115 }
116
117 strncpy(video_codec.plName, payload_name.c_str(), kPayloadNameSize - 1);
118 video_codec.plName[kPayloadNameSize - 1] = '\0';
119 video_codec.plType = payload_type;
120 video_codec.numberOfSimulcastStreams =
121 static_cast<unsigned char>(streams.size());
122 video_codec.minBitrate = streams[0].min_bitrate_bps / 1000;
123 if (video_codec.minBitrate < kEncoderMinBitrateKbps)
124 video_codec.minBitrate = kEncoderMinBitrateKbps;
125 RTC_DCHECK_LE(streams.size(), static_cast<size_t>(kMaxSimulcastStreams));
126 if (video_codec.codecType == kVideoCodecVP9) {
127 // If the vector is empty, bitrates will be configured automatically.
128 RTC_DCHECK(config.spatial_layers.empty() ||
129 config.spatial_layers.size() ==
130 video_codec.codecSpecific.VP9.numberOfSpatialLayers);
131 RTC_DCHECK_LE(video_codec.codecSpecific.VP9.numberOfSpatialLayers,
132 kMaxSimulcastStreams);
133 for (size_t i = 0; i < config.spatial_layers.size(); ++i)
134 video_codec.spatialLayers[i] = config.spatial_layers[i];
135 }
136 for (size_t i = 0; i < streams.size(); ++i) {
137 SimulcastStream* sim_stream = &video_codec.simulcastStream[i];
138 RTC_DCHECK_GT(streams[i].width, 0u);
139 RTC_DCHECK_GT(streams[i].height, 0u);
140 RTC_DCHECK_GT(streams[i].max_framerate, 0);
141 // Different framerates not supported per stream at the moment.
142 RTC_DCHECK_EQ(streams[i].max_framerate, streams[0].max_framerate);
143 RTC_DCHECK_GE(streams[i].min_bitrate_bps, 0);
144 RTC_DCHECK_GE(streams[i].target_bitrate_bps, streams[i].min_bitrate_bps);
145 RTC_DCHECK_GE(streams[i].max_bitrate_bps, streams[i].target_bitrate_bps);
146 RTC_DCHECK_GE(streams[i].max_qp, 0);
147
148 sim_stream->width = static_cast<uint16_t>(streams[i].width);
149 sim_stream->height = static_cast<uint16_t>(streams[i].height);
150 sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000;
151 sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000;
152 sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000;
153 sim_stream->qpMax = streams[i].max_qp;
154 sim_stream->numberOfTemporalLayers = static_cast<unsigned char>(
155 streams[i].temporal_layer_thresholds_bps.size() + 1);
156
157 video_codec.width =
158 std::max(video_codec.width, static_cast<uint16_t>(streams[i].width));
159 video_codec.height =
160 std::max(video_codec.height, static_cast<uint16_t>(streams[i].height));
161 video_codec.minBitrate =
162 std::min(static_cast<uint16_t>(video_codec.minBitrate),
163 static_cast<uint16_t>(streams[i].min_bitrate_bps / 1000));
164 video_codec.maxBitrate += streams[i].max_bitrate_bps / 1000;
165 video_codec.qpMax = std::max(video_codec.qpMax,
166 static_cast<unsigned int>(streams[i].max_qp));
167 }
168
169 if (video_codec.maxBitrate == 0) {
170 // Unset max bitrate -> cap to one bit per pixel.
171 video_codec.maxBitrate =
172 (video_codec.width * video_codec.height * video_codec.maxFramerate) /
173 1000;
174 }
175 if (video_codec.maxBitrate < kEncoderMinBitrateKbps)
176 video_codec.maxBitrate = kEncoderMinBitrateKbps;
177
178 RTC_DCHECK_GT(streams[0].max_framerate, 0);
179 video_codec.maxFramerate = streams[0].max_framerate;
180 video_codec.expect_encode_from_texture = config.expect_encode_from_texture;
181
182 return video_codec;
183 }
184
185 // TODO(pbos): Lower these thresholds (to closer to 100%) when we handle
186 // pipelining encoders better (multiple input frames before something comes
187 // out). This should effectively turn off CPU adaptations for systems that
188 // remotely cope with the load right now.
189 CpuOveruseOptions GetCpuOveruseOptions(bool full_overuse_time) {
190 CpuOveruseOptions options;
191 if (full_overuse_time) {
192 options.low_encode_usage_threshold_percent = 150;
193 options.high_encode_usage_threshold_percent = 200;
194 }
195 return options;
196 }
197
198 } // namespace
199
200 class ViEEncoder::EncodeTask : public rtc::QueuedTask {
201 public:
202 EncodeTask(const VideoFrame& frame, ViEEncoder* vie_encoder)
203 : vie_encoder_(vie_encoder) {
204 frame_.ShallowCopy(frame);
205 ++vie_encoder_->posted_frames_waiting_for_encode_;
206 }
207
208 private:
209 bool Run() override {
210 if (--vie_encoder_->posted_frames_waiting_for_encode_ > 0) {
211 // There is a newer frame in flight. Do not encode this frame.
212 return true;
213 }
sprang_webrtc 2016/07/06 15:02:51 Can posted_frames_waiting_for_encode_ ever be 0 he
perkj_webrtc 2016/07/07 08:37:12 ptal
214 vie_encoder_->EncodeVideoFrame(frame_);
215 return true;
216 }
217 VideoFrame frame_;
218 ViEEncoder* vie_encoder_;
219 };
220
30 ViEEncoder::ViEEncoder(uint32_t number_of_cores, 221 ViEEncoder::ViEEncoder(uint32_t number_of_cores,
31 ProcessThread* module_process_thread,
32 SendStatisticsProxy* stats_proxy, 222 SendStatisticsProxy* stats_proxy,
33 OveruseFrameDetector* overuse_detector, 223 const VideoSendStream::Config::EncoderSettings& settings,
34 EncodedImageCallback* sink) 224 rtc::VideoSinkInterface<VideoFrame>* pre_encode_callback,
35 : number_of_cores_(number_of_cores), 225 LoadObserver* overuse_callback,
36 sink_(sink), 226 EncodedFrameObserver* encoder_timing)
227 : shutdown_event_(true /* manual_reset */, false),
228 number_of_cores_(number_of_cores),
229 settings_(settings),
37 vp_(VideoProcessing::Create()), 230 vp_(VideoProcessing::Create()),
38 video_sender_(Clock::GetRealTimeClock(), this, this, this), 231 video_sender_(Clock::GetRealTimeClock(), this, this, this),
232 overuse_detector_(Clock::GetRealTimeClock(),
233 GetCpuOveruseOptions(settings.full_overuse_time),
234 this,
235 encoder_timing,
236 stats_proxy),
237 load_observer_(overuse_callback),
39 stats_proxy_(stats_proxy), 238 stats_proxy_(stats_proxy),
40 overuse_detector_(overuse_detector), 239 pre_encode_callback_(pre_encode_callback),
41 time_of_last_frame_activity_ms_(std::numeric_limits<int64_t>::max()), 240 module_process_thread_(nullptr),
42 encoder_config_(), 241 encoder_config_(),
242 encoder_start_bitrate_bps_(0),
43 last_observed_bitrate_bps_(0), 243 last_observed_bitrate_bps_(0),
44 encoder_paused_and_dropped_frame_(false), 244 encoder_paused_and_dropped_frame_(false),
45 module_process_thread_(module_process_thread),
46 has_received_sli_(false), 245 has_received_sli_(false),
47 picture_id_sli_(0), 246 picture_id_sli_(0),
48 has_received_rpsi_(false), 247 has_received_rpsi_(false),
49 picture_id_rpsi_(0), 248 picture_id_rpsi_(0),
50 video_suspended_(false) { 249 clock_(Clock::GetRealTimeClock()),
250 last_captured_timestamp_(0),
251 delta_ntp_internal_ms_(clock_->CurrentNtpInMilliseconds() -
252 clock_->TimeInMilliseconds()),
253 encoder_queue_("EncoderQueue") {
254 vp_->EnableTemporalDecimation(false);
255 encoded_thread_checker_.DetachFromThread();
256
257 encoder_queue_.PostTask([this] {
258 video_sender_.RegisterExternalEncoder(
259 settings_.encoder, settings_.payload_type, settings_.internal_source);
260 });
261 }
262
263 ViEEncoder::~ViEEncoder() {
264 RTC_DCHECK(shutdown_event_.Wait(0))
265 << "Must call ::Stop() before destruction.";
266 }
267
268 void ViEEncoder::Stop() {
269 if (!encoder_queue_.IsCurrent()) {
270 encoder_queue_.PostTask([this] { Stop(); });
271 shutdown_event_.Wait(rtc::Event::kForever);
272 return;
273 }
274 RTC_DCHECK_RUN_ON(&encoder_queue_);
275 video_sender_.RegisterExternalEncoder(nullptr, settings_.payload_type, false);
276 shutdown_event_.Set();
277 }
278
279 void ViEEncoder::RegisterProcessThread(ProcessThread* module_process_thread) {
280 RTC_DCHECK(!module_process_thread_);
281 module_process_thread_ = module_process_thread;
282 module_process_thread_->RegisterModule(&overuse_detector_);
51 module_process_thread_->RegisterModule(&video_sender_); 283 module_process_thread_->RegisterModule(&video_sender_);
52 vp_->EnableTemporalDecimation(true); 284 module_process_thread_checker_.DetachFromThread();
53 } 285 }
54 286
55 vcm::VideoSender* ViEEncoder::video_sender() { 287 void ViEEncoder::DeRegisterProcessThread() {
56 return &video_sender_; 288 module_process_thread_->DeRegisterModule(&overuse_detector_);
57 }
58
59 ViEEncoder::~ViEEncoder() {
60 module_process_thread_->DeRegisterModule(&video_sender_); 289 module_process_thread_->DeRegisterModule(&video_sender_);
61 } 290 }
62 291
63 int32_t ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder, 292 void ViEEncoder::SetSink(EncodedImageCallback* sink) {
64 uint8_t pl_type, 293 encoder_queue_.PostTask([this, sink] {
65 bool internal_source) { 294 RTC_DCHECK_RUN_ON(&encoder_queue_);
66 video_sender_.RegisterExternalEncoder(encoder, pl_type, internal_source); 295 sink_ = sink;
67 return 0; 296 });
68 } 297 }
69 298
70 int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) { 299 void ViEEncoder::SetStartBitrate(int start_bitrate_bps) {
71 video_sender_.RegisterExternalEncoder(nullptr, pl_type, false); 300 encoder_queue_.PostTask([this, start_bitrate_bps] {
72 return 0; 301 RTC_DCHECK_RUN_ON(&encoder_queue_);
73 } 302 encoder_start_bitrate_bps_ = start_bitrate_bps;
74 303 });
75 void ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec, 304 }
76 size_t max_data_payload_length) { 305
306 void ViEEncoder::ConfigureEncoder(const VideoEncoderConfig& config,
307 size_t max_data_payload_length) {
308 VideoCodec video_codec = VideoEncoderConfigToVideoCodec(
309 config, settings_.payload_name, settings_.payload_type);
310 encoder_queue_.PostTask([this, video_codec, max_data_payload_length] {
311 ConfigureEncoderInternal(video_codec, max_data_payload_length);
312 });
313 return;
314 }
315
316 void ViEEncoder::ConfigureEncoderInternal(const VideoCodec& video_codec,
317 size_t max_data_payload_length) {
318 RTC_DCHECK_RUN_ON(&encoder_queue_);
319 RTC_DCHECK_GE(encoder_start_bitrate_bps_, 0);
320 RTC_DCHECK(sink_);
321
77 // Setting target width and height for VPM. 322 // Setting target width and height for VPM.
78 RTC_CHECK_EQ(VPM_OK, 323 RTC_CHECK_EQ(VPM_OK,
79 vp_->SetTargetResolution(video_codec.width, video_codec.height, 324 vp_->SetTargetResolution(video_codec.width, video_codec.height,
80 video_codec.maxFramerate)); 325 video_codec.maxFramerate));
81 { 326
82 rtc::CritScope lock(&data_cs_); 327 encoder_config_ = video_codec;
83 encoder_config_ = video_codec; 328 encoder_config_.startBitrate = encoder_start_bitrate_bps_ / 1000;
84 }
85 329
86 bool success = video_sender_.RegisterSendCodec( 330 bool success = video_sender_.RegisterSendCodec(
87 &video_codec, number_of_cores_, 331 &encoder_config_, number_of_cores_,
88 static_cast<uint32_t>(max_data_payload_length)) == VCM_OK; 332 static_cast<uint32_t>(max_data_payload_length)) == VCM_OK;
89 333
90 if (!success) { 334 if (!success) {
91 LOG(LS_ERROR) << "Failed to configure encoder."; 335 LOG(LS_ERROR) << "Failed to configure encoder.";
92 RTC_DCHECK(success); 336 RTC_DCHECK(success);
93 } 337 }
94 338
95 if (stats_proxy_) { 339 if (stats_proxy_) {
96 VideoEncoderConfig::ContentType content_type = 340 VideoEncoderConfig::ContentType content_type =
97 VideoEncoderConfig::ContentType::kRealtimeVideo; 341 VideoEncoderConfig::ContentType::kRealtimeVideo;
98 switch (video_codec.mode) { 342 switch (video_codec.mode) {
99 case kRealtimeVideo: 343 case kRealtimeVideo:
100 content_type = VideoEncoderConfig::ContentType::kRealtimeVideo; 344 content_type = VideoEncoderConfig::ContentType::kRealtimeVideo;
101 break; 345 break;
102 case kScreensharing: 346 case kScreensharing:
103 content_type = VideoEncoderConfig::ContentType::kScreen; 347 content_type = VideoEncoderConfig::ContentType::kScreen;
104 break; 348 break;
105 default: 349 default:
106 RTC_NOTREACHED(); 350 RTC_NOTREACHED();
107 break; 351 break;
108 } 352 }
109 stats_proxy_->SetContentType(content_type); 353 stats_proxy_->SetContentType(content_type);
110 } 354 }
111 } 355 }
112 356
357 void ViEEncoder::IncomingCapturedFrame(const VideoFrame& video_frame) {
358 // TODO(perkj): Timestamp handling is moved here from the removed
359 // implementation class VideoCaptureInput. nisse@ is looking into the
360 // timestamps in https://bugs.chromium.org/p/webrtc/issues/detail?id=5740.
361 stats_proxy_->OnIncomingFrame(video_frame.width(), video_frame.height());
362
363 VideoFrame incoming_frame = video_frame;
364
365 // Local time in webrtc time base.
366 int64_t current_time = clock_->TimeInMilliseconds();
367 incoming_frame.set_render_time_ms(current_time);
368
369 // Capture time may come from clock with an offset and drift from clock_.
370 int64_t capture_ntp_time_ms;
371 if (video_frame.ntp_time_ms() != 0) {
372 capture_ntp_time_ms = video_frame.ntp_time_ms();
373 } else if (video_frame.render_time_ms() != 0) {
374 capture_ntp_time_ms = video_frame.render_time_ms() + delta_ntp_internal_ms_;
375 } else {
376 capture_ntp_time_ms = current_time + delta_ntp_internal_ms_;
377 }
378 incoming_frame.set_ntp_time_ms(capture_ntp_time_ms);
379
380 // Convert NTP time, in ms, to RTP timestamp.
381 const int kMsToRtpTimestamp = 90;
382 incoming_frame.set_timestamp(
383 kMsToRtpTimestamp * static_cast<uint32_t>(incoming_frame.ntp_time_ms()));
384
385 if (incoming_frame.ntp_time_ms() <= last_captured_timestamp_) {
386 // We don't allow the same capture time for two frames, drop this one.
387 LOG(LS_WARNING) << "Same/old NTP timestamp ("
388 << incoming_frame.ntp_time_ms()
389 << " <= " << last_captured_timestamp_
390 << ") for incoming frame. Dropping.";
391 return;
392 }
393
394 last_captured_timestamp_ = incoming_frame.ntp_time_ms();
395 overuse_detector_.FrameCaptured(incoming_frame);
396 encoder_queue_.PostTask(
397 std::unique_ptr<rtc::QueuedTask>(new EncodeTask(incoming_frame, this)));
398 }
399
113 bool ViEEncoder::EncoderPaused() const { 400 bool ViEEncoder::EncoderPaused() const {
401 RTC_DCHECK_RUN_ON(&encoder_queue_);
114 // Pause video if paused by caller or as long as the network is down or the 402 // Pause video if paused by caller or as long as the network is down or the
115 // pacer queue has grown too large in buffered mode. 403 // pacer queue has grown too large in buffered mode.
116 // If the pacer queue has grown too large or the network is down, 404 // If the pacer queue has grown too large or the network is down,
117 // last_observed_bitrate_bps_ will be 0. 405 // last_observed_bitrate_bps_ will be 0.
118 return video_suspended_ || last_observed_bitrate_bps_ == 0; 406 return last_observed_bitrate_bps_ == 0;
119 } 407 }
120 408
121 void ViEEncoder::TraceFrameDropStart() { 409 void ViEEncoder::TraceFrameDropStart() {
410 RTC_DCHECK_RUN_ON(&encoder_queue_);
122 // Start trace event only on the first frame after encoder is paused. 411 // Start trace event only on the first frame after encoder is paused.
123 if (!encoder_paused_and_dropped_frame_) { 412 if (!encoder_paused_and_dropped_frame_) {
124 TRACE_EVENT_ASYNC_BEGIN0("webrtc", "EncoderPaused", this); 413 TRACE_EVENT_ASYNC_BEGIN0("webrtc", "EncoderPaused", this);
125 } 414 }
126 encoder_paused_and_dropped_frame_ = true; 415 encoder_paused_and_dropped_frame_ = true;
127 return; 416 return;
128 } 417 }
129 418
130 void ViEEncoder::TraceFrameDropEnd() { 419 void ViEEncoder::TraceFrameDropEnd() {
420 RTC_DCHECK_RUN_ON(&encoder_queue_);
131 // End trace event on first frame after encoder resumes, if frame was dropped. 421 // End trace event on first frame after encoder resumes, if frame was dropped.
132 if (encoder_paused_and_dropped_frame_) { 422 if (encoder_paused_and_dropped_frame_) {
133 TRACE_EVENT_ASYNC_END0("webrtc", "EncoderPaused", this); 423 TRACE_EVENT_ASYNC_END0("webrtc", "EncoderPaused", this);
134 } 424 }
135 encoder_paused_and_dropped_frame_ = false; 425 encoder_paused_and_dropped_frame_ = false;
136 } 426 }
137 427
138 void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) { 428 void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame) {
139 VideoCodecType codec_type; 429 RTC_DCHECK_RUN_ON(&encoder_queue_);
140 { 430 if (pre_encode_callback_)
141 rtc::CritScope lock(&data_cs_); 431 pre_encode_callback_->OnFrame(video_frame);
142 time_of_last_frame_activity_ms_ = rtc::TimeMillis(); 432
143 if (EncoderPaused()) { 433 if (EncoderPaused()) {
144 TraceFrameDropStart(); 434 TraceFrameDropStart();
145 return; 435 return;
146 }
147 TraceFrameDropEnd();
148 codec_type = encoder_config_.codecType;
149 } 436 }
437 TraceFrameDropEnd();
150 438
151 TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(), 439 TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(),
152 "Encode"); 440 "Encode");
153 const VideoFrame* frame_to_send = &video_frame; 441 const VideoFrame* frame_to_send = &video_frame;
154 // TODO(wuchengli): support texture frames. 442 // TODO(wuchengli): support texture frames.
155 if (!video_frame.video_frame_buffer()->native_handle()) { 443 if (!video_frame.video_frame_buffer()->native_handle()) {
156 // Pass frame via preprocessor. 444 // Pass frame via preprocessor.
157 frame_to_send = vp_->PreprocessFrame(video_frame); 445 frame_to_send = vp_->PreprocessFrame(video_frame);
158 if (!frame_to_send) { 446 if (!frame_to_send) {
159 // Drop this frame, or there was an error processing it. 447 // Drop this frame, or there was an error processing it.
160 return; 448 return;
161 } 449 }
162 } 450 }
163 451
164 if (codec_type == webrtc::kVideoCodecVP8) { 452 if (encoder_config_.codecType == webrtc::kVideoCodecVP8) {
165 webrtc::CodecSpecificInfo codec_specific_info; 453 webrtc::CodecSpecificInfo codec_specific_info;
166 codec_specific_info.codecType = webrtc::kVideoCodecVP8; 454 codec_specific_info.codecType = webrtc::kVideoCodecVP8;
167 { 455
168 rtc::CritScope lock(&data_cs_);
169 codec_specific_info.codecSpecific.VP8.hasReceivedRPSI = 456 codec_specific_info.codecSpecific.VP8.hasReceivedRPSI =
170 has_received_rpsi_; 457 has_received_rpsi_;
171 codec_specific_info.codecSpecific.VP8.hasReceivedSLI = 458 codec_specific_info.codecSpecific.VP8.hasReceivedSLI =
172 has_received_sli_; 459 has_received_sli_;
173 codec_specific_info.codecSpecific.VP8.pictureIdRPSI = 460 codec_specific_info.codecSpecific.VP8.pictureIdRPSI =
174 picture_id_rpsi_; 461 picture_id_rpsi_;
175 codec_specific_info.codecSpecific.VP8.pictureIdSLI = 462 codec_specific_info.codecSpecific.VP8.pictureIdSLI =
176 picture_id_sli_; 463 picture_id_sli_;
177 has_received_sli_ = false; 464 has_received_sli_ = false;
178 has_received_rpsi_ = false; 465 has_received_rpsi_ = false;
179 }
180 466
181 video_sender_.AddVideoFrame(*frame_to_send, &codec_specific_info); 467 video_sender_.AddVideoFrame(*frame_to_send, &codec_specific_info);
182 return; 468 return;
183 } 469 }
184 video_sender_.AddVideoFrame(*frame_to_send, nullptr); 470 video_sender_.AddVideoFrame(*frame_to_send, nullptr);
185 } 471 }
186 472
187 void ViEEncoder::SendKeyFrame() { 473 void ViEEncoder::SendKeyFrame() {
474 if (!encoder_queue_.IsCurrent()) {
475 encoder_queue_.PostTask([this] { SendKeyFrame(); });
476 return;
477 }
478 RTC_DCHECK(encoder_queue_.IsCurrent());
188 video_sender_.IntraFrameRequest(0); 479 video_sender_.IntraFrameRequest(0);
189 } 480 }
190 481
191 int64_t ViEEncoder::time_of_last_frame_activity_ms() {
192 rtc::CritScope lock(&data_cs_);
193 return time_of_last_frame_activity_ms_;
194 }
195
196 void ViEEncoder::OnSetRates(uint32_t bitrate_bps, int framerate) { 482 void ViEEncoder::OnSetRates(uint32_t bitrate_bps, int framerate) {
197 if (stats_proxy_) 483 if (stats_proxy_)
198 stats_proxy_->OnSetRates(bitrate_bps, framerate); 484 stats_proxy_->OnSetRates(bitrate_bps, framerate);
199 } 485 }
200 486
201 int32_t ViEEncoder::Encoded(const EncodedImage& encoded_image, 487 int32_t ViEEncoder::Encoded(const EncodedImage& encoded_image,
202 const CodecSpecificInfo* codec_specific_info, 488 const CodecSpecificInfo* codec_specific_info,
203 const RTPFragmentationHeader* fragmentation) { 489 const RTPFragmentationHeader* fragmentation) {
204 { 490 // Encoded is called on whatever thread the real encoder implementation run
205 rtc::CritScope lock(&data_cs_); 491 // on.
206 time_of_last_frame_activity_ms_ = rtc::TimeMillis(); 492 RTC_DCHECK(encoded_thread_checker_.CalledOnValidThread());
207 }
208 if (stats_proxy_) { 493 if (stats_proxy_) {
209 stats_proxy_->OnSendEncodedImage(encoded_image, codec_specific_info); 494 stats_proxy_->OnSendEncodedImage(encoded_image, codec_specific_info);
210 } 495 }
211 496
212 int success = 497 int success =
213 sink_->Encoded(encoded_image, codec_specific_info, fragmentation); 498 sink_->Encoded(encoded_image, codec_specific_info, fragmentation);
214 499
215 overuse_detector_->FrameSent(encoded_image._timeStamp); 500 overuse_detector_.FrameSent(encoded_image._timeStamp);
216 return success; 501 return success;
217 } 502 }
218 503
219 void ViEEncoder::SendStatistics(uint32_t bit_rate, 504 void ViEEncoder::SendStatistics(uint32_t bit_rate,
220 uint32_t frame_rate, 505 uint32_t frame_rate,
221 const std::string& encoder_name) { 506 const std::string& encoder_name) {
507 RTC_DCHECK(module_process_thread_checker_.CalledOnValidThread());
222 if (stats_proxy_) 508 if (stats_proxy_)
223 stats_proxy_->OnEncoderStatsUpdate(frame_rate, bit_rate, encoder_name); 509 stats_proxy_->OnEncoderStatsUpdate(frame_rate, bit_rate, encoder_name);
224 } 510 }
225 511
226 void ViEEncoder::OnReceivedSLI(uint8_t picture_id) { 512 void ViEEncoder::OnReceivedSLI(uint8_t picture_id) {
227 rtc::CritScope lock(&data_cs_); 513 if (!encoder_queue_.IsCurrent()) {
514 encoder_queue_.PostTask([this, picture_id] { OnReceivedSLI(picture_id); });
515 return;
516 }
517 RTC_DCHECK_RUN_ON(&encoder_queue_);
228 picture_id_sli_ = picture_id; 518 picture_id_sli_ = picture_id;
229 has_received_sli_ = true; 519 has_received_sli_ = true;
230 } 520 }
231 521
232 void ViEEncoder::OnReceivedRPSI(uint64_t picture_id) { 522 void ViEEncoder::OnReceivedRPSI(uint64_t picture_id) {
233 rtc::CritScope lock(&data_cs_); 523 if (!encoder_queue_.IsCurrent()) {
524 encoder_queue_.PostTask([this, picture_id] { OnReceivedRPSI(picture_id); });
525 return;
526 }
527 RTC_DCHECK_RUN_ON(&encoder_queue_);
234 picture_id_rpsi_ = picture_id; 528 picture_id_rpsi_ = picture_id;
235 has_received_rpsi_ = true; 529 has_received_rpsi_ = true;
236 } 530 }
237 531
238 void ViEEncoder::OnReceivedIntraFrameRequest(size_t stream_index) { 532 void ViEEncoder::OnReceivedIntraFrameRequest(size_t stream_index) {
533 if (!encoder_queue_.IsCurrent()) {
534 encoder_queue_.PostTask(
535 [this, stream_index] { OnReceivedIntraFrameRequest(stream_index); });
536 return;
537 }
538 RTC_DCHECK(encoder_queue_.IsCurrent());
239 // Key frame request from remote side, signal to VCM. 539 // Key frame request from remote side, signal to VCM.
240 TRACE_EVENT0("webrtc", "OnKeyFrameRequest"); 540 TRACE_EVENT0("webrtc", "OnKeyFrameRequest");
241 video_sender_.IntraFrameRequest(stream_index); 541 video_sender_.IntraFrameRequest(stream_index);
242 } 542 }
243 543
244 void ViEEncoder::OnBitrateUpdated(uint32_t bitrate_bps, 544 void ViEEncoder::OnBitrateUpdated(uint32_t bitrate_bps,
245 uint8_t fraction_lost, 545 uint8_t fraction_lost,
246 int64_t round_trip_time_ms) { 546 int64_t round_trip_time_ms) {
547 if (!encoder_queue_.IsCurrent()) {
548 encoder_queue_.PostTask(
549 [this, bitrate_bps, fraction_lost, round_trip_time_ms] {
550 OnBitrateUpdated(bitrate_bps, fraction_lost, round_trip_time_ms);
551 });
552 return;
553 }
554 RTC_DCHECK_RUN_ON(&encoder_queue_);
555 RTC_DCHECK(sink_) << "sink_ must be set before the encoder is active.";
556
247 LOG(LS_VERBOSE) << "OnBitrateUpdated, bitrate " << bitrate_bps 557 LOG(LS_VERBOSE) << "OnBitrateUpdated, bitrate " << bitrate_bps
248 << " packet loss " << static_cast<int>(fraction_lost) 558 << " packet loss " << static_cast<int>(fraction_lost)
249 << " rtt " << round_trip_time_ms; 559 << " rtt " << round_trip_time_ms;
560
250 video_sender_.SetChannelParameters(bitrate_bps, fraction_lost, 561 video_sender_.SetChannelParameters(bitrate_bps, fraction_lost,
251 round_trip_time_ms); 562 round_trip_time_ms);
252 bool video_suspension_changed; 563
564 encoder_start_bitrate_bps_ =
565 bitrate_bps != 0 ? bitrate_bps : encoder_start_bitrate_bps_;
253 bool video_is_suspended = bitrate_bps == 0; 566 bool video_is_suspended = bitrate_bps == 0;
254 { 567 bool video_suspension_changed =
255 rtc::CritScope lock(&data_cs_); 568 video_is_suspended != (last_observed_bitrate_bps_ == 0);
256 last_observed_bitrate_bps_ = bitrate_bps; 569 last_observed_bitrate_bps_ = bitrate_bps;
257 video_suspension_changed = video_suspended_ != video_is_suspended;
258 video_suspended_ = video_is_suspended;
259 // Set |time_of_last_frame_activity_ms_| to now if this is the first time
260 // the encoder is supposed to produce encoded frames.
261 // TODO(perkj): Remove this hack. It is here to avoid a race that the
262 // encoder report that it has timed out before it has processed the first
263 // frame.
264 if (last_observed_bitrate_bps_ != 0 &&
265 time_of_last_frame_activity_ms_ ==
266 std::numeric_limits<int64_t>::max()) {
267 time_of_last_frame_activity_ms_ = rtc::TimeMillis();
268 }
269 }
270 570
271 if (stats_proxy_ && video_suspension_changed) { 571 if (stats_proxy_ && video_suspension_changed) {
272 LOG(LS_INFO) << "Video suspend state changed to: " 572 LOG(LS_INFO) << "Video suspend state changed to: "
273 << (video_is_suspended ? "suspended" : "not suspended"); 573 << (video_is_suspended ? "suspended" : "not suspended");
274 stats_proxy_->OnSuspendChange(video_is_suspended); 574 stats_proxy_->OnSuspendChange(video_is_suspended);
275 } 575 }
276 } 576 }
277 577
578 void ViEEncoder::OveruseDetected() {
579 RTC_DCHECK(module_process_thread_checker_.CalledOnValidThread());
580 // TODO(perkj): When ViEEncoder inherit rtc::VideoSink instead of
581 // VideoCaptureInput |load_observer_| should be removed and overuse be
582 // expressed as rtc::VideoSinkWants instead.
583 if (load_observer_)
584 load_observer_->OnLoadUpdate(LoadObserver::kOveruse);
585 }
586
587 void ViEEncoder::NormalUsage() {
588 RTC_DCHECK(module_process_thread_checker_.CalledOnValidThread());
589 if (load_observer_)
590 load_observer_->OnLoadUpdate(LoadObserver::kUnderuse);
591 }
592
278 } // namespace webrtc 593 } // namespace webrtc
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698