OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "webrtc/modules/video_coding/generic_encoder.h" | 11 #include "webrtc/modules/video_coding/generic_encoder.h" |
12 | 12 |
13 #include <vector> | 13 #include <vector> |
14 | 14 |
15 #include "webrtc/api/video/i420_buffer.h" | 15 #include "webrtc/api/video/i420_buffer.h" |
| 16 #include "webrtc/modules/pacing/alr_detector.h" |
16 #include "webrtc/modules/video_coding/encoded_frame.h" | 17 #include "webrtc/modules/video_coding/encoded_frame.h" |
17 #include "webrtc/modules/video_coding/media_optimization.h" | 18 #include "webrtc/modules/video_coding/media_optimization.h" |
18 #include "webrtc/rtc_base/checks.h" | 19 #include "webrtc/rtc_base/checks.h" |
19 #include "webrtc/rtc_base/logging.h" | 20 #include "webrtc/rtc_base/logging.h" |
20 #include "webrtc/rtc_base/optional.h" | 21 #include "webrtc/rtc_base/optional.h" |
21 #include "webrtc/rtc_base/timeutils.h" | 22 #include "webrtc/rtc_base/timeutils.h" |
22 #include "webrtc/rtc_base/trace_event.h" | 23 #include "webrtc/rtc_base/trace_event.h" |
| 24 #include "webrtc/system_wrappers/include/field_trial.h" |
23 | 25 |
24 namespace webrtc { | 26 namespace webrtc { |
25 | 27 |
26 VCMGenericEncoder::VCMGenericEncoder( | 28 VCMGenericEncoder::VCMGenericEncoder( |
27 VideoEncoder* encoder, | 29 VideoEncoder* encoder, |
28 VCMEncodedFrameCallback* encoded_frame_callback, | 30 VCMEncodedFrameCallback* encoded_frame_callback, |
29 bool internal_source) | 31 bool internal_source) |
30 : encoder_(encoder), | 32 : encoder_(encoder), |
31 vcm_encoded_frame_callback_(encoded_frame_callback), | 33 vcm_encoded_frame_callback_(encoded_frame_callback), |
32 internal_source_(internal_source), | 34 internal_source_(internal_source), |
(...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
176 } | 178 } |
177 | 179 |
178 VCMEncodedFrameCallback::VCMEncodedFrameCallback( | 180 VCMEncodedFrameCallback::VCMEncodedFrameCallback( |
179 EncodedImageCallback* post_encode_callback, | 181 EncodedImageCallback* post_encode_callback, |
180 media_optimization::MediaOptimization* media_opt) | 182 media_optimization::MediaOptimization* media_opt) |
181 : internal_source_(false), | 183 : internal_source_(false), |
182 post_encode_callback_(post_encode_callback), | 184 post_encode_callback_(post_encode_callback), |
183 media_opt_(media_opt), | 185 media_opt_(media_opt), |
184 framerate_(1), | 186 framerate_(1), |
185 last_timing_frame_time_ms_(-1), | 187 last_timing_frame_time_ms_(-1), |
186 timing_frames_thresholds_({-1, 0}) {} | 188 timing_frames_thresholds_({-1, 0}) { |
| 189 rtc::Optional<AlrDetector::AlrExperimentSettings> experiment_settings = |
| 190 AlrDetector::ParseAlrSettingsFromFieldTrial( |
| 191 AlrDetector::kStrictPacingAndProbingExperimentName); |
| 192 if (experiment_settings) { |
| 193 experiment_groups_[0] = experiment_settings->group_id + 1; |
| 194 } else { |
| 195 experiment_groups_[0] = 0; |
| 196 } |
| 197 experiment_settings = AlrDetector::ParseAlrSettingsFromFieldTrial( |
| 198 AlrDetector::kScreenshareProbingBweExperimentName); |
| 199 if (experiment_settings) { |
| 200 experiment_groups_[1] = experiment_settings->group_id + 1; |
| 201 } else { |
| 202 experiment_groups_[1] = 0; |
| 203 } |
| 204 } |
187 | 205 |
188 VCMEncodedFrameCallback::~VCMEncodedFrameCallback() {} | 206 VCMEncodedFrameCallback::~VCMEncodedFrameCallback() {} |
189 | 207 |
190 void VCMEncodedFrameCallback::OnTargetBitrateChanged( | 208 void VCMEncodedFrameCallback::OnTargetBitrateChanged( |
191 size_t bitrate_bytes_per_second, | 209 size_t bitrate_bytes_per_second, |
192 size_t simulcast_svc_idx) { | 210 size_t simulcast_svc_idx) { |
193 rtc::CritScope crit(&timing_params_lock_); | 211 rtc::CritScope crit(&timing_params_lock_); |
194 if (timing_frames_info_.size() < simulcast_svc_idx + 1) | 212 if (timing_frames_info_.size() < simulcast_svc_idx + 1) |
195 timing_frames_info_.resize(simulcast_svc_idx + 1); | 213 timing_frames_info_.resize(simulcast_svc_idx + 1); |
196 timing_frames_info_[simulcast_svc_idx].target_bitrate_bytes_per_sec = | 214 timing_frames_info_[simulcast_svc_idx].target_bitrate_bytes_per_sec = |
(...skipping 27 matching lines...) Expand all Loading... |
224 } else if (codec_specific->codecType == kVideoCodecVP8) { | 242 } else if (codec_specific->codecType == kVideoCodecVP8) { |
225 simulcast_svc_idx = codec_specific->codecSpecific.VP8.simulcastIdx; | 243 simulcast_svc_idx = codec_specific->codecSpecific.VP8.simulcastIdx; |
226 } else if (codec_specific->codecType == kVideoCodecGeneric) { | 244 } else if (codec_specific->codecType == kVideoCodecGeneric) { |
227 simulcast_svc_idx = codec_specific->codecSpecific.generic.simulcast_idx; | 245 simulcast_svc_idx = codec_specific->codecSpecific.generic.simulcast_idx; |
228 } else if (codec_specific->codecType == kVideoCodecH264) { | 246 } else if (codec_specific->codecType == kVideoCodecH264) { |
229 // TODO(ilnik): When h264 simulcast is landed, extract simulcast idx here. | 247 // TODO(ilnik): When h264 simulcast is landed, extract simulcast idx here. |
230 } | 248 } |
231 | 249 |
232 rtc::Optional<size_t> outlier_frame_size; | 250 rtc::Optional<size_t> outlier_frame_size; |
233 rtc::Optional<int64_t> encode_start_ms; | 251 rtc::Optional<int64_t> encode_start_ms; |
| 252 size_t num_simulcast_svc_streams = 1; |
234 uint8_t timing_flags = TimingFrameFlags::kInvalid; | 253 uint8_t timing_flags = TimingFrameFlags::kInvalid; |
235 { | 254 { |
236 rtc::CritScope crit(&timing_params_lock_); | 255 rtc::CritScope crit(&timing_params_lock_); |
237 | 256 |
238 // Encoders with internal sources do not call OnEncodeStarted and | 257 // Encoders with internal sources do not call OnEncodeStarted and |
239 // OnFrameRateChanged. |timing_frames_info_| may be not filled here. | 258 // OnFrameRateChanged. |timing_frames_info_| may be not filled here. |
240 if (simulcast_svc_idx < timing_frames_info_.size()) { | 259 num_simulcast_svc_streams = timing_frames_info_.size(); |
| 260 if (simulcast_svc_idx < num_simulcast_svc_streams) { |
241 auto encode_start_map = | 261 auto encode_start_map = |
242 &timing_frames_info_[simulcast_svc_idx].encode_start_time_ms; | 262 &timing_frames_info_[simulcast_svc_idx].encode_start_time_ms; |
243 auto it = encode_start_map->find(encoded_image.capture_time_ms_); | 263 auto it = encode_start_map->find(encoded_image.capture_time_ms_); |
244 if (it != encode_start_map->end()) { | 264 if (it != encode_start_map->end()) { |
245 encode_start_ms.emplace(it->second); | 265 encode_start_ms.emplace(it->second); |
246 // Assuming all encoders do not reorder frames within single stream, | 266 // Assuming all encoders do not reorder frames within single stream, |
247 // there may be some dropped frames with smaller timestamps. These | 267 // there may be some dropped frames with smaller timestamps. These |
248 // should be purged. | 268 // should be purged. |
249 encode_start_map->erase(encode_start_map->begin(), it); | 269 encode_start_map->erase(encode_start_map->begin(), it); |
250 encode_start_map->erase(it); | 270 encode_start_map->erase(it); |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
292 // drift relative to rtc::TimeMillis(). We can't use it for Timing frames, | 312 // drift relative to rtc::TimeMillis(). We can't use it for Timing frames, |
293 // because to being sent in the network capture time required to be less than | 313 // because to being sent in the network capture time required to be less than |
294 // all the other timestamps. | 314 // all the other timestamps. |
295 if (timing_flags != TimingFrameFlags::kInvalid && encode_start_ms) { | 315 if (timing_flags != TimingFrameFlags::kInvalid && encode_start_ms) { |
296 encoded_image.SetEncodeTime(*encode_start_ms, rtc::TimeMillis()); | 316 encoded_image.SetEncodeTime(*encode_start_ms, rtc::TimeMillis()); |
297 encoded_image.timing_.flags = timing_flags; | 317 encoded_image.timing_.flags = timing_flags; |
298 } else { | 318 } else { |
299 encoded_image.timing_.flags = TimingFrameFlags::kInvalid; | 319 encoded_image.timing_.flags = TimingFrameFlags::kInvalid; |
300 } | 320 } |
301 | 321 |
| 322 // Piggyback ALR experiment group id and simulcast id into the content type. |
| 323 uint8_t experiment_id = |
| 324 experiment_groups_[videocontenttypehelpers::IsScreenshare( |
| 325 encoded_image.content_type_)]; |
| 326 |
| 327 // TODO(ilnik): This will force content type extension to be present even |
| 328 // for realtime video. At the expense of miniscule overhead we will get |
| 329 // sliced receive statistics. |
| 330 RTC_CHECK(videocontenttypehelpers::SetExperimentId( |
| 331 &encoded_image.content_type_, experiment_id)); |
| 332 // We count simulcast streams from 1 on the wire. That's why we set simulcast |
| 333 // id in content type to +1 of that is actual simulcast index. This is because |
| 334 // value 0 on the wire is reserved for 'no simulcast stream specified'. |
| 335 RTC_CHECK(videocontenttypehelpers::SetSimulcastId( |
| 336 &encoded_image.content_type_, |
| 337 static_cast<uint8_t>(simulcast_svc_idx + 1))); |
| 338 |
302 Result result = post_encode_callback_->OnEncodedImage( | 339 Result result = post_encode_callback_->OnEncodedImage( |
303 encoded_image, codec_specific, fragmentation_header); | 340 encoded_image, codec_specific, fragmentation_header); |
304 if (result.error != Result::OK) | 341 if (result.error != Result::OK) |
305 return result; | 342 return result; |
306 | 343 |
307 if (media_opt_) { | 344 if (media_opt_) { |
308 media_opt_->UpdateWithEncodedData(encoded_image); | 345 media_opt_->UpdateWithEncodedData(encoded_image); |
309 if (internal_source_) { | 346 if (internal_source_) { |
310 // Signal to encoder to drop next frame. | 347 // Signal to encoder to drop next frame. |
311 result.drop_next_frame = media_opt_->DropFrame(); | 348 result.drop_next_frame = media_opt_->DropFrame(); |
312 } | 349 } |
313 } | 350 } |
314 return result; | 351 return result; |
315 } | 352 } |
316 | 353 |
317 } // namespace webrtc | 354 } // namespace webrtc |
OLD | NEW |