OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "webrtc/modules/video_coding/generic_encoder.h" | 11 #include "webrtc/modules/video_coding/generic_encoder.h" |
12 | 12 |
13 #include <vector> | 13 #include <vector> |
14 | 14 |
15 #include "webrtc/api/video/i420_buffer.h" | 15 #include "webrtc/api/video/i420_buffer.h" |
| 16 #include "webrtc/modules/pacing/alr_detector.h" |
16 #include "webrtc/modules/video_coding/encoded_frame.h" | 17 #include "webrtc/modules/video_coding/encoded_frame.h" |
17 #include "webrtc/modules/video_coding/media_optimization.h" | 18 #include "webrtc/modules/video_coding/media_optimization.h" |
18 #include "webrtc/rtc_base/checks.h" | 19 #include "webrtc/rtc_base/checks.h" |
19 #include "webrtc/rtc_base/logging.h" | 20 #include "webrtc/rtc_base/logging.h" |
20 #include "webrtc/rtc_base/optional.h" | 21 #include "webrtc/rtc_base/optional.h" |
21 #include "webrtc/rtc_base/timeutils.h" | 22 #include "webrtc/rtc_base/timeutils.h" |
22 #include "webrtc/rtc_base/trace_event.h" | 23 #include "webrtc/rtc_base/trace_event.h" |
| 24 #include "webrtc/system_wrappers/include/field_trial.h" |
23 | 25 |
24 namespace webrtc { | 26 namespace webrtc { |
25 | 27 |
26 VCMGenericEncoder::VCMGenericEncoder( | 28 VCMGenericEncoder::VCMGenericEncoder( |
27 VideoEncoder* encoder, | 29 VideoEncoder* encoder, |
28 VCMEncodedFrameCallback* encoded_frame_callback, | 30 VCMEncodedFrameCallback* encoded_frame_callback, |
29 bool internal_source) | 31 bool internal_source) |
30 : encoder_(encoder), | 32 : encoder_(encoder), |
31 vcm_encoded_frame_callback_(encoded_frame_callback), | 33 vcm_encoded_frame_callback_(encoded_frame_callback), |
32 internal_source_(internal_source), | 34 internal_source_(internal_source), |
(...skipping 191 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
224 } else if (codec_specific->codecType == kVideoCodecVP8) { | 226 } else if (codec_specific->codecType == kVideoCodecVP8) { |
225 simulcast_svc_idx = codec_specific->codecSpecific.VP8.simulcastIdx; | 227 simulcast_svc_idx = codec_specific->codecSpecific.VP8.simulcastIdx; |
226 } else if (codec_specific->codecType == kVideoCodecGeneric) { | 228 } else if (codec_specific->codecType == kVideoCodecGeneric) { |
227 simulcast_svc_idx = codec_specific->codecSpecific.generic.simulcast_idx; | 229 simulcast_svc_idx = codec_specific->codecSpecific.generic.simulcast_idx; |
228 } else if (codec_specific->codecType == kVideoCodecH264) { | 230 } else if (codec_specific->codecType == kVideoCodecH264) { |
229 // TODO(ilnik): When h264 simulcast is landed, extract simulcast idx here. | 231 // TODO(ilnik): When h264 simulcast is landed, extract simulcast idx here. |
230 } | 232 } |
231 | 233 |
232 rtc::Optional<size_t> outlier_frame_size; | 234 rtc::Optional<size_t> outlier_frame_size; |
233 rtc::Optional<int64_t> encode_start_ms; | 235 rtc::Optional<int64_t> encode_start_ms; |
| 236 size_t num_simulcast_svc_streams = 1; |
234 uint8_t timing_flags = TimingFrameFlags::kInvalid; | 237 uint8_t timing_flags = TimingFrameFlags::kInvalid; |
235 { | 238 { |
236 rtc::CritScope crit(&timing_params_lock_); | 239 rtc::CritScope crit(&timing_params_lock_); |
237 | 240 |
238 // Encoders with internal sources do not call OnEncodeStarted and | 241 // Encoders with internal sources do not call OnEncodeStarted and |
239 // OnFrameRateChanged. |timing_frames_info_| may be not filled here. | 242 // OnFrameRateChanged. |timing_frames_info_| may be not filled here. |
240 if (simulcast_svc_idx < timing_frames_info_.size()) { | 243 num_simulcast_svc_streams = timing_frames_info_.size(); |
| 244 if (simulcast_svc_idx < num_simulcast_svc_streams) { |
241 auto encode_start_map = | 245 auto encode_start_map = |
242 &timing_frames_info_[simulcast_svc_idx].encode_start_time_ms; | 246 &timing_frames_info_[simulcast_svc_idx].encode_start_time_ms; |
243 auto it = encode_start_map->find(encoded_image.capture_time_ms_); | 247 auto it = encode_start_map->find(encoded_image.capture_time_ms_); |
244 if (it != encode_start_map->end()) { | 248 if (it != encode_start_map->end()) { |
245 encode_start_ms.emplace(it->second); | 249 encode_start_ms.emplace(it->second); |
246 // Assuming all encoders do not reorder frames within single stream, | 250 // Assuming all encoders do not reorder frames within single stream, |
247 // there may be some dropped frames with smaller timestamps. These | 251 // there may be some dropped frames with smaller timestamps. These |
248 // should be purged. | 252 // should be purged. |
249 encode_start_map->erase(encode_start_map->begin(), it); | 253 encode_start_map->erase(encode_start_map->begin(), it); |
250 encode_start_map->erase(it); | 254 encode_start_map->erase(it); |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
292 // drift relative to rtc::TimeMillis(). We can't use it for Timing frames, | 296 // drift relative to rtc::TimeMillis(). We can't use it for Timing frames, |
293 // because to being sent in the network capture time required to be less than | 297 // because to being sent in the network capture time required to be less than |
294 // all the other timestamps. | 298 // all the other timestamps. |
295 if (timing_flags != TimingFrameFlags::kInvalid && encode_start_ms) { | 299 if (timing_flags != TimingFrameFlags::kInvalid && encode_start_ms) { |
296 encoded_image.SetEncodeTime(*encode_start_ms, rtc::TimeMillis()); | 300 encoded_image.SetEncodeTime(*encode_start_ms, rtc::TimeMillis()); |
297 encoded_image.timing_.flags = timing_flags; | 301 encoded_image.timing_.flags = timing_flags; |
298 } else { | 302 } else { |
299 encoded_image.timing_.flags = TimingFrameFlags::kInvalid; | 303 encoded_image.timing_.flags = TimingFrameFlags::kInvalid; |
300 } | 304 } |
301 | 305 |
| 306 // Piggyback ALR experiment group id and simulcast id into the content type. |
| 307 uint8_t experiment_id = 0; |
| 308 rtc::Optional<AlrDetector::AlrExperimentSettings> experiment_settings; |
| 309 if (encoded_image.content_type_.IsScreenshare()) { |
| 310 experiment_settings = AlrDetector::ParseAlrSettingsFromFieldTrial( |
| 311 AlrDetector::kScreenshareProbingBweExperimentName); |
| 312 } else { |
| 313 experiment_settings = AlrDetector::ParseAlrSettingsFromFieldTrial( |
| 314 AlrDetector::kStrictPacingAndProbingExperimentName); |
| 315 } |
| 316 if (experiment_settings) { |
| 317 // 0 would mean no experiment, therefore adding 1. It will be subtracted at |
| 318 // the receive side before reporting the experiment group. |
| 319 experiment_id = experiment_settings->group_id + 1; |
| 320 } |
| 321 |
| 322 // TODO(ilnik): This will force content type extension to be present even |
| 323 // for realtime video. At the expense of miniscule overhead we will get |
| 324 // sliced receive statistics. |
| 325 RTC_CHECK(encoded_image.content_type_.SetExperimentId(experiment_id)); |
| 326 // We count simulcast streams from 1 on the wire. |
| 327 RTC_CHECK(encoded_image.content_type_.SetSimulcastId( |
| 328 static_cast<uint8_t>(simulcast_svc_idx + 1))); |
| 329 |
302 Result result = post_encode_callback_->OnEncodedImage( | 330 Result result = post_encode_callback_->OnEncodedImage( |
303 encoded_image, codec_specific, fragmentation_header); | 331 encoded_image, codec_specific, fragmentation_header); |
304 if (result.error != Result::OK) | 332 if (result.error != Result::OK) |
305 return result; | 333 return result; |
306 | 334 |
307 if (media_opt_) { | 335 if (media_opt_) { |
308 media_opt_->UpdateWithEncodedData(encoded_image); | 336 media_opt_->UpdateWithEncodedData(encoded_image); |
309 if (internal_source_) { | 337 if (internal_source_) { |
310 // Signal to encoder to drop next frame. | 338 // Signal to encoder to drop next frame. |
311 result.drop_next_frame = media_opt_->DropFrame(); | 339 result.drop_next_frame = media_opt_->DropFrame(); |
312 } | 340 } |
313 } | 341 } |
314 return result; | 342 return result; |
315 } | 343 } |
316 | 344 |
317 } // namespace webrtc | 345 } // namespace webrtc |
OLD | NEW |