OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "webrtc/modules/video_coding/generic_encoder.h" | 11 #include "webrtc/modules/video_coding/generic_encoder.h" |
12 | 12 |
13 #include <vector> | 13 #include <vector> |
14 | 14 |
15 #include "webrtc/api/video/i420_buffer.h" | 15 #include "webrtc/api/video/i420_buffer.h" |
16 #include "webrtc/modules/video_coding/encoded_frame.h" | 16 #include "webrtc/modules/video_coding/encoded_frame.h" |
17 #include "webrtc/modules/video_coding/media_optimization.h" | 17 #include "webrtc/modules/video_coding/media_optimization.h" |
18 #include "webrtc/rtc_base/checks.h" | 18 #include "webrtc/rtc_base/checks.h" |
19 #include "webrtc/rtc_base/logging.h" | 19 #include "webrtc/rtc_base/logging.h" |
| 20 #include "webrtc/rtc_base/optional.h" |
20 #include "webrtc/rtc_base/timeutils.h" | 21 #include "webrtc/rtc_base/timeutils.h" |
21 #include "webrtc/rtc_base/trace_event.h" | 22 #include "webrtc/rtc_base/trace_event.h" |
22 | 23 |
23 namespace webrtc { | 24 namespace webrtc { |
24 | 25 |
25 VCMGenericEncoder::VCMGenericEncoder( | 26 VCMGenericEncoder::VCMGenericEncoder( |
26 VideoEncoder* encoder, | 27 VideoEncoder* encoder, |
27 VCMEncodedFrameCallback* encoded_frame_callback, | 28 VCMEncodedFrameCallback* encoded_frame_callback, |
28 bool internal_source) | 29 bool internal_source) |
29 : encoder_(encoder), | 30 : encoder_(encoder), |
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
209 timing_frames_info_[simulcast_svc_idx].encode_start_time_ms[capture_time_ms] = | 210 timing_frames_info_[simulcast_svc_idx].encode_start_time_ms[capture_time_ms] = |
210 rtc::TimeMillis(); | 211 rtc::TimeMillis(); |
211 } | 212 } |
212 | 213 |
213 EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage( | 214 EncodedImageCallback::Result VCMEncodedFrameCallback::OnEncodedImage( |
214 const EncodedImage& encoded_image, | 215 const EncodedImage& encoded_image, |
215 const CodecSpecificInfo* codec_specific, | 216 const CodecSpecificInfo* codec_specific, |
216 const RTPFragmentationHeader* fragmentation_header) { | 217 const RTPFragmentationHeader* fragmentation_header) { |
217 TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded", | 218 TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded", |
218 "timestamp", encoded_image._timeStamp); | 219 "timestamp", encoded_image._timeStamp); |
219 bool is_timing_frame = false; | |
220 size_t outlier_frame_size = 0; | |
221 int64_t encode_start_ms = -1; | |
222 size_t simulcast_svc_idx = 0; | 220 size_t simulcast_svc_idx = 0; |
223 if (codec_specific->codecType == kVideoCodecVP9) { | 221 if (codec_specific->codecType == kVideoCodecVP9) { |
224 if (codec_specific->codecSpecific.VP9.num_spatial_layers > 1) | 222 if (codec_specific->codecSpecific.VP9.num_spatial_layers > 1) |
225 simulcast_svc_idx = codec_specific->codecSpecific.VP9.spatial_idx; | 223 simulcast_svc_idx = codec_specific->codecSpecific.VP9.spatial_idx; |
226 } else if (codec_specific->codecType == kVideoCodecVP8) { | 224 } else if (codec_specific->codecType == kVideoCodecVP8) { |
227 simulcast_svc_idx = codec_specific->codecSpecific.VP8.simulcastIdx; | 225 simulcast_svc_idx = codec_specific->codecSpecific.VP8.simulcastIdx; |
228 } else if (codec_specific->codecType == kVideoCodecGeneric) { | 226 } else if (codec_specific->codecType == kVideoCodecGeneric) { |
229 simulcast_svc_idx = codec_specific->codecSpecific.generic.simulcast_idx; | 227 simulcast_svc_idx = codec_specific->codecSpecific.generic.simulcast_idx; |
230 } else if (codec_specific->codecType == kVideoCodecH264) { | 228 } else if (codec_specific->codecType == kVideoCodecH264) { |
231 // TODO(ilnik): When h264 simulcast is landed, extract simulcast idx here. | 229 // TODO(ilnik): When h264 simulcast is landed, extract simulcast idx here. |
232 } | 230 } |
233 | 231 |
| 232 rtc::Optional<size_t> outlier_frame_size; |
| 233 rtc::Optional<int64_t> encode_start_ms; |
| 234 bool is_timing_frame = false; |
234 { | 235 { |
235 rtc::CritScope crit(&timing_params_lock_); | 236 rtc::CritScope crit(&timing_params_lock_); |
236 // TODO(ilnik): Workaround for hardware encoders, which do not call | 237 |
237 // |OnEncodeStarted| correctly. Once fixed, remove conditional check. | 238 // Encoders with internal sources do not call OnEncodeStarted and |
| 239 // OnFrameRateChanged. |timing_frames_info_| may be not filled here. |
238 if (simulcast_svc_idx < timing_frames_info_.size()) { | 240 if (simulcast_svc_idx < timing_frames_info_.size()) { |
239 RTC_CHECK_LT(simulcast_svc_idx, timing_frames_info_.size()); | |
240 | |
241 auto encode_start_map = | 241 auto encode_start_map = |
242 &timing_frames_info_[simulcast_svc_idx].encode_start_time_ms; | 242 &timing_frames_info_[simulcast_svc_idx].encode_start_time_ms; |
243 auto it = encode_start_map->find(encoded_image.capture_time_ms_); | 243 auto it = encode_start_map->find(encoded_image.capture_time_ms_); |
244 if (it != encode_start_map->end()) { | 244 if (it != encode_start_map->end()) { |
245 encode_start_ms = it->second; | 245 encode_start_ms.emplace(it->second); |
246 // Assuming all encoders do not reorder frames within single stream, | 246 // Assuming all encoders do not reorder frames within single stream, |
247 // there may be some dropped frames with smaller timestamps. These | 247 // there may be some dropped frames with smaller timestamps. These |
248 // should be purged. | 248 // should be purged. |
249 encode_start_map->erase(encode_start_map->begin(), it); | 249 encode_start_map->erase(encode_start_map->begin(), it); |
250 encode_start_map->erase(it); | 250 encode_start_map->erase(it); |
251 } else { | 251 } else { |
252 // Some chromium remoting unittests use generic encoder incorrectly | 252 // Encoder is with internal source: free our records of any frames just |
253 // If timestamps do not match, purge them all. | 253 // in case to free memory. |
254 encode_start_map->erase(encode_start_map->begin(), | 254 encode_start_map->clear(); |
255 encode_start_map->end()); | |
256 } | 255 } |
257 | 256 |
258 int64_t timing_frame_delay_ms = | 257 size_t target_bitrate = |
259 encoded_image.capture_time_ms_ - last_timing_frame_time_ms_; | 258 timing_frames_info_[simulcast_svc_idx].target_bitrate_bytes_per_sec; |
260 if (last_timing_frame_time_ms_ == -1 || | 259 if (framerate_ > 0 && target_bitrate > 0) { |
261 timing_frame_delay_ms >= timing_frames_thresholds_.delay_ms || | 260 // framerate and target bitrate were reported by encoder. |
262 timing_frame_delay_ms == 0) { | 261 size_t average_frame_size = target_bitrate / framerate_; |
263 is_timing_frame = true; | 262 outlier_frame_size.emplace( |
264 last_timing_frame_time_ms_ = encoded_image.capture_time_ms_; | 263 average_frame_size * |
| 264 timing_frames_thresholds_.outlier_ratio_percent / 100); |
265 } | 265 } |
266 // TODO(ilnik): Once OnFramerateChanged is called correctly by hardware | 266 } |
267 // encoders, remove the conditional check below. | 267 |
268 if (framerate_ > 0) { | 268 // Check if it's time to send a timing frame. |
269 RTC_CHECK_GT(framerate_, 0); | 269 int64_t timing_frame_delay_ms = |
270 size_t average_frame_size = | 270 encoded_image.capture_time_ms_ - last_timing_frame_time_ms_; |
271 timing_frames_info_[simulcast_svc_idx].target_bitrate_bytes_per_sec | 271 // Trigger threshold if it's a first frame, too long passed since the last |
272 / framerate_; | 272 // timing frame, or we already sent timing frame on a different simulcast |
273 outlier_frame_size = average_frame_size * | 273 // stream with the same capture time. |
274 timing_frames_thresholds_.outlier_ratio_percent / | 274 if (last_timing_frame_time_ms_ == -1 || |
275 100; | 275 timing_frame_delay_ms >= timing_frames_thresholds_.delay_ms || |
276 } else { | 276 timing_frame_delay_ms == 0) { |
277 outlier_frame_size = encoded_image._length + 1; | 277 is_timing_frame = true; |
278 } | 278 last_timing_frame_time_ms_ = encoded_image.capture_time_ms_; |
279 } else { | 279 } |
280 // We don't have any information prior to encode start, thus we can't | 280 |
281 // reliably detect outliers. Set outlier size to anything larger than | 281 // Outliers trigger timing frames, but do not affect scheduled timing |
282 // current frame size. | 282 // frames. |
283 outlier_frame_size = encoded_image._length + 1; | 283 if (outlier_frame_size && encoded_image._length >= *outlier_frame_size) { |
| 284 is_timing_frame = true; |
284 } | 285 } |
285 } | 286 } |
286 | 287 |
287 if (encoded_image._length >= outlier_frame_size) { | 288 // If encode start is not available that means that encoder uses internal |
288 is_timing_frame = true; | 289 // source. In that case capture timestamp may be from a different clock with a |
289 } | 290 // drift relative to rtc::TimeMillis(). We can't use it for Timing frames, |
290 if (encode_start_ms >= 0 && is_timing_frame) { | 291 // because to being sent in the network capture time required to be less than |
291 encoded_image.SetEncodeTime(encode_start_ms, rtc::TimeMillis()); | 292 // all the other timestamps. |
| 293 if (is_timing_frame && encode_start_ms) { |
| 294 encoded_image.SetEncodeTime(*encode_start_ms, rtc::TimeMillis()); |
292 } | 295 } |
293 | 296 |
294 Result result = post_encode_callback_->OnEncodedImage( | 297 Result result = post_encode_callback_->OnEncodedImage( |
295 encoded_image, codec_specific, fragmentation_header); | 298 encoded_image, codec_specific, fragmentation_header); |
296 if (result.error != Result::OK) | 299 if (result.error != Result::OK) |
297 return result; | 300 return result; |
298 | 301 |
299 if (media_opt_) { | 302 if (media_opt_) { |
300 media_opt_->UpdateWithEncodedData(encoded_image); | 303 media_opt_->UpdateWithEncodedData(encoded_image); |
301 if (internal_source_) { | 304 if (internal_source_) { |
302 // Signal to encoder to drop next frame. | 305 // Signal to encoder to drop next frame. |
303 result.drop_next_frame = media_opt_->DropFrame(); | 306 result.drop_next_frame = media_opt_->DropFrame(); |
304 } | 307 } |
305 } | 308 } |
306 return result; | 309 return result; |
307 } | 310 } |
308 | 311 |
309 } // namespace webrtc | 312 } // namespace webrtc |
OLD | NEW |