OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
213 } | 213 } |
214 | 214 |
215 rtc::scoped_refptr<VideoFrameBuffer> buffer(frame_reader_->ReadFrame()); | 215 rtc::scoped_refptr<VideoFrameBuffer> buffer(frame_reader_->ReadFrame()); |
216 if (buffer) { | 216 if (buffer) { |
217 // Use the frame number as "timestamp" to identify frames. | 217 // Use the frame number as "timestamp" to identify frames. |
218 VideoFrame source_frame(buffer, frame_number, 0, webrtc::kVideoRotation_0); | 218 VideoFrame source_frame(buffer, frame_number, 0, webrtc::kVideoRotation_0); |
219 | 219 |
220 // Ensure we have a new statistics data object we can fill. | 220 // Ensure we have a new statistics data object we can fill. |
221 FrameStatistic& stat = stats_->NewFrame(frame_number); | 221 FrameStatistic& stat = stats_->NewFrame(frame_number); |
222 | 222 |
223 encode_start_ns_ = rtc::TimeNanos(); | |
224 | |
225 // Decide if we are going to force a keyframe. | 223 // Decide if we are going to force a keyframe. |
226 std::vector<FrameType> frame_types(1, kVideoFrameDelta); | 224 std::vector<FrameType> frame_types(1, kVideoFrameDelta); |
227 if (config_.keyframe_interval > 0 && | 225 if (config_.keyframe_interval > 0 && |
228 frame_number % config_.keyframe_interval == 0) { | 226 frame_number % config_.keyframe_interval == 0) { |
229 frame_types[0] = kVideoFrameKey; | 227 frame_types[0] = kVideoFrameKey; |
230 } | 228 } |
231 | 229 |
232 // For dropped frames, we regard them as zero size encoded frames. | 230 // For dropped frames, we regard them as zero size encoded frames. |
233 encoded_frame_size_ = 0; | 231 encoded_frame_size_ = 0; |
234 encoded_frame_type_ = kVideoFrameDelta; | 232 encoded_frame_type_ = kVideoFrameDelta; |
235 | 233 |
234 encode_start_ns_ = rtc::TimeNanos(); | |
236 int32_t encode_result = | 235 int32_t encode_result = |
237 encoder_->Encode(source_frame, nullptr, &frame_types); | 236 encoder_->Encode(source_frame, nullptr, &frame_types); |
238 | 237 |
239 if (encode_result != WEBRTC_VIDEO_CODEC_OK) { | 238 if (encode_result != WEBRTC_VIDEO_CODEC_OK) { |
240 fprintf(stderr, "Failed to encode frame %d, return code: %d\n", | 239 fprintf(stderr, "Failed to encode frame %d, return code: %d\n", |
241 frame_number, encode_result); | 240 frame_number, encode_result); |
242 } | 241 } |
243 stat.encode_return_code = encode_result; | 242 stat.encode_return_code = encode_result; |
244 | 243 |
245 return true; | 244 return true; |
246 } else { | 245 } else { |
247 // Last frame has been reached. | 246 // Last frame has been reached. |
248 return false; | 247 return false; |
249 } | 248 } |
250 } | 249 } |
251 | 250 |
252 void VideoProcessorImpl::FrameEncoded( | 251 void VideoProcessorImpl::FrameEncoded( |
253 webrtc::VideoCodecType codec, | 252 webrtc::VideoCodecType codec, |
254 const EncodedImage& encoded_image, | 253 const EncodedImage& encoded_image, |
255 const webrtc::RTPFragmentationHeader* fragmentation) { | 254 const webrtc::RTPFragmentationHeader* fragmentation) { |
255 int64_t encode_stop_ns = rtc::TimeNanos(); | |
sprang_webrtc
2017/02/15 10:50:59
Maybe add comments clarifying that timestamps shou
brandtr
2017/02/15 12:13:36
Good idea. Done.
| |
256 | |
256 // Timestamp is frame number, so this gives us #dropped frames. | 257 // Timestamp is frame number, so this gives us #dropped frames. |
257 int num_dropped_from_prev_encode = | 258 int num_dropped_from_prev_encode = |
258 encoded_image._timeStamp - prev_time_stamp_ - 1; | 259 encoded_image._timeStamp - prev_time_stamp_ - 1; |
259 num_dropped_frames_ += num_dropped_from_prev_encode; | 260 num_dropped_frames_ += num_dropped_from_prev_encode; |
260 prev_time_stamp_ = encoded_image._timeStamp; | 261 prev_time_stamp_ = encoded_image._timeStamp; |
261 if (num_dropped_from_prev_encode > 0) { | 262 if (num_dropped_from_prev_encode > 0) { |
262 // For dropped frames, we write out the last decoded frame to avoid getting | 263 // For dropped frames, we write out the last decoded frame to avoid getting |
263 // out of sync for the computation of PSNR and SSIM. | 264 // out of sync for the computation of PSNR and SSIM. |
264 for (int i = 0; i < num_dropped_from_prev_encode; i++) { | 265 for (int i = 0; i < num_dropped_from_prev_encode; i++) { |
265 frame_writer_->WriteFrame(last_successful_frame_buffer_.get()); | 266 frame_writer_->WriteFrame(last_successful_frame_buffer_.get()); |
266 } | 267 } |
267 } | 268 } |
268 // Frame is not dropped, so update the encoded frame size | 269 // Frame is not dropped, so update the encoded frame size |
269 // (encoder callback is only called for non-zero length frames). | 270 // (encoder callback is only called for non-zero length frames). |
270 encoded_frame_size_ = encoded_image._length; | 271 encoded_frame_size_ = encoded_image._length; |
271 encoded_frame_type_ = encoded_image._frameType; | 272 encoded_frame_type_ = encoded_image._frameType; |
272 | |
273 int64_t encode_stop_ns = rtc::TimeNanos(); | |
274 int frame_number = encoded_image._timeStamp; | 273 int frame_number = encoded_image._timeStamp; |
275 | 274 |
276 FrameStatistic& stat = stats_->stats_[frame_number]; | 275 FrameStatistic& stat = stats_->stats_[frame_number]; |
277 stat.encode_time_in_us = | 276 stat.encode_time_in_us = |
278 GetElapsedTimeMicroseconds(encode_start_ns_, encode_stop_ns); | 277 GetElapsedTimeMicroseconds(encode_start_ns_, encode_stop_ns); |
279 stat.encoding_successful = true; | 278 stat.encoding_successful = true; |
280 stat.encoded_frame_length_in_bytes = encoded_image._length; | 279 stat.encoded_frame_length_in_bytes = encoded_image._length; |
281 stat.frame_number = encoded_image._timeStamp; | 280 stat.frame_number = encoded_image._timeStamp; |
282 stat.frame_type = encoded_image._frameType; | 281 stat.frame_type = encoded_image._frameType; |
283 stat.bit_rate_in_kbps = encoded_image._length * bit_rate_factor_; | 282 stat.bit_rate_in_kbps = encoded_image._length * bit_rate_factor_; |
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
403 int VideoProcessorImpl::GetElapsedTimeMicroseconds(int64_t start, | 402 int VideoProcessorImpl::GetElapsedTimeMicroseconds(int64_t start, |
404 int64_t stop) { | 403 int64_t stop) { |
405 int64_t encode_time = (stop - start) / rtc::kNumNanosecsPerMicrosec; | 404 int64_t encode_time = (stop - start) / rtc::kNumNanosecsPerMicrosec; |
406 RTC_DCHECK_GE(encode_time, std::numeric_limits<int>::min()); | 405 RTC_DCHECK_GE(encode_time, std::numeric_limits<int>::min()); |
407 RTC_DCHECK_LE(encode_time, std::numeric_limits<int>::max()); | 406 RTC_DCHECK_LE(encode_time, std::numeric_limits<int>::max()); |
408 return static_cast<int>(encode_time); | 407 return static_cast<int>(encode_time); |
409 } | 408 } |
410 | 409 |
411 } // namespace test | 410 } // namespace test |
412 } // namespace webrtc | 411 } // namespace webrtc |
OLD | NEW |