| Index: webrtc/modules/video_coding/codecs/test/videoprocessor.cc
|
| diff --git a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
|
| index e64babd599f4c4a59c8d2bda576d3f0cfc383f3f..e43be97ae621086f0cbcde765fac7e5b8f529a1c 100644
|
| --- a/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
|
| +++ b/webrtc/modules/video_coding/codecs/test/videoprocessor.cc
|
| @@ -17,6 +17,7 @@
|
| #include <memory>
|
| #include <vector>
|
|
|
| +#include "webrtc/base/timeutils.h"
|
| #include "webrtc/system_wrappers/include/cpu_info.h"
|
|
|
| namespace webrtc {
|
| @@ -198,7 +199,7 @@ bool VideoProcessorImpl::ProcessFrame(int frame_number) {
|
| // Ensure we have a new statistics data object we can fill:
|
| FrameStatistic& stat = stats_->NewFrame(frame_number);
|
|
|
| - encode_start_ = TickTime::Now();
|
| + encode_start_ = rtc::TimeNanos();
|
| // Use the frame number as "timestamp" to identify frames
|
| source_frame_.set_timestamp(frame_number);
|
|
|
| @@ -248,7 +249,7 @@ void VideoProcessorImpl::FrameEncoded(
|
|
|
| encoded_frame_type_ = encoded_image._frameType;
|
|
|
| - TickTime encode_stop = TickTime::Now();
|
| + int64_t encode_stop = rtc::TimeNanos();
|
| int frame_number = encoded_image._timeStamp;
|
| FrameStatistic& stat = stats_->stats_[frame_number];
|
| stat.encode_time_in_us =
|
| @@ -299,7 +300,7 @@ void VideoProcessorImpl::FrameEncoded(
|
|
|
| // Keep track of if frames are lost due to packet loss so we can tell
|
| // this to the encoder (this is handled by the RTP logic in the full stack)
|
| - decode_start_ = TickTime::Now();
|
| + decode_start_ = rtc::TimeNanos();
|
| // TODO(kjellander): Pass fragmentation header to the decoder when
|
| // CL 172001 has been submitted and PacketManipulator supports this.
|
| int32_t decode_result =
|
| @@ -315,7 +316,7 @@ void VideoProcessorImpl::FrameEncoded(
|
| }
|
|
|
| void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
|
| - TickTime decode_stop = TickTime::Now();
|
| + int64_t decode_stop = rtc::TimeNanos();
|
| int frame_number = image.timestamp();
|
| // Report stats
|
| FrameStatistic& stat = stats_->stats_[frame_number];
|
| @@ -379,9 +380,9 @@ void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
|
| }
|
|
|
| int VideoProcessorImpl::GetElapsedTimeMicroseconds(
|
| - const webrtc::TickTime& start,
|
| - const webrtc::TickTime& stop) {
|
| - uint64_t encode_time = (stop - start).Microseconds();
|
| + int64_t start,
|
| + int64_t stop) {
|
| + uint64_t encode_time = (stop - start) / rtc::kNumNanosecsPerMicrosec;
|
| assert(encode_time <
|
| static_cast<unsigned int>(std::numeric_limits<int>::max()));
|
| return static_cast<int>(encode_time);
|
|
|