OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
78 } | 78 } |
79 | 79 |
80 const int64_t now_ms = _clock->TimeInMilliseconds(); | 80 const int64_t now_ms = _clock->TimeInMilliseconds(); |
81 if (!decode_time_ms) { | 81 if (!decode_time_ms) { |
82 decode_time_ms = | 82 decode_time_ms = |
83 rtc::Optional<int32_t>(now_ms - frameInfo->decodeStartTimeMs); | 83 rtc::Optional<int32_t>(now_ms - frameInfo->decodeStartTimeMs); |
84 } | 84 } |
85 _timing->StopDecodeTimer(decodedImage.timestamp(), *decode_time_ms, now_ms, | 85 _timing->StopDecodeTimer(decodedImage.timestamp(), *decode_time_ms, now_ms, |
86 frameInfo->renderTimeMs); | 86 frameInfo->renderTimeMs); |
87 | 87 |
88 // Report timing information. | |
89 if (frameInfo->timing.is_timing_frame) { | |
90 int64_t ntp_offset = | |
91 _clock->CurrentNtpInMilliseconds() - _clock->TimeInMilliseconds(); | |
sprang_webrtc
2017/05/31 11:12:55
This will be racy, in the sense that the offset ma
ilnik
2017/05/31 15:17:45
I think nothing can be done here, as we are using
sprang_webrtc
2017/06/05 14:39:20
Some imprecision is unavoidable, agreed, but we ca
ilnik
2017/06/07 14:25:03
Oh, I've got that you mean. I didn't find any cach
| |
92 // Convert remote timestamps to local time from ntp timestamps. | |
93 frameInfo->timing.encode_start_ms -= ntp_offset; | |
94 frameInfo->timing.encode_finish_ms -= ntp_offset; | |
95 frameInfo->timing.packetization_finish_ms -= ntp_offset; | |
96 frameInfo->timing.pacer_exit_ms -= ntp_offset; | |
97 frameInfo->timing.network_timestamp_ms -= ntp_offset; | |
98 // TODO(ilnik): Report timing information here. | |
99 // Capture time: decodedImage.ntp_time_ms() - ntp_offset | |
100 // Encode start: frameInfo->timing.encode_start_ms | |
101 // Encode finish: frameInfo->timing.encode_finish_ms | |
102 // Packetization done: frameInfo->timing.packetization_finish_ms | |
103 // Pacer exit: frameInfo->timing.pacer_exit_ms | |
104 // Network timestamp: frameInfo->timing.network_timestamp_ms | |
105 // Receive start: frameInfo->timing.receive_start_ms | |
106 // Receive finish: frameInfo->timing.receive_finish_ms | |
107 // Decode start: frameInfo->decodeStartTimeMs | |
108 // Decode finish: now_ms | |
109 // Render time: frameInfo->renderTimeMs | |
110 } | |
111 | |
88 decodedImage.set_timestamp_us( | 112 decodedImage.set_timestamp_us( |
89 frameInfo->renderTimeMs * rtc::kNumMicrosecsPerMillisec); | 113 frameInfo->renderTimeMs * rtc::kNumMicrosecsPerMillisec); |
90 decodedImage.set_rotation(frameInfo->rotation); | 114 decodedImage.set_rotation(frameInfo->rotation); |
91 _receiveCallback->FrameToRender(decodedImage, qp, frameInfo->content_type); | 115 _receiveCallback->FrameToRender(decodedImage, qp, frameInfo->content_type); |
92 } | 116 } |
93 | 117 |
94 int32_t VCMDecodedFrameCallback::ReceivedDecodedReferenceFrame( | 118 int32_t VCMDecodedFrameCallback::ReceivedDecodedReferenceFrame( |
95 const uint64_t pictureId) { | 119 const uint64_t pictureId) { |
96 return _receiveCallback->ReceivedDecodedReferenceFrame(pictureId); | 120 return _receiveCallback->ReceivedDecodedReferenceFrame(pictureId); |
97 } | 121 } |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
144 | 168 |
145 return _decoder->InitDecode(settings, numberOfCores); | 169 return _decoder->InitDecode(settings, numberOfCores); |
146 } | 170 } |
147 | 171 |
148 int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, int64_t nowMs) { | 172 int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, int64_t nowMs) { |
149 TRACE_EVENT1("webrtc", "VCMGenericDecoder::Decode", "timestamp", | 173 TRACE_EVENT1("webrtc", "VCMGenericDecoder::Decode", "timestamp", |
150 frame.EncodedImage()._timeStamp); | 174 frame.EncodedImage()._timeStamp); |
151 _frameInfos[_nextFrameInfoIdx].decodeStartTimeMs = nowMs; | 175 _frameInfos[_nextFrameInfoIdx].decodeStartTimeMs = nowMs; |
152 _frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs(); | 176 _frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs(); |
153 _frameInfos[_nextFrameInfoIdx].rotation = frame.rotation(); | 177 _frameInfos[_nextFrameInfoIdx].rotation = frame.rotation(); |
178 _frameInfos[_nextFrameInfoIdx].timing = frame.video_timing(); | |
154 // Set correctly only for key frames. Thus, use latest key frame | 179 // Set correctly only for key frames. Thus, use latest key frame |
155 // content type. If the corresponding key frame was lost, decode will fail | 180 // content type. If the corresponding key frame was lost, decode will fail |
156 // and content type will be ignored. | 181 // and content type will be ignored. |
157 if (frame.FrameType() == kVideoFrameKey) { | 182 if (frame.FrameType() == kVideoFrameKey) { |
158 _frameInfos[_nextFrameInfoIdx].content_type = frame.contentType(); | 183 _frameInfos[_nextFrameInfoIdx].content_type = frame.contentType(); |
159 _last_keyframe_content_type = frame.contentType(); | 184 _last_keyframe_content_type = frame.contentType(); |
160 } else { | 185 } else { |
161 _frameInfos[_nextFrameInfoIdx].content_type = _last_keyframe_content_type; | 186 _frameInfos[_nextFrameInfoIdx].content_type = _last_keyframe_content_type; |
162 } | 187 } |
163 _callback->Map(frame.TimeStamp(), &_frameInfos[_nextFrameInfoIdx]); | 188 _callback->Map(frame.TimeStamp(), &_frameInfos[_nextFrameInfoIdx]); |
(...skipping 30 matching lines...) Expand all Loading... | |
194 | 219 |
195 bool VCMGenericDecoder::External() const { | 220 bool VCMGenericDecoder::External() const { |
196 return _isExternal; | 221 return _isExternal; |
197 } | 222 } |
198 | 223 |
199 bool VCMGenericDecoder::PrefersLateDecoding() const { | 224 bool VCMGenericDecoder::PrefersLateDecoding() const { |
200 return _decoder->PrefersLateDecoding(); | 225 return _decoder->PrefersLateDecoding(); |
201 } | 226 } |
202 | 227 |
203 } // namespace webrtc | 228 } // namespace webrtc |
OLD | NEW |