OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
60 | 60 |
61 VideoCaptureInput::~VideoCaptureInput() { | 61 VideoCaptureInput::~VideoCaptureInput() { |
62 module_process_thread_->DeRegisterModule(overuse_detector_.get()); | 62 module_process_thread_->DeRegisterModule(overuse_detector_.get()); |
63 | 63 |
64 // Stop the thread. | 64 // Stop the thread. |
65 rtc::AtomicOps::ReleaseStore(&stop_, 1); | 65 rtc::AtomicOps::ReleaseStore(&stop_, 1); |
66 capture_event_.Set(); | 66 capture_event_.Set(); |
67 encoder_thread_.Stop(); | 67 encoder_thread_.Stop(); |
68 } | 68 } |
69 | 69 |
70 void VideoCaptureInput::IncomingCapturedFrame(const VideoFrame& video_frame) { | 70 VideoFrame VideoCaptureInput::UpdateTimestamps(const VideoFrame& video_frame) { |
71 // TODO(pbos): Remove local rendering, it should be handled by the client code | |
72 // if required. | |
73 if (local_renderer_) | |
74 local_renderer_->RenderFrame(video_frame, 0); | |
75 | |
76 stats_proxy_->OnIncomingFrame(video_frame.width(), video_frame.height()); | |
77 | |
78 VideoFrame incoming_frame = video_frame; | 71 VideoFrame incoming_frame = video_frame; |
79 | 72 |
80 if (incoming_frame.ntp_time_ms() != 0) { | 73 if (incoming_frame.ntp_time_ms() != 0) { |
81 // If a NTP time stamp is set, this is the time stamp we will use. | 74 // If a NTP time stamp is set, this is the time stamp we will use. |
82 incoming_frame.set_render_time_ms(incoming_frame.ntp_time_ms() - | 75 incoming_frame.set_render_time_ms(incoming_frame.ntp_time_ms() - |
83 delta_ntp_internal_ms_); | 76 delta_ntp_internal_ms_); |
84 } else { // NTP time stamp not set. | 77 } else { // NTP time stamp not set. |
85 int64_t render_time = incoming_frame.render_time_ms() != 0 | 78 int64_t render_time = incoming_frame.render_time_ms() != 0 |
86 ? incoming_frame.render_time_ms() | 79 ? incoming_frame.render_time_ms() |
87 : TickTime::MillisecondTimestamp(); | 80 : TickTime::MillisecondTimestamp(); |
88 | 81 |
89 incoming_frame.set_render_time_ms(render_time); | 82 incoming_frame.set_render_time_ms(render_time); |
90 incoming_frame.set_ntp_time_ms(render_time + delta_ntp_internal_ms_); | 83 incoming_frame.set_ntp_time_ms(render_time + delta_ntp_internal_ms_); |
91 } | 84 } |
92 | 85 |
93 // Convert NTP time, in ms, to RTP timestamp. | 86 // Convert NTP time, in ms, to RTP timestamp. |
94 const int kMsToRtpTimestamp = 90; | 87 const int kMsToRtpTimestamp = 90; |
95 incoming_frame.set_timestamp( | 88 incoming_frame.set_timestamp( |
96 kMsToRtpTimestamp * static_cast<uint32_t>(incoming_frame.ntp_time_ms())); | 89 kMsToRtpTimestamp * static_cast<uint32_t>(incoming_frame.ntp_time_ms())); |
97 | 90 |
91 return incoming_frame; | |
92 } | |
93 | |
94 void VideoCaptureInput::IncomingCapturedFrame(const VideoFrame& video_frame) { | |
95 // TODO(pbos): Remove local rendering, it should be handled by the client code | |
96 // if required. | |
97 if (local_renderer_) | |
98 local_renderer_->RenderFrame(video_frame, 0); | |
pbos-webrtc
2015/12/18 16:23:16
Use this callback instead of wrapping input, but m
sprang_webrtc
2015/12/18 16:52:57
Done.
| |
99 | |
100 stats_proxy_->OnIncomingFrame(video_frame.width(), video_frame.height()); | |
101 | |
102 VideoFrame incoming_frame = UpdateTimestamps(video_frame); | |
103 | |
98 CriticalSectionScoped cs(capture_cs_.get()); | 104 CriticalSectionScoped cs(capture_cs_.get()); |
99 if (incoming_frame.ntp_time_ms() <= last_captured_timestamp_) { | 105 if (incoming_frame.ntp_time_ms() <= last_captured_timestamp_) { |
100 // We don't allow the same capture time for two frames, drop this one. | 106 // We don't allow the same capture time for two frames, drop this one. |
101 LOG(LS_WARNING) << "Same/old NTP timestamp (" | 107 LOG(LS_WARNING) << "Same/old NTP timestamp (" |
102 << incoming_frame.ntp_time_ms() | 108 << incoming_frame.ntp_time_ms() |
103 << " <= " << last_captured_timestamp_ | 109 << " <= " << last_captured_timestamp_ |
104 << ") for incoming frame. Dropping."; | 110 << ") for incoming frame. Dropping."; |
105 return; | 111 return; |
106 } | 112 } |
107 | 113 |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
157 } | 163 } |
158 // We're done! | 164 // We're done! |
159 if (capture_time != -1) { | 165 if (capture_time != -1) { |
160 overuse_detector_->FrameSent(capture_time); | 166 overuse_detector_->FrameSent(capture_time); |
161 } | 167 } |
162 return true; | 168 return true; |
163 } | 169 } |
164 | 170 |
165 } // namespace internal | 171 } // namespace internal |
166 } // namespace webrtc | 172 } // namespace webrtc |
OLD | NEW |