OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 15 matching lines...) Loading... |
26 namespace webrtc { | 26 namespace webrtc { |
27 namespace videocapturemodule { | 27 namespace videocapturemodule { |
28 rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create( | 28 rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create( |
29 VideoCaptureExternal*& externalCapture) { | 29 VideoCaptureExternal*& externalCapture) { |
30 rtc::scoped_refptr<VideoCaptureImpl> implementation( | 30 rtc::scoped_refptr<VideoCaptureImpl> implementation( |
31 new rtc::RefCountedObject<VideoCaptureImpl>()); | 31 new rtc::RefCountedObject<VideoCaptureImpl>()); |
32 externalCapture = implementation.get(); | 32 externalCapture = implementation.get(); |
33 return implementation; | 33 return implementation; |
34 } | 34 } |
35 | 35 |
36 const char* VideoCaptureImpl::CurrentDeviceName() const | 36 const char* VideoCaptureImpl::CurrentDeviceName() const { |
37 { | 37 return _deviceUniqueId; |
38 return _deviceUniqueId; | |
39 } | 38 } |
40 | 39 |
41 // static | 40 // static |
42 int32_t VideoCaptureImpl::RotationFromDegrees(int degrees, | 41 int32_t VideoCaptureImpl::RotationFromDegrees(int degrees, |
43 VideoRotation* rotation) { | 42 VideoRotation* rotation) { |
44 switch (degrees) { | 43 switch (degrees) { |
45 case 0: | 44 case 0: |
46 *rotation = kVideoRotation_0; | 45 *rotation = kVideoRotation_0; |
47 return 0; | 46 return 0; |
48 case 90: | 47 case 90: |
(...skipping 40 matching lines...) Loading... |
89 _lastProcessFrameTimeNanos(rtc::TimeNanos()), | 88 _lastProcessFrameTimeNanos(rtc::TimeNanos()), |
90 _rotateFrame(kVideoRotation_0), | 89 _rotateFrame(kVideoRotation_0), |
91 apply_rotation_(false) { | 90 apply_rotation_(false) { |
92 _requestedCapability.width = kDefaultWidth; | 91 _requestedCapability.width = kDefaultWidth; |
93 _requestedCapability.height = kDefaultHeight; | 92 _requestedCapability.height = kDefaultHeight; |
94 _requestedCapability.maxFPS = 30; | 93 _requestedCapability.maxFPS = 30; |
95 _requestedCapability.rawType = kVideoI420; | 94 _requestedCapability.rawType = kVideoI420; |
96 memset(_incomingFrameTimesNanos, 0, sizeof(_incomingFrameTimesNanos)); | 95 memset(_incomingFrameTimesNanos, 0, sizeof(_incomingFrameTimesNanos)); |
97 } | 96 } |
98 | 97 |
99 VideoCaptureImpl::~VideoCaptureImpl() | 98 VideoCaptureImpl::~VideoCaptureImpl() { |
100 { | 99 DeRegisterCaptureDataCallback(); |
101 DeRegisterCaptureDataCallback(); | 100 delete &_apiCs; |
102 delete &_apiCs; | |
103 | 101 |
104 if (_deviceUniqueId) | 102 if (_deviceUniqueId) |
105 delete[] _deviceUniqueId; | 103 delete[] _deviceUniqueId; |
106 } | 104 } |
107 | 105 |
108 void VideoCaptureImpl::RegisterCaptureDataCallback( | 106 void VideoCaptureImpl::RegisterCaptureDataCallback( |
109 rtc::VideoSinkInterface<VideoFrame>* dataCallBack) { | 107 rtc::VideoSinkInterface<VideoFrame>* dataCallBack) { |
110 CriticalSectionScoped cs(&_apiCs); | 108 CriticalSectionScoped cs(&_apiCs); |
111 _dataCallBack = dataCallBack; | 109 _dataCallBack = dataCallBack; |
112 } | 110 } |
113 | 111 |
114 void VideoCaptureImpl::DeRegisterCaptureDataCallback() { | 112 void VideoCaptureImpl::DeRegisterCaptureDataCallback() { |
115 CriticalSectionScoped cs(&_apiCs); | 113 CriticalSectionScoped cs(&_apiCs); |
116 _dataCallBack = NULL; | 114 _dataCallBack = NULL; |
117 } | 115 } |
118 int32_t VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame) { | 116 int32_t VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame) { |
119 UpdateFrameCount(); // frame count used for local frame rate callback. | 117 UpdateFrameCount(); // frame count used for local frame rate callback. |
120 | 118 |
121 if (_dataCallBack) { | 119 if (_dataCallBack) { |
122 _dataCallBack->OnFrame(captureFrame); | 120 _dataCallBack->OnFrame(captureFrame); |
123 } | 121 } |
124 | 122 |
125 return 0; | 123 return 0; |
126 } | 124 } |
127 | 125 |
128 int32_t VideoCaptureImpl::IncomingFrame( | 126 int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, |
129 uint8_t* videoFrame, | 127 size_t videoFrameLength, |
130 size_t videoFrameLength, | 128 const VideoCaptureCapability& frameInfo, |
131 const VideoCaptureCapability& frameInfo, | 129 int64_t captureTime /*=0*/) { |
132 int64_t captureTime/*=0*/) | 130 CriticalSectionScoped cs(&_apiCs); |
133 { | |
134 CriticalSectionScoped cs(&_apiCs); | |
135 | 131 |
136 const int32_t width = frameInfo.width; | 132 const int32_t width = frameInfo.width; |
137 const int32_t height = frameInfo.height; | 133 const int32_t height = frameInfo.height; |
138 | 134 |
139 TRACE_EVENT1("webrtc", "VC::IncomingFrame", "capture_time", captureTime); | 135 TRACE_EVENT1("webrtc", "VC::IncomingFrame", "capture_time", captureTime); |
140 | 136 |
141 // Not encoded, convert to I420. | 137 // Not encoded, convert to I420. |
142 const VideoType commonVideoType = | 138 const VideoType commonVideoType = |
143 RawVideoTypeToCommonVideoVideoType(frameInfo.rawType); | 139 RawVideoTypeToCommonVideoVideoType(frameInfo.rawType); |
144 | 140 |
145 if (frameInfo.rawType != kVideoMJPEG && | 141 if (frameInfo.rawType != kVideoMJPEG && |
146 CalcBufferSize(commonVideoType, width, | 142 CalcBufferSize(commonVideoType, width, abs(height)) != videoFrameLength) { |
147 abs(height)) != videoFrameLength) | 143 LOG(LS_ERROR) << "Wrong incoming frame length."; |
148 { | 144 return -1; |
149 LOG(LS_ERROR) << "Wrong incoming frame length."; | 145 } |
150 return -1; | 146 |
| 147 int stride_y = width; |
| 148 int stride_uv = (width + 1) / 2; |
| 149 int target_width = width; |
| 150 int target_height = height; |
| 151 |
| 152 // SetApplyRotation doesn't take any lock. Make a local copy here. |
| 153 bool apply_rotation = apply_rotation_; |
| 154 |
| 155 if (apply_rotation) { |
| 156 // Rotating resolution when for 90/270 degree rotations. |
| 157 if (_rotateFrame == kVideoRotation_90 || |
| 158 _rotateFrame == kVideoRotation_270) { |
| 159 target_width = abs(height); |
| 160 target_height = width; |
151 } | 161 } |
| 162 } |
152 | 163 |
153 int stride_y = width; | 164 // Setting absolute height (in case it was negative). |
154 int stride_uv = (width + 1) / 2; | 165 // In Windows, the image starts bottom left, instead of top left. |
155 int target_width = width; | 166 // Setting a negative source height, inverts the image (within LibYuv). |
156 int target_height = height; | |
157 | 167 |
158 // SetApplyRotation doesn't take any lock. Make a local copy here. | 168 // TODO(nisse): Use a pool? |
159 bool apply_rotation = apply_rotation_; | 169 rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create( |
| 170 target_width, abs(target_height), stride_y, stride_uv, stride_uv); |
| 171 const int conversionResult = ConvertToI420( |
| 172 commonVideoType, videoFrame, 0, 0, // No cropping |
| 173 width, height, videoFrameLength, |
| 174 apply_rotation ? _rotateFrame : kVideoRotation_0, buffer.get()); |
| 175 if (conversionResult < 0) { |
| 176 LOG(LS_ERROR) << "Failed to convert capture frame from type " |
| 177 << frameInfo.rawType << "to I420."; |
| 178 return -1; |
| 179 } |
160 | 180 |
161 if (apply_rotation) { | 181 VideoFrame captureFrame(buffer, 0, rtc::TimeMillis(), |
162 // Rotating resolution when for 90/270 degree rotations. | 182 !apply_rotation ? _rotateFrame : kVideoRotation_0); |
163 if (_rotateFrame == kVideoRotation_90 || | 183 captureFrame.set_ntp_time_ms(captureTime); |
164 _rotateFrame == kVideoRotation_270) { | |
165 target_width = abs(height); | |
166 target_height = width; | |
167 } | |
168 } | |
169 | 184 |
170 // Setting absolute height (in case it was negative). | 185 DeliverCapturedFrame(captureFrame); |
171 // In Windows, the image starts bottom left, instead of top left. | |
172 // Setting a negative source height, inverts the image (within LibYuv). | |
173 | 186 |
174 // TODO(nisse): Use a pool? | 187 return 0; |
175 rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create( | |
176 target_width, abs(target_height), stride_y, stride_uv, stride_uv); | |
177 const int conversionResult = ConvertToI420( | |
178 commonVideoType, videoFrame, 0, 0, // No cropping | |
179 width, height, videoFrameLength, | |
180 apply_rotation ? _rotateFrame : kVideoRotation_0, buffer.get()); | |
181 if (conversionResult < 0) | |
182 { | |
183 LOG(LS_ERROR) << "Failed to convert capture frame from type " | |
184 << frameInfo.rawType << "to I420."; | |
185 return -1; | |
186 } | |
187 | |
188 VideoFrame captureFrame( | |
189 buffer, 0, rtc::TimeMillis(), | |
190 !apply_rotation ? _rotateFrame : kVideoRotation_0); | |
191 captureFrame.set_ntp_time_ms(captureTime); | |
192 | |
193 DeliverCapturedFrame(captureFrame); | |
194 | |
195 return 0; | |
196 } | 188 } |
197 | 189 |
198 int32_t VideoCaptureImpl::SetCaptureRotation(VideoRotation rotation) { | 190 int32_t VideoCaptureImpl::SetCaptureRotation(VideoRotation rotation) { |
199 CriticalSectionScoped cs(&_apiCs); | 191 CriticalSectionScoped cs(&_apiCs); |
200 _rotateFrame = rotation; | 192 _rotateFrame = rotation; |
201 return 0; | 193 return 0; |
202 } | 194 } |
203 | 195 |
204 bool VideoCaptureImpl::SetApplyRotation(bool enable) { | 196 bool VideoCaptureImpl::SetApplyRotation(bool enable) { |
205 // We can't take any lock here as it'll cause deadlock with IncomingFrame. | 197 // We can't take any lock here as it'll cause deadlock with IncomingFrame. |
206 | 198 |
207 // The effect of this is the last caller wins. | 199 // The effect of this is the last caller wins. |
208 apply_rotation_ = enable; | 200 apply_rotation_ = enable; |
209 return true; | 201 return true; |
210 } | 202 } |
211 | 203 |
212 void VideoCaptureImpl::UpdateFrameCount() | 204 void VideoCaptureImpl::UpdateFrameCount() { |
213 { | 205 if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0) { |
214 if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0) | 206 // first no shift |
215 { | 207 } else { |
216 // first no shift | 208 // shift |
| 209 for (int i = (kFrameRateCountHistorySize - 2); i >= 0; i--) { |
| 210 _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i]; |
217 } | 211 } |
218 else | 212 } |
219 { | 213 _incomingFrameTimesNanos[0] = rtc::TimeNanos(); |
220 // shift | |
221 for (int i = (kFrameRateCountHistorySize - 2); i >= 0; i--) | |
222 { | |
223 _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i]; | |
224 } | |
225 } | |
226 _incomingFrameTimesNanos[0] = rtc::TimeNanos(); | |
227 } | 214 } |
228 | 215 |
229 uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns) | 216 uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns) { |
230 { | 217 int32_t num = 0; |
231 int32_t num = 0; | 218 int32_t nrOfFrames = 0; |
232 int32_t nrOfFrames = 0; | 219 for (num = 1; num < (kFrameRateCountHistorySize - 1); num++) { |
233 for (num = 1; num < (kFrameRateCountHistorySize - 1); num++) | 220 if (_incomingFrameTimesNanos[num] <= 0 || |
234 { | 221 (now_ns - _incomingFrameTimesNanos[num]) / |
235 if (_incomingFrameTimesNanos[num] <= 0 || | 222 rtc::kNumNanosecsPerMillisec > |
236 (now_ns - _incomingFrameTimesNanos[num]) / | 223 kFrameRateHistoryWindowMs) { // don't use data older than 2sec |
237 rtc::kNumNanosecsPerMillisec > | 224 break; |
238 kFrameRateHistoryWindowMs) // don't use data older than 2sec | 225 } else { |
239 { | 226 nrOfFrames++; |
240 break; | |
241 } | |
242 else | |
243 { | |
244 nrOfFrames++; | |
245 } | |
246 } | 227 } |
247 if (num > 1) | 228 } |
248 { | 229 if (num > 1) { |
249 int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) / | 230 int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) / |
250 rtc::kNumNanosecsPerMillisec; | 231 rtc::kNumNanosecsPerMillisec; |
251 if (diff > 0) | 232 if (diff > 0) { |
252 { | 233 return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f); |
253 return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f); | |
254 } | |
255 } | 234 } |
| 235 } |
256 | 236 |
257 return nrOfFrames; | 237 return nrOfFrames; |
258 } | 238 } |
259 } // namespace videocapturemodule | 239 } // namespace videocapturemodule |
260 } // namespace webrtc | 240 } // namespace webrtc |
OLD | NEW |