OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 12 matching lines...) Expand all Loading... |
23 | 23 |
24 #include "webrtc/base/refcount.h" | 24 #include "webrtc/base/refcount.h" |
25 #include "webrtc/base/scoped_ref_ptr.h" | 25 #include "webrtc/base/scoped_ref_ptr.h" |
26 #include "webrtc/modules/video_capture/linux/video_capture_linux.h" | 26 #include "webrtc/modules/video_capture/linux/video_capture_linux.h" |
27 #include "webrtc/system_wrappers/include/critical_section_wrapper.h" | 27 #include "webrtc/system_wrappers/include/critical_section_wrapper.h" |
28 #include "webrtc/system_wrappers/include/trace.h" | 28 #include "webrtc/system_wrappers/include/trace.h" |
29 | 29 |
30 namespace webrtc { | 30 namespace webrtc { |
31 namespace videocapturemodule { | 31 namespace videocapturemodule { |
32 rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create( | 32 rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create( |
33 const int32_t id, | |
34 const char* deviceUniqueId) { | 33 const char* deviceUniqueId) { |
35 rtc::scoped_refptr<VideoCaptureModuleV4L2> implementation( | 34 rtc::scoped_refptr<VideoCaptureModuleV4L2> implementation( |
36 new rtc::RefCountedObject<VideoCaptureModuleV4L2>(id)); | 35 new rtc::RefCountedObject<VideoCaptureModuleV4L2>()); |
37 | 36 |
38 if (implementation->Init(deviceUniqueId) != 0) | 37 if (implementation->Init(deviceUniqueId) != 0) |
39 return nullptr; | 38 return nullptr; |
40 | 39 |
41 return implementation; | 40 return implementation; |
42 } | 41 } |
43 | 42 |
44 VideoCaptureModuleV4L2::VideoCaptureModuleV4L2(const int32_t id) | 43 VideoCaptureModuleV4L2::VideoCaptureModuleV4L2() |
45 : VideoCaptureImpl(id), | 44 : VideoCaptureImpl(), |
46 _captureCritSect(CriticalSectionWrapper::CreateCriticalSection()), | 45 _captureCritSect(CriticalSectionWrapper::CreateCriticalSection()), |
47 _deviceId(-1), | 46 _deviceId(-1), |
48 _deviceFd(-1), | 47 _deviceFd(-1), |
49 _buffersAllocatedByDevice(-1), | 48 _buffersAllocatedByDevice(-1), |
50 _currentWidth(-1), | 49 _currentWidth(-1), |
51 _currentHeight(-1), | 50 _currentHeight(-1), |
52 _currentFrameRate(-1), | 51 _currentFrameRate(-1), |
53 _captureStarted(false), | 52 _captureStarted(false), |
54 _captureVideoType(kVideoI420), | 53 _captureVideoType(kVideoI420), |
55 _pool(NULL) | 54 _pool(NULL) |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
90 found = true; | 89 found = true; |
91 break; // fd matches with device unique id supplied | 90 break; // fd matches with device unique id supplied |
92 } | 91 } |
93 } | 92 } |
94 } | 93 } |
95 close(fd); // close since this is not the matching device | 94 close(fd); // close since this is not the matching device |
96 } | 95 } |
97 } | 96 } |
98 if (!found) | 97 if (!found) |
99 { | 98 { |
100 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "no m
atching device found"); | 99 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, |
| 100 0, "no matching device found"); |
101 return -1; | 101 return -1; |
102 } | 102 } |
103 _deviceId = n; //store the device id | 103 _deviceId = n; //store the device id |
104 return 0; | 104 return 0; |
105 } | 105 } |
106 | 106 |
107 VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() | 107 VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() |
108 { | 108 { |
109 StopCapture(); | 109 StopCapture(); |
110 if (_captureCritSect) | 110 if (_captureCritSect) |
(...skipping 21 matching lines...) Expand all Loading... |
132 } | 132 } |
133 } | 133 } |
134 | 134 |
135 CriticalSectionScoped cs(_captureCritSect); | 135 CriticalSectionScoped cs(_captureCritSect); |
136 //first open /dev/video device | 136 //first open /dev/video device |
137 char device[20]; | 137 char device[20]; |
138 sprintf(device, "/dev/video%d", (int) _deviceId); | 138 sprintf(device, "/dev/video%d", (int) _deviceId); |
139 | 139 |
140 if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) | 140 if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) |
141 { | 141 { |
142 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, | 142 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, |
143 "error in opening %s errono = %d", device, errno); | 143 "error in opening %s errono = %d", device, errno); |
144 return -1; | 144 return -1; |
145 } | 145 } |
146 | 146 |
147 // Supported video formats in preferred order. | 147 // Supported video formats in preferred order. |
148 // If the requested resolution is larger than VGA, we prefer MJPEG. Go for | 148 // If the requested resolution is larger than VGA, we prefer MJPEG. Go for |
149 // I420 otherwise. | 149 // I420 otherwise. |
150 const int nFormats = 5; | 150 const int nFormats = 5; |
151 unsigned int fmts[nFormats]; | 151 unsigned int fmts[nFormats]; |
152 if (capability.width > 640 || capability.height > 480) { | 152 if (capability.width > 640 || capability.height > 480) { |
153 fmts[0] = V4L2_PIX_FMT_MJPEG; | 153 fmts[0] = V4L2_PIX_FMT_MJPEG; |
154 fmts[1] = V4L2_PIX_FMT_YUV420; | 154 fmts[1] = V4L2_PIX_FMT_YUV420; |
155 fmts[2] = V4L2_PIX_FMT_YUYV; | 155 fmts[2] = V4L2_PIX_FMT_YUYV; |
156 fmts[3] = V4L2_PIX_FMT_UYVY; | 156 fmts[3] = V4L2_PIX_FMT_UYVY; |
157 fmts[4] = V4L2_PIX_FMT_JPEG; | 157 fmts[4] = V4L2_PIX_FMT_JPEG; |
158 } else { | 158 } else { |
159 fmts[0] = V4L2_PIX_FMT_YUV420; | 159 fmts[0] = V4L2_PIX_FMT_YUV420; |
160 fmts[1] = V4L2_PIX_FMT_YUYV; | 160 fmts[1] = V4L2_PIX_FMT_YUYV; |
161 fmts[2] = V4L2_PIX_FMT_UYVY; | 161 fmts[2] = V4L2_PIX_FMT_UYVY; |
162 fmts[3] = V4L2_PIX_FMT_MJPEG; | 162 fmts[3] = V4L2_PIX_FMT_MJPEG; |
163 fmts[4] = V4L2_PIX_FMT_JPEG; | 163 fmts[4] = V4L2_PIX_FMT_JPEG; |
164 } | 164 } |
165 | 165 |
166 // Enumerate image formats. | 166 // Enumerate image formats. |
167 struct v4l2_fmtdesc fmt; | 167 struct v4l2_fmtdesc fmt; |
168 int fmtsIdx = nFormats; | 168 int fmtsIdx = nFormats; |
169 memset(&fmt, 0, sizeof(fmt)); | 169 memset(&fmt, 0, sizeof(fmt)); |
170 fmt.index = 0; | 170 fmt.index = 0; |
171 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 171 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
172 WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, | 172 WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, 0, |
173 "Video Capture enumerats supported image formats:"); | 173 "Video Capture enumerats supported image formats:"); |
174 while (ioctl(_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) { | 174 while (ioctl(_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) { |
175 WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, | 175 WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, 0, |
176 " { pixelformat = %c%c%c%c, description = '%s' }", | 176 " { pixelformat = %c%c%c%c, description = '%s' }", |
177 fmt.pixelformat & 0xFF, (fmt.pixelformat>>8) & 0xFF, | 177 fmt.pixelformat & 0xFF, (fmt.pixelformat>>8) & 0xFF, |
178 (fmt.pixelformat>>16) & 0xFF, (fmt.pixelformat>>24) & 0xFF, | 178 (fmt.pixelformat>>16) & 0xFF, (fmt.pixelformat>>24) & 0xFF, |
179 fmt.description); | 179 fmt.description); |
180 // Match the preferred order. | 180 // Match the preferred order. |
181 for (int i = 0; i < nFormats; i++) { | 181 for (int i = 0; i < nFormats; i++) { |
182 if (fmt.pixelformat == fmts[i] && i < fmtsIdx) | 182 if (fmt.pixelformat == fmts[i] && i < fmtsIdx) |
183 fmtsIdx = i; | 183 fmtsIdx = i; |
184 } | 184 } |
185 // Keep enumerating. | 185 // Keep enumerating. |
186 fmt.index++; | 186 fmt.index++; |
187 } | 187 } |
188 | 188 |
189 if (fmtsIdx == nFormats) | 189 if (fmtsIdx == nFormats) |
190 { | 190 { |
191 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, | 191 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, |
192 "no supporting video formats found"); | 192 "no supporting video formats found"); |
193 return -1; | 193 return -1; |
194 } else { | 194 } else { |
195 WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, | 195 WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, 0, |
196 "We prefer format %c%c%c%c", | 196 "We prefer format %c%c%c%c", |
197 fmts[fmtsIdx] & 0xFF, (fmts[fmtsIdx]>>8) & 0xFF, | 197 fmts[fmtsIdx] & 0xFF, (fmts[fmtsIdx]>>8) & 0xFF, |
198 (fmts[fmtsIdx]>>16) & 0xFF, (fmts[fmtsIdx]>>24) & 0xFF); | 198 (fmts[fmtsIdx]>>16) & 0xFF, (fmts[fmtsIdx]>>24) & 0xFF); |
199 } | 199 } |
200 | 200 |
201 struct v4l2_format video_fmt; | 201 struct v4l2_format video_fmt; |
202 memset(&video_fmt, 0, sizeof(struct v4l2_format)); | 202 memset(&video_fmt, 0, sizeof(struct v4l2_format)); |
203 video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 203 video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
204 video_fmt.fmt.pix.sizeimage = 0; | 204 video_fmt.fmt.pix.sizeimage = 0; |
205 video_fmt.fmt.pix.width = capability.width; | 205 video_fmt.fmt.pix.width = capability.width; |
206 video_fmt.fmt.pix.height = capability.height; | 206 video_fmt.fmt.pix.height = capability.height; |
207 video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx]; | 207 video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx]; |
208 | 208 |
209 if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) | 209 if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) |
210 _captureVideoType = kVideoYUY2; | 210 _captureVideoType = kVideoYUY2; |
211 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420) | 211 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420) |
212 _captureVideoType = kVideoI420; | 212 _captureVideoType = kVideoI420; |
213 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY) | 213 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY) |
214 _captureVideoType = kVideoUYVY; | 214 _captureVideoType = kVideoUYVY; |
215 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG || | 215 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG || |
216 video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG) | 216 video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG) |
217 _captureVideoType = kVideoMJPEG; | 217 _captureVideoType = kVideoMJPEG; |
218 | 218 |
219 //set format and frame size now | 219 //set format and frame size now |
220 if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) | 220 if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) |
221 { | 221 { |
222 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, | 222 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, |
223 "error in VIDIOC_S_FMT, errno = %d", errno); | 223 "error in VIDIOC_S_FMT, errno = %d", errno); |
224 return -1; | 224 return -1; |
225 } | 225 } |
226 | 226 |
227 // initialize current width and height | 227 // initialize current width and height |
228 _currentWidth = video_fmt.fmt.pix.width; | 228 _currentWidth = video_fmt.fmt.pix.width; |
229 _currentHeight = video_fmt.fmt.pix.height; | 229 _currentHeight = video_fmt.fmt.pix.height; |
230 _captureDelay = 120; | 230 _captureDelay = 120; |
231 | 231 |
232 // Trying to set frame rate, before check driver capability. | 232 // Trying to set frame rate, before check driver capability. |
233 bool driver_framerate_support = true; | 233 bool driver_framerate_support = true; |
234 struct v4l2_streamparm streamparms; | 234 struct v4l2_streamparm streamparms; |
235 memset(&streamparms, 0, sizeof(streamparms)); | 235 memset(&streamparms, 0, sizeof(streamparms)); |
236 streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 236 streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
237 if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) { | 237 if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) { |
238 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, | 238 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, |
239 "error in VIDIOC_G_PARM errno = %d", errno); | 239 "error in VIDIOC_G_PARM errno = %d", errno); |
240 driver_framerate_support = false; | 240 driver_framerate_support = false; |
241 // continue | 241 // continue |
242 } else { | 242 } else { |
243 // check the capability flag is set to V4L2_CAP_TIMEPERFRAME. | 243 // check the capability flag is set to V4L2_CAP_TIMEPERFRAME. |
244 if (streamparms.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { | 244 if (streamparms.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { |
245 // driver supports the feature. Set required framerate. | 245 // driver supports the feature. Set required framerate. |
246 memset(&streamparms, 0, sizeof(streamparms)); | 246 memset(&streamparms, 0, sizeof(streamparms)); |
247 streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 247 streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
248 streamparms.parm.capture.timeperframe.numerator = 1; | 248 streamparms.parm.capture.timeperframe.numerator = 1; |
249 streamparms.parm.capture.timeperframe.denominator = capability.maxFPS; | 249 streamparms.parm.capture.timeperframe.denominator = capability.maxFPS; |
250 if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) { | 250 if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) { |
251 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, | 251 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, |
252 "Failed to set the framerate. errno=%d", errno); | 252 "Failed to set the framerate. errno=%d", errno); |
253 driver_framerate_support = false; | 253 driver_framerate_support = false; |
254 } else { | 254 } else { |
255 _currentFrameRate = capability.maxFPS; | 255 _currentFrameRate = capability.maxFPS; |
256 } | 256 } |
257 } | 257 } |
258 } | 258 } |
259 // If driver doesn't support framerate control, need to hardcode. | 259 // If driver doesn't support framerate control, need to hardcode. |
260 // Hardcoding the value based on the frame size. | 260 // Hardcoding the value based on the frame size. |
261 if (!driver_framerate_support) { | 261 if (!driver_framerate_support) { |
262 if(_currentWidth >= 800 && _captureVideoType != kVideoMJPEG) { | 262 if(_currentWidth >= 800 && _captureVideoType != kVideoMJPEG) { |
263 _currentFrameRate = 15; | 263 _currentFrameRate = 15; |
264 } else { | 264 } else { |
265 _currentFrameRate = 30; | 265 _currentFrameRate = 30; |
266 } | 266 } |
267 } | 267 } |
268 | 268 |
269 if (!AllocateVideoBuffers()) | 269 if (!AllocateVideoBuffers()) |
270 { | 270 { |
271 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, | 271 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, |
272 "failed to allocate video capture buffers"); | 272 "failed to allocate video capture buffers"); |
273 return -1; | 273 return -1; |
274 } | 274 } |
275 | 275 |
276 //start capture thread; | 276 //start capture thread; |
277 if (!_captureThread) | 277 if (!_captureThread) |
278 { | 278 { |
279 _captureThread.reset(new rtc::PlatformThread( | 279 _captureThread.reset(new rtc::PlatformThread( |
280 VideoCaptureModuleV4L2::CaptureThread, this, "CaptureThread")); | 280 VideoCaptureModuleV4L2::CaptureThread, this, "CaptureThread")); |
281 _captureThread->Start(); | 281 _captureThread->Start(); |
282 _captureThread->SetPriority(rtc::kHighPriority); | 282 _captureThread->SetPriority(rtc::kHighPriority); |
283 } | 283 } |
284 | 284 |
285 // Needed to start UVC camera - from the uvcview application | 285 // Needed to start UVC camera - from the uvcview application |
286 enum v4l2_buf_type type; | 286 enum v4l2_buf_type type; |
287 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 287 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
288 if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1) | 288 if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1) |
289 { | 289 { |
290 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, | 290 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, |
291 "Failed to turn on stream"); | 291 "Failed to turn on stream"); |
292 return -1; | 292 return -1; |
293 } | 293 } |
294 | 294 |
295 _captureStarted = true; | 295 _captureStarted = true; |
296 return 0; | 296 return 0; |
297 } | 297 } |
298 | 298 |
299 int32_t VideoCaptureModuleV4L2::StopCapture() | 299 int32_t VideoCaptureModuleV4L2::StopCapture() |
300 { | 300 { |
(...skipping 22 matching lines...) Expand all Loading... |
323 { | 323 { |
324 struct v4l2_requestbuffers rbuffer; | 324 struct v4l2_requestbuffers rbuffer; |
325 memset(&rbuffer, 0, sizeof(v4l2_requestbuffers)); | 325 memset(&rbuffer, 0, sizeof(v4l2_requestbuffers)); |
326 | 326 |
327 rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 327 rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
328 rbuffer.memory = V4L2_MEMORY_MMAP; | 328 rbuffer.memory = V4L2_MEMORY_MMAP; |
329 rbuffer.count = kNoOfV4L2Bufffers; | 329 rbuffer.count = kNoOfV4L2Bufffers; |
330 | 330 |
331 if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0) | 331 if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0) |
332 { | 332 { |
333 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, | 333 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, |
334 "Could not get buffers from device. errno = %d", errno); | 334 "Could not get buffers from device. errno = %d", errno); |
335 return false; | 335 return false; |
336 } | 336 } |
337 | 337 |
338 if (rbuffer.count > kNoOfV4L2Bufffers) | 338 if (rbuffer.count > kNoOfV4L2Bufffers) |
339 rbuffer.count = kNoOfV4L2Bufffers; | 339 rbuffer.count = kNoOfV4L2Bufffers; |
340 | 340 |
341 _buffersAllocatedByDevice = rbuffer.count; | 341 _buffersAllocatedByDevice = rbuffer.count; |
342 | 342 |
343 //Map the buffers | 343 //Map the buffers |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
382 for (int i = 0; i < _buffersAllocatedByDevice; i++) | 382 for (int i = 0; i < _buffersAllocatedByDevice; i++) |
383 munmap(_pool[i].start, _pool[i].length); | 383 munmap(_pool[i].start, _pool[i].length); |
384 | 384 |
385 delete[] _pool; | 385 delete[] _pool; |
386 | 386 |
387 // turn off stream | 387 // turn off stream |
388 enum v4l2_buf_type type; | 388 enum v4l2_buf_type type; |
389 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 389 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
390 if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0) | 390 if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0) |
391 { | 391 { |
392 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, | 392 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, |
393 "VIDIOC_STREAMOFF error. errno: %d", errno); | 393 "VIDIOC_STREAMOFF error. errno: %d", errno); |
394 } | 394 } |
395 | 395 |
396 return true; | 396 return true; |
397 } | 397 } |
398 | 398 |
399 bool VideoCaptureModuleV4L2::CaptureStarted() | 399 bool VideoCaptureModuleV4L2::CaptureStarted() |
400 { | 400 { |
401 return _captureStarted; | 401 return _captureStarted; |
402 } | 402 } |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
442 { | 442 { |
443 struct v4l2_buffer buf; | 443 struct v4l2_buffer buf; |
444 memset(&buf, 0, sizeof(struct v4l2_buffer)); | 444 memset(&buf, 0, sizeof(struct v4l2_buffer)); |
445 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 445 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
446 buf.memory = V4L2_MEMORY_MMAP; | 446 buf.memory = V4L2_MEMORY_MMAP; |
447 // dequeue a buffer - repeat until dequeued properly! | 447 // dequeue a buffer - repeat until dequeued properly! |
448 while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0) | 448 while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0) |
449 { | 449 { |
450 if (errno != EINTR) | 450 if (errno != EINTR) |
451 { | 451 { |
452 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _i
d, | 452 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, |
453 "could not sync on a buffer on device %s", strerror(e
rrno)); | 453 "could not sync on a buffer on device %s", strerror(e
rrno)); |
454 _captureCritSect->Leave(); | 454 _captureCritSect->Leave(); |
455 return true; | 455 return true; |
456 } | 456 } |
457 } | 457 } |
458 VideoCaptureCapability frameInfo; | 458 VideoCaptureCapability frameInfo; |
459 frameInfo.width = _currentWidth; | 459 frameInfo.width = _currentWidth; |
460 frameInfo.height = _currentHeight; | 460 frameInfo.height = _currentHeight; |
461 frameInfo.rawType = _captureVideoType; | 461 frameInfo.rawType = _captureVideoType; |
462 | 462 |
463 // convert to to I420 if needed | 463 // convert to to I420 if needed |
464 IncomingFrame((unsigned char*) _pool[buf.index].start, | 464 IncomingFrame((unsigned char*) _pool[buf.index].start, |
465 buf.bytesused, frameInfo); | 465 buf.bytesused, frameInfo); |
466 // enqueue the buffer again | 466 // enqueue the buffer again |
467 if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1) | 467 if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1) |
468 { | 468 { |
469 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id, | 469 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, 0, |
470 "Failed to enqueue capture buffer"); | 470 "Failed to enqueue capture buffer"); |
471 } | 471 } |
472 } | 472 } |
473 _captureCritSect->Leave(); | 473 _captureCritSect->Leave(); |
474 usleep(0); | 474 usleep(0); |
475 return true; | 475 return true; |
476 } | 476 } |
477 | 477 |
478 int32_t VideoCaptureModuleV4L2::CaptureSettings(VideoCaptureCapability& settings
) | 478 int32_t VideoCaptureModuleV4L2::CaptureSettings(VideoCaptureCapability& settings
) |
479 { | 479 { |
480 settings.width = _currentWidth; | 480 settings.width = _currentWidth; |
481 settings.height = _currentHeight; | 481 settings.height = _currentHeight; |
482 settings.maxFPS = _currentFrameRate; | 482 settings.maxFPS = _currentFrameRate; |
483 settings.rawType=_captureVideoType; | 483 settings.rawType=_captureVideoType; |
484 | 484 |
485 return 0; | 485 return 0; |
486 } | 486 } |
487 } // namespace videocapturemodule | 487 } // namespace videocapturemodule |
488 } // namespace webrtc | 488 } // namespace webrtc |
OLD | NEW |