OLD | NEW |
1 /* | 1 /* |
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
(...skipping 30 matching lines...) Expand all Loading... |
41 | 41 |
42 VideoCaptureModuleV4L2::VideoCaptureModuleV4L2() | 42 VideoCaptureModuleV4L2::VideoCaptureModuleV4L2() |
43 : VideoCaptureImpl(), | 43 : VideoCaptureImpl(), |
44 _deviceId(-1), | 44 _deviceId(-1), |
45 _deviceFd(-1), | 45 _deviceFd(-1), |
46 _buffersAllocatedByDevice(-1), | 46 _buffersAllocatedByDevice(-1), |
47 _currentWidth(-1), | 47 _currentWidth(-1), |
48 _currentHeight(-1), | 48 _currentHeight(-1), |
49 _currentFrameRate(-1), | 49 _currentFrameRate(-1), |
50 _captureStarted(false), | 50 _captureStarted(false), |
51 _captureVideoType(kVideoI420), | 51 _captureVideoType(VideoType::kI420), |
52 _pool(NULL) | 52 _pool(NULL) {} |
53 { | |
54 } | |
55 | 53 |
56 int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8) | 54 int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8) |
57 { | 55 { |
58 int len = strlen((const char*) deviceUniqueIdUTF8); | 56 int len = strlen((const char*) deviceUniqueIdUTF8); |
59 _deviceUniqueId = new (std::nothrow) char[len + 1]; | 57 _deviceUniqueId = new (std::nothrow) char[len + 1]; |
60 if (_deviceUniqueId) | 58 if (_deviceUniqueId) |
61 { | 59 { |
62 memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1); | 60 memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1); |
63 } | 61 } |
64 | 62 |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
107 StopCapture(); | 105 StopCapture(); |
108 if (_deviceFd != -1) | 106 if (_deviceFd != -1) |
109 close(_deviceFd); | 107 close(_deviceFd); |
110 } | 108 } |
111 | 109 |
112 int32_t VideoCaptureModuleV4L2::StartCapture( | 110 int32_t VideoCaptureModuleV4L2::StartCapture( |
113 const VideoCaptureCapability& capability) | 111 const VideoCaptureCapability& capability) |
114 { | 112 { |
115 if (_captureStarted) | 113 if (_captureStarted) |
116 { | 114 { |
117 if (capability.width == _currentWidth && | 115 if (capability.width == _currentWidth && |
118 capability.height == _currentHeight && | 116 capability.height == _currentHeight && |
119 _captureVideoType == capability.rawType) | 117 _captureVideoType == capability.videoType) { |
120 { | 118 return 0; |
121 return 0; | |
122 } | 119 } |
123 else | 120 else |
124 { | 121 { |
125 StopCapture(); | 122 StopCapture(); |
126 } | 123 } |
127 } | 124 } |
128 | 125 |
129 rtc::CritScope cs(&_captureCritSect); | 126 rtc::CritScope cs(&_captureCritSect); |
130 //first open /dev/video device | 127 //first open /dev/video device |
131 char device[20]; | 128 char device[20]; |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
194 | 191 |
195 struct v4l2_format video_fmt; | 192 struct v4l2_format video_fmt; |
196 memset(&video_fmt, 0, sizeof(struct v4l2_format)); | 193 memset(&video_fmt, 0, sizeof(struct v4l2_format)); |
197 video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 194 video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
198 video_fmt.fmt.pix.sizeimage = 0; | 195 video_fmt.fmt.pix.sizeimage = 0; |
199 video_fmt.fmt.pix.width = capability.width; | 196 video_fmt.fmt.pix.width = capability.width; |
200 video_fmt.fmt.pix.height = capability.height; | 197 video_fmt.fmt.pix.height = capability.height; |
201 video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx]; | 198 video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx]; |
202 | 199 |
203 if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) | 200 if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) |
204 _captureVideoType = kVideoYUY2; | 201 _captureVideoType = VideoType::kYUY2; |
205 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420) | 202 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420) |
206 _captureVideoType = kVideoI420; | 203 _captureVideoType = VideoType::kI420; |
207 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY) | 204 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY) |
208 _captureVideoType = kVideoUYVY; | 205 _captureVideoType = VideoType::kUYVY; |
209 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG || | 206 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG || |
210 video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG) | 207 video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG) |
211 _captureVideoType = kVideoMJPEG; | 208 _captureVideoType = VideoType::kMJPEG; |
212 | 209 |
213 //set format and frame size now | 210 //set format and frame size now |
214 if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) | 211 if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) |
215 { | 212 { |
216 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, | 213 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, |
217 "error in VIDIOC_S_FMT, errno = %d", errno); | 214 "error in VIDIOC_S_FMT, errno = %d", errno); |
218 return -1; | 215 return -1; |
219 } | 216 } |
220 | 217 |
221 // initialize current width and height | 218 // initialize current width and height |
(...skipping 23 matching lines...) Expand all Loading... |
245 "Failed to set the framerate. errno=%d", errno); | 242 "Failed to set the framerate. errno=%d", errno); |
246 driver_framerate_support = false; | 243 driver_framerate_support = false; |
247 } else { | 244 } else { |
248 _currentFrameRate = capability.maxFPS; | 245 _currentFrameRate = capability.maxFPS; |
249 } | 246 } |
250 } | 247 } |
251 } | 248 } |
252 // If driver doesn't support framerate control, need to hardcode. | 249 // If driver doesn't support framerate control, need to hardcode. |
253 // Hardcoding the value based on the frame size. | 250 // Hardcoding the value based on the frame size. |
254 if (!driver_framerate_support) { | 251 if (!driver_framerate_support) { |
255 if(_currentWidth >= 800 && _captureVideoType != kVideoMJPEG) { | 252 if (_currentWidth >= 800 && _captureVideoType != VideoType::kMJPEG) { |
256 _currentFrameRate = 15; | 253 _currentFrameRate = 15; |
257 } else { | 254 } else { |
258 _currentFrameRate = 30; | 255 _currentFrameRate = 30; |
259 } | 256 } |
260 } | 257 } |
261 | 258 |
262 if (!AllocateVideoBuffers()) | 259 if (!AllocateVideoBuffers()) |
263 { | 260 { |
264 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, | 261 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, |
265 "failed to allocate video capture buffers"); | 262 "failed to allocate video capture buffers"); |
(...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
440 if (errno != EINTR) | 437 if (errno != EINTR) |
441 { | 438 { |
442 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, | 439 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, |
443 "could not sync on a buffer on device %s", strerror(e
rrno)); | 440 "could not sync on a buffer on device %s", strerror(e
rrno)); |
444 return true; | 441 return true; |
445 } | 442 } |
446 } | 443 } |
447 VideoCaptureCapability frameInfo; | 444 VideoCaptureCapability frameInfo; |
448 frameInfo.width = _currentWidth; | 445 frameInfo.width = _currentWidth; |
449 frameInfo.height = _currentHeight; | 446 frameInfo.height = _currentHeight; |
450 frameInfo.rawType = _captureVideoType; | 447 frameInfo.videoType = _captureVideoType; |
451 | 448 |
452 // convert to to I420 if needed | 449 // convert to to I420 if needed |
453 IncomingFrame((unsigned char*) _pool[buf.index].start, | 450 IncomingFrame((unsigned char*) _pool[buf.index].start, |
454 buf.bytesused, frameInfo); | 451 buf.bytesused, frameInfo); |
455 // enqueue the buffer again | 452 // enqueue the buffer again |
456 if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1) | 453 if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1) |
457 { | 454 { |
458 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, 0, | 455 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, 0, |
459 "Failed to enqueue capture buffer"); | 456 "Failed to enqueue capture buffer"); |
460 } | 457 } |
461 } | 458 } |
462 usleep(0); | 459 usleep(0); |
463 return true; | 460 return true; |
464 } | 461 } |
465 | 462 |
466 int32_t VideoCaptureModuleV4L2::CaptureSettings(VideoCaptureCapability& settings
) | 463 int32_t VideoCaptureModuleV4L2::CaptureSettings(VideoCaptureCapability& settings
) |
467 { | 464 { |
468 settings.width = _currentWidth; | 465 settings.width = _currentWidth; |
469 settings.height = _currentHeight; | 466 settings.height = _currentHeight; |
470 settings.maxFPS = _currentFrameRate; | 467 settings.maxFPS = _currentFrameRate; |
471 settings.rawType=_captureVideoType; | 468 settings.videoType = _captureVideoType; |
472 | 469 |
473 return 0; | 470 return 0; |
474 } | 471 } |
475 } // namespace videocapturemodule | 472 } // namespace videocapturemodule |
476 } // namespace webrtc | 473 } // namespace webrtc |
OLD | NEW |