| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #include <errno.h> | 11 #include <errno.h> |
| 12 #include <fcntl.h> | 12 #include <fcntl.h> |
| 13 #include <linux/videodev2.h> | 13 #include <linux/videodev2.h> |
| 14 #include <stdio.h> | 14 #include <stdio.h> |
| 15 #include <string.h> | 15 #include <string.h> |
| 16 #include <sys/ioctl.h> | 16 #include <sys/ioctl.h> |
| 17 #include <sys/mman.h> | 17 #include <sys/mman.h> |
| 18 #include <sys/stat.h> | 18 #include <sys/stat.h> |
| 19 #include <unistd.h> | 19 #include <unistd.h> |
| 20 | 20 |
| 21 #include <iostream> | 21 #include <iostream> |
| 22 #include <new> | 22 #include <new> |
| 23 | 23 |
| 24 #include "webrtc/base/refcount.h" | 24 #include "webrtc/base/refcount.h" |
| 25 #include "webrtc/base/scoped_ref_ptr.h" | 25 #include "webrtc/base/scoped_ref_ptr.h" |
| 26 #include "webrtc/modules/video_capture/linux/video_capture_linux.h" | 26 #include "webrtc/modules/video_capture/linux/video_capture_linux.h" |
| 27 #include "webrtc/system_wrappers/include/critical_section_wrapper.h" | |
| 28 #include "webrtc/system_wrappers/include/trace.h" | 27 #include "webrtc/system_wrappers/include/trace.h" |
| 29 | 28 |
| 30 namespace webrtc { | 29 namespace webrtc { |
| 31 namespace videocapturemodule { | 30 namespace videocapturemodule { |
| 32 rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create( | 31 rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create( |
| 33 const char* deviceUniqueId) { | 32 const char* deviceUniqueId) { |
| 34 rtc::scoped_refptr<VideoCaptureModuleV4L2> implementation( | 33 rtc::scoped_refptr<VideoCaptureModuleV4L2> implementation( |
| 35 new rtc::RefCountedObject<VideoCaptureModuleV4L2>()); | 34 new rtc::RefCountedObject<VideoCaptureModuleV4L2>()); |
| 36 | 35 |
| 37 if (implementation->Init(deviceUniqueId) != 0) | 36 if (implementation->Init(deviceUniqueId) != 0) |
| 38 return nullptr; | 37 return nullptr; |
| 39 | 38 |
| 40 return implementation; | 39 return implementation; |
| 41 } | 40 } |
| 42 | 41 |
| 43 VideoCaptureModuleV4L2::VideoCaptureModuleV4L2() | 42 VideoCaptureModuleV4L2::VideoCaptureModuleV4L2() |
| 44 : VideoCaptureImpl(), | 43 : VideoCaptureImpl(), |
| 45 _captureCritSect(CriticalSectionWrapper::CreateCriticalSection()), | |
| 46 _deviceId(-1), | 44 _deviceId(-1), |
| 47 _deviceFd(-1), | 45 _deviceFd(-1), |
| 48 _buffersAllocatedByDevice(-1), | 46 _buffersAllocatedByDevice(-1), |
| 49 _currentWidth(-1), | 47 _currentWidth(-1), |
| 50 _currentHeight(-1), | 48 _currentHeight(-1), |
| 51 _currentFrameRate(-1), | 49 _currentFrameRate(-1), |
| 52 _captureStarted(false), | 50 _captureStarted(false), |
| 53 _captureVideoType(kVideoI420), | 51 _captureVideoType(kVideoI420), |
| 54 _pool(NULL) | 52 _pool(NULL) |
| 55 { | 53 { |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 100 0, "no matching device found"); | 98 0, "no matching device found"); |
| 101 return -1; | 99 return -1; |
| 102 } | 100 } |
| 103 _deviceId = n; //store the device id | 101 _deviceId = n; //store the device id |
| 104 return 0; | 102 return 0; |
| 105 } | 103 } |
| 106 | 104 |
| 107 VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() | 105 VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() |
| 108 { | 106 { |
| 109 StopCapture(); | 107 StopCapture(); |
| 110 if (_captureCritSect) | |
| 111 { | |
| 112 delete _captureCritSect; | |
| 113 } | |
| 114 if (_deviceFd != -1) | 108 if (_deviceFd != -1) |
| 115 close(_deviceFd); | 109 close(_deviceFd); |
| 116 } | 110 } |
| 117 | 111 |
| 118 int32_t VideoCaptureModuleV4L2::StartCapture( | 112 int32_t VideoCaptureModuleV4L2::StartCapture( |
| 119 const VideoCaptureCapability& capability) | 113 const VideoCaptureCapability& capability) |
| 120 { | 114 { |
| 121 if (_captureStarted) | 115 if (_captureStarted) |
| 122 { | 116 { |
| 123 if (capability.width == _currentWidth && | 117 if (capability.width == _currentWidth && |
| 124 capability.height == _currentHeight && | 118 capability.height == _currentHeight && |
| 125 _captureVideoType == capability.rawType) | 119 _captureVideoType == capability.rawType) |
| 126 { | 120 { |
| 127 return 0; | 121 return 0; |
| 128 } | 122 } |
| 129 else | 123 else |
| 130 { | 124 { |
| 131 StopCapture(); | 125 StopCapture(); |
| 132 } | 126 } |
| 133 } | 127 } |
| 134 | 128 |
| 135 CriticalSectionScoped cs(_captureCritSect); | 129 rtc::CritScope cs(&_captureCritSect); |
| 136 //first open /dev/video device | 130 //first open /dev/video device |
| 137 char device[20]; | 131 char device[20]; |
| 138 sprintf(device, "/dev/video%d", (int) _deviceId); | 132 sprintf(device, "/dev/video%d", (int) _deviceId); |
| 139 | 133 |
| 140 if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) | 134 if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) |
| 141 { | 135 { |
| 142 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, | 136 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, |
| 143 "error in opening %s errono = %d", device, errno); | 137 "error in opening %s errono = %d", device, errno); |
| 144 return -1; | 138 return -1; |
| 145 } | 139 } |
| (...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 296 } | 290 } |
| 297 | 291 |
| 298 int32_t VideoCaptureModuleV4L2::StopCapture() | 292 int32_t VideoCaptureModuleV4L2::StopCapture() |
| 299 { | 293 { |
| 300 if (_captureThread) { | 294 if (_captureThread) { |
| 301 // Make sure the capture thread stop stop using the critsect. | 295 // Make sure the capture thread stop stop using the critsect. |
| 302 _captureThread->Stop(); | 296 _captureThread->Stop(); |
| 303 _captureThread.reset(); | 297 _captureThread.reset(); |
| 304 } | 298 } |
| 305 | 299 |
| 306 CriticalSectionScoped cs(_captureCritSect); | 300 rtc::CritScope cs(&_captureCritSect); |
| 307 if (_captureStarted) | 301 if (_captureStarted) |
| 308 { | 302 { |
| 309 _captureStarted = false; | 303 _captureStarted = false; |
| 310 | 304 |
| 311 DeAllocateVideoBuffers(); | 305 DeAllocateVideoBuffers(); |
| 312 close(_deviceFd); | 306 close(_deviceFd); |
| 313 _deviceFd = -1; | 307 _deviceFd = -1; |
| 314 } | 308 } |
| 315 | 309 |
| 316 return 0; | 310 return 0; |
| (...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 403 bool VideoCaptureModuleV4L2::CaptureThread(void* obj) | 397 bool VideoCaptureModuleV4L2::CaptureThread(void* obj) |
| 404 { | 398 { |
| 405 return static_cast<VideoCaptureModuleV4L2*> (obj)->CaptureProcess(); | 399 return static_cast<VideoCaptureModuleV4L2*> (obj)->CaptureProcess(); |
| 406 } | 400 } |
| 407 bool VideoCaptureModuleV4L2::CaptureProcess() | 401 bool VideoCaptureModuleV4L2::CaptureProcess() |
| 408 { | 402 { |
| 409 int retVal = 0; | 403 int retVal = 0; |
| 410 fd_set rSet; | 404 fd_set rSet; |
| 411 struct timeval timeout; | 405 struct timeval timeout; |
| 412 | 406 |
| 413 _captureCritSect->Enter(); | 407 rtc::CritScope cs(&_captureCritSect); |
| 414 | 408 |
| 415 FD_ZERO(&rSet); | 409 FD_ZERO(&rSet); |
| 416 FD_SET(_deviceFd, &rSet); | 410 FD_SET(_deviceFd, &rSet); |
| 417 timeout.tv_sec = 1; | 411 timeout.tv_sec = 1; |
| 418 timeout.tv_usec = 0; | 412 timeout.tv_usec = 0; |
| 419 | 413 |
| 420 retVal = select(_deviceFd + 1, &rSet, NULL, NULL, &timeout); | 414 retVal = select(_deviceFd + 1, &rSet, NULL, NULL, &timeout); |
| 421 if (retVal < 0 && errno != EINTR) // continue if interrupted | 415 if (retVal < 0 && errno != EINTR) // continue if interrupted |
| 422 { | 416 { |
| 423 // select failed | 417 // select failed |
| 424 _captureCritSect->Leave(); | |
| 425 return false; | 418 return false; |
| 426 } | 419 } |
| 427 else if (retVal == 0) | 420 else if (retVal == 0) |
| 428 { | 421 { |
| 429 // select timed out | 422 // select timed out |
| 430 _captureCritSect->Leave(); | |
| 431 return true; | 423 return true; |
| 432 } | 424 } |
| 433 else if (!FD_ISSET(_deviceFd, &rSet)) | 425 else if (!FD_ISSET(_deviceFd, &rSet)) |
| 434 { | 426 { |
| 435 // not event on camera handle | 427 // not event on camera handle |
| 436 _captureCritSect->Leave(); | |
| 437 return true; | 428 return true; |
| 438 } | 429 } |
| 439 | 430 |
| 440 if (_captureStarted) | 431 if (_captureStarted) |
| 441 { | 432 { |
| 442 struct v4l2_buffer buf; | 433 struct v4l2_buffer buf; |
| 443 memset(&buf, 0, sizeof(struct v4l2_buffer)); | 434 memset(&buf, 0, sizeof(struct v4l2_buffer)); |
| 444 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 435 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
| 445 buf.memory = V4L2_MEMORY_MMAP; | 436 buf.memory = V4L2_MEMORY_MMAP; |
| 446 // dequeue a buffer - repeat until dequeued properly! | 437 // dequeue a buffer - repeat until dequeued properly! |
| 447 while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0) | 438 while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0) |
| 448 { | 439 { |
| 449 if (errno != EINTR) | 440 if (errno != EINTR) |
| 450 { | 441 { |
| 451 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, | 442 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0, |
| 452 "could not sync on a buffer on device %s", strerror(e
rrno)); | 443 "could not sync on a buffer on device %s", strerror(e
rrno)); |
| 453 _captureCritSect->Leave(); | |
| 454 return true; | 444 return true; |
| 455 } | 445 } |
| 456 } | 446 } |
| 457 VideoCaptureCapability frameInfo; | 447 VideoCaptureCapability frameInfo; |
| 458 frameInfo.width = _currentWidth; | 448 frameInfo.width = _currentWidth; |
| 459 frameInfo.height = _currentHeight; | 449 frameInfo.height = _currentHeight; |
| 460 frameInfo.rawType = _captureVideoType; | 450 frameInfo.rawType = _captureVideoType; |
| 461 | 451 |
| 462 // convert to to I420 if needed | 452 // convert to to I420 if needed |
| 463 IncomingFrame((unsigned char*) _pool[buf.index].start, | 453 IncomingFrame((unsigned char*) _pool[buf.index].start, |
| 464 buf.bytesused, frameInfo); | 454 buf.bytesused, frameInfo); |
| 465 // enqueue the buffer again | 455 // enqueue the buffer again |
| 466 if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1) | 456 if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1) |
| 467 { | 457 { |
| 468 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, 0, | 458 WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, 0, |
| 469 "Failed to enqueue capture buffer"); | 459 "Failed to enqueue capture buffer"); |
| 470 } | 460 } |
| 471 } | 461 } |
| 472 _captureCritSect->Leave(); | |
| 473 usleep(0); | 462 usleep(0); |
| 474 return true; | 463 return true; |
| 475 } | 464 } |
| 476 | 465 |
| 477 int32_t VideoCaptureModuleV4L2::CaptureSettings(VideoCaptureCapability& settings
) | 466 int32_t VideoCaptureModuleV4L2::CaptureSettings(VideoCaptureCapability& settings
) |
| 478 { | 467 { |
| 479 settings.width = _currentWidth; | 468 settings.width = _currentWidth; |
| 480 settings.height = _currentHeight; | 469 settings.height = _currentHeight; |
| 481 settings.maxFPS = _currentFrameRate; | 470 settings.maxFPS = _currentFrameRate; |
| 482 settings.rawType=_captureVideoType; | 471 settings.rawType=_captureVideoType; |
| 483 | 472 |
| 484 return 0; | 473 return 0; |
| 485 } | 474 } |
| 486 } // namespace videocapturemodule | 475 } // namespace videocapturemodule |
| 487 } // namespace webrtc | 476 } // namespace webrtc |
| OLD | NEW |