| OLD | NEW |
| (Empty) |
| 1 /* | |
| 2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. | |
| 3 * | |
| 4 * Use of this source code is governed by a BSD-style license | |
| 5 * that can be found in the LICENSE file in the root of the source | |
| 6 * tree. An additional intellectual property rights grant can be found | |
| 7 * in the file PATENTS. All contributing project authors may | |
| 8 * be found in the AUTHORS file in the root of the source tree. | |
| 9 */ | |
| 10 | |
| 11 #include "webrtc/modules/video_capture/android/video_capture_android.h" | |
| 12 | |
| 13 #include "webrtc/base/common.h" | |
| 14 #include "webrtc/modules/utility/interface/helpers_android.h" | |
| 15 #include "webrtc/modules/video_capture/android/device_info_android.h" | |
| 16 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h" | |
| 17 #include "webrtc/system_wrappers/interface/logcat_trace_context.h" | |
| 18 #include "webrtc/system_wrappers/interface/logging.h" | |
| 19 #include "webrtc/system_wrappers/interface/ref_count.h" | |
| 20 #include "webrtc/system_wrappers/interface/trace.h" | |
| 21 | |
| 22 static JavaVM* g_jvm = NULL; | |
| 23 static jclass g_java_capturer_class = NULL; // VideoCaptureAndroid.class. | |
| 24 static jobject g_context = NULL; // Owned android.content.Context. | |
| 25 | |
| 26 namespace webrtc { | |
| 27 | |
| 28 // Called by Java to get the global application context. | |
| 29 jobject JNICALL GetContext(JNIEnv* env, jclass) { | |
| 30 assert(g_context); | |
| 31 return g_context; | |
| 32 } | |
| 33 | |
| 34 // Called by Java when the camera has a new frame to deliver. | |
| 35 void JNICALL ProvideCameraFrame( | |
| 36 JNIEnv* env, | |
| 37 jobject, | |
| 38 jbyteArray javaCameraFrame, | |
| 39 jint length, | |
| 40 jint rotation, | |
| 41 jlong timeStamp, | |
| 42 jlong context) { | |
| 43 webrtc::videocapturemodule::VideoCaptureAndroid* captureModule = | |
| 44 reinterpret_cast<webrtc::videocapturemodule::VideoCaptureAndroid*>( | |
| 45 context); | |
| 46 jbyte* cameraFrame = env->GetByteArrayElements(javaCameraFrame, NULL); | |
| 47 captureModule->OnIncomingFrame( | |
| 48 reinterpret_cast<uint8_t*>(cameraFrame), length, rotation, 0); | |
| 49 env->ReleaseByteArrayElements(javaCameraFrame, cameraFrame, JNI_ABORT); | |
| 50 } | |
| 51 | |
| 52 int32_t SetCaptureAndroidVM(JavaVM* javaVM, jobject context) { | |
| 53 if (javaVM) { | |
| 54 assert(!g_jvm); | |
| 55 g_jvm = javaVM; | |
| 56 AttachThreadScoped ats(g_jvm); | |
| 57 g_context = ats.env()->NewGlobalRef(context); | |
| 58 | |
| 59 videocapturemodule::DeviceInfoAndroid::Initialize(ats.env()); | |
| 60 | |
| 61 jclass j_capture_class = | |
| 62 ats.env()->FindClass("org/webrtc/videoengine/VideoCaptureAndroid"); | |
| 63 assert(j_capture_class); | |
| 64 g_java_capturer_class = | |
| 65 reinterpret_cast<jclass>(ats.env()->NewGlobalRef(j_capture_class)); | |
| 66 assert(g_java_capturer_class); | |
| 67 | |
| 68 JNINativeMethod native_methods[] = { | |
| 69 {"GetContext", | |
| 70 "()Landroid/content/Context;", | |
| 71 reinterpret_cast<void*>(&GetContext)}, | |
| 72 {"ProvideCameraFrame", | |
| 73 "([BIIJJ)V", | |
| 74 reinterpret_cast<void*>(&ProvideCameraFrame)}}; | |
| 75 if (ats.env()->RegisterNatives(g_java_capturer_class, | |
| 76 native_methods, 2) != 0) | |
| 77 assert(false); | |
| 78 } else { | |
| 79 if (g_jvm) { | |
| 80 AttachThreadScoped ats(g_jvm); | |
| 81 ats.env()->UnregisterNatives(g_java_capturer_class); | |
| 82 ats.env()->DeleteGlobalRef(g_java_capturer_class); | |
| 83 g_java_capturer_class = NULL; | |
| 84 ats.env()->DeleteGlobalRef(g_context); | |
| 85 g_context = NULL; | |
| 86 videocapturemodule::DeviceInfoAndroid::DeInitialize(); | |
| 87 g_jvm = NULL; | |
| 88 } | |
| 89 } | |
| 90 | |
| 91 return 0; | |
| 92 } | |
| 93 | |
| 94 namespace videocapturemodule { | |
| 95 | |
| 96 VideoCaptureModule* VideoCaptureImpl::Create( | |
| 97 const int32_t id, | |
| 98 const char* deviceUniqueIdUTF8) { | |
| 99 RefCountImpl<videocapturemodule::VideoCaptureAndroid>* implementation = | |
| 100 new RefCountImpl<videocapturemodule::VideoCaptureAndroid>(id); | |
| 101 if (implementation->Init(id, deviceUniqueIdUTF8) != 0) { | |
| 102 delete implementation; | |
| 103 implementation = NULL; | |
| 104 } | |
| 105 return implementation; | |
| 106 } | |
| 107 | |
| 108 int32_t VideoCaptureAndroid::OnIncomingFrame(uint8_t* videoFrame, | |
| 109 size_t videoFrameLength, | |
| 110 int32_t degrees, | |
| 111 int64_t captureTime) { | |
| 112 if (!_captureStarted) | |
| 113 return 0; | |
| 114 VideoRotation current_rotation = | |
| 115 (degrees <= 45 || degrees > 315) ? kVideoRotation_0 : | |
| 116 (degrees > 45 && degrees <= 135) ? kVideoRotation_90 : | |
| 117 (degrees > 135 && degrees <= 225) ? kVideoRotation_180 : | |
| 118 (degrees > 225 && degrees <= 315) ? kVideoRotation_270 : | |
| 119 kVideoRotation_0; // Impossible. | |
| 120 if (_rotation != current_rotation) { | |
| 121 LOG(LS_INFO) << "New camera rotation: " << degrees; | |
| 122 _rotation = current_rotation; | |
| 123 int32_t status = VideoCaptureImpl::SetCaptureRotation(_rotation); | |
| 124 if (status != 0) | |
| 125 return status; | |
| 126 } | |
| 127 return IncomingFrame( | |
| 128 videoFrame, videoFrameLength, _captureCapability, captureTime); | |
| 129 } | |
| 130 | |
| 131 VideoCaptureAndroid::VideoCaptureAndroid(const int32_t id) | |
| 132 : VideoCaptureImpl(id), | |
| 133 _deviceInfo(id), | |
| 134 _jCapturer(NULL), | |
| 135 _captureStarted(false) { | |
| 136 } | |
| 137 | |
| 138 int32_t VideoCaptureAndroid::Init(const int32_t id, | |
| 139 const char* deviceUniqueIdUTF8) { | |
| 140 const int nameLength = strlen(deviceUniqueIdUTF8); | |
| 141 if (nameLength >= kVideoCaptureUniqueNameLength) | |
| 142 return -1; | |
| 143 | |
| 144 // Store the device name | |
| 145 LOG(LS_INFO) << "VideoCaptureAndroid::Init: " << deviceUniqueIdUTF8; | |
| 146 size_t camera_id = 0; | |
| 147 if (!_deviceInfo.FindCameraIndex(deviceUniqueIdUTF8, &camera_id)) | |
| 148 return -1; | |
| 149 _deviceUniqueId = new char[nameLength + 1]; | |
| 150 memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1); | |
| 151 | |
| 152 AttachThreadScoped ats(g_jvm); | |
| 153 JNIEnv* env = ats.env(); | |
| 154 jmethodID ctor = env->GetMethodID(g_java_capturer_class, "<init>", "(IJ)V"); | |
| 155 assert(ctor); | |
| 156 jlong j_this = reinterpret_cast<intptr_t>(this); | |
| 157 _jCapturer = env->NewGlobalRef( | |
| 158 env->NewObject(g_java_capturer_class, ctor, camera_id, j_this)); | |
| 159 assert(_jCapturer); | |
| 160 _rotation = kVideoRotation_0; | |
| 161 return 0; | |
| 162 } | |
| 163 | |
| 164 VideoCaptureAndroid::~VideoCaptureAndroid() { | |
| 165 // Ensure Java camera is released even if our caller didn't explicitly Stop. | |
| 166 if (_captureStarted) | |
| 167 StopCapture(); | |
| 168 AttachThreadScoped ats(g_jvm); | |
| 169 ats.env()->DeleteGlobalRef(_jCapturer); | |
| 170 } | |
| 171 | |
| 172 int32_t VideoCaptureAndroid::StartCapture( | |
| 173 const VideoCaptureCapability& capability) { | |
| 174 CriticalSectionScoped cs(&_apiCs); | |
| 175 AttachThreadScoped ats(g_jvm); | |
| 176 JNIEnv* env = ats.env(); | |
| 177 | |
| 178 if (_deviceInfo.GetBestMatchedCapability( | |
| 179 _deviceUniqueId, capability, _captureCapability) < 0) { | |
| 180 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, | |
| 181 "%s: GetBestMatchedCapability failed: %dx%d", | |
| 182 __FUNCTION__, capability.width, capability.height); | |
| 183 return -1; | |
| 184 } | |
| 185 | |
| 186 _captureDelay = _captureCapability.expectedCaptureDelay; | |
| 187 | |
| 188 jmethodID j_start = | |
| 189 env->GetMethodID(g_java_capturer_class, "startCapture", "(IIII)Z"); | |
| 190 assert(j_start); | |
| 191 int min_mfps = 0; | |
| 192 int max_mfps = 0; | |
| 193 _deviceInfo.GetMFpsRange(_deviceUniqueId, _captureCapability.maxFPS, | |
| 194 &min_mfps, &max_mfps); | |
| 195 bool started = env->CallBooleanMethod(_jCapturer, j_start, | |
| 196 _captureCapability.width, | |
| 197 _captureCapability.height, | |
| 198 min_mfps, max_mfps); | |
| 199 if (started) { | |
| 200 _requestedCapability = capability; | |
| 201 _captureStarted = true; | |
| 202 } | |
| 203 return started ? 0 : -1; | |
| 204 } | |
| 205 | |
| 206 int32_t VideoCaptureAndroid::StopCapture() { | |
| 207 _apiCs.Enter(); | |
| 208 AttachThreadScoped ats(g_jvm); | |
| 209 JNIEnv* env = ats.env(); | |
| 210 | |
| 211 memset(&_requestedCapability, 0, sizeof(_requestedCapability)); | |
| 212 memset(&_captureCapability, 0, sizeof(_captureCapability)); | |
| 213 _captureStarted = false; | |
| 214 // Exit critical section to avoid blocking camera thread inside | |
| 215 // onIncomingFrame() call. | |
| 216 _apiCs.Leave(); | |
| 217 | |
| 218 jmethodID j_stop = | |
| 219 env->GetMethodID(g_java_capturer_class, "stopCapture", "()Z"); | |
| 220 return env->CallBooleanMethod(_jCapturer, j_stop) ? 0 : -1; | |
| 221 } | |
| 222 | |
| 223 bool VideoCaptureAndroid::CaptureStarted() { | |
| 224 CriticalSectionScoped cs(&_apiCs); | |
| 225 return _captureStarted; | |
| 226 } | |
| 227 | |
| 228 int32_t VideoCaptureAndroid::CaptureSettings( | |
| 229 VideoCaptureCapability& settings) { | |
| 230 CriticalSectionScoped cs(&_apiCs); | |
| 231 settings = _requestedCapability; | |
| 232 return 0; | |
| 233 } | |
| 234 | |
| 235 int32_t VideoCaptureAndroid::SetCaptureRotation(VideoRotation rotation) { | |
| 236 int32_t status = VideoCaptureImpl::SetCaptureRotation(rotation); | |
| 237 if (status != 0) | |
| 238 return status; | |
| 239 | |
| 240 AttachThreadScoped ats(g_jvm); | |
| 241 JNIEnv* env = ats.env(); | |
| 242 | |
| 243 jmethodID j_spr = | |
| 244 env->GetMethodID(g_java_capturer_class, "setPreviewRotation", "(I)V"); | |
| 245 assert(j_spr); | |
| 246 int rotation_degrees; | |
| 247 if (RotationInDegrees(rotation, &rotation_degrees) != 0) { | |
| 248 assert(false); | |
| 249 } | |
| 250 env->CallVoidMethod(_jCapturer, j_spr, rotation_degrees); | |
| 251 return 0; | |
| 252 } | |
| 253 | |
| 254 } // namespace videocapturemodule | |
| 255 } // namespace webrtc | |
| OLD | NEW |