OLD | NEW |
1 /* | 1 /* |
2 * libjingle | 2 * libjingle |
3 * Copyright 2015 Google Inc. | 3 * Copyright 2015 Google Inc. |
4 * | 4 * |
5 * Redistribution and use in source and binary forms, with or without | 5 * Redistribution and use in source and binary forms, with or without |
6 * modification, are permitted provided that the following conditions are met: | 6 * modification, are permitted provided that the following conditions are met: |
7 * | 7 * |
8 * 1. Redistributions of source code must retain the above copyright notice, | 8 * 1. Redistributions of source code must retain the above copyright notice, |
9 * this list of conditions and the following disclaimer. | 9 * this list of conditions and the following disclaimer. |
10 * 2. Redistributions in binary form must reproduce the above copyright notice, | 10 * 2. Redistributions in binary form must reproduce the above copyright notice, |
(...skipping 10 matching lines...) Expand all Loading... |
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; | 21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; |
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, | 22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, |
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR | 23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR |
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF | 24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF |
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
26 * | 26 * |
27 */ | 27 */ |
28 | 28 |
29 #include "talk/app/webrtc/java/jni/androidvideocapturer_jni.h" | 29 #include "talk/app/webrtc/java/jni/androidvideocapturer_jni.h" |
30 #include "talk/app/webrtc/java/jni/classreferenceholder.h" | 30 #include "talk/app/webrtc/java/jni/classreferenceholder.h" |
| 31 #include "talk/app/webrtc/java/jni/native_handle_impl.h" |
31 #include "webrtc/base/bind.h" | 32 #include "webrtc/base/bind.h" |
32 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" | 33 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" |
33 | 34 |
34 namespace webrtc_jni { | 35 namespace webrtc_jni { |
35 | 36 |
| 37 namespace { |
| 38 |
| 39 class CameraTextureBuffer : public webrtc::NativeHandleBuffer { |
| 40 public: |
| 41 CameraTextureBuffer(int width, int height, |
| 42 const NativeHandleImpl& native_handle, |
| 43 const rtc::Callback0<void>& no_longer_used) |
| 44 : webrtc::NativeHandleBuffer(&native_handle_, width, height), |
| 45 native_handle_(native_handle), |
| 46 no_longer_used_cb_(no_longer_used) {} |
| 47 |
| 48 ~CameraTextureBuffer() { |
| 49 no_longer_used_cb_(); |
| 50 } |
| 51 |
| 52 rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override { |
| 53 RTC_NOTREACHED() |
| 54 << "CameraTextureBuffer::NativeToI420Buffer not implemented."; |
| 55 return nullptr; |
| 56 } |
| 57 |
| 58 private: |
| 59 NativeHandleImpl native_handle_; |
| 60 rtc::Callback0<void> no_longer_used_cb_; |
| 61 }; |
| 62 |
| 63 } // anonymous namespace |
| 64 |
36 jobject AndroidVideoCapturerJni::application_context_ = nullptr; | 65 jobject AndroidVideoCapturerJni::application_context_ = nullptr; |
37 | 66 |
38 // static | 67 // static |
39 int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni, | 68 int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni, |
40 jobject appliction_context) { | 69 jobject appliction_context) { |
41 if (application_context_) { | 70 if (application_context_) { |
42 jni->DeleteGlobalRef(application_context_); | 71 jni->DeleteGlobalRef(application_context_); |
43 } | 72 } |
44 application_context_ = NewGlobalRef(jni, appliction_context); | 73 application_context_ = NewGlobalRef(jni, appliction_context); |
45 | 74 |
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
143 return JavaToStdString(jni(), j_json_caps); | 172 return JavaToStdString(jni(), j_json_caps); |
144 } | 173 } |
145 | 174 |
146 void AndroidVideoCapturerJni::OnCapturerStarted(bool success) { | 175 void AndroidVideoCapturerJni::OnCapturerStarted(bool success) { |
147 LOG(LS_INFO) << "AndroidVideoCapturerJni capture started: " << success; | 176 LOG(LS_INFO) << "AndroidVideoCapturerJni capture started: " << success; |
148 AsyncCapturerInvoke("OnCapturerStarted", | 177 AsyncCapturerInvoke("OnCapturerStarted", |
149 &webrtc::AndroidVideoCapturer::OnCapturerStarted, | 178 &webrtc::AndroidVideoCapturer::OnCapturerStarted, |
150 success); | 179 success); |
151 } | 180 } |
152 | 181 |
153 void AndroidVideoCapturerJni::OnIncomingFrame(void* video_frame, | 182 void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame, |
154 int length, | 183 int length, |
155 int width, | 184 int width, |
156 int height, | 185 int height, |
157 int rotation, | 186 int rotation, |
158 int64_t time_stamp) { | 187 int64_t timestamp_ns) { |
159 const uint8_t* y_plane = static_cast<uint8_t*>(video_frame); | 188 const uint8_t* y_plane = static_cast<uint8_t*>(video_frame); |
160 // Android guarantees that the stride is a multiple of 16. | 189 // Android guarantees that the stride is a multiple of 16. |
161 // http://developer.android.com/reference/android/hardware/Camera.Parameters.h
tml#setPreviewFormat%28int%29 | 190 // http://developer.android.com/reference/android/hardware/Camera.Parameters.h
tml#setPreviewFormat%28int%29 |
162 int y_stride; | 191 int y_stride; |
163 int uv_stride; | 192 int uv_stride; |
164 webrtc::Calc16ByteAlignedStride(width, &y_stride, &uv_stride); | 193 webrtc::Calc16ByteAlignedStride(width, &y_stride, &uv_stride); |
165 const uint8_t* v_plane = y_plane + y_stride * height; | 194 const uint8_t* v_plane = y_plane + y_stride * height; |
166 const uint8_t* u_plane = | 195 const uint8_t* u_plane = |
167 v_plane + uv_stride * webrtc::AlignInt(height, 2) / 2; | 196 v_plane + uv_stride * webrtc::AlignInt(height, 2) / 2; |
168 | 197 |
169 // Wrap the Java buffer, and call ReturnBuffer() in the wrapped | 198 // Wrap the Java buffer, and call ReturnBuffer() in the wrapped |
170 // VideoFrameBuffer destructor. | 199 // VideoFrameBuffer destructor. |
171 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer( | 200 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer( |
172 new rtc::RefCountedObject<webrtc::WrappedI420Buffer>( | 201 new rtc::RefCountedObject<webrtc::WrappedI420Buffer>( |
173 width, height, y_plane, y_stride, u_plane, uv_stride, v_plane, | 202 width, height, y_plane, y_stride, u_plane, uv_stride, v_plane, |
174 uv_stride, | 203 uv_stride, |
175 rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this, time_stamp))); | 204 rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this, |
| 205 timestamp_ns))); |
176 AsyncCapturerInvoke("OnIncomingFrame", | 206 AsyncCapturerInvoke("OnIncomingFrame", |
177 &webrtc::AndroidVideoCapturer::OnIncomingFrame, | 207 &webrtc::AndroidVideoCapturer::OnIncomingFrame, |
178 buffer, rotation, time_stamp); | 208 buffer, rotation, timestamp_ns); |
| 209 } |
| 210 |
| 211 void AndroidVideoCapturerJni::OnTextureFrame(int width, |
| 212 int height, |
| 213 int64_t timestamp_ns, |
| 214 const NativeHandleImpl& handle) { |
| 215 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer( |
| 216 new rtc::RefCountedObject<CameraTextureBuffer>( |
| 217 width, height, handle, |
| 218 rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this, |
| 219 timestamp_ns))); |
| 220 AsyncCapturerInvoke("OnIncomingFrame", |
| 221 &webrtc::AndroidVideoCapturer::OnIncomingFrame, |
| 222 buffer, 0, timestamp_ns); |
179 } | 223 } |
180 | 224 |
181 void AndroidVideoCapturerJni::OnOutputFormatRequest(int width, | 225 void AndroidVideoCapturerJni::OnOutputFormatRequest(int width, |
182 int height, | 226 int height, |
183 int fps) { | 227 int fps) { |
184 AsyncCapturerInvoke("OnOutputFormatRequest", | 228 AsyncCapturerInvoke("OnOutputFormatRequest", |
185 &webrtc::AndroidVideoCapturer::OnOutputFormatRequest, | 229 &webrtc::AndroidVideoCapturer::OnOutputFormatRequest, |
186 width, height, fps); | 230 width, height, fps); |
187 } | 231 } |
188 | 232 |
189 JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); } | 233 JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); } |
190 | 234 |
191 JOW(void, | 235 JOW(void, |
192 VideoCapturerAndroid_00024NativeObserver_nativeOnByteBufferFrameCaptured) | 236 VideoCapturerAndroid_00024NativeObserver_nativeOnByteBufferFrameCaptured) |
193 (JNIEnv* jni, jclass, jlong j_capturer, jbyteArray j_frame, jint length, | 237 (JNIEnv* jni, jclass, jlong j_capturer, jbyteArray j_frame, jint length, |
194 jint width, jint height, jint rotation, jlong ts) { | 238 jint width, jint height, jint rotation, jlong timestamp) { |
195 jboolean is_copy = true; | 239 jboolean is_copy = true; |
196 jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy); | 240 jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy); |
197 // If this is a copy of the original frame, it means that the memory | 241 // If this is a copy of the original frame, it means that the memory |
198 // is not direct memory and thus VideoCapturerAndroid does not guarantee | 242 // is not direct memory and thus VideoCapturerAndroid does not guarantee |
199 // that the memory is valid when we have released |j_frame|. | 243 // that the memory is valid when we have released |j_frame|. |
200 // TODO(magjed): Move ReleaseByteArrayElements() into ReturnBuffer() and | 244 // TODO(magjed): Move ReleaseByteArrayElements() into ReturnBuffer() and |
201 // remove this check. | 245 // remove this check. |
202 RTC_CHECK(!is_copy) | 246 RTC_CHECK(!is_copy) |
203 << "NativeObserver_nativeOnFrameCaptured: frame is a copy"; | 247 << "NativeObserver_nativeOnFrameCaptured: frame is a copy"; |
204 reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer) | 248 reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer) |
205 ->OnIncomingFrame(bytes, length, width, height, rotation, ts); | 249 ->OnMemoryBufferFrame(bytes, length, width, height, rotation, timestamp); |
206 jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT); | 250 jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT); |
207 } | 251 } |
208 | 252 |
| 253 JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnTextureFrameCaptured) |
| 254 (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height, |
| 255 jint j_oes_texture_id, jfloatArray j_transform_matrix, |
| 256 jlong j_timestamp) { |
| 257 reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer) |
| 258 ->OnTextureFrame(j_width, j_height, j_timestamp, |
| 259 NativeHandleImpl(jni, j_oes_texture_id, |
| 260 j_transform_matrix)); |
| 261 } |
| 262 |
209 JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted) | 263 JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted) |
210 (JNIEnv* jni, jclass, jlong j_capturer, jboolean j_success) { | 264 (JNIEnv* jni, jclass, jlong j_capturer, jboolean j_success) { |
211 LOG(LS_INFO) << "NativeObserver_nativeCapturerStarted"; | 265 LOG(LS_INFO) << "NativeObserver_nativeCapturerStarted"; |
212 reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnCapturerStarted( | 266 reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnCapturerStarted( |
213 j_success); | 267 j_success); |
214 } | 268 } |
215 | 269 |
216 JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnOutputFormatRequest) | 270 JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnOutputFormatRequest) |
217 (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height, | 271 (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height, |
218 jint j_fps) { | 272 jint j_fps) { |
219 LOG(LS_INFO) << "NativeObserver_nativeOnOutputFormatRequest"; | 273 LOG(LS_INFO) << "NativeObserver_nativeOnOutputFormatRequest"; |
220 reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnOutputFormatRequest( | 274 reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnOutputFormatRequest( |
221 j_width, j_height, j_fps); | 275 j_width, j_height, j_fps); |
222 } | 276 } |
223 | 277 |
224 JOW(jlong, VideoCapturerAndroid_nativeCreateVideoCapturer) | 278 JOW(jlong, VideoCapturerAndroid_nativeCreateVideoCapturer) |
225 (JNIEnv* jni, jclass, jobject j_video_capturer) { | 279 (JNIEnv* jni, jclass, jobject j_video_capturer) { |
226 rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate = | 280 rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate = |
227 new rtc::RefCountedObject<AndroidVideoCapturerJni>(jni, j_video_capturer); | 281 new rtc::RefCountedObject<AndroidVideoCapturerJni>(jni, j_video_capturer); |
228 rtc::scoped_ptr<cricket::VideoCapturer> capturer( | 282 rtc::scoped_ptr<cricket::VideoCapturer> capturer( |
229 new webrtc::AndroidVideoCapturer(delegate)); | 283 new webrtc::AndroidVideoCapturer(delegate)); |
230 // Caller takes ownership of the cricket::VideoCapturer* pointer. | 284 // Caller takes ownership of the cricket::VideoCapturer* pointer. |
231 return jlongFromPointer(capturer.release()); | 285 return jlongFromPointer(capturer.release()); |
232 } | 286 } |
233 | 287 |
234 } // namespace webrtc_jni | 288 } // namespace webrtc_jni |
OLD | NEW |