| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| 11 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" | 11 #include "webrtc/sdk/android/src/jni/native_handle_impl.h" |
| 12 | 12 |
| 13 #include <memory> | 13 #include <memory> |
| 14 | 14 |
| 15 #include "webrtc/common_video/include/video_frame_buffer.h" | 15 #include "webrtc/common_video/include/video_frame_buffer.h" |
| 16 #include "webrtc/rtc_base/bind.h" | 16 #include "webrtc/rtc_base/bind.h" |
| 17 #include "webrtc/rtc_base/checks.h" | 17 #include "webrtc/rtc_base/checks.h" |
| 18 #include "webrtc/rtc_base/keep_ref_until_done.h" | 18 #include "webrtc/rtc_base/keep_ref_until_done.h" |
| 19 #include "webrtc/rtc_base/logging.h" | 19 #include "webrtc/rtc_base/logging.h" |
| 20 #include "webrtc/rtc_base/scoped_ref_ptr.h" | 20 #include "webrtc/rtc_base/scoped_ref_ptr.h" |
| 21 #include "webrtc/rtc_base/timeutils.h" | 21 #include "webrtc/rtc_base/timeutils.h" |
| 22 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" | 22 #include "webrtc/sdk/android/src/jni/classreferenceholder.h" |
| 23 #include "webrtc/sdk/android/src/jni/jni_helpers.h" | 23 #include "webrtc/sdk/android/src/jni/jni_helpers.h" |
| 24 #include "webrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h" | 24 #include "webrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h" |
| 25 #include "webrtc/system_wrappers/include/aligned_malloc.h" | 25 #include "webrtc/system_wrappers/include/aligned_malloc.h" |
| 26 | 26 |
| 27 namespace webrtc_jni { | 27 namespace webrtc { |
| 28 namespace jni { |
| 28 | 29 |
| 29 namespace { | 30 namespace { |
| 30 | 31 |
| 31 class AndroidVideoI420Buffer : public webrtc::I420BufferInterface { | 32 class AndroidVideoI420Buffer : public I420BufferInterface { |
| 32 public: | 33 public: |
| 33 // Wraps an existing reference to a Java VideoBuffer. Retain will not be | 34 // Wraps an existing reference to a Java VideoBuffer. Retain will not be |
| 34 // called but release will be called when the C++ object is destroyed. | 35 // called but release will be called when the C++ object is destroyed. |
| 35 static rtc::scoped_refptr<AndroidVideoI420Buffer> WrapReference( | 36 static rtc::scoped_refptr<AndroidVideoI420Buffer> WrapReference( |
| 36 JNIEnv* jni, | 37 JNIEnv* jni, |
| 37 jmethodID j_release_id, | 38 jmethodID j_release_id, |
| 38 int width, | 39 int width, |
| 39 int height, | 40 int height, |
| 40 jobject j_video_frame_buffer); | 41 jobject j_video_frame_buffer); |
| 41 | 42 |
| (...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 160 } | 161 } |
| 161 jni->ReleaseFloatArrayElements(a, ptr, 0); | 162 jni->ReleaseFloatArrayElements(a, ptr, 0); |
| 162 } | 163 } |
| 163 | 164 |
| 164 jfloatArray Matrix::ToJava(JNIEnv* jni) const { | 165 jfloatArray Matrix::ToJava(JNIEnv* jni) const { |
| 165 jfloatArray matrix = jni->NewFloatArray(16); | 166 jfloatArray matrix = jni->NewFloatArray(16); |
| 166 jni->SetFloatArrayRegion(matrix, 0, 16, elem_); | 167 jni->SetFloatArrayRegion(matrix, 0, 16, elem_); |
| 167 return matrix; | 168 return matrix; |
| 168 } | 169 } |
| 169 | 170 |
| 170 void Matrix::Rotate(webrtc::VideoRotation rotation) { | 171 void Matrix::Rotate(VideoRotation rotation) { |
| 171 // Texture coordinates are in the range 0 to 1. The transformation of the last | 172 // Texture coordinates are in the range 0 to 1. The transformation of the last |
| 172 // row in each rotation matrix is needed for proper translation, e.g, to | 173 // row in each rotation matrix is needed for proper translation, e.g, to |
| 173 // mirror x, we don't replace x by -x, but by 1-x. | 174 // mirror x, we don't replace x by -x, but by 1-x. |
| 174 switch (rotation) { | 175 switch (rotation) { |
| 175 case webrtc::kVideoRotation_0: | 176 case kVideoRotation_0: |
| 176 break; | 177 break; |
| 177 case webrtc::kVideoRotation_90: { | 178 case kVideoRotation_90: { |
| 178 const float ROTATE_90[16] = | 179 const float ROTATE_90[16] = |
| 179 { elem_[4], elem_[5], elem_[6], elem_[7], | 180 { elem_[4], elem_[5], elem_[6], elem_[7], |
| 180 -elem_[0], -elem_[1], -elem_[2], -elem_[3], | 181 -elem_[0], -elem_[1], -elem_[2], -elem_[3], |
| 181 elem_[8], elem_[9], elem_[10], elem_[11], | 182 elem_[8], elem_[9], elem_[10], elem_[11], |
| 182 elem_[0] + elem_[12], elem_[1] + elem_[13], | 183 elem_[0] + elem_[12], elem_[1] + elem_[13], |
| 183 elem_[2] + elem_[14], elem_[3] + elem_[15]}; | 184 elem_[2] + elem_[14], elem_[3] + elem_[15]}; |
| 184 memcpy(elem_, ROTATE_90, sizeof(elem_)); | 185 memcpy(elem_, ROTATE_90, sizeof(elem_)); |
| 185 } break; | 186 } break; |
| 186 case webrtc::kVideoRotation_180: { | 187 case kVideoRotation_180: { |
| 187 const float ROTATE_180[16] = | 188 const float ROTATE_180[16] = |
| 188 { -elem_[0], -elem_[1], -elem_[2], -elem_[3], | 189 { -elem_[0], -elem_[1], -elem_[2], -elem_[3], |
| 189 -elem_[4], -elem_[5], -elem_[6], -elem_[7], | 190 -elem_[4], -elem_[5], -elem_[6], -elem_[7], |
| 190 elem_[8], elem_[9], elem_[10], elem_[11], | 191 elem_[8], elem_[9], elem_[10], elem_[11], |
| 191 elem_[0] + elem_[4] + elem_[12], elem_[1] + elem_[5] + elem_[13], | 192 elem_[0] + elem_[4] + elem_[12], elem_[1] + elem_[5] + elem_[13], |
| 192 elem_[2] + elem_[6] + elem_[14], elem_[3] + elem_[11]+ elem_[15]}; | 193 elem_[2] + elem_[6] + elem_[14], elem_[3] + elem_[11]+ elem_[15]}; |
| 193 memcpy(elem_, ROTATE_180, sizeof(elem_)); | 194 memcpy(elem_, ROTATE_180, sizeof(elem_)); |
| 194 } break; | 195 } break; |
| 195 case webrtc::kVideoRotation_270: { | 196 case kVideoRotation_270: { |
| 196 const float ROTATE_270[16] = | 197 const float ROTATE_270[16] = |
| 197 { -elem_[4], -elem_[5], -elem_[6], -elem_[7], | 198 { -elem_[4], -elem_[5], -elem_[6], -elem_[7], |
| 198 elem_[0], elem_[1], elem_[2], elem_[3], | 199 elem_[0], elem_[1], elem_[2], elem_[3], |
| 199 elem_[8], elem_[9], elem_[10], elem_[11], | 200 elem_[8], elem_[9], elem_[10], elem_[11], |
| 200 elem_[4] + elem_[12], elem_[5] + elem_[13], | 201 elem_[4] + elem_[12], elem_[5] + elem_[13], |
| 201 elem_[6] + elem_[14], elem_[7] + elem_[15]}; | 202 elem_[6] + elem_[14], elem_[7] + elem_[15]}; |
| 202 memcpy(elem_, ROTATE_270, sizeof(elem_)); | 203 memcpy(elem_, ROTATE_270, sizeof(elem_)); |
| 203 } break; | 204 } break; |
| 204 } | 205 } |
| 205 } | 206 } |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 254 : width_(width), | 255 : width_(width), |
| 255 height_(height), | 256 height_(height), |
| 256 native_handle_(native_handle), | 257 native_handle_(native_handle), |
| 257 surface_texture_helper_(surface_texture_helper), | 258 surface_texture_helper_(surface_texture_helper), |
| 258 no_longer_used_cb_(no_longer_used) {} | 259 no_longer_used_cb_(no_longer_used) {} |
| 259 | 260 |
| 260 AndroidTextureBuffer::~AndroidTextureBuffer() { | 261 AndroidTextureBuffer::~AndroidTextureBuffer() { |
| 261 no_longer_used_cb_(); | 262 no_longer_used_cb_(); |
| 262 } | 263 } |
| 263 | 264 |
| 264 webrtc::VideoFrameBuffer::Type AndroidTextureBuffer::type() const { | 265 VideoFrameBuffer::Type AndroidTextureBuffer::type() const { |
| 265 return Type::kNative; | 266 return Type::kNative; |
| 266 } | 267 } |
| 267 | 268 |
| 268 NativeHandleImpl AndroidTextureBuffer::native_handle_impl() const { | 269 NativeHandleImpl AndroidTextureBuffer::native_handle_impl() const { |
| 269 return native_handle_; | 270 return native_handle_; |
| 270 } | 271 } |
| 271 | 272 |
| 272 int AndroidTextureBuffer::width() const { | 273 int AndroidTextureBuffer::width() const { |
| 273 return width_; | 274 return width_; |
| 274 } | 275 } |
| 275 | 276 |
| 276 int AndroidTextureBuffer::height() const { | 277 int AndroidTextureBuffer::height() const { |
| 277 return height_; | 278 return height_; |
| 278 } | 279 } |
| 279 | 280 |
| 280 rtc::scoped_refptr<webrtc::I420BufferInterface> AndroidTextureBuffer::ToI420() { | 281 rtc::scoped_refptr<I420BufferInterface> AndroidTextureBuffer::ToI420() { |
| 281 int uv_width = (width() + 7) / 8; | 282 int uv_width = (width() + 7) / 8; |
| 282 int stride = 8 * uv_width; | 283 int stride = 8 * uv_width; |
| 283 int uv_height = (height() + 1) / 2; | 284 int uv_height = (height() + 1) / 2; |
| 284 size_t size = stride * (height() + uv_height); | 285 size_t size = stride * (height() + uv_height); |
| 285 // The data is owned by the frame, and the normal case is that the | 286 // The data is owned by the frame, and the normal case is that the |
| 286 // data is deleted by the frame's destructor callback. | 287 // data is deleted by the frame's destructor callback. |
| 287 // | 288 // |
| 288 // TODO(nisse): Use an I420BufferPool. We then need to extend that | 289 // TODO(nisse): Use an I420BufferPool. We then need to extend that |
| 289 // class, and I420Buffer, to support our memory layout. | 290 // class, and I420Buffer, to support our memory layout. |
| 290 // TODO(nisse): Depending on | 291 // TODO(nisse): Depending on |
| 291 // system_wrappers/include/aligned_malloc.h violate current DEPS | 292 // system_wrappers/include/aligned_malloc.h violate current DEPS |
| 292 // rules. We get away for now only because it is indirectly included | 293 // rules. We get away for now only because it is indirectly included |
| 293 // by i420_buffer.h | 294 // by i420_buffer.h |
| 294 std::unique_ptr<uint8_t, webrtc::AlignedFreeDeleter> yuv_data( | 295 std::unique_ptr<uint8_t, AlignedFreeDeleter> yuv_data( |
| 295 static_cast<uint8_t*>(webrtc::AlignedMalloc(size, kBufferAlignment))); | 296 static_cast<uint8_t*>(AlignedMalloc(size, kBufferAlignment))); |
| 296 // See YuvConverter.java for the required layout. | 297 // See YuvConverter.java for the required layout. |
| 297 uint8_t* y_data = yuv_data.get(); | 298 uint8_t* y_data = yuv_data.get(); |
| 298 uint8_t* u_data = y_data + height() * stride; | 299 uint8_t* u_data = y_data + height() * stride; |
| 299 uint8_t* v_data = u_data + stride/2; | 300 uint8_t* v_data = u_data + stride/2; |
| 300 | 301 |
| 301 rtc::scoped_refptr<webrtc::I420BufferInterface> copy = webrtc::WrapI420Buffer( | 302 rtc::scoped_refptr<I420BufferInterface> copy = webrtc::WrapI420Buffer( |
| 302 width(), height(), y_data, stride, u_data, stride, v_data, stride, | 303 width(), height(), y_data, stride, u_data, stride, v_data, stride, |
| 303 rtc::Bind(&webrtc::AlignedFree, yuv_data.release())); | 304 rtc::Bind(&AlignedFree, yuv_data.release())); |
| 304 | 305 |
| 305 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 306 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 306 ScopedLocalRefFrame local_ref_frame(jni); | 307 ScopedLocalRefFrame local_ref_frame(jni); |
| 307 | 308 |
| 308 jmethodID transform_mid = GetMethodID( | 309 jmethodID transform_mid = GetMethodID( |
| 309 jni, | 310 jni, |
| 310 GetObjectClass(jni, surface_texture_helper_), | 311 GetObjectClass(jni, surface_texture_helper_), |
| 311 "textureToYUV", | 312 "textureToYUV", |
| 312 "(Ljava/nio/ByteBuffer;IIII[F)V"); | 313 "(Ljava/nio/ByteBuffer;IIII[F)V"); |
| 313 | 314 |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 359 | 360 |
| 360 AndroidVideoBuffer::~AndroidVideoBuffer() { | 361 AndroidVideoBuffer::~AndroidVideoBuffer() { |
| 361 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 362 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 362 jni->CallVoidMethod(*j_video_frame_buffer_, j_release_id_); | 363 jni->CallVoidMethod(*j_video_frame_buffer_, j_release_id_); |
| 363 } | 364 } |
| 364 | 365 |
| 365 jobject AndroidVideoBuffer::video_frame_buffer() const { | 366 jobject AndroidVideoBuffer::video_frame_buffer() const { |
| 366 return *j_video_frame_buffer_; | 367 return *j_video_frame_buffer_; |
| 367 } | 368 } |
| 368 | 369 |
| 369 webrtc::VideoFrameBuffer::Type AndroidVideoBuffer::type() const { | 370 VideoFrameBuffer::Type AndroidVideoBuffer::type() const { |
| 370 return Type::kNative; | 371 return Type::kNative; |
| 371 } | 372 } |
| 372 | 373 |
| 373 int AndroidVideoBuffer::width() const { | 374 int AndroidVideoBuffer::width() const { |
| 374 return width_; | 375 return width_; |
| 375 } | 376 } |
| 376 | 377 |
| 377 int AndroidVideoBuffer::height() const { | 378 int AndroidVideoBuffer::height() const { |
| 378 return height_; | 379 return height_; |
| 379 } | 380 } |
| 380 | 381 |
| 381 rtc::scoped_refptr<webrtc::I420BufferInterface> AndroidVideoBuffer::ToI420() { | 382 rtc::scoped_refptr<I420BufferInterface> AndroidVideoBuffer::ToI420() { |
| 382 JNIEnv* jni = AttachCurrentThreadIfNeeded(); | 383 JNIEnv* jni = AttachCurrentThreadIfNeeded(); |
| 383 ScopedLocalRefFrame local_ref_frame(jni); | 384 ScopedLocalRefFrame local_ref_frame(jni); |
| 384 | 385 |
| 385 jclass j_video_frame_buffer_class = | 386 jclass j_video_frame_buffer_class = |
| 386 FindClass(jni, "org/webrtc/VideoFrame$Buffer"); | 387 FindClass(jni, "org/webrtc/VideoFrame$Buffer"); |
| 387 jmethodID j_to_i420_id = | 388 jmethodID j_to_i420_id = |
| 388 jni->GetMethodID(j_video_frame_buffer_class, "toI420", | 389 jni->GetMethodID(j_video_frame_buffer_class, "toI420", |
| 389 "()Lorg/webrtc/VideoFrame$I420Buffer;"); | 390 "()Lorg/webrtc/VideoFrame$I420Buffer;"); |
| 390 | 391 |
| 391 jobject j_i420_buffer = | 392 jobject j_i420_buffer = |
| 392 jni->CallObjectMethod(*j_video_frame_buffer_, j_to_i420_id); | 393 jni->CallObjectMethod(*j_video_frame_buffer_, j_to_i420_id); |
| 393 | 394 |
| 394 // We don't need to retain the buffer because toI420 returns a new object that | 395 // We don't need to retain the buffer because toI420 returns a new object that |
| 395 // we are assumed to take the ownership of. | 396 // we are assumed to take the ownership of. |
| 396 return AndroidVideoI420Buffer::WrapReference(jni, j_release_id_, width_, | 397 return AndroidVideoI420Buffer::WrapReference(jni, j_release_id_, width_, |
| 397 height_, j_i420_buffer); | 398 height_, j_i420_buffer); |
| 398 } | 399 } |
| 399 | 400 |
| 400 jobject AndroidVideoBuffer::ToJavaI420Frame(JNIEnv* jni, int rotation) { | 401 jobject AndroidVideoBuffer::ToJavaI420Frame(JNIEnv* jni, int rotation) { |
| 401 jclass j_byte_buffer_class = jni->FindClass("java/nio/ByteBuffer"); | 402 jclass j_byte_buffer_class = jni->FindClass("java/nio/ByteBuffer"); |
| 402 jclass j_i420_frame_class = | 403 jclass j_i420_frame_class = |
| 403 FindClass(jni, "org/webrtc/VideoRenderer$I420Frame"); | 404 FindClass(jni, "org/webrtc/VideoRenderer$I420Frame"); |
| 404 jmethodID j_i420_frame_ctor_id = GetMethodID( | 405 jmethodID j_i420_frame_ctor_id = GetMethodID( |
| 405 jni, j_i420_frame_class, "<init>", "(ILorg/webrtc/VideoFrame$Buffer;J)V"); | 406 jni, j_i420_frame_class, "<init>", "(ILorg/webrtc/VideoFrame$Buffer;J)V"); |
| 406 // Java code just uses the native frame to hold a reference to the buffer so | 407 // Java code just uses the native frame to hold a reference to the buffer so |
| 407 // this is okay. | 408 // this is okay. |
| 408 webrtc::VideoFrame* native_frame = new webrtc::VideoFrame( | 409 VideoFrame* native_frame = |
| 409 this, 0 /* timestamp */, 0 /* render_time_ms */, | 410 new VideoFrame(this, 0 /* timestamp */, 0 /* render_time_ms */, |
| 410 webrtc::VideoRotation::kVideoRotation_0 /* rotation */); | 411 VideoRotation::kVideoRotation_0 /* rotation */); |
| 411 return jni->NewObject(j_i420_frame_class, j_i420_frame_ctor_id, rotation, | 412 return jni->NewObject(j_i420_frame_class, j_i420_frame_ctor_id, rotation, |
| 412 *j_video_frame_buffer_, jlongFromPointer(native_frame)); | 413 *j_video_frame_buffer_, jlongFromPointer(native_frame)); |
| 413 } | 414 } |
| 414 | 415 |
| 415 AndroidVideoBufferFactory::AndroidVideoBufferFactory(JNIEnv* jni) | 416 AndroidVideoBufferFactory::AndroidVideoBufferFactory(JNIEnv* jni) |
| 416 : j_video_frame_class_(jni, FindClass(jni, "org/webrtc/VideoFrame")), | 417 : j_video_frame_class_(jni, FindClass(jni, "org/webrtc/VideoFrame")), |
| 417 j_get_buffer_id_(GetMethodID(jni, | 418 j_get_buffer_id_(GetMethodID(jni, |
| 418 *j_video_frame_class_, | 419 *j_video_frame_class_, |
| 419 "getBuffer", | 420 "getBuffer", |
| 420 "()Lorg/webrtc/VideoFrame$Buffer;")), | 421 "()Lorg/webrtc/VideoFrame$Buffer;")), |
| 421 j_get_rotation_id_( | 422 j_get_rotation_id_( |
| 422 GetMethodID(jni, *j_video_frame_class_, "getRotation", "()I")), | 423 GetMethodID(jni, *j_video_frame_class_, "getRotation", "()I")), |
| 423 j_get_timestamp_ns_id_( | 424 j_get_timestamp_ns_id_( |
| 424 GetMethodID(jni, *j_video_frame_class_, "getTimestampNs", "()J")), | 425 GetMethodID(jni, *j_video_frame_class_, "getTimestampNs", "()J")), |
| 425 j_video_frame_buffer_class_( | 426 j_video_frame_buffer_class_( |
| 426 jni, | 427 jni, |
| 427 FindClass(jni, "org/webrtc/VideoFrame$Buffer")), | 428 FindClass(jni, "org/webrtc/VideoFrame$Buffer")), |
| 428 j_retain_id_( | 429 j_retain_id_( |
| 429 GetMethodID(jni, *j_video_frame_buffer_class_, "retain", "()V")), | 430 GetMethodID(jni, *j_video_frame_buffer_class_, "retain", "()V")), |
| 430 j_release_id_( | 431 j_release_id_( |
| 431 GetMethodID(jni, *j_video_frame_buffer_class_, "release", "()V")), | 432 GetMethodID(jni, *j_video_frame_buffer_class_, "release", "()V")), |
| 432 j_get_width_id_( | 433 j_get_width_id_( |
| 433 GetMethodID(jni, *j_video_frame_buffer_class_, "getWidth", "()I")), | 434 GetMethodID(jni, *j_video_frame_buffer_class_, "getWidth", "()I")), |
| 434 j_get_height_id_( | 435 j_get_height_id_( |
| 435 GetMethodID(jni, *j_video_frame_buffer_class_, "getHeight", "()I")) {} | 436 GetMethodID(jni, *j_video_frame_buffer_class_, "getHeight", "()I")) {} |
| 436 | 437 |
| 437 webrtc::VideoFrame AndroidVideoBufferFactory::CreateFrame( | 438 VideoFrame AndroidVideoBufferFactory::CreateFrame( |
| 438 JNIEnv* jni, | 439 JNIEnv* jni, |
| 439 jobject j_video_frame, | 440 jobject j_video_frame, |
| 440 uint32_t timestamp_rtp) const { | 441 uint32_t timestamp_rtp) const { |
| 441 jobject j_video_frame_buffer = | 442 jobject j_video_frame_buffer = |
| 442 jni->CallObjectMethod(j_video_frame, j_get_buffer_id_); | 443 jni->CallObjectMethod(j_video_frame, j_get_buffer_id_); |
| 443 int rotation = jni->CallIntMethod(j_video_frame, j_get_rotation_id_); | 444 int rotation = jni->CallIntMethod(j_video_frame, j_get_rotation_id_); |
| 444 uint32_t timestamp_ns = | 445 uint32_t timestamp_ns = |
| 445 jni->CallLongMethod(j_video_frame, j_get_timestamp_ns_id_); | 446 jni->CallLongMethod(j_video_frame, j_get_timestamp_ns_id_); |
| 446 rtc::scoped_refptr<AndroidVideoBuffer> buffer = | 447 rtc::scoped_refptr<AndroidVideoBuffer> buffer = |
| 447 CreateBuffer(jni, j_video_frame_buffer); | 448 CreateBuffer(jni, j_video_frame_buffer); |
| 448 return webrtc::VideoFrame(buffer, timestamp_rtp, | 449 return VideoFrame(buffer, timestamp_rtp, |
| 449 timestamp_ns / rtc::kNumNanosecsPerMillisec, | 450 timestamp_ns / rtc::kNumNanosecsPerMillisec, |
| 450 static_cast<webrtc::VideoRotation>(rotation)); | 451 static_cast<VideoRotation>(rotation)); |
| 451 } | 452 } |
| 452 | 453 |
| 453 rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::WrapBuffer( | 454 rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::WrapBuffer( |
| 454 JNIEnv* jni, | 455 JNIEnv* jni, |
| 455 jobject j_video_frame_buffer) const { | 456 jobject j_video_frame_buffer) const { |
| 456 int width = jni->CallIntMethod(j_video_frame_buffer, j_get_width_id_); | 457 int width = jni->CallIntMethod(j_video_frame_buffer, j_get_width_id_); |
| 457 int height = jni->CallIntMethod(j_video_frame_buffer, j_get_height_id_); | 458 int height = jni->CallIntMethod(j_video_frame_buffer, j_get_height_id_); |
| 458 return AndroidVideoBuffer::WrapReference(jni, j_release_id_, width, height, | 459 return AndroidVideoBuffer::WrapReference(jni, j_release_id_, width, height, |
| 459 j_video_frame_buffer); | 460 j_video_frame_buffer); |
| 460 } | 461 } |
| 461 | 462 |
| 462 rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::CreateBuffer( | 463 rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::CreateBuffer( |
| 463 JNIEnv* jni, | 464 JNIEnv* jni, |
| 464 jobject j_video_frame_buffer) const { | 465 jobject j_video_frame_buffer) const { |
| 465 int width = jni->CallIntMethod(j_video_frame_buffer, j_get_width_id_); | 466 int width = jni->CallIntMethod(j_video_frame_buffer, j_get_width_id_); |
| 466 int height = jni->CallIntMethod(j_video_frame_buffer, j_get_height_id_); | 467 int height = jni->CallIntMethod(j_video_frame_buffer, j_get_height_id_); |
| 467 return new rtc::RefCountedObject<AndroidVideoBuffer>( | 468 return new rtc::RefCountedObject<AndroidVideoBuffer>( |
| 468 jni, j_retain_id_, j_release_id_, width, height, j_video_frame_buffer); | 469 jni, j_retain_id_, j_release_id_, width, height, j_video_frame_buffer); |
| 469 } | 470 } |
| 470 | 471 |
| 471 JavaVideoFrameFactory::JavaVideoFrameFactory(JNIEnv* jni) | 472 JavaVideoFrameFactory::JavaVideoFrameFactory(JNIEnv* jni) |
| 472 : j_video_frame_class_(jni, FindClass(jni, "org/webrtc/VideoFrame")) { | 473 : j_video_frame_class_(jni, FindClass(jni, "org/webrtc/VideoFrame")) { |
| 473 j_video_frame_constructor_id_ = | 474 j_video_frame_constructor_id_ = |
| 474 GetMethodID(jni, *j_video_frame_class_, "<init>", | 475 GetMethodID(jni, *j_video_frame_class_, "<init>", |
| 475 "(Lorg/webrtc/VideoFrame$Buffer;IJ)V"); | 476 "(Lorg/webrtc/VideoFrame$Buffer;IJ)V"); |
| 476 } | 477 } |
| 477 | 478 |
| 478 static bool IsJavaVideoBuffer( | 479 static bool IsJavaVideoBuffer(rtc::scoped_refptr<VideoFrameBuffer> buffer) { |
| 479 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer) { | 480 if (buffer->type() != VideoFrameBuffer::Type::kNative) { |
| 480 if (buffer->type() != webrtc::VideoFrameBuffer::Type::kNative) { | |
| 481 return false; | 481 return false; |
| 482 } | 482 } |
| 483 AndroidVideoFrameBuffer* android_buffer = | 483 AndroidVideoFrameBuffer* android_buffer = |
| 484 static_cast<AndroidVideoFrameBuffer*>(buffer.get()); | 484 static_cast<AndroidVideoFrameBuffer*>(buffer.get()); |
| 485 return android_buffer->android_type() == | 485 return android_buffer->android_type() == |
| 486 AndroidVideoFrameBuffer::AndroidType::kJavaBuffer; | 486 AndroidVideoFrameBuffer::AndroidType::kJavaBuffer; |
| 487 } | 487 } |
| 488 | 488 |
| 489 jobject JavaVideoFrameFactory::ToJavaFrame( | 489 jobject JavaVideoFrameFactory::ToJavaFrame(JNIEnv* jni, |
| 490 JNIEnv* jni, | 490 const VideoFrame& frame) const { |
| 491 const webrtc::VideoFrame& frame) const { | 491 rtc::scoped_refptr<VideoFrameBuffer> buffer = frame.video_frame_buffer(); |
| 492 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer = | |
| 493 frame.video_frame_buffer(); | |
| 494 jobject j_buffer; | 492 jobject j_buffer; |
| 495 if (IsJavaVideoBuffer(buffer)) { | 493 if (IsJavaVideoBuffer(buffer)) { |
| 496 RTC_DCHECK(buffer->type() == webrtc::VideoFrameBuffer::Type::kNative); | 494 RTC_DCHECK(buffer->type() == VideoFrameBuffer::Type::kNative); |
| 497 AndroidVideoFrameBuffer* android_buffer = | 495 AndroidVideoFrameBuffer* android_buffer = |
| 498 static_cast<AndroidVideoFrameBuffer*>(buffer.get()); | 496 static_cast<AndroidVideoFrameBuffer*>(buffer.get()); |
| 499 RTC_DCHECK(android_buffer->android_type() == | 497 RTC_DCHECK(android_buffer->android_type() == |
| 500 AndroidVideoFrameBuffer::AndroidType::kJavaBuffer); | 498 AndroidVideoFrameBuffer::AndroidType::kJavaBuffer); |
| 501 AndroidVideoBuffer* android_video_buffer = | 499 AndroidVideoBuffer* android_video_buffer = |
| 502 static_cast<AndroidVideoBuffer*>(android_buffer); | 500 static_cast<AndroidVideoBuffer*>(android_buffer); |
| 503 j_buffer = android_video_buffer->video_frame_buffer(); | 501 j_buffer = android_video_buffer->video_frame_buffer(); |
| 504 } else { | 502 } else { |
| 505 j_buffer = WrapI420Buffer(jni, buffer->ToI420()); | 503 j_buffer = WrapI420Buffer(jni, buffer->ToI420()); |
| 506 } | 504 } |
| 507 return jni->NewObject( | 505 return jni->NewObject( |
| 508 *j_video_frame_class_, j_video_frame_constructor_id_, j_buffer, | 506 *j_video_frame_class_, j_video_frame_constructor_id_, j_buffer, |
| 509 static_cast<jint>(frame.rotation()), | 507 static_cast<jint>(frame.rotation()), |
| 510 static_cast<jlong>(frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec)); | 508 static_cast<jlong>(frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec)); |
| 511 } | 509 } |
| 512 | 510 |
| 513 } // namespace webrtc_jni | 511 } // namespace jni |
| 512 } // namespace webrtc |
| OLD | NEW |