OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "webrtc/api/java/jni/androidvideocapturer_jni.h" | 11 #include "webrtc/api/java/jni/androidvideocapturer_jni.h" |
12 #include "webrtc/api/java/jni/classreferenceholder.h" | 12 #include "webrtc/api/java/jni/classreferenceholder.h" |
13 #include "webrtc/api/java/jni/native_handle_impl.h" | 13 #include "webrtc/api/java/jni/native_handle_impl.h" |
14 #include "webrtc/api/java/jni/surfacetexturehelper_jni.h" | 14 #include "webrtc/api/java/jni/surfacetexturehelper_jni.h" |
15 #include "third_party/libyuv/include/libyuv/convert.h" | 15 #include "third_party/libyuv/include/libyuv/convert.h" |
16 #include "third_party/libyuv/include/libyuv/scale.h" | |
17 #include "webrtc/base/bind.h" | 16 #include "webrtc/base/bind.h" |
18 | 17 |
19 namespace webrtc_jni { | 18 namespace webrtc_jni { |
20 | 19 |
21 jobject AndroidVideoCapturerJni::application_context_ = nullptr; | 20 jobject AndroidVideoCapturerJni::application_context_ = nullptr; |
22 | 21 |
23 // static | 22 // static |
24 int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni, | 23 int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni, |
25 jobject appliction_context) { | 24 jobject appliction_context) { |
26 if (application_context_) { | 25 if (application_context_) { |
(...skipping 183 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
210 y_plane + width * crop_y + crop_x, width, | 209 y_plane + width * crop_y + crop_x, width, |
211 uv_plane + uv_width * crop_y + crop_x, width, | 210 uv_plane + uv_width * crop_y + crop_x, width, |
212 buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane), | 211 buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane), |
213 // Swap U and V, since we have NV21, not NV12. | 212 // Swap U and V, since we have NV21, not NV12. |
214 buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane), | 213 buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane), |
215 buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane), | 214 buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane), |
216 crop_width, crop_height, static_cast<libyuv::RotationMode>( | 215 crop_width, crop_height, static_cast<libyuv::RotationMode>( |
217 capturer_->apply_rotation() ? rotation : 0)); | 216 capturer_->apply_rotation() ? rotation : 0)); |
218 | 217 |
219 if (adapted_width != rotated_width || adapted_height != rotated_height) { | 218 if (adapted_width != rotated_width || adapted_height != rotated_height) { |
220 rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled = | 219 rtc::scoped_refptr<webrtc::I420Buffer> scaled = |
221 post_scale_pool_.CreateBuffer(adapted_width, adapted_height); | 220 post_scale_pool_.CreateBuffer(adapted_width, adapted_height); |
222 // TODO(nisse): This should be done by some Scale method in | 221 scaled->CropAndScale(buffer, 0, 0, rotated_width, rotated_height); |
magjed_webrtc
2016/05/30 11:58:26
You should use crop_x/crop_y instead of 0?
nisse-webrtc
2016/05/30 12:58:22
I don't think so. Cropping was done in the above c
| |
223 // I420Buffer, but we can't do that right now, since | |
224 // I420BufferPool uses a wrapper object. | |
225 if (libyuv::I420Scale(buffer->DataY(), buffer->StrideY(), | |
226 buffer->DataU(), buffer->StrideU(), | |
227 buffer->DataV(), buffer->StrideV(), | |
228 rotated_width, rotated_height, | |
229 scaled->MutableDataY(), scaled->StrideY(), | |
230 scaled->MutableDataU(), scaled->StrideU(), | |
231 scaled->MutableDataV(), scaled->StrideV(), | |
232 adapted_width, adapted_height, | |
233 libyuv::kFilterBox) < 0) { | |
234 LOG(LS_WARNING) << "I420Scale failed"; | |
235 return; | |
236 } | |
237 buffer = scaled; | 222 buffer = scaled; |
238 } | 223 } |
239 // TODO(nisse): Use microsecond time instead. | 224 // TODO(nisse): Use microsecond time instead. |
240 capturer_->OnFrame(cricket::WebRtcVideoFrame( | 225 capturer_->OnFrame(cricket::WebRtcVideoFrame( |
241 buffer, timestamp_ns, | 226 buffer, timestamp_ns, |
242 capturer_->apply_rotation() | 227 capturer_->apply_rotation() |
243 ? webrtc::kVideoRotation_0 | 228 ? webrtc::kVideoRotation_0 |
244 : static_cast<webrtc::VideoRotation>(rotation)), | 229 : static_cast<webrtc::VideoRotation>(rotation)), |
245 width, height); | 230 width, height); |
246 } | 231 } |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
334 | 319 |
335 JOW(void, VideoCapturer_00024NativeObserver_nativeOnOutputFormatRequest) | 320 JOW(void, VideoCapturer_00024NativeObserver_nativeOnOutputFormatRequest) |
336 (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height, | 321 (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height, |
337 jint j_fps) { | 322 jint j_fps) { |
338 LOG(LS_INFO) << "NativeObserver_nativeOnOutputFormatRequest"; | 323 LOG(LS_INFO) << "NativeObserver_nativeOnOutputFormatRequest"; |
339 reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnOutputFormatRequest( | 324 reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnOutputFormatRequest( |
340 j_width, j_height, j_fps); | 325 j_width, j_height, j_fps); |
341 } | 326 } |
342 | 327 |
343 } // namespace webrtc_jni | 328 } // namespace webrtc_jni |
OLD | NEW |