Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. | 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ | 9 */ |
| 10 | 10 |
| (...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 162 &webrtc::AndroidVideoCapturer::OnCapturerStarted, | 162 &webrtc::AndroidVideoCapturer::OnCapturerStarted, |
| 163 success); | 163 success); |
| 164 } | 164 } |
| 165 | 165 |
| 166 void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame, | 166 void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame, |
| 167 int length, | 167 int length, |
| 168 int width, | 168 int width, |
| 169 int height, | 169 int height, |
| 170 int rotation, | 170 int rotation, |
| 171 int64_t timestamp_ns) { | 171 int64_t timestamp_ns) { |
| 172 const uint8_t* y_plane = static_cast<uint8_t*>(video_frame); | 172 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || |
| 173 const uint8_t* vu_plane = y_plane + width * height; | 173 rotation == 270); |
| 174 rtc::CritScope cs(&capturer_lock_); | |
| 175 | |
| 176 int adapted_width; | |
| 177 int adapted_height; | |
| 178 int crop_width; | |
| 179 int crop_height; | |
| 180 int crop_x; | |
| 181 int crop_y; | |
| 182 | |
| 183 if (!capturer_->AdaptFrame(width, height, &adapted_width, &adapted_height, | |
| 184 &crop_width, &crop_height, &crop_x, &crop_y)) { | |
| 185 return; | |
| 186 } | |
| 187 | |
| 188 int rotated_width = crop_width; | |
| 189 int rotated_height = crop_height; | |
| 190 | |
| 191 if (capturer_->apply_rotation() && (rotation == 90 || rotation == 270)) { | |
| 192 std::swap(adapted_width, adapted_height); | |
| 193 std::swap(rotated_width, rotated_height); | |
| 194 } | |
| 174 | 195 |
| 175 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer = | 196 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer = |
| 176 buffer_pool_.CreateBuffer(width, height); | 197 buffer_pool_.CreateBuffer(rotated_width, rotated_height); |
| 177 libyuv::NV21ToI420( | 198 |
| 178 y_plane, width, | 199 const uint8_t* y_plane = static_cast<const uint8_t*>(video_frame); |
| 179 vu_plane, width, | 200 const uint8_t* uv_plane = y_plane + width * height; |
| 201 | |
| 202 // Can only crop at even pixels. | |
| 203 crop_x &= ~1; | |
|
magjed_webrtc
2016/05/18 15:45:30
The same is true for crop_y.
nisse-webrtc
2016/05/18 16:34:17
Done.
| |
| 204 | |
| 205 libyuv::NV12ToI420Rotate( | |
| 206 y_plane + width * crop_y + crop_x, width, | |
| 207 uv_plane + width * crop_y + crop_x, width, | |
|
magjed_webrtc
2016/05/18 15:45:30
crop_y is not used correctly, it should be roughly
nisse-webrtc
2016/05/18 16:34:17
If crop_y is forced to be even (fixed above), then
| |
| 180 buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane), | 208 buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane), |
| 209 // Swap U and V, since we have NV21, not NV12. | |
| 210 buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane), | |
| 181 buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane), | 211 buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane), |
| 182 buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane), | 212 crop_width, crop_height, static_cast<libyuv::RotationMode>( |
| 183 width, height); | 213 capturer_->apply_rotation() ? rotation : 0)); |
| 184 AsyncCapturerInvoke("OnIncomingFrame", | 214 |
| 185 &webrtc::AndroidVideoCapturer::OnIncomingFrame, | 215 if (adapted_width != rotated_width || adapted_height != rotated_height) { |
| 186 buffer, rotation, timestamp_ns); | 216 // TODO(nisse): Use buffer_pool_ here? |
|
magjed_webrtc
2016/05/18 15:45:30
Yes, use a buffer pool here.
nisse-webrtc
2016/05/18 16:34:17
Two buffer pools it is, then. But it would be nice
| |
| 217 buffer = webrtc::I420Buffer::CropAndScale(buffer, 0, 0, | |
| 218 rotated_width, rotated_height, | |
| 219 adapted_width, adapted_height); | |
| 220 } | |
| 221 // TODO(nisse): Use microsecond time instead. | |
| 222 capturer_->OnFrame(cricket::WebRtcVideoFrame( | |
| 223 buffer, timestamp_ns, | |
| 224 capturer_->apply_rotation() | |
| 225 ? webrtc::kVideoRotation_0 | |
| 226 : static_cast<webrtc::VideoRotation>(rotation)), | |
| 227 width, height); | |
| 187 } | 228 } |
| 188 | 229 |
| 189 void AndroidVideoCapturerJni::OnTextureFrame(int width, | 230 void AndroidVideoCapturerJni::OnTextureFrame(int width, |
| 190 int height, | 231 int height, |
| 191 int rotation, | 232 int rotation, |
| 192 int64_t timestamp_ns, | 233 int64_t timestamp_ns, |
| 193 const NativeHandleImpl& handle) { | 234 const NativeHandleImpl& handle) { |
| 194 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer( | 235 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || |
| 195 surface_texture_helper_->CreateTextureFrame(width, height, handle)); | 236 rotation == 270); |
| 237 rtc::CritScope cs(&capturer_lock_); | |
| 196 | 238 |
| 197 AsyncCapturerInvoke("OnIncomingFrame", | 239 int adapted_width; |
| 198 &webrtc::AndroidVideoCapturer::OnIncomingFrame, | 240 int adapted_height; |
| 199 buffer, rotation, timestamp_ns); | 241 int crop_width; |
| 242 int crop_height; | |
| 243 int crop_x; | |
| 244 int crop_y; | |
| 245 | |
| 246 if (!capturer_->AdaptFrame(width, height, &adapted_width, &adapted_height, | |
| 247 &crop_width, &crop_height, &crop_x, &crop_y)) { | |
| 248 return; | |
| 249 } | |
| 250 | |
| 251 Matrix matrix = handle.sampling_matrix; | |
| 252 | |
| 253 matrix.Crop(crop_width / static_cast<float>(width), | |
| 254 crop_height / static_cast<float>(height), | |
| 255 crop_x / static_cast<float>(width), | |
| 256 crop_y / static_cast<float>(height)); | |
| 257 | |
| 258 if (capturer_->apply_rotation()) { | |
| 259 if (rotation == webrtc::kVideoRotation_90 || | |
| 260 rotation == webrtc::kVideoRotation_270) { | |
| 261 std::swap(adapted_width, adapted_height); | |
| 262 } | |
| 263 matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation)); | |
| 264 } | |
| 265 | |
| 266 // TODO(nisse): Use microsecond time instead. | |
| 267 capturer_->OnFrame( | |
| 268 cricket::WebRtcVideoFrame( | |
| 269 surface_texture_helper_->CreateTextureFrame( | |
| 270 adapted_width, adapted_height, | |
| 271 NativeHandleImpl(handle.oes_texture_id, matrix)), | |
| 272 timestamp_ns, capturer_->apply_rotation() | |
| 273 ? webrtc::kVideoRotation_0 | |
| 274 : static_cast<webrtc::VideoRotation>(rotation)), | |
| 275 width, height); | |
| 200 } | 276 } |
| 201 | 277 |
| 202 void AndroidVideoCapturerJni::OnOutputFormatRequest(int width, | 278 void AndroidVideoCapturerJni::OnOutputFormatRequest(int width, |
| 203 int height, | 279 int height, |
| 204 int fps) { | 280 int fps) { |
| 205 AsyncCapturerInvoke("OnOutputFormatRequest", | 281 AsyncCapturerInvoke("OnOutputFormatRequest", |
| 206 &webrtc::AndroidVideoCapturer::OnOutputFormatRequest, | 282 &webrtc::AndroidVideoCapturer::OnOutputFormatRequest, |
| 207 width, height, fps); | 283 width, height, fps); |
| 208 } | 284 } |
| 209 | 285 |
| (...skipping 29 matching lines...) Expand all Loading... | |
| 239 | 315 |
| 240 JOW(void, VideoCapturer_00024NativeObserver_nativeOnOutputFormatRequest) | 316 JOW(void, VideoCapturer_00024NativeObserver_nativeOnOutputFormatRequest) |
| 241 (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height, | 317 (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height, |
| 242 jint j_fps) { | 318 jint j_fps) { |
| 243 LOG(LS_INFO) << "NativeObserver_nativeOnOutputFormatRequest"; | 319 LOG(LS_INFO) << "NativeObserver_nativeOnOutputFormatRequest"; |
| 244 reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnOutputFormatRequest( | 320 reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnOutputFormatRequest( |
| 245 j_width, j_height, j_fps); | 321 j_width, j_height, j_fps); |
| 246 } | 322 } |
| 247 | 323 |
| 248 } // namespace webrtc_jni | 324 } // namespace webrtc_jni |
| OLD | NEW |