OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. | 2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license | 4 * Use of this source code is governed by a BSD-style license |
5 * that can be found in the LICENSE file in the root of the source | 5 * that can be found in the LICENSE file in the root of the source |
6 * tree. An additional intellectual property rights grant can be found | 6 * tree. An additional intellectual property rights grant can be found |
7 * in the file PATENTS. All contributing project authors may | 7 * in the file PATENTS. All contributing project authors may |
8 * be found in the AUTHORS file in the root of the source tree. | 8 * be found in the AUTHORS file in the root of the source tree. |
9 */ | 9 */ |
10 | 10 |
11 #include "webrtc/api/androidvideotracksource.h" | 11 #include "webrtc/api/androidvideotracksource.h" |
12 | 12 |
13 #include <utility> | 13 #include <utility> |
14 | 14 |
15 #include "third_party/libyuv/include/libyuv/rotate.h" | |
16 | |
17 namespace webrtc { | 15 namespace webrtc { |
18 | 16 |
19 AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread, | 17 AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread, |
20 JNIEnv* jni, | 18 JNIEnv* jni, |
21 jobject j_egl_context, | 19 jobject j_egl_context, |
22 bool is_screencast) | 20 bool is_screencast) |
23 : signaling_thread_(signaling_thread), | 21 : signaling_thread_(signaling_thread), |
24 surface_texture_helper_(webrtc_jni::SurfaceTextureHelper::create( | 22 surface_texture_helper_(webrtc_jni::SurfaceTextureHelper::create( |
25 jni, | 23 jni, |
26 "Camera SurfaceTextureHelper", | 24 "Camera SurfaceTextureHelper", |
27 j_egl_context)), | 25 j_egl_context)), |
28 is_screencast_(is_screencast) { | 26 is_screencast_(is_screencast) { |
29 LOG(LS_INFO) << "AndroidVideoTrackSource ctor"; | 27 LOG(LS_INFO) << "AndroidVideoTrackSource ctor"; |
30 worker_thread_checker_.DetachFromThread(); | |
31 camera_thread_checker_.DetachFromThread(); | 28 camera_thread_checker_.DetachFromThread(); |
32 } | 29 } |
33 | 30 |
34 bool AndroidVideoTrackSource::GetStats(AndroidVideoTrackSource::Stats* stats) { | |
35 rtc::CritScope lock(&stats_crit_); | |
36 | |
37 if (!stats_) { | |
38 return false; | |
39 } | |
40 | |
41 *stats = *stats_; | |
42 return true; | |
43 } | |
44 | |
45 void AndroidVideoTrackSource::SetState(SourceState state) { | 31 void AndroidVideoTrackSource::SetState(SourceState state) { |
46 if (rtc::Thread::Current() != signaling_thread_) { | 32 if (rtc::Thread::Current() != signaling_thread_) { |
47 invoker_.AsyncInvoke<void>( | 33 invoker_.AsyncInvoke<void>( |
48 RTC_FROM_HERE, signaling_thread_, | 34 RTC_FROM_HERE, signaling_thread_, |
49 rtc::Bind(&AndroidVideoTrackSource::SetState, this, state)); | 35 rtc::Bind(&AndroidVideoTrackSource::SetState, this, state)); |
50 return; | 36 return; |
51 } | 37 } |
52 | 38 |
53 if (state_ != state) { | 39 if (state_ != state) { |
54 state_ = state; | 40 state_ = state; |
55 FireOnChanged(); | 41 FireOnChanged(); |
56 } | 42 } |
57 } | 43 } |
58 | 44 |
59 void AndroidVideoTrackSource::AddOrUpdateSink( | |
60 rtc::VideoSinkInterface<cricket::VideoFrame>* sink, | |
61 const rtc::VideoSinkWants& wants) { | |
62 RTC_DCHECK(worker_thread_checker_.CalledOnValidThread()); | |
63 | |
64 broadcaster_.AddOrUpdateSink(sink, wants); | |
65 OnSinkWantsChanged(broadcaster_.wants()); | |
66 } | |
67 | |
68 void AndroidVideoTrackSource::RemoveSink( | |
69 rtc::VideoSinkInterface<cricket::VideoFrame>* sink) { | |
70 RTC_DCHECK(worker_thread_checker_.CalledOnValidThread()); | |
71 | |
72 broadcaster_.RemoveSink(sink); | |
73 OnSinkWantsChanged(broadcaster_.wants()); | |
74 } | |
75 | |
76 void AndroidVideoTrackSource::OnSinkWantsChanged( | |
77 const rtc::VideoSinkWants& wants) { | |
78 { | |
79 rtc::CritScope lock(&apply_rotation_crit_); | |
80 apply_rotation_ = wants.rotation_applied; | |
81 } | |
82 | |
83 video_adapter_.OnResolutionRequest(wants.max_pixel_count, | |
84 wants.max_pixel_count_step_up); | |
85 } | |
86 | |
87 void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data, | 45 void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data, |
88 int length, | 46 int length, |
89 int width, | 47 int width, |
90 int height, | 48 int height, |
91 int rotation, | 49 int rotation, |
92 int64_t timestamp_ns) { | 50 int64_t timestamp_ns) { |
93 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); | 51 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); |
94 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || | 52 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || |
95 rotation == 270); | 53 rotation == 270); |
96 | 54 |
55 int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec; | |
56 int64_t translated_camera_time_us = | |
57 timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros()); | |
58 | |
97 int adapted_width; | 59 int adapted_width; |
98 int adapted_height; | 60 int adapted_height; |
99 int crop_width; | 61 int crop_width; |
100 int crop_height; | 62 int crop_height; |
101 int crop_x; | 63 int crop_x; |
102 int crop_y; | 64 int crop_y; |
103 int64_t translated_camera_time_us; | |
104 | 65 |
105 if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec, | 66 if (!AdaptFrame(width, height, camera_time_us, |
106 &adapted_width, &adapted_height, &crop_width, &crop_height, | 67 &adapted_width, &adapted_height, &crop_width, &crop_height, |
107 &crop_x, &crop_y, &translated_camera_time_us)) { | 68 &crop_x, &crop_y)) { |
108 return; | 69 return; |
109 } | 70 } |
110 | 71 |
111 const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data); | 72 const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data); |
112 const uint8_t* uv_plane = y_plane + width * height; | 73 const uint8_t* uv_plane = y_plane + width * height; |
113 const int uv_width = (width + 1) / 2; | 74 const int uv_width = (width + 1) / 2; |
114 | 75 |
115 RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2)); | 76 RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2)); |
116 | 77 |
117 // Can only crop at even pixels. | 78 // Can only crop at even pixels. |
118 crop_x &= ~1; | 79 crop_x &= ~1; |
119 crop_y &= ~1; | 80 crop_y &= ~1; |
120 // Crop just by modifying pointers. | 81 // Crop just by modifying pointers. |
121 y_plane += width * crop_y + crop_x; | 82 y_plane += width * crop_y + crop_x; |
122 uv_plane += uv_width * crop_y + crop_x; | 83 uv_plane += uv_width * crop_y + crop_x; |
123 | 84 |
124 rtc::scoped_refptr<webrtc::I420Buffer> buffer = | 85 rtc::scoped_refptr<webrtc::I420Buffer> buffer = |
125 buffer_pool_.CreateBuffer(adapted_width, adapted_height); | 86 buffer_pool_.CreateBuffer(adapted_width, adapted_height); |
126 | 87 |
127 nv12toi420_scaler_.NV12ToI420Scale( | 88 nv12toi420_scaler_.NV12ToI420Scale( |
128 y_plane, width, | 89 y_plane, width, |
129 uv_plane, uv_width * 2, | 90 uv_plane, uv_width * 2, |
130 crop_width, crop_height, | 91 crop_width, crop_height, |
131 buffer->MutableDataY(), buffer->StrideY(), | 92 buffer->MutableDataY(), buffer->StrideY(), |
132 // Swap U and V, since we have NV21, not NV12. | 93 // Swap U and V, since we have NV21, not NV12. |
133 buffer->MutableDataV(), buffer->StrideV(), | 94 buffer->MutableDataV(), buffer->StrideV(), |
134 buffer->MutableDataU(), buffer->StrideU(), | 95 buffer->MutableDataU(), buffer->StrideU(), |
135 buffer->width(), buffer->height()); | 96 buffer->width(), buffer->height()); |
136 | 97 |
137 // Applying rotation is only supported for legacy reasons, and the performance | |
138 // for this path is not critical. | |
139 rtc::CritScope lock(&apply_rotation_crit_); | |
140 if (apply_rotation_ && rotation != 0) { | |
141 rtc::scoped_refptr<I420Buffer> rotated_buffer = | |
142 rotation == 180 ? I420Buffer::Create(buffer->width(), buffer->height()) | |
143 : I420Buffer::Create(buffer->height(), buffer->width()); | |
144 | |
145 libyuv::I420Rotate( | |
146 buffer->DataY(), buffer->StrideY(), | |
147 buffer->DataU(), buffer->StrideU(), | |
148 buffer->DataV(), buffer->StrideV(), | |
149 rotated_buffer->MutableDataY(), rotated_buffer->StrideY(), | |
150 rotated_buffer->MutableDataU(), rotated_buffer->StrideU(), | |
151 rotated_buffer->MutableDataV(), rotated_buffer->StrideV(), | |
152 buffer->width(), buffer->height(), | |
153 static_cast<libyuv::RotationMode>(rotation)); | |
154 | |
155 buffer = rotated_buffer; | |
156 } | |
157 | |
158 OnFrame(cricket::WebRtcVideoFrame( | 98 OnFrame(cricket::WebRtcVideoFrame( |
perkj_webrtc
2016/09/20 12:26:09
How about we let OnFrame take VideoFrameBuffer as
nisse-webrtc
2016/09/20 13:17:47
And rotation and timestamp arguments too? I think
| |
159 buffer, | 99 buffer, static_cast<webrtc::VideoRotation>(rotation), |
160 apply_rotation_ ? webrtc::kVideoRotation_0 | 100 translated_camera_time_us, 0)); |
161 : static_cast<webrtc::VideoRotation>(rotation), | |
162 translated_camera_time_us, 0), | |
163 width, height); | |
164 } | 101 } |
165 | 102 |
166 void AndroidVideoTrackSource::OnTextureFrameCaptured( | 103 void AndroidVideoTrackSource::OnTextureFrameCaptured( |
167 int width, | 104 int width, |
168 int height, | 105 int height, |
169 int rotation, | 106 int rotation, |
170 int64_t timestamp_ns, | 107 int64_t timestamp_ns, |
171 const webrtc_jni::NativeHandleImpl& handle) { | 108 const webrtc_jni::NativeHandleImpl& handle) { |
172 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); | 109 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); |
173 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || | 110 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || |
174 rotation == 270); | 111 rotation == 270); |
175 | 112 |
113 int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec; | |
114 int64_t translated_camera_time_us = | |
115 timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros()); | |
116 | |
176 int adapted_width; | 117 int adapted_width; |
177 int adapted_height; | 118 int adapted_height; |
178 int crop_width; | 119 int crop_width; |
179 int crop_height; | 120 int crop_height; |
180 int crop_x; | 121 int crop_x; |
181 int crop_y; | 122 int crop_y; |
182 int64_t translated_camera_time_us; | |
183 | 123 |
184 if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec, | 124 if (!AdaptFrame(width, height, camera_time_us, |
185 &adapted_width, &adapted_height, &crop_width, &crop_height, | 125 &adapted_width, &adapted_height, &crop_width, &crop_height, |
186 &crop_x, &crop_y, &translated_camera_time_us)) { | 126 &crop_x, &crop_y)) { |
187 surface_texture_helper_->ReturnTextureFrame(); | 127 surface_texture_helper_->ReturnTextureFrame(); |
188 return; | 128 return; |
189 } | 129 } |
190 | 130 |
191 webrtc_jni::Matrix matrix = handle.sampling_matrix; | 131 webrtc_jni::Matrix matrix = handle.sampling_matrix; |
192 | 132 |
193 matrix.Crop(crop_width / static_cast<float>(width), | 133 matrix.Crop(crop_width / static_cast<float>(width), |
194 crop_height / static_cast<float>(height), | 134 crop_height / static_cast<float>(height), |
195 crop_x / static_cast<float>(width), | 135 crop_x / static_cast<float>(width), |
196 crop_y / static_cast<float>(height)); | 136 crop_y / static_cast<float>(height)); |
197 | 137 |
198 rtc::CritScope lock(&apply_rotation_crit_); | 138 // Make a local copy, since value of apply_rotation() may change |
199 if (apply_rotation_) { | 139 // under our feet. |
140 bool do_rotate = apply_rotation(); | |
141 | |
142 if (do_rotate) { | |
200 if (rotation == webrtc::kVideoRotation_90 || | 143 if (rotation == webrtc::kVideoRotation_90 || |
201 rotation == webrtc::kVideoRotation_270) { | 144 rotation == webrtc::kVideoRotation_270) { |
202 std::swap(adapted_width, adapted_height); | 145 std::swap(adapted_width, adapted_height); |
203 } | 146 } |
204 matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation)); | 147 matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation)); |
205 } | 148 } |
206 | 149 |
207 OnFrame(cricket::WebRtcVideoFrame( | 150 OnFrame(cricket::WebRtcVideoFrame( |
208 surface_texture_helper_->CreateTextureFrame( | 151 surface_texture_helper_->CreateTextureFrame( |
209 adapted_width, adapted_height, | 152 adapted_width, adapted_height, |
210 webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)), | 153 webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)), |
211 apply_rotation_ ? webrtc::kVideoRotation_0 | 154 do_rotate ? webrtc::kVideoRotation_0 |
212 : static_cast<webrtc::VideoRotation>(rotation), | 155 : static_cast<webrtc::VideoRotation>(rotation), |
213 translated_camera_time_us, 0), | 156 translated_camera_time_us, 0)); |
214 width, height); | |
215 } | |
216 | |
217 void AndroidVideoTrackSource::OnFrame(const cricket::VideoFrame& frame, | |
218 int width, | |
219 int height) { | |
220 { | |
221 rtc::CritScope lock(&stats_crit_); | |
222 stats_ = rtc::Optional<AndroidVideoTrackSource::Stats>({width, height}); | |
223 } | |
224 | |
225 broadcaster_.OnFrame(frame); | |
226 } | 157 } |
227 | 158 |
228 void AndroidVideoTrackSource::OnOutputFormatRequest(int width, | 159 void AndroidVideoTrackSource::OnOutputFormatRequest(int width, |
229 int height, | 160 int height, |
230 int fps) { | 161 int fps) { |
231 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); | 162 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); |
232 | 163 |
233 cricket::VideoFormat format(width, height, | 164 cricket::VideoFormat format(width, height, |
234 cricket::VideoFormat::FpsToInterval(fps), 0); | 165 cricket::VideoFormat::FpsToInterval(fps), 0); |
235 video_adapter_.OnOutputFormatRequest(format); | 166 video_adapter()->OnOutputFormatRequest(format); |
236 } | |
237 | |
238 bool AndroidVideoTrackSource::AdaptFrame(int width, | |
239 int height, | |
240 int64_t camera_time_us, | |
241 int* out_width, | |
242 int* out_height, | |
243 int* crop_width, | |
244 int* crop_height, | |
245 int* crop_x, | |
246 int* crop_y, | |
247 int64_t* translated_camera_time_us) { | |
248 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); | |
249 | |
250 int64_t system_time_us = rtc::TimeMicros(); | |
251 *translated_camera_time_us = | |
252 timestamp_aligner_.TranslateTimestamp(camera_time_us, system_time_us); | |
253 | |
254 if (!broadcaster_.frame_wanted()) { | |
255 return false; | |
256 } | |
257 | |
258 if (!video_adapter_.AdaptFrameResolution( | |
259 width, height, camera_time_us * rtc::kNumNanosecsPerMicrosec, | |
260 crop_width, crop_height, out_width, out_height)) { | |
261 // VideoAdapter dropped the frame. | |
262 return false; | |
263 } | |
264 *crop_x = (width - *crop_width) / 2; | |
265 *crop_y = (height - *crop_height) / 2; | |
266 | |
267 return true; | |
268 } | 167 } |
269 | 168 |
270 } // namespace webrtc | 169 } // namespace webrtc |
OLD | NEW |