Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(630)

Side by Side Diff: webrtc/api/androidvideotracksource.cc

Issue 2328333002: New class AdaptedVideoTrackSource. (Closed)
Patch Set: Drop access to to apply_rotation lock. Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 #include "webrtc/api/androidvideotracksource.h" 11 #include "webrtc/api/androidvideotracksource.h"
12 12
13 #include <utility> 13 #include <utility>
14 14
15 namespace webrtc { 15 namespace webrtc {
16 16
17 AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread, 17 AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread,
18 JNIEnv* jni, 18 JNIEnv* jni,
19 jobject j_egl_context, 19 jobject j_egl_context,
20 bool is_screencast) 20 bool is_screencast)
21 : signaling_thread_(signaling_thread), 21 : signaling_thread_(signaling_thread),
22 surface_texture_helper_(webrtc_jni::SurfaceTextureHelper::create( 22 surface_texture_helper_(webrtc_jni::SurfaceTextureHelper::create(
23 jni, 23 jni,
24 "Camera SurfaceTextureHelper", 24 "Camera SurfaceTextureHelper",
25 j_egl_context)), 25 j_egl_context)),
26 is_screencast_(is_screencast) { 26 is_screencast_(is_screencast) {
27 LOG(LS_INFO) << "AndroidVideoTrackSource ctor"; 27 LOG(LS_INFO) << "AndroidVideoTrackSource ctor";
28 worker_thread_checker_.DetachFromThread();
29 camera_thread_checker_.DetachFromThread(); 28 camera_thread_checker_.DetachFromThread();
30 } 29 }
31 30
32 bool AndroidVideoTrackSource::GetStats(AndroidVideoTrackSource::Stats* stats) {
33 rtc::CritScope lock(&stats_crit_);
34
35 if (!stats_) {
36 return false;
37 }
38
39 *stats = *stats_;
40 return true;
41 }
42
43 void AndroidVideoTrackSource::SetState(SourceState state) { 31 void AndroidVideoTrackSource::SetState(SourceState state) {
44 if (rtc::Thread::Current() != signaling_thread_) { 32 if (rtc::Thread::Current() != signaling_thread_) {
45 invoker_.AsyncInvoke<void>( 33 invoker_.AsyncInvoke<void>(
46 RTC_FROM_HERE, signaling_thread_, 34 RTC_FROM_HERE, signaling_thread_,
47 rtc::Bind(&AndroidVideoTrackSource::SetState, this, state)); 35 rtc::Bind(&AndroidVideoTrackSource::SetState, this, state));
48 return; 36 return;
49 } 37 }
50 38
51 if (state_ != state) { 39 if (state_ != state) {
52 state_ = state; 40 state_ = state;
53 FireOnChanged(); 41 FireOnChanged();
54 } 42 }
55 } 43 }
56 44
57 void AndroidVideoTrackSource::AddOrUpdateSink(
58 rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
59 const rtc::VideoSinkWants& wants) {
60 RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
61
62 broadcaster_.AddOrUpdateSink(sink, wants);
63 OnSinkWantsChanged(broadcaster_.wants());
64 }
65
66 void AndroidVideoTrackSource::RemoveSink(
67 rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
68 RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
69
70 broadcaster_.RemoveSink(sink);
71 OnSinkWantsChanged(broadcaster_.wants());
72 }
73
74 void AndroidVideoTrackSource::OnSinkWantsChanged(
75 const rtc::VideoSinkWants& wants) {
76 {
77 rtc::CritScope lock(&apply_rotation_crit_);
78 apply_rotation_ = wants.rotation_applied;
79 }
80
81 video_adapter_.OnResolutionRequest(wants.max_pixel_count,
82 wants.max_pixel_count_step_up);
83 }
84
85 void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data, 45 void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
86 int length, 46 int length,
87 int width, 47 int width,
88 int height, 48 int height,
89 int rotation, 49 int rotation,
90 int64_t timestamp_ns) { 50 int64_t timestamp_ns) {
91 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); 51 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
92 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || 52 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
93 rotation == 270); 53 rotation == 270);
94 54
55 int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
56 int64_t translated_camera_time_us =
57 timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());
58
95 int adapted_width; 59 int adapted_width;
96 int adapted_height; 60 int adapted_height;
97 int crop_width; 61 int crop_width;
98 int crop_height; 62 int crop_height;
99 int crop_x; 63 int crop_x;
100 int crop_y; 64 int crop_y;
101 int64_t translated_camera_time_us;
102 65
103 if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec, 66 if (!AdaptFrame(width, height, camera_time_us,
104 &adapted_width, &adapted_height, &crop_width, &crop_height, 67 &adapted_width, &adapted_height, &crop_width, &crop_height,
105 &crop_x, &crop_y, &translated_camera_time_us)) { 68 &crop_x, &crop_y)) {
106 return; 69 return;
107 } 70 }
108 71
109 int rotated_width = crop_width; 72 int rotated_width = crop_width;
110 int rotated_height = crop_height; 73 int rotated_height = crop_height;
111 74
112 rtc::CritScope lock(&apply_rotation_crit_); 75 // Make a local copy, since value of apply_rotation() may change
113 if (apply_rotation_ && (rotation == 90 || rotation == 270)) { 76 // under our feet.
77
78 // TODO(nisse, magjed): Don't rotate here, leave it to the base
magjed_webrtc 2016/09/19 09:43:26 You need to rebase this function (I changed it rec
nisse-webrtc 2016/09/20 08:23:42 Done, hope I got the rebasing right.
79 // class instead. It's a fallback case and doesn't need to be well
80 // optimized.
81 bool do_rotate = apply_rotation();
82
83 if (do_rotate && (rotation == 90 || rotation == 270)) {
114 std::swap(adapted_width, adapted_height); 84 std::swap(adapted_width, adapted_height);
115 std::swap(rotated_width, rotated_height); 85 std::swap(rotated_width, rotated_height);
116 } 86 }
117 87
118 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer = 88 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
119 pre_scale_pool_.CreateBuffer(rotated_width, rotated_height); 89 pre_scale_pool_.CreateBuffer(rotated_width, rotated_height);
120 90
121 const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data); 91 const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data);
122 const uint8_t* uv_plane = y_plane + width * height; 92 const uint8_t* uv_plane = y_plane + width * height;
123 int uv_width = (width + 1) / 2; 93 int uv_width = (width + 1) / 2;
124 94
125 RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2)); 95 RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2));
126 96
127 // Can only crop at even pixels. 97 // Can only crop at even pixels.
128 crop_x &= ~1; 98 crop_x &= ~1;
129 crop_y &= ~1; 99 crop_y &= ~1;
130 100
131 libyuv::NV12ToI420Rotate( 101 libyuv::NV12ToI420Rotate(
132 y_plane + width * crop_y + crop_x, width, 102 y_plane + width * crop_y + crop_x, width,
133 uv_plane + uv_width * crop_y + crop_x, width, buffer->MutableDataY(), 103 uv_plane + uv_width * crop_y + crop_x, width, buffer->MutableDataY(),
134 buffer->StrideY(), 104 buffer->StrideY(),
135 // Swap U and V, since we have NV21, not NV12. 105 // Swap U and V, since we have NV21, not NV12.
136 buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(), 106 buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(),
137 buffer->StrideU(), crop_width, crop_height, 107 buffer->StrideU(), crop_width, crop_height,
138 static_cast<libyuv::RotationMode>(apply_rotation_ ? rotation : 0)); 108 static_cast<libyuv::RotationMode>(do_rotate ? rotation : 0));
139 109
140 if (adapted_width != buffer->width() || adapted_height != buffer->height()) { 110 if (adapted_width != buffer->width() || adapted_height != buffer->height()) {
141 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer( 111 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
142 post_scale_pool_.CreateBuffer(adapted_width, adapted_height)); 112 post_scale_pool_.CreateBuffer(adapted_width, adapted_height));
143 scaled_buffer->ScaleFrom(buffer); 113 scaled_buffer->ScaleFrom(buffer);
144 buffer = scaled_buffer; 114 buffer = scaled_buffer;
145 } 115 }
146 116
147 OnFrame(cricket::WebRtcVideoFrame( 117 OnFrame(cricket::WebRtcVideoFrame(
148 buffer, 118 buffer, do_rotate ? webrtc::kVideoRotation_0
149 apply_rotation_ ? webrtc::kVideoRotation_0 119 : static_cast<webrtc::VideoRotation>(rotation),
150 : static_cast<webrtc::VideoRotation>(rotation),
151 translated_camera_time_us, 0), 120 translated_camera_time_us, 0),
152 width, height); 121 width, height);
153 } 122 }
154 123
155 void AndroidVideoTrackSource::OnTextureFrameCaptured( 124 void AndroidVideoTrackSource::OnTextureFrameCaptured(
156 int width, 125 int width,
157 int height, 126 int height,
158 int rotation, 127 int rotation,
159 int64_t timestamp_ns, 128 int64_t timestamp_ns,
160 const webrtc_jni::NativeHandleImpl& handle) { 129 const webrtc_jni::NativeHandleImpl& handle) {
161 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); 130 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
162 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || 131 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
163 rotation == 270); 132 rotation == 270);
164 133
134 int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
135 int64_t translated_camera_time_us =
136 timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());
137
165 int adapted_width; 138 int adapted_width;
166 int adapted_height; 139 int adapted_height;
167 int crop_width; 140 int crop_width;
168 int crop_height; 141 int crop_height;
169 int crop_x; 142 int crop_x;
170 int crop_y; 143 int crop_y;
171 int64_t translated_camera_time_us;
172 144
173 if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec, 145 if (!AdaptFrame(width, height, camera_time_us,
174 &adapted_width, &adapted_height, &crop_width, &crop_height, 146 &adapted_width, &adapted_height, &crop_width, &crop_height,
175 &crop_x, &crop_y, &translated_camera_time_us)) { 147 &crop_x, &crop_y)) {
176 surface_texture_helper_->ReturnTextureFrame(); 148 surface_texture_helper_->ReturnTextureFrame();
177 return; 149 return;
178 } 150 }
179 151
180 webrtc_jni::Matrix matrix = handle.sampling_matrix; 152 webrtc_jni::Matrix matrix = handle.sampling_matrix;
181 153
182 matrix.Crop(crop_width / static_cast<float>(width), 154 matrix.Crop(crop_width / static_cast<float>(width),
183 crop_height / static_cast<float>(height), 155 crop_height / static_cast<float>(height),
184 crop_x / static_cast<float>(width), 156 crop_x / static_cast<float>(width),
185 crop_y / static_cast<float>(height)); 157 crop_y / static_cast<float>(height));
186 158
187 rtc::CritScope lock(&apply_rotation_crit_); 159 // Make a local copy, since value of apply_rotation() may change
188 if (apply_rotation_) { 160 // under our feet.
161 bool do_rotate = apply_rotation();
162
163 if (do_rotate) {
189 if (rotation == webrtc::kVideoRotation_90 || 164 if (rotation == webrtc::kVideoRotation_90 ||
190 rotation == webrtc::kVideoRotation_270) { 165 rotation == webrtc::kVideoRotation_270) {
191 std::swap(adapted_width, adapted_height); 166 std::swap(adapted_width, adapted_height);
192 } 167 }
193 matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation)); 168 matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation));
194 } 169 }
195 170
196 OnFrame(cricket::WebRtcVideoFrame( 171 OnFrame(cricket::WebRtcVideoFrame(
197 surface_texture_helper_->CreateTextureFrame( 172 surface_texture_helper_->CreateTextureFrame(
198 adapted_width, adapted_height, 173 adapted_width, adapted_height,
199 webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)), 174 webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)),
200 apply_rotation_ ? webrtc::kVideoRotation_0 175 do_rotate ? webrtc::kVideoRotation_0
201 : static_cast<webrtc::VideoRotation>(rotation), 176 : static_cast<webrtc::VideoRotation>(rotation),
202 translated_camera_time_us, 0), 177 translated_camera_time_us, 0),
203 width, height); 178 width, height);
204 } 179 }
205 180
206 void AndroidVideoTrackSource::OnFrame(const cricket::VideoFrame& frame,
207 int width,
208 int height) {
209 {
210 rtc::CritScope lock(&stats_crit_);
211 stats_ = rtc::Optional<AndroidVideoTrackSource::Stats>({width, height});
212 }
213
214 broadcaster_.OnFrame(frame);
215 }
216
217 void AndroidVideoTrackSource::OnOutputFormatRequest(int width, 181 void AndroidVideoTrackSource::OnOutputFormatRequest(int width,
218 int height, 182 int height,
219 int fps) { 183 int fps) {
220 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); 184 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
221 185
222 cricket::VideoFormat format(width, height, 186 cricket::VideoFormat format(width, height,
223 cricket::VideoFormat::FpsToInterval(fps), 0); 187 cricket::VideoFormat::FpsToInterval(fps), 0);
224 video_adapter_.OnOutputFormatRequest(format); 188 video_adapter()->OnOutputFormatRequest(format);
225 }
226
227 bool AndroidVideoTrackSource::AdaptFrame(int width,
228 int height,
229 int64_t camera_time_us,
230 int* out_width,
231 int* out_height,
232 int* crop_width,
233 int* crop_height,
234 int* crop_x,
235 int* crop_y,
236 int64_t* translated_camera_time_us) {
237 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
238
239 int64_t system_time_us = rtc::TimeMicros();
240 *translated_camera_time_us =
241 timestamp_aligner_.TranslateTimestamp(camera_time_us, system_time_us);
242
243 if (!broadcaster_.frame_wanted()) {
244 return false;
245 }
246
247 if (!video_adapter_.AdaptFrameResolution(
248 width, height, camera_time_us * rtc::kNumNanosecsPerMicrosec,
249 crop_width, crop_height, out_width, out_height)) {
250 // VideoAdapter dropped the frame.
251 return false;
252 }
253 *crop_x = (width - *crop_width) / 2;
254 *crop_y = (height - *crop_height) / 2;
255
256 return true;
257 } 189 }
258 190
259 } // namespace webrtc 191 } // namespace webrtc
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698