Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 /* | |
| 2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. | |
| 3 * | |
| 4 * Use of this source code is governed by a BSD-style license | |
| 5 * that can be found in the LICENSE file in the root of the source | |
| 6 * tree. An additional intellectual property rights grant can be found | |
| 7 * in the file PATENTS. All contributing project authors may | |
| 8 * be found in the AUTHORS file in the root of the source tree. | |
| 9 */ | |
| 10 | |
| 11 #include "webrtc/api/androidvideotracksource.h" | |
| 12 | |
| 13 namespace webrtc { | |
| 14 | |
| 15 AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread, | |
| 16 JNIEnv* jni, | |
| 17 jobject j_egl_context) | |
| 18 : signaling_thread_(signaling_thread), | |
| 19 surface_texture_helper_(webrtc_jni::SurfaceTextureHelper::create( | |
| 20 jni, | |
| 21 "Camera SurfaceTextureHelper", | |
| 22 j_egl_context)) { | |
| 23 LOG(LS_INFO) << "AndroidVideoTrackSource ctor"; | |
| 24 worker_thread_checker_.DetachFromThread(); | |
| 25 camera_thread_checker_.DetachFromThread(); | |
| 26 } | |
| 27 | |
| 28 bool AndroidVideoTrackSource::GetStats(AndroidVideoTrackSource::Stats* stats) { | |
| 29 rtc::CritScope lock(&stats_crit_); | |
| 30 | |
| 31 if (!stats_) { | |
| 32 return false; | |
| 33 } | |
| 34 | |
| 35 *stats = *stats_; | |
| 36 return true; | |
| 37 } | |
| 38 | |
| 39 void AndroidVideoTrackSource::SetState(SourceState state) { | |
| 40 if (rtc::Thread::Current() != signaling_thread_) { | |
| 41 invoker_.AsyncInvoke<void>( | |
| 42 RTC_FROM_HERE, signaling_thread_, | |
| 43 rtc::Bind(&AndroidVideoTrackSource::SetState, this, state)); | |
| 44 return; | |
| 45 } | |
| 46 | |
| 47 if (state_ != state) { | |
| 48 state_ = state; | |
| 49 FireOnChanged(); | |
| 50 } | |
| 51 } | |
| 52 | |
| 53 void AndroidVideoTrackSource::AddOrUpdateSink( | |
| 54 rtc::VideoSinkInterface<cricket::VideoFrame>* sink, | |
| 55 const rtc::VideoSinkWants& wants) { | |
| 56 RTC_DCHECK(worker_thread_checker_.CalledOnValidThread()); | |
| 57 | |
| 58 broadcaster_.AddOrUpdateSink(sink, wants); | |
| 59 OnSinkWantsChanged(broadcaster_.wants()); | |
| 60 } | |
| 61 | |
| 62 void AndroidVideoTrackSource::RemoveSink( | |
| 63 rtc::VideoSinkInterface<cricket::VideoFrame>* sink) { | |
| 64 RTC_DCHECK(worker_thread_checker_.CalledOnValidThread()); | |
| 65 | |
| 66 broadcaster_.RemoveSink(sink); | |
| 67 OnSinkWantsChanged(broadcaster_.wants()); | |
| 68 } | |
| 69 | |
| 70 void AndroidVideoTrackSource::OnSinkWantsChanged( | |
| 71 const rtc::VideoSinkWants& wants) { | |
| 72 rtc::CritScope lock(&apply_rotation_crit_); | |
| 73 | |
| 74 apply_rotation_ = wants.rotation_applied; | |
|
magjed_webrtc
2016/07/19 14:42:28
Just lock this statement, and not video_adapter_.O
sakal
2016/07/20 08:06:28
Done.
| |
| 75 | |
| 76 video_adapter_.OnResolutionRequest(wants.max_pixel_count, | |
| 77 wants.max_pixel_count_step_up); | |
| 78 } | |
| 79 | |
| 80 void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data, | |
| 81 int length, | |
| 82 int width, | |
| 83 int height, | |
| 84 int rotation, | |
| 85 int64_t timestamp_ns) { | |
| 86 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); | |
| 87 rtc::CritScope lock(&apply_rotation_crit_); | |
|
magjed_webrtc
2016/07/19 14:42:28
Move it as close as possible to the first use of |
sakal
2016/07/20 08:06:28
Done.
| |
| 88 | |
| 89 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || | |
| 90 rotation == 270); | |
| 91 | |
| 92 int adapted_width; | |
| 93 int adapted_height; | |
| 94 int crop_width; | |
| 95 int crop_height; | |
| 96 int crop_x; | |
| 97 int crop_y; | |
| 98 int64_t translated_camera_time_us; | |
| 99 | |
| 100 if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec, | |
| 101 &adapted_width, &adapted_height, &crop_width, &crop_height, | |
| 102 &crop_x, &crop_y, &translated_camera_time_us)) { | |
| 103 return; | |
| 104 } | |
| 105 | |
| 106 int rotated_width = crop_width; | |
| 107 int rotated_height = crop_height; | |
| 108 | |
| 109 if (apply_rotation_ && (rotation == 90 || rotation == 270)) { | |
| 110 std::swap(adapted_width, adapted_height); | |
| 111 std::swap(rotated_width, rotated_height); | |
| 112 } | |
| 113 | |
| 114 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer = | |
| 115 pre_scale_pool_.CreateBuffer(rotated_width, rotated_height); | |
| 116 | |
| 117 const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data); | |
| 118 const uint8_t* uv_plane = y_plane + width * height; | |
| 119 int uv_width = (width + 1) / 2; | |
| 120 | |
| 121 RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2)); | |
| 122 | |
| 123 // Can only crop at even pixels. | |
| 124 crop_x &= ~1; | |
| 125 crop_y &= ~1; | |
| 126 | |
| 127 libyuv::NV12ToI420Rotate( | |
| 128 y_plane + width * crop_y + crop_x, width, | |
| 129 uv_plane + uv_width * crop_y + crop_x, width, buffer->MutableDataY(), | |
| 130 buffer->StrideY(), | |
| 131 // Swap U and V, since we have NV21, not NV12. | |
| 132 buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(), | |
| 133 buffer->StrideU(), crop_width, crop_height, | |
| 134 static_cast<libyuv::RotationMode>(apply_rotation_ ? rotation : 0)); | |
| 135 | |
| 136 if (adapted_width != buffer->width() || adapted_height != buffer->height()) { | |
| 137 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer( | |
| 138 post_scale_pool_.CreateBuffer(adapted_width, adapted_height)); | |
| 139 scaled_buffer->ScaleFrom(buffer); | |
| 140 buffer = scaled_buffer; | |
| 141 } | |
| 142 | |
| 143 OnFrame(cricket::WebRtcVideoFrame( | |
| 144 buffer, | |
| 145 apply_rotation_ ? webrtc::kVideoRotation_0 | |
| 146 : static_cast<webrtc::VideoRotation>(rotation), | |
| 147 translated_camera_time_us), | |
| 148 width, height); | |
| 149 } | |
| 150 | |
| 151 void AndroidVideoTrackSource::OnTextureFrameCaptured( | |
| 152 int width, | |
| 153 int height, | |
| 154 int rotation, | |
| 155 int64_t timestamp_ns, | |
| 156 const webrtc_jni::NativeHandleImpl& handle) { | |
| 157 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); | |
| 158 rtc::CritScope lock(&apply_rotation_crit_); | |
| 159 | |
| 160 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || | |
| 161 rotation == 270); | |
| 162 | |
| 163 int adapted_width; | |
| 164 int adapted_height; | |
| 165 int crop_width; | |
| 166 int crop_height; | |
| 167 int crop_x; | |
| 168 int crop_y; | |
| 169 int64_t translated_camera_time_us; | |
| 170 | |
| 171 if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec, | |
| 172 &adapted_width, &adapted_height, &crop_width, &crop_height, | |
| 173 &crop_x, &crop_y, &translated_camera_time_us)) { | |
| 174 surface_texture_helper_->ReturnTextureFrame(); | |
| 175 return; | |
| 176 } | |
| 177 | |
| 178 webrtc_jni::Matrix matrix = handle.sampling_matrix; | |
| 179 | |
| 180 matrix.Crop(crop_width / static_cast<float>(width), | |
| 181 crop_height / static_cast<float>(height), | |
| 182 crop_x / static_cast<float>(width), | |
| 183 crop_y / static_cast<float>(height)); | |
| 184 | |
| 185 if (apply_rotation_) { | |
| 186 if (rotation == webrtc::kVideoRotation_90 || | |
| 187 rotation == webrtc::kVideoRotation_270) { | |
| 188 std::swap(adapted_width, adapted_height); | |
| 189 } | |
| 190 matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation)); | |
| 191 } | |
| 192 | |
| 193 OnFrame(cricket::WebRtcVideoFrame( | |
| 194 surface_texture_helper_->CreateTextureFrame( | |
| 195 adapted_width, adapted_height, | |
| 196 webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)), | |
| 197 apply_rotation_ ? webrtc::kVideoRotation_0 | |
| 198 : static_cast<webrtc::VideoRotation>(rotation), | |
| 199 translated_camera_time_us), | |
| 200 width, height); | |
| 201 } | |
| 202 | |
| 203 void AndroidVideoTrackSource::OnFrame(const cricket::VideoFrame& frame, | |
| 204 int width, | |
| 205 int height) { | |
| 206 { | |
| 207 rtc::CritScope lock(&stats_crit_); | |
| 208 stats_ = rtc::Optional<AndroidVideoTrackSource::Stats>({width, height}); | |
| 209 } | |
| 210 | |
| 211 broadcaster_.OnFrame(frame); | |
| 212 } | |
| 213 | |
| 214 void AndroidVideoTrackSource::OnOutputFormatRequest(int width, | |
| 215 int height, | |
| 216 int fps) { | |
| 217 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); | |
| 218 | |
| 219 cricket::VideoFormat format(width, height, | |
| 220 cricket::VideoFormat::FpsToInterval(fps), 0); | |
| 221 video_adapter_.OnOutputFormatRequest(format); | |
| 222 } | |
| 223 | |
| 224 bool AndroidVideoTrackSource::AdaptFrame(int width, | |
| 225 int height, | |
| 226 int64_t camera_time_us, | |
| 227 int* out_width, | |
| 228 int* out_height, | |
| 229 int* crop_width, | |
| 230 int* crop_height, | |
| 231 int* crop_x, | |
| 232 int* crop_y, | |
| 233 int64_t* translated_camera_time_us) { | |
| 234 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); | |
| 235 RTC_CHECK(translated_camera_time_us != nullptr); | |
|
magjed_webrtc
2016/07/19 14:42:28
No need for this, it will crash regardless.
sakal
2016/07/20 08:06:28
I think crashing is undefined behavior but sure it
| |
| 236 | |
| 237 int64_t system_time_us = rtc::TimeMicros(); | |
| 238 | |
| 239 int64_t offset_us = | |
| 240 timestamp_aligner_.UpdateOffset(camera_time_us, system_time_us); | |
|
magjed_webrtc
2016/07/19 14:42:28
I don't understand why we have to make two calls t
sakal
2016/07/20 08:06:28
Acknowledged.
| |
| 241 | |
| 242 if (!broadcaster_.frame_wanted()) { | |
| 243 return false; | |
| 244 } | |
| 245 | |
| 246 if (!video_adapter_.AdaptFrameResolution( | |
| 247 width, height, camera_time_us * rtc::kNumNanosecsPerMicrosec, | |
| 248 crop_width, crop_height, out_width, out_height)) { | |
| 249 // VideoAdapter dropped the frame. | |
| 250 return false; | |
| 251 } | |
| 252 *crop_x = (width - *crop_width) / 2; | |
| 253 *crop_y = (height - *crop_height) / 2; | |
| 254 | |
| 255 *translated_camera_time_us = timestamp_aligner_.ClipTimestamp( | |
| 256 camera_time_us + offset_us, system_time_us); | |
| 257 return true; | |
| 258 } | |
| 259 | |
| 260 } // namespace webrtc | |
| OLD | NEW |