OLD | NEW |
(Empty) | |
| 1 /* |
| 2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. |
| 3 * |
| 4 * Use of this source code is governed by a BSD-style license |
| 5 * that can be found in the LICENSE file in the root of the source |
| 6 * tree. An additional intellectual property rights grant can be found |
| 7 * in the file PATENTS. All contributing project authors may |
| 8 * be found in the AUTHORS file in the root of the source tree. |
| 9 */ |
| 10 |
| 11 #include "webrtc/api/androidvideotracksource.h" |
| 12 |
| 13 #include <utility> |
| 14 |
| 15 namespace webrtc { |
| 16 |
| 17 AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread, |
| 18 JNIEnv* jni, |
| 19 jobject j_egl_context) |
| 20 : signaling_thread_(signaling_thread), |
| 21 surface_texture_helper_(webrtc_jni::SurfaceTextureHelper::create( |
| 22 jni, |
| 23 "Camera SurfaceTextureHelper", |
| 24 j_egl_context)) { |
| 25 LOG(LS_INFO) << "AndroidVideoTrackSource ctor"; |
| 26 worker_thread_checker_.DetachFromThread(); |
| 27 camera_thread_checker_.DetachFromThread(); |
| 28 } |
| 29 |
| 30 bool AndroidVideoTrackSource::GetStats(AndroidVideoTrackSource::Stats* stats) { |
| 31 rtc::CritScope lock(&stats_crit_); |
| 32 |
| 33 if (!stats_) { |
| 34 return false; |
| 35 } |
| 36 |
| 37 *stats = *stats_; |
| 38 return true; |
| 39 } |
| 40 |
| 41 void AndroidVideoTrackSource::SetState(SourceState state) { |
| 42 if (rtc::Thread::Current() != signaling_thread_) { |
| 43 invoker_.AsyncInvoke<void>( |
| 44 RTC_FROM_HERE, signaling_thread_, |
| 45 rtc::Bind(&AndroidVideoTrackSource::SetState, this, state)); |
| 46 return; |
| 47 } |
| 48 |
| 49 if (state_ != state) { |
| 50 state_ = state; |
| 51 FireOnChanged(); |
| 52 } |
| 53 } |
| 54 |
| 55 void AndroidVideoTrackSource::AddOrUpdateSink( |
| 56 rtc::VideoSinkInterface<cricket::VideoFrame>* sink, |
| 57 const rtc::VideoSinkWants& wants) { |
| 58 RTC_DCHECK(worker_thread_checker_.CalledOnValidThread()); |
| 59 |
| 60 broadcaster_.AddOrUpdateSink(sink, wants); |
| 61 OnSinkWantsChanged(broadcaster_.wants()); |
| 62 } |
| 63 |
| 64 void AndroidVideoTrackSource::RemoveSink( |
| 65 rtc::VideoSinkInterface<cricket::VideoFrame>* sink) { |
| 66 RTC_DCHECK(worker_thread_checker_.CalledOnValidThread()); |
| 67 |
| 68 broadcaster_.RemoveSink(sink); |
| 69 OnSinkWantsChanged(broadcaster_.wants()); |
| 70 } |
| 71 |
| 72 void AndroidVideoTrackSource::OnSinkWantsChanged( |
| 73 const rtc::VideoSinkWants& wants) { |
| 74 { |
| 75 rtc::CritScope lock(&apply_rotation_crit_); |
| 76 apply_rotation_ = wants.rotation_applied; |
| 77 } |
| 78 |
| 79 video_adapter_.OnResolutionRequest(wants.max_pixel_count, |
| 80 wants.max_pixel_count_step_up); |
| 81 } |
| 82 |
| 83 void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data, |
| 84 int length, |
| 85 int width, |
| 86 int height, |
| 87 int rotation, |
| 88 int64_t timestamp_ns) { |
| 89 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); |
| 90 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || |
| 91 rotation == 270); |
| 92 |
| 93 int adapted_width; |
| 94 int adapted_height; |
| 95 int crop_width; |
| 96 int crop_height; |
| 97 int crop_x; |
| 98 int crop_y; |
| 99 int64_t translated_camera_time_us; |
| 100 |
| 101 if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec, |
| 102 &adapted_width, &adapted_height, &crop_width, &crop_height, |
| 103 &crop_x, &crop_y, &translated_camera_time_us)) { |
| 104 return; |
| 105 } |
| 106 |
| 107 int rotated_width = crop_width; |
| 108 int rotated_height = crop_height; |
| 109 |
| 110 rtc::CritScope lock(&apply_rotation_crit_); |
| 111 if (apply_rotation_ && (rotation == 90 || rotation == 270)) { |
| 112 std::swap(adapted_width, adapted_height); |
| 113 std::swap(rotated_width, rotated_height); |
| 114 } |
| 115 |
| 116 rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer = |
| 117 pre_scale_pool_.CreateBuffer(rotated_width, rotated_height); |
| 118 |
| 119 const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data); |
| 120 const uint8_t* uv_plane = y_plane + width * height; |
| 121 int uv_width = (width + 1) / 2; |
| 122 |
| 123 RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2)); |
| 124 |
| 125 // Can only crop at even pixels. |
| 126 crop_x &= ~1; |
| 127 crop_y &= ~1; |
| 128 |
| 129 libyuv::NV12ToI420Rotate( |
| 130 y_plane + width * crop_y + crop_x, width, |
| 131 uv_plane + uv_width * crop_y + crop_x, width, buffer->MutableDataY(), |
| 132 buffer->StrideY(), |
| 133 // Swap U and V, since we have NV21, not NV12. |
| 134 buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(), |
| 135 buffer->StrideU(), crop_width, crop_height, |
| 136 static_cast<libyuv::RotationMode>(apply_rotation_ ? rotation : 0)); |
| 137 |
| 138 if (adapted_width != buffer->width() || adapted_height != buffer->height()) { |
| 139 rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer( |
| 140 post_scale_pool_.CreateBuffer(adapted_width, adapted_height)); |
| 141 scaled_buffer->ScaleFrom(buffer); |
| 142 buffer = scaled_buffer; |
| 143 } |
| 144 |
| 145 OnFrame(cricket::WebRtcVideoFrame( |
| 146 buffer, |
| 147 apply_rotation_ ? webrtc::kVideoRotation_0 |
| 148 : static_cast<webrtc::VideoRotation>(rotation), |
| 149 translated_camera_time_us), |
| 150 width, height); |
| 151 } |
| 152 |
| 153 void AndroidVideoTrackSource::OnTextureFrameCaptured( |
| 154 int width, |
| 155 int height, |
| 156 int rotation, |
| 157 int64_t timestamp_ns, |
| 158 const webrtc_jni::NativeHandleImpl& handle) { |
| 159 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); |
| 160 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || |
| 161 rotation == 270); |
| 162 |
| 163 int adapted_width; |
| 164 int adapted_height; |
| 165 int crop_width; |
| 166 int crop_height; |
| 167 int crop_x; |
| 168 int crop_y; |
| 169 int64_t translated_camera_time_us; |
| 170 |
| 171 if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec, |
| 172 &adapted_width, &adapted_height, &crop_width, &crop_height, |
| 173 &crop_x, &crop_y, &translated_camera_time_us)) { |
| 174 surface_texture_helper_->ReturnTextureFrame(); |
| 175 return; |
| 176 } |
| 177 |
| 178 webrtc_jni::Matrix matrix = handle.sampling_matrix; |
| 179 |
| 180 matrix.Crop(crop_width / static_cast<float>(width), |
| 181 crop_height / static_cast<float>(height), |
| 182 crop_x / static_cast<float>(width), |
| 183 crop_y / static_cast<float>(height)); |
| 184 |
| 185 rtc::CritScope lock(&apply_rotation_crit_); |
| 186 if (apply_rotation_) { |
| 187 if (rotation == webrtc::kVideoRotation_90 || |
| 188 rotation == webrtc::kVideoRotation_270) { |
| 189 std::swap(adapted_width, adapted_height); |
| 190 } |
| 191 matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation)); |
| 192 } |
| 193 |
| 194 OnFrame(cricket::WebRtcVideoFrame( |
| 195 surface_texture_helper_->CreateTextureFrame( |
| 196 adapted_width, adapted_height, |
| 197 webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)), |
| 198 apply_rotation_ ? webrtc::kVideoRotation_0 |
| 199 : static_cast<webrtc::VideoRotation>(rotation), |
| 200 translated_camera_time_us), |
| 201 width, height); |
| 202 } |
| 203 |
| 204 void AndroidVideoTrackSource::OnFrame(const cricket::VideoFrame& frame, |
| 205 int width, |
| 206 int height) { |
| 207 { |
| 208 rtc::CritScope lock(&stats_crit_); |
| 209 stats_ = rtc::Optional<AndroidVideoTrackSource::Stats>({width, height}); |
| 210 } |
| 211 |
| 212 broadcaster_.OnFrame(frame); |
| 213 } |
| 214 |
| 215 void AndroidVideoTrackSource::OnOutputFormatRequest(int width, |
| 216 int height, |
| 217 int fps) { |
| 218 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); |
| 219 |
| 220 cricket::VideoFormat format(width, height, |
| 221 cricket::VideoFormat::FpsToInterval(fps), 0); |
| 222 video_adapter_.OnOutputFormatRequest(format); |
| 223 } |
| 224 |
| 225 bool AndroidVideoTrackSource::AdaptFrame(int width, |
| 226 int height, |
| 227 int64_t camera_time_us, |
| 228 int* out_width, |
| 229 int* out_height, |
| 230 int* crop_width, |
| 231 int* crop_height, |
| 232 int* crop_x, |
| 233 int* crop_y, |
| 234 int64_t* translated_camera_time_us) { |
| 235 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); |
| 236 |
| 237 int64_t system_time_us = rtc::TimeMicros(); |
| 238 |
| 239 int64_t offset_us = |
| 240 timestamp_aligner_.UpdateOffset(camera_time_us, system_time_us); |
| 241 |
| 242 if (!broadcaster_.frame_wanted()) { |
| 243 return false; |
| 244 } |
| 245 |
| 246 if (!video_adapter_.AdaptFrameResolution( |
| 247 width, height, camera_time_us * rtc::kNumNanosecsPerMicrosec, |
| 248 crop_width, crop_height, out_width, out_height)) { |
| 249 // VideoAdapter dropped the frame. |
| 250 return false; |
| 251 } |
| 252 *crop_x = (width - *crop_width) / 2; |
| 253 *crop_y = (height - *crop_height) / 2; |
| 254 |
| 255 *translated_camera_time_us = timestamp_aligner_.ClipTimestamp( |
| 256 camera_time_us + offset_us, system_time_us); |
| 257 return true; |
| 258 } |
| 259 |
| 260 } // namespace webrtc |
OLD | NEW |