OLD | NEW |
| (Empty) |
1 /* | |
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. | |
3 * | |
4 * Use of this source code is governed by a BSD-style license | |
5 * that can be found in the LICENSE file in the root of the source | |
6 * tree. An additional intellectual property rights grant can be found | |
7 * in the file PATENTS. All contributing project authors may | |
8 * be found in the AUTHORS file in the root of the source tree. | |
9 */ | |
10 | |
11 #include "webrtc/api/androidvideotracksource.h" | |
12 | |
13 #include <utility> | |
14 | |
15 namespace webrtc { | |
16 | |
17 AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread, | |
18 JNIEnv* jni, | |
19 jobject j_egl_context, | |
20 bool is_screencast) | |
21 : signaling_thread_(signaling_thread), | |
22 surface_texture_helper_(webrtc_jni::SurfaceTextureHelper::create( | |
23 jni, | |
24 "Camera SurfaceTextureHelper", | |
25 j_egl_context)), | |
26 is_screencast_(is_screencast) { | |
27 LOG(LS_INFO) << "AndroidVideoTrackSource ctor"; | |
28 camera_thread_checker_.DetachFromThread(); | |
29 } | |
30 | |
31 void AndroidVideoTrackSource::SetState(SourceState state) { | |
32 if (rtc::Thread::Current() != signaling_thread_) { | |
33 invoker_.AsyncInvoke<void>( | |
34 RTC_FROM_HERE, signaling_thread_, | |
35 rtc::Bind(&AndroidVideoTrackSource::SetState, this, state)); | |
36 return; | |
37 } | |
38 | |
39 if (state_ != state) { | |
40 state_ = state; | |
41 FireOnChanged(); | |
42 } | |
43 } | |
44 | |
45 void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data, | |
46 int length, | |
47 int width, | |
48 int height, | |
49 int rotation, | |
50 int64_t timestamp_ns) { | |
51 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); | |
52 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || | |
53 rotation == 270); | |
54 | |
55 int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec; | |
56 int64_t translated_camera_time_us = | |
57 timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros()); | |
58 | |
59 int adapted_width; | |
60 int adapted_height; | |
61 int crop_width; | |
62 int crop_height; | |
63 int crop_x; | |
64 int crop_y; | |
65 | |
66 if (!AdaptFrame(width, height, camera_time_us, | |
67 &adapted_width, &adapted_height, &crop_width, &crop_height, | |
68 &crop_x, &crop_y)) { | |
69 return; | |
70 } | |
71 | |
72 const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data); | |
73 const uint8_t* uv_plane = y_plane + width * height; | |
74 const int uv_width = (width + 1) / 2; | |
75 | |
76 RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2)); | |
77 | |
78 // Can only crop at even pixels. | |
79 crop_x &= ~1; | |
80 crop_y &= ~1; | |
81 // Crop just by modifying pointers. | |
82 y_plane += width * crop_y + crop_x; | |
83 uv_plane += uv_width * crop_y + crop_x; | |
84 | |
85 rtc::scoped_refptr<webrtc::I420Buffer> buffer = | |
86 buffer_pool_.CreateBuffer(adapted_width, adapted_height); | |
87 | |
88 nv12toi420_scaler_.NV12ToI420Scale( | |
89 y_plane, width, | |
90 uv_plane, uv_width * 2, | |
91 crop_width, crop_height, | |
92 buffer->MutableDataY(), buffer->StrideY(), | |
93 // Swap U and V, since we have NV21, not NV12. | |
94 buffer->MutableDataV(), buffer->StrideV(), | |
95 buffer->MutableDataU(), buffer->StrideU(), | |
96 buffer->width(), buffer->height()); | |
97 | |
98 OnFrame(VideoFrame(buffer, static_cast<webrtc::VideoRotation>(rotation), | |
99 translated_camera_time_us)); | |
100 } | |
101 | |
102 void AndroidVideoTrackSource::OnTextureFrameCaptured( | |
103 int width, | |
104 int height, | |
105 int rotation, | |
106 int64_t timestamp_ns, | |
107 const webrtc_jni::NativeHandleImpl& handle) { | |
108 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); | |
109 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || | |
110 rotation == 270); | |
111 | |
112 int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec; | |
113 int64_t translated_camera_time_us = | |
114 timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros()); | |
115 | |
116 int adapted_width; | |
117 int adapted_height; | |
118 int crop_width; | |
119 int crop_height; | |
120 int crop_x; | |
121 int crop_y; | |
122 | |
123 if (!AdaptFrame(width, height, camera_time_us, | |
124 &adapted_width, &adapted_height, &crop_width, &crop_height, | |
125 &crop_x, &crop_y)) { | |
126 surface_texture_helper_->ReturnTextureFrame(); | |
127 return; | |
128 } | |
129 | |
130 webrtc_jni::Matrix matrix = handle.sampling_matrix; | |
131 | |
132 matrix.Crop(crop_width / static_cast<float>(width), | |
133 crop_height / static_cast<float>(height), | |
134 crop_x / static_cast<float>(width), | |
135 crop_y / static_cast<float>(height)); | |
136 | |
137 // Make a local copy, since value of apply_rotation() may change | |
138 // under our feet. | |
139 bool do_rotate = apply_rotation(); | |
140 | |
141 if (do_rotate) { | |
142 if (rotation == webrtc::kVideoRotation_90 || | |
143 rotation == webrtc::kVideoRotation_270) { | |
144 std::swap(adapted_width, adapted_height); | |
145 } | |
146 matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation)); | |
147 } | |
148 | |
149 OnFrame(VideoFrame( | |
150 surface_texture_helper_->CreateTextureFrame( | |
151 adapted_width, adapted_height, | |
152 webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)), | |
153 do_rotate ? webrtc::kVideoRotation_0 | |
154 : static_cast<webrtc::VideoRotation>(rotation), | |
155 translated_camera_time_us)); | |
156 } | |
157 | |
158 void AndroidVideoTrackSource::OnOutputFormatRequest(int width, | |
159 int height, | |
160 int fps) { | |
161 cricket::VideoFormat format(width, height, | |
162 cricket::VideoFormat::FpsToInterval(fps), 0); | |
163 video_adapter()->OnOutputFormatRequest(format); | |
164 } | |
165 | |
166 } // namespace webrtc | |
OLD | NEW |