Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(318)

Side by Side Diff: webrtc/api/android/jni/androidvideotracksource.cc

Issue 2514383002: Move androidvideotracksource from api under api/android/jni. (Closed)
Patch Set: Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. 2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3 * 3 *
4 * Use of this source code is governed by a BSD-style license 4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source 5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found 6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may 7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree. 8 * be found in the AUTHORS file in the root of the source tree.
9 */ 9 */
10 10
11 #include "webrtc/api/androidvideotracksource.h" 11 #include "webrtc/api/android/jni/androidvideotracksource.h"
12 12
13 #include <utility> 13 #include <utility>
14 14
15 namespace webrtc { 15 namespace webrtc {
16 16
17 AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread, 17 AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread,
18 JNIEnv* jni, 18 JNIEnv* jni,
19 jobject j_egl_context, 19 jobject j_egl_context,
20 bool is_screencast) 20 bool is_screencast)
21 : signaling_thread_(signaling_thread), 21 : signaling_thread_(signaling_thread),
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
56 int64_t translated_camera_time_us = 56 int64_t translated_camera_time_us =
57 timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros()); 57 timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());
58 58
59 int adapted_width; 59 int adapted_width;
60 int adapted_height; 60 int adapted_height;
61 int crop_width; 61 int crop_width;
62 int crop_height; 62 int crop_height;
63 int crop_x; 63 int crop_x;
64 int crop_y; 64 int crop_y;
65 65
66 if (!AdaptFrame(width, height, camera_time_us, 66 if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
67 &adapted_width, &adapted_height, &crop_width, &crop_height, 67 &adapted_height, &crop_width, &crop_height, &crop_x,
68 &crop_x, &crop_y)) { 68 &crop_y)) {
69 return; 69 return;
70 } 70 }
71 71
72 const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data); 72 const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data);
73 const uint8_t* uv_plane = y_plane + width * height; 73 const uint8_t* uv_plane = y_plane + width * height;
74 const int uv_width = (width + 1) / 2; 74 const int uv_width = (width + 1) / 2;
75 75
76 RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2)); 76 RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2));
77 77
78 // Can only crop at even pixels. 78 // Can only crop at even pixels.
79 crop_x &= ~1; 79 crop_x &= ~1;
80 crop_y &= ~1; 80 crop_y &= ~1;
81 // Crop just by modifying pointers. 81 // Crop just by modifying pointers.
82 y_plane += width * crop_y + crop_x; 82 y_plane += width * crop_y + crop_x;
83 uv_plane += uv_width * crop_y + crop_x; 83 uv_plane += uv_width * crop_y + crop_x;
84 84
85 rtc::scoped_refptr<webrtc::I420Buffer> buffer = 85 rtc::scoped_refptr<webrtc::I420Buffer> buffer =
86 buffer_pool_.CreateBuffer(adapted_width, adapted_height); 86 buffer_pool_.CreateBuffer(adapted_width, adapted_height);
87 87
88 nv12toi420_scaler_.NV12ToI420Scale( 88 nv12toi420_scaler_.NV12ToI420Scale(
89 y_plane, width, 89 y_plane, width, uv_plane, uv_width * 2, crop_width, crop_height,
90 uv_plane, uv_width * 2,
91 crop_width, crop_height,
92 buffer->MutableDataY(), buffer->StrideY(), 90 buffer->MutableDataY(), buffer->StrideY(),
93 // Swap U and V, since we have NV21, not NV12. 91 // Swap U and V, since we have NV21, not NV12.
94 buffer->MutableDataV(), buffer->StrideV(), 92 buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(),
95 buffer->MutableDataU(), buffer->StrideU(), 93 buffer->StrideU(), buffer->width(), buffer->height());
96 buffer->width(), buffer->height());
97 94
98 OnFrame(VideoFrame(buffer, static_cast<webrtc::VideoRotation>(rotation), 95 OnFrame(VideoFrame(buffer, static_cast<webrtc::VideoRotation>(rotation),
99 translated_camera_time_us)); 96 translated_camera_time_us));
100 } 97 }
101 98
102 void AndroidVideoTrackSource::OnTextureFrameCaptured( 99 void AndroidVideoTrackSource::OnTextureFrameCaptured(
103 int width, 100 int width,
104 int height, 101 int height,
105 int rotation, 102 int rotation,
106 int64_t timestamp_ns, 103 int64_t timestamp_ns,
107 const webrtc_jni::NativeHandleImpl& handle) { 104 const webrtc_jni::NativeHandleImpl& handle) {
108 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); 105 RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
109 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 || 106 RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
110 rotation == 270); 107 rotation == 270);
111 108
112 int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec; 109 int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
113 int64_t translated_camera_time_us = 110 int64_t translated_camera_time_us =
114 timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros()); 111 timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());
115 112
116 int adapted_width; 113 int adapted_width;
117 int adapted_height; 114 int adapted_height;
118 int crop_width; 115 int crop_width;
119 int crop_height; 116 int crop_height;
120 int crop_x; 117 int crop_x;
121 int crop_y; 118 int crop_y;
122 119
123 if (!AdaptFrame(width, height, camera_time_us, 120 if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
124 &adapted_width, &adapted_height, &crop_width, &crop_height, 121 &adapted_height, &crop_width, &crop_height, &crop_x,
125 &crop_x, &crop_y)) { 122 &crop_y)) {
126 surface_texture_helper_->ReturnTextureFrame(); 123 surface_texture_helper_->ReturnTextureFrame();
127 return; 124 return;
128 } 125 }
129 126
130 webrtc_jni::Matrix matrix = handle.sampling_matrix; 127 webrtc_jni::Matrix matrix = handle.sampling_matrix;
131 128
132 matrix.Crop(crop_width / static_cast<float>(width), 129 matrix.Crop(crop_width / static_cast<float>(width),
133 crop_height / static_cast<float>(height), 130 crop_height / static_cast<float>(height),
134 crop_x / static_cast<float>(width), 131 crop_x / static_cast<float>(width),
135 crop_y / static_cast<float>(height)); 132 crop_y / static_cast<float>(height));
(...skipping 21 matching lines...) Expand all
157 154
158 void AndroidVideoTrackSource::OnOutputFormatRequest(int width, 155 void AndroidVideoTrackSource::OnOutputFormatRequest(int width,
159 int height, 156 int height,
160 int fps) { 157 int fps) {
161 cricket::VideoFormat format(width, height, 158 cricket::VideoFormat format(width, height,
162 cricket::VideoFormat::FpsToInterval(fps), 0); 159 cricket::VideoFormat::FpsToInterval(fps), 0);
163 video_adapter()->OnOutputFormatRequest(format); 160 video_adapter()->OnOutputFormatRequest(format);
164 } 161 }
165 162
166 } // namespace webrtc 163 } // namespace webrtc
OLDNEW
« no previous file with comments | « webrtc/api/android/jni/androidvideotracksource.h ('k') | webrtc/api/android/jni/androidvideotracksource_jni.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698