Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(201)

Side by Side Diff: chrome/browser/android/vr_shell/vr_shell_gl.cc

Issue 2584343002: WIP: working copy-no-compositor path
Patch Set: StatTracker destructor, delete old magic numbers, mojo export Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2016 The Chromium Authors. All rights reserved. 1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h" 5 #include "chrome/browser/android/vr_shell/vr_shell_gl.h"
6 6
7 #include "base/android/jni_android.h"
7 #include "base/memory/ptr_util.h" 8 #include "base/memory/ptr_util.h"
8 #include "base/metrics/histogram_macros.h" 9 #include "base/metrics/histogram_macros.h"
9 #include "base/threading/thread_task_runner_handle.h" 10 #include "base/threading/thread_task_runner_handle.h"
10 #include "chrome/browser/android/vr_shell/ui_elements.h" 11 #include "chrome/browser/android/vr_shell/ui_elements.h"
11 #include "chrome/browser/android/vr_shell/ui_scene.h" 12 #include "chrome/browser/android/vr_shell/ui_scene.h"
12 #include "chrome/browser/android/vr_shell/vr_controller.h" 13 #include "chrome/browser/android/vr_shell/vr_controller.h"
13 #include "chrome/browser/android/vr_shell/vr_gl_util.h" 14 #include "chrome/browser/android/vr_shell/vr_gl_util.h"
14 #include "chrome/browser/android/vr_shell/vr_input_manager.h" 15 #include "chrome/browser/android/vr_shell/vr_input_manager.h"
15 #include "chrome/browser/android/vr_shell/vr_math.h" 16 #include "chrome/browser/android/vr_shell/vr_math.h"
16 #include "chrome/browser/android/vr_shell/vr_shell.h" 17 #include "chrome/browser/android/vr_shell/vr_shell.h"
17 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h" 18 #include "chrome/browser/android/vr_shell/vr_shell_renderer.h"
18 #include "third_party/WebKit/public/platform/WebInputEvent.h" 19 #include "third_party/WebKit/public/platform/WebInputEvent.h"
19 #include "ui/gfx/vsync_provider.h" 20 #include "ui/gfx/vsync_provider.h"
20 #include "ui/gl/android/scoped_java_surface.h" 21 #include "ui/gl/android/scoped_java_surface.h"
21 #include "ui/gl/android/surface_texture.h" 22 #include "ui/gl/android/surface_texture.h"
22 #include "ui/gl/gl_bindings.h" 23 #include "ui/gl/gl_bindings.h"
23 #include "ui/gl/gl_context.h" 24 #include "ui/gl/gl_context.h"
24 #include "ui/gl/gl_surface.h" 25 #include "ui/gl/gl_surface.h"
25 #include "ui/gl/init/gl_factory.h" 26 #include "ui/gl/init/gl_factory.h"
27 #include "gpu/ipc/common/gpu_surface_tracker.h"
28 #include "gpu/ipc/common/surface_handle.h"
26 29
27 namespace vr_shell { 30 namespace vr_shell {
28 31
29 namespace { 32 namespace {
30 // Constant taken from treasure_hunt demo. 33 // Constant taken from treasure_hunt demo.
31 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000; 34 static constexpr long kPredictionTimeWithoutVsyncNanos = 50000000;
32 35
33 static constexpr float kZNear = 0.1f; 36 static constexpr float kZNear = 0.1f;
34 static constexpr float kZFar = 1000.0f; 37 static constexpr float kZFar = 1000.0f;
38 #include <android/native_window.h>
39 #include <android/native_window_jni.h>
40
41 #include <iomanip>
42
35 43
36 // Screen angle in degrees. 0 = vertical, positive = top closer. 44 // Screen angle in degrees. 0 = vertical, positive = top closer.
37 static constexpr float kDesktopScreenTiltDefault = 0; 45 static constexpr float kDesktopScreenTiltDefault = 0;
38 46
39 static constexpr float kReticleWidth = 0.025f; 47 static constexpr float kReticleWidth = 0.025f;
40 static constexpr float kReticleHeight = 0.025f; 48 static constexpr float kReticleHeight = 0.025f;
41 49
42 static constexpr float kLaserWidth = 0.01f; 50 static constexpr float kLaserWidth = 0.01f;
43 51
44 // Angle (radians) the beam down from the controller axis, for wrist comfort. 52 // Angle (radians) the beam down from the controller axis, for wrist comfort.
(...skipping 29 matching lines...) Expand all
74 // is currently sized to fit the WebVR "insecure transport" warnings, 82 // is currently sized to fit the WebVR "insecure transport" warnings,
75 // adjust it as needed if there is additional content. 83 // adjust it as needed if there is additional content.
76 static constexpr gvr::Sizei kHeadlockedBufferDimensions = {1024, 1024}; 84 static constexpr gvr::Sizei kHeadlockedBufferDimensions = {1024, 1024};
77 static constexpr gvr::Rectf kHeadlockedBufferFov = {20.f, 20.f, 20.f, 20.f}; 85 static constexpr gvr::Rectf kHeadlockedBufferFov = {20.f, 20.f, 20.f, 20.f};
78 86
79 // The GVR viewport list has two entries (left eye and right eye) for each 87 // The GVR viewport list has two entries (left eye and right eye) for each
80 // GVR buffer. 88 // GVR buffer.
81 static constexpr int kViewportListPrimaryOffset = 0; 89 static constexpr int kViewportListPrimaryOffset = 0;
82 static constexpr int kViewportListHeadlockedOffset = 2; 90 static constexpr int kViewportListHeadlockedOffset = 2;
83 91
84 // Magic numbers used to mark valid pose index values encoded in frame
85 // data. Must match the magic numbers used in blink's VRDisplay.cpp.
86 static constexpr std::array<uint8_t, 2> kWebVrPosePixelMagicNumbers{{42, 142}};
87
88 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) { 92 float Distance(const gvr::Vec3f& vec1, const gvr::Vec3f& vec2) {
89 float xdiff = (vec1.x - vec2.x); 93 float xdiff = (vec1.x - vec2.x);
90 float ydiff = (vec1.y - vec2.y); 94 float ydiff = (vec1.y - vec2.y);
91 float zdiff = (vec1.z - vec2.z); 95 float zdiff = (vec1.z - vec2.z);
92 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff; 96 float scale = xdiff * xdiff + ydiff * ydiff + zdiff * zdiff;
93 return std::sqrt(scale); 97 return std::sqrt(scale);
94 } 98 }
95 99
96 // Generate a quaternion representing the rotation from the negative Z axis 100 // Generate a quaternion representing the rotation from the negative Z axis
97 // (0, 0, -1) to a specified vector. This is an optimized version of a more 101 // (0, 0, -1) to a specified vector. This is an optimized version of a more
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
134 return mouse_event; 138 return mouse_event;
135 } 139 }
136 140
137 enum class ViewerType { 141 enum class ViewerType {
138 UNKNOWN_TYPE = 0, 142 UNKNOWN_TYPE = 0,
139 CARDBOARD = 1, 143 CARDBOARD = 1,
140 DAYDREAM = 2, 144 DAYDREAM = 2,
141 VIEWER_TYPE_MAX, 145 VIEWER_TYPE_MAX,
142 }; 146 };
143 147
144 int GetPixelEncodedPoseIndexByte() { 148 } // namespace
145 TRACE_EVENT0("gpu", "VrShellGl::GetPixelEncodedPoseIndex");
146 // Read the pose index encoded in a bottom left pixel as color values.
147 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which
148 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc
149 // which tracks poses. Returns the low byte (0..255) if valid, or -1
150 // if not valid due to bad magic number.
151 uint8_t pixels[4];
152 // Assume we're reading from the framebuffer we just wrote to.
153 // That's true currently, we may need to use glReadBuffer(GL_BACK)
154 // or equivalent if the rendering setup changes in the future.
155 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
156
157 // Check for the magic number written by VRDevice.cpp on submit.
158 // This helps avoid glitches from garbage data in the render
159 // buffer that can appear during initialization or resizing. These
160 // often appear as flashes of all-black or all-white pixels.
161 if (pixels[1] == kWebVrPosePixelMagicNumbers[0] &&
162 pixels[2] == kWebVrPosePixelMagicNumbers[1]) {
163 // Pose is good.
164 return pixels[0];
165 }
166 VLOG(1) << "WebVR: reject decoded pose index " << (int)pixels[0] <<
167 ", bad magic number " << (int)pixels[1] << ", " << (int)pixels[2];
168 return -1;
169 }
170 149
171 int64_t TimeInMicroseconds() { 150 int64_t TimeInMicroseconds() {
172 return std::chrono::duration_cast<std::chrono::microseconds>( 151 return std::chrono::duration_cast<std::chrono::microseconds>(
173 std::chrono::steady_clock::now().time_since_epoch()).count(); 152 std::chrono::steady_clock::now().time_since_epoch()).count();
174 } 153 }
175 154
176 } // namespace 155 uint32_t GetPixelEncodedPoseIndex() {
156 TRACE_EVENT0("gpu", "VrShell::GetPixelEncodedPoseIndex");
157 // Read the pose index encoded in a bottom left pixel as color values.
158 // See also third_party/WebKit/Source/modules/vr/VRDisplay.cpp which
159 // encodes the pose index, and device/vr/android/gvr/gvr_device.cc
160 // which tracks poses.
161 uint8_t pixels[4];
162 // Assume we're reading from the framebuffer we just wrote to.
163 // That's true currently, we may need to use glReadBuffer(GL_BACK)
164 // or equivalent if the rendering setup changes in the future.
165 glReadPixels(0, 0, 1, 1, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
166 return pixels[0] | (pixels[1] << 8) | (pixels[2] << 16);
167 }
177 168
178 VrShellGl::VrShellGl( 169 VrShellGl::VrShellGl(
179 const base::WeakPtr<VrShell>& weak_vr_shell, 170 const base::WeakPtr<VrShell>& weak_vr_shell,
180 const base::WeakPtr<VrInputManager>& content_input_manager, 171 const base::WeakPtr<VrInputManager>& content_input_manager,
181 const base::WeakPtr<VrInputManager>& ui_input_manager, 172 const base::WeakPtr<VrInputManager>& ui_input_manager,
182 scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner, 173 scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner,
183 gvr_context* gvr_api, 174 gvr_context* gvr_api,
184 bool initially_web_vr) 175 bool initially_web_vr)
185 : web_vr_mode_(initially_web_vr), 176 : web_vr_mode_(initially_web_vr),
186 task_runner_(base::ThreadTaskRunnerHandle::Get()), 177 task_runner_(base::ThreadTaskRunnerHandle::Get()),
187 weak_vr_shell_(weak_vr_shell), 178 weak_vr_shell_(weak_vr_shell),
188 content_input_manager_(content_input_manager), 179 content_input_manager_(content_input_manager),
189 ui_input_manager_(ui_input_manager), 180 ui_input_manager_(ui_input_manager),
190 main_thread_task_runner_(std::move(main_thread_task_runner)), 181 main_thread_task_runner_(std::move(main_thread_task_runner)),
191 weak_ptr_factory_(this) { 182 weak_ptr_factory_(this) {
192 GvrInit(gvr_api); 183 GvrInit(gvr_api);
193 } 184 }
194 185
195 VrShellGl::~VrShellGl() { 186 VrShellGl::~VrShellGl() {
196 draw_task_.Cancel(); 187 draw_task_.Cancel();
197 } 188 }
artem.bolgar 2017/02/28 20:15:52 It seems like WebVR surface (maybe others too) is
mthiesse 2017/02/28 20:22:48 Our other surfaces are likely okay, we go through
198 189
199 bool VrShellGl::Initialize() { 190 bool VrShellGl::Initialize() {
200 if (!InitializeGl()) return false; 191 if (!InitializeGl()) return false;
201 192
202 gvr::Mat4f identity; 193 gvr::Mat4f identity;
203 SetIdentityM(identity); 194 SetIdentityM(identity);
204 webvr_head_pose_.resize(kPoseRingBufferSize, identity); 195 webvr_head_pose_.resize(kPoseRingBufferSize, identity);
205 webvr_head_pose_valid_.resize(kPoseRingBufferSize, false); 196 webvr_time_frame_start_.resize(kPoseRingBufferSize, 0.0);
197 webvr_time_get_pose_.resize(kPoseRingBufferSize, 0.0);
198 webvr_time_got_pose_.resize(kPoseRingBufferSize, 0.0);
199 webvr_time_submit_.resize(kPoseRingBufferSize, 0.0);
200 webvr_time_surfaced_.resize(kPoseRingBufferSize, 0.0);
201 webvr_time_acquired_.resize(kPoseRingBufferSize, 0.0);
206 202
207 draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this))); 203 draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this), 0)) ;
208 204
209 scene_.reset(new UiScene); 205 scene_.reset(new UiScene);
210 206
211 InitializeRenderer(); 207 InitializeRenderer();
208 VLOG(1) << __FUNCTION__ << ": Destructor for presenting delegate and its gvr_a pi";
212 209
213 ScheduleNextDrawFrame(); 210 ScheduleNextDrawFrame();
214 return true; 211 return true;
215 } 212 }
216 213
217 bool VrShellGl::InitializeGl() { 214 bool VrShellGl::InitializeGl() {
218 if (gl::GetGLImplementation() == gl::kGLImplementationNone && 215 if (gl::GetGLImplementation() == gl::kGLImplementationNone &&
219 !gl::init::InitializeGLOneOff()) { 216 !gl::init::InitializeGLOneOff()) {
220 LOG(ERROR) << "gl::init::InitializeGLOneOff failed"; 217 LOG(ERROR) << "gl::init::InitializeGLOneOff failed";
221 ForceExitVr(); 218 ForceExitVr();
(...skipping 21 matching lines...) Expand all
243 // TODO(mthiesse): We don't appear to have a VSync provider ever here. This is 240 // TODO(mthiesse): We don't appear to have a VSync provider ever here. This is
244 // sort of okay, because the GVR swap chain will block if we render too fast, 241 // sort of okay, because the GVR swap chain will block if we render too fast,
245 // but we should address this properly. 242 // but we should address this properly.
246 if (surface_->GetVSyncProvider()) { 243 if (surface_->GetVSyncProvider()) {
247 surface_->GetVSyncProvider()->GetVSyncParameters(base::Bind( 244 surface_->GetVSyncProvider()->GetVSyncParameters(base::Bind(
248 &VrShellGl::UpdateVSyncParameters, weak_ptr_factory_.GetWeakPtr())); 245 &VrShellGl::UpdateVSyncParameters, weak_ptr_factory_.GetWeakPtr()));
249 } else { 246 } else {
250 LOG(ERROR) << "No VSync Provider"; 247 LOG(ERROR) << "No VSync Provider";
251 } 248 }
252 249
253 unsigned int textures[2]; 250 unsigned int textures[3];
254 glGenTextures(2, textures); 251 glGenTextures(3, textures);
255 ui_texture_id_ = textures[0]; 252 ui_texture_id_ = textures[0];
256 content_texture_id_ = textures[1]; 253 content_texture_id_ = textures[1];
254 webvr_texture_id_ = textures[2];
255
257 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_); 256 ui_surface_texture_ = gl::SurfaceTexture::Create(ui_texture_id_);
258 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_); 257 content_surface_texture_ = gl::SurfaceTexture::Create(content_texture_id_);
258 webvr_surface_texture_ = gl::SurfaceTexture::Create(webvr_texture_id_);
259
259 ui_surface_.reset(new gl::ScopedJavaSurface(ui_surface_texture_.get())); 260 ui_surface_.reset(new gl::ScopedJavaSurface(ui_surface_texture_.get()));
260 content_surface_.reset(new gl::ScopedJavaSurface( 261 content_surface_.reset(new gl::ScopedJavaSurface(
261 content_surface_texture_.get())); 262 content_surface_texture_.get()));
263 webvr_surface_.reset(new gl::ScopedJavaSurface(
264 webvr_surface_texture_.get()));
265
262 ui_surface_texture_->SetFrameAvailableCallback(base::Bind( 266 ui_surface_texture_->SetFrameAvailableCallback(base::Bind(
263 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr())); 267 &VrShellGl::OnUIFrameAvailable, weak_ptr_factory_.GetWeakPtr()));
264 content_surface_texture_->SetFrameAvailableCallback(base::Bind( 268 content_surface_texture_->SetFrameAvailableCallback(base::Bind(
265 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr())); 269 &VrShellGl::OnContentFrameAvailable, weak_ptr_factory_.GetWeakPtr()));
270 webvr_surface_texture_->SetFrameAvailableCallback(base::Bind(
271 &VrShellGl::OnWebVrFrameAvailable, weak_ptr_factory_.GetWeakPtr()));
266 272
267 content_surface_texture_->SetDefaultBufferSize( 273 content_surface_texture_->SetDefaultBufferSize(
268 content_tex_physical_size_.width, content_tex_physical_size_.height); 274 content_tex_physical_size_.width, content_tex_physical_size_.height);
269 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width, 275 ui_surface_texture_->SetDefaultBufferSize(ui_tex_physical_size_.width,
270 ui_tex_physical_size_.height); 276 ui_tex_physical_size_.height);
277 // Set a small default surface size for WebVR since we may not need it.
278 // Will be resized as needed.
279 webvr_surface_texture_->SetDefaultBufferSize(1, 1);
271 280
272 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind( 281 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind(
273 &VrShell::SurfacesChanged, weak_vr_shell_, 282 &VrShell::SurfacesChanged, weak_vr_shell_,
274 content_surface_->j_surface().obj(), 283 content_surface_->j_surface().obj(),
275 ui_surface_->j_surface().obj())); 284 ui_surface_->j_surface().obj()));
276 return true; 285 return true;
277 } 286 }
278 287
279 void VrShellGl::OnUIFrameAvailable() { 288 void VrShellGl::OnUIFrameAvailable() {
280 ui_surface_texture_->UpdateTexImage(); 289 ui_surface_texture_->UpdateTexImage();
281 } 290 }
282 291
283 void VrShellGl::OnContentFrameAvailable() { 292 void VrShellGl::OnContentFrameAvailable() {
284 content_surface_texture_->UpdateTexImage(); 293 content_surface_texture_->UpdateTexImage();
285 } 294 }
286 295
296 void VrShellGl::OnWebVrFrameAvailable() {
297 if (!webvr_pending_poses_.size()) {
298 VLOG(2) << __FUNCTION__ << ": No pending pose, saving this frame for later";
299 ++webvr_already_available_frames_;
300 return;
301 }
302 uint32_t pose_index = webvr_pending_poses_.front();
303 webvr_pending_poses_.pop_front();
304 VLOG(2) << __FUNCTION__ << ": receiving WebVR frame " << pose_index;
305 webvr_surface_texture_->UpdateTexImage();
306 auto remaining_poses = webvr_pending_poses_.size();
307 if (remaining_poses > 0) {
308 VLOG(2) << __FUNCTION__ << ": still have " << remaining_poses << " pose(s) q ueued up.";
309 }
310 //long frameTimestamp = mWebVrSurfaceTexture.getTimestamp();
311 //if (frameTimestamp == 0) {
312 // Log.w(TAG, "Invalid timestamp for frame on WebVR waiting list. This should not happen.");
313 // return;
314 //}
315 DrawFrame(pose_index);
316 }
317
287 void VrShellGl::GvrInit(gvr_context* gvr_api) { 318 void VrShellGl::GvrInit(gvr_context* gvr_api) {
288 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api); 319 gvr_api_ = gvr::GvrApi::WrapNonOwned(gvr_api);
289 controller_.reset(new VrController(gvr_api)); 320 controller_.reset(new VrController(gvr_api));
290 321
291 ViewerType viewerType; 322 ViewerType viewerType;
292 switch (gvr_api_->GetViewerType()) { 323 switch (gvr_api_->GetViewerType()) {
293 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM: 324 case gvr::ViewerType::GVR_VIEWER_TYPE_DAYDREAM:
294 viewerType = ViewerType::DAYDREAM; 325 viewerType = ViewerType::DAYDREAM;
295 break; 326 break;
296 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD: 327 case gvr::ViewerType::GVR_VIEWER_TYPE_CARDBOARD:
297 viewerType = ViewerType::CARDBOARD; 328 viewerType = ViewerType::CARDBOARD;
298 break; 329 break;
299 default: 330 default:
300 NOTREACHED(); 331 NOTREACHED();
301 viewerType = ViewerType::UNKNOWN_TYPE; 332 viewerType = ViewerType::UNKNOWN_TYPE;
302 break; 333 break;
303 } 334 }
304 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType), 335 UMA_HISTOGRAM_ENUMERATION("VRViewerType", static_cast<int>(viewerType),
305 static_cast<int>(ViewerType::VIEWER_TYPE_MAX)); 336 static_cast<int>(ViewerType::VIEWER_TYPE_MAX));
306 } 337 }
307 338
308 void VrShellGl::InitializeRenderer() { 339 void VrShellGl::InitializeRenderer() {
309 // While WebVR is going through the compositor path, it shares
310 // the same texture ID. This will change once it gets its own
311 // surface, but store it separately to avoid future confusion.
312 // TODO(klausw,crbug.com/655722): remove this.
313 webvr_texture_id_ = content_texture_id_;
314 // Out of paranoia, explicitly reset the "pose valid" flags to false
315 // from the GL thread. The constructor ran in the UI thread.
316 // TODO(klausw,crbug.com/655722): remove this.
317 webvr_head_pose_valid_.assign(kPoseRingBufferSize, false);
318
319 gvr_api_->InitializeGl(); 340 gvr_api_->InitializeGl();
320 std::vector<gvr::BufferSpec> specs; 341 std::vector<gvr::BufferSpec> specs;
321 // For kFramePrimaryBuffer (primary VrShell and WebVR content) 342 // For kFramePrimaryBuffer (primary VrShell and WebVR content)
322 specs.push_back(gvr_api_->CreateBufferSpec()); 343 specs.push_back(gvr_api_->CreateBufferSpec());
323 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize(); 344 render_size_primary_ = specs[kFramePrimaryBuffer].GetSize();
345 render_size_primary_vrshell_ = render_size_primary_;
324 346
325 // For kFrameHeadlockedBuffer (for WebVR insecure content warning). 347 // For kFrameHeadlockedBuffer (for WebVR insecure content warning).
326 // Set this up at fixed resolution, the (smaller) FOV gets set below. 348 // Set this up at fixed resolution, the (smaller) FOV gets set below.
327 specs.push_back(gvr_api_->CreateBufferSpec()); 349 specs.push_back(gvr_api_->CreateBufferSpec());
328 specs.back().SetSize(kHeadlockedBufferDimensions); 350 specs.back().SetSize(kHeadlockedBufferDimensions);
329 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize(); 351 render_size_headlocked_ = specs[kFrameHeadlockedBuffer].GetSize();
330 352
331 swap_chain_.reset(new gvr::SwapChain(gvr_api_->CreateSwapChain(specs))); 353 swap_chain_.reset(new gvr::SwapChain(gvr_api_->CreateSwapChain(specs)));
332 354
333 vr_shell_renderer_.reset(new VrShellRenderer()); 355 vr_shell_renderer_.reset(new VrShellRenderer());
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
393 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) { 415 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_APP)) {
394 main_thread_task_runner_->PostTask( 416 main_thread_task_runner_->PostTask(
395 FROM_HERE, base::Bind(&VrShell::AppButtonPressed, weak_vr_shell_)); 417 FROM_HERE, base::Bind(&VrShell::AppButtonPressed, weak_vr_shell_));
396 } 418 }
397 419
398 if (web_vr_mode_) { 420 if (web_vr_mode_) {
399 // Process screen touch events for Cardboard button compatibility. 421 // Process screen touch events for Cardboard button compatibility.
400 // Also send tap events for controller "touchpad click" events. 422 // Also send tap events for controller "touchpad click" events.
401 if (touch_pending_ || controller_->ButtonUpHappened( 423 if (touch_pending_ || controller_->ButtonUpHappened(
402 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) { 424 gvr::ControllerButton::GVR_CONTROLLER_BUTTON_CLICK)) {
425 VLOG(1) << __FUNCTION__ << ": WebVR emulating Cardboard button";
403 touch_pending_ = false; 426 touch_pending_ = false;
404 std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent()); 427 std::unique_ptr<WebGestureEvent> gesture(new WebGestureEvent());
405 gesture->sourceDevice = blink::WebGestureDeviceTouchpad; 428 gesture->sourceDevice = blink::WebGestureDeviceTouchpad;
406 gesture->timeStampSeconds = 429 gesture->timeStampSeconds =
407 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF(); 430 (base::TimeTicks::Now() - base::TimeTicks()).InSecondsF();
408 gesture->type = WebInputEvent::GestureTapDown; 431 gesture->type = WebInputEvent::GestureTapDown;
409 gesture->x = 0; 432 gesture->x = 0;
410 gesture->y = 0; 433 gesture->y = 0;
411 SendGesture(InputTarget::CONTENT, std::move(gesture)); 434 SendGesture(InputTarget::CONTENT, std::move(gesture));
412 } 435 }
(...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after
558 } 581 }
559 current_input_target_ = input_target; 582 current_input_target_ = input_target;
560 if (current_input_target_ != InputTarget::NONE) { 583 if (current_input_target_ != InputTarget::NONE) {
561 WebInputEvent::Type type = 584 WebInputEvent::Type type =
562 new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove; 585 new_target ? WebInputEvent::MouseEnter : WebInputEvent::MouseMove;
563 SendGesture(input_target, 586 SendGesture(input_target,
564 MakeMouseEvent(type, timestamp, pixel_x, pixel_y)); 587 MakeMouseEvent(type, timestamp, pixel_x, pixel_y));
565 } 588 }
566 } 589 }
567 590
591 static double getMonotonicTimestampMs() {
592 // TODO(klausw): replace with TimeInMicroseconds()?
593 #if defined(OS_ANDROID)
594 // Android surfacetexture timestamp compatible timer? See:
595 // http://androidxref.com/7.0.0_r1/xref/frameworks/native/libs/gui/Surface.cp p#370
596 // http://androidxref.com/7.0.0_r1/xref/frameworks/rs/rsCppUtils.h#162
597 struct timespec t;
598 t.tv_sec = t.tv_nsec = 0;
599 clock_gettime(CLOCK_MONOTONIC, &t);
600 return t.tv_sec * 1e3 + t.tv_nsec * 1e-6;
601 #else
602 return 0.0;
603 #endif
604 }
605
568 void VrShellGl::SendGesture(InputTarget input_target, 606 void VrShellGl::SendGesture(InputTarget input_target,
569 std::unique_ptr<blink::WebInputEvent> event) { 607 std::unique_ptr<blink::WebInputEvent> event) {
570 DCHECK(input_target != InputTarget::NONE); 608 DCHECK(input_target != InputTarget::NONE);
571 const base::WeakPtr<VrInputManager>& weak_ptr = 609 const base::WeakPtr<VrInputManager>& weak_ptr =
572 input_target == InputTarget::CONTENT ? content_input_manager_ 610 input_target == InputTarget::CONTENT ? content_input_manager_
573 : ui_input_manager_; 611 : ui_input_manager_;
574 main_thread_task_runner_->PostTask( 612 main_thread_task_runner_->PostTask(
575 FROM_HERE, 613 FROM_HERE,
576 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr, 614 base::Bind(&VrInputManager::ProcessUpdatedGesture, weak_ptr,
577 base::Passed(std::move(event)))); 615 base::Passed(std::move(event))));
578 } 616 }
579 617
580 void VrShellGl::SetGvrPoseForWebVr(const gvr::Mat4f& pose, uint32_t pose_num) { 618 void VrShellGl::SetWebVRGvrPose(
581 webvr_head_pose_[pose_num % kPoseRingBufferSize] = pose; 619 const gvr::Mat4f& pose, uint32_t pose_index, int64_t pose_time_nanos) {
582 webvr_head_pose_valid_[pose_num % kPoseRingBufferSize] = true; 620 webvr_head_pose_[pose_index % kPoseRingBufferSize] = pose;
621 webvr_time_submit_[pose_index % kPoseRingBufferSize] = 0.0;
583 } 622 }
584 623
585 bool VrShellGl::WebVrPoseByteIsValid(int pose_index_byte) { 624 void VrShellGl::DrawFrame(uint32_t pose_index) {
586 if (pose_index_byte < 0) { 625 TRACE_EVENT1("gpu", "VrShellGl::DrawFrame", "frame", pose_index);
587 return false;
588 }
589 if (!webvr_head_pose_valid_[pose_index_byte % kPoseRingBufferSize]) {
590 VLOG(1) << "WebVR: reject decoded pose index " << pose_index_byte <<
591 ", not a valid pose";
592 return false;
593 }
594 return true;
595 }
596
597 void VrShellGl::DrawFrame() {
598 TRACE_EVENT0("gpu", "VrShellGl::DrawFrame");
599 // Reset the viewport list to just the pair of viewports for the 626 // Reset the viewport list to just the pair of viewports for the
600 // primary buffer each frame. Head-locked viewports get added by 627 // primary buffer each frame. Head-locked viewports get added by
601 // DrawVrShell if needed. 628 // DrawVrShell if needed.
602 buffer_viewport_list_->SetToRecommendedBufferViewports(); 629 buffer_viewport_list_->SetToRecommendedBufferViewports();
603 630
604 gvr::Frame frame = swap_chain_->AcquireFrame(); 631 if (web_vr_mode_) {
605 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow(); 632 if (!pose_index) {
606 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos; 633 LOG(INFO) << "klausw:DrawFrame: no pose index, not drawing WebVR.";
634 return;
635 }
607 636
608 gvr::Mat4f head_pose = 637 webvr_time_surfaced_[pose_index % kPoseRingBufferSize] = getMonotonicTimesta mpMs();
609 gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time);
610 638
611 gvr::Vec3f position = GetTranslation(head_pose); 639 // If needed, resize the primary buffer for use with WebVR.
612 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) { 640 if (render_size_primary_ != render_size_primary_webvr_) {
613 // This appears to be a 3DOF pose without a neck model. Add one. 641 if (!render_size_primary_webvr_.width) {
614 // The head pose has redundant data. Assume we're only using the 642 VLOG(2) << "WebVR rendering size not known yet, dropping frame";
615 // object_from_reference_matrix, we're not updating position_external. 643 return;
616 // TODO: Not sure what object_from_reference_matrix is. The new api removed 644 }
617 // it. For now, removing it seems working fine. 645 VLOG(1) << "WebVR set size " << render_size_primary_webvr_.width << "x" << render_size_primary_webvr_.height;
618 gvr_api_->ApplyNeckModel(head_pose, 1.0f); 646 render_size_primary_ = render_size_primary_webvr_;
647 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
648 }
649 } else {
650 if (render_size_primary_ != render_size_primary_vrshell_) {
651 VLOG(1) << "WebVR restore size " << render_size_primary_vrshell_.width << "x" << render_size_primary_vrshell_.height;
652 render_size_primary_ = render_size_primary_vrshell_;
653 swap_chain_->ResizeBuffer(kFramePrimaryBuffer, render_size_primary_);
654 }
655 }
656
657 gvr::Mat4f head_pose;
658 gvr::Frame frame = gvr::Frame(nullptr);
659 {
660 TRACE_EVENT0("gpu", "AcquireFrame");
661 frame = swap_chain_->AcquireFrame();
662 }
663
664 if (web_vr_mode_) {
665 int idx = pose_index % kPoseRingBufferSize;
666 webvr_time_acquired_[idx] = getMonotonicTimestampMs();
667 // TODO(klausw): report acquire time back to JS? If it blocks,
668 // completion time == VSYNC time or a close approximation which
669 // may be useful. But not helpful if it doesn't block.
670 head_pose = webvr_head_pose_[idx];
671 } else {
672 gvr::ClockTimePoint target_time = gvr::GvrApi::GetTimePointNow();
673 target_time.monotonic_system_time_nanos += kPredictionTimeWithoutVsyncNanos;
674
675 head_pose = gvr_api_->GetHeadSpaceFromStartSpaceRotation(target_time);
676
677 gvr::Vec3f position = GetTranslation(head_pose);
678 if (position.x == 0.0f && position.y == 0.0f && position.z == 0.0f) {
679 // This appears to be a 3DOF pose without a neck model. Add one.
680 // The head pose has redundant data. Assume we're only using the
681 // object_from_reference_matrix, we're not updating position_external.
682 // TODO: Not sure what object_from_reference_matrix is. The new api
683 // removed it. For now, removing it seems working fine.
684 gvr_api_->ApplyNeckModel(head_pose, 1.0f);
685 }
619 } 686 }
620 687
621 frame.BindBuffer(kFramePrimaryBuffer); 688 frame.BindBuffer(kFramePrimaryBuffer);
622 689
623 // Update the render position of all UI elements (including desktop). 690 // Update the render position of all UI elements (including desktop).
624 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f; 691 const float screen_tilt = kDesktopScreenTiltDefault * M_PI / 180.0f;
625 scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds()); 692 scene_->UpdateTransforms(screen_tilt, TimeInMicroseconds());
626 693
627 UpdateController(GetForwardVector(head_pose)); 694 UpdateController(GetForwardVector(head_pose));
628 695
629 if (web_vr_mode_) { 696 if (web_vr_mode_) {
630 DrawWebVr(); 697 DrawWebVr(pose_index);
631 698 #define CHECK_FRAME_COUNTER_PIXEL_FOR_DEBUGGING 0
632 // When using async reprojection, we need to know which pose was used in 699 #if CHECK_FRAME_COUNTER_PIXEL_FOR_DEBUGGING
633 // the WebVR app for drawing this frame. Due to unknown amounts of 700 uint32_t pixel_pose = GetPixelEncodedPoseIndex();
634 // buffering in the compositor and SurfaceTexture, we read the pose number 701 if (pixel_pose != pose_index) {
635 // from a corner pixel. There's no point in doing this for legacy 702 LOG(ERROR) << __FUNCTION__ << ": poses got out of sync, pixel=" << pixel_p ose << " != pose_index=" << pose_index;
636 // distortion rendering since that doesn't need a pose, and reading back
637 // pixels is an expensive operation. TODO(klausw,crbug.com/655722): stop
638 // doing this once we have working no-compositor rendering for WebVR.
639 if (gvr_api_->GetAsyncReprojectionEnabled()) {
640 int pose_index_byte = GetPixelEncodedPoseIndexByte();
641 if (WebVrPoseByteIsValid(pose_index_byte)) {
642 // We have a valid pose, use it for reprojection.
643 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_FULL);
644 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_FULL);
645 head_pose = webvr_head_pose_[pose_index_byte % kPoseRingBufferSize];
646 // We can't mark the used pose as invalid since unfortunately
647 // we have to reuse them. The compositor will re-submit stale
648 // frames on vsync, and we can't tell that this has happened
649 // until we've read the pose index from it, and at that point
650 // it's too late to skip rendering.
651 } else {
652 // If we don't get a valid frame ID back we shouldn't attempt
653 // to reproject by an invalid matrix, so turn off reprojection
654 // instead. Invalid poses can permanently break reprojection
655 // for this GVR instance: http://crbug.com/667327
656 webvr_left_viewport_->SetReprojection(GVR_REPROJECTION_NONE);
657 webvr_right_viewport_->SetReprojection(GVR_REPROJECTION_NONE);
658 }
659 } 703 }
704 #endif
660 } 705 }
661 706
662 DrawVrShell(head_pose, frame); 707 DrawVrShell(head_pose, frame);
663 708
664 frame.Unbind(); 709 frame.Unbind();
710 glFlush();
665 frame.Submit(*buffer_viewport_list_, head_pose); 711 frame.Submit(*buffer_viewport_list_, head_pose);
666 712
667 // No need to SwapBuffers for an offscreen surface. 713 // No need to SwapBuffers for an offscreen surface.
668 ScheduleNextDrawFrame(); 714 // TODO(klausw): is this true? Test with async reprojection off.
715 #if 0
716 if (web_vr_mode_ && !gvr_api_->GetAsyncReprojectionEnabled()) {
717 // WebVR uses RENDERMODE_WHEN_DIRTY for the Java glSurfaceView,
718 // and never actually marks frames as dirty. We need to manually
719 // swap buffers if not using reprojection since the GvrLayout
720 // won't do it for us.
721 eglSwapBuffers(eglGetDisplay(EGL_DEFAULT_DISPLAY),
722 eglGetCurrentSurface(EGL_DRAW));
723 }
724 #endif
725
726 if (web_vr_mode_) {
727 double submit_time = webvr_time_submit_[pose_index % kPoseRingBufferSize];
728
729 double prev_submit_time = 0.0;
730 for (int i = 1; i < kPoseRingBufferSize; ++i) {
731 int offset = kPoseRingBufferSize - i;
732 prev_submit_time = webvr_time_submit_[
733 (pose_index + offset) % kPoseRingBufferSize];
734 if (prev_submit_time != 0.0)
735 break;
736 }
737
738 int frameI = 1;
739 if (prev_submit_time != 0.0) {
740 // Rounded integer "frames taken" assuming 60Hz base rate.
741 frameI = ((submit_time - prev_submit_time) * 60 / 1000 + 0.5);
742 if (!frameI) frameI = 1;
743 }
744
745 int idx = pose_index % kPoseRingBufferSize;
746 double frame_start_time = webvr_time_frame_start_[idx];
747 double get_pose_time = webvr_time_get_pose_[idx];
748 double got_pose_time = webvr_time_got_pose_[idx];
749 double surfaced_time = webvr_time_surfaced_[idx];
750 double acquired_time = webvr_time_acquired_[idx];
751 double drawn_time = getMonotonicTimestampMs();
752
753 LOG(INFO) << "timing for frame " << pose_index <<
754 ", frameI " << frameI <<
755 ", rAF " << std::fixed << std::setprecision(1) <<
756 (get_pose_time - frame_start_time) << " getPose " <<
757 (got_pose_time - get_pose_time) << " gotPose " <<
758 (submit_time - got_pose_time) << " submit " <<
759 (surfaced_time - submit_time) << " surfaced " <<
760 (acquired_time - surfaced_time) << " acquired " <<
761 (drawn_time - acquired_time) << " drawn ";
762
763 // TODO(klausw): can completion reporting be moved earlier? I
764 // tried doing so right after AcquireFrame, but then framerate was
765 // very wobbly. Try moving the callback after DrawWebVr +
766 // glFlush() (not finish) to see if that helps?
767 main_thread_task_runner_->PostTask(FROM_HERE, base::Bind(
768 &VrShell::OnWebVRFrameSubmitted, weak_vr_shell_,
769 webvr_surface_handle_,
770 pose_index,
771 acquired_time - submit_time));
772 } else {
773 // Only request a new scheduled frame in non-WebVR mode. In WebVR mode,
774 // the next frame will be drawn in response to SubmitFrame.
775 ScheduleNextDrawFrame();
776 }
669 } 777 }
670 778
671 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose, 779 void VrShellGl::DrawVrShell(const gvr::Mat4f& head_pose,
672 gvr::Frame &frame) { 780 gvr::Frame &frame) {
673 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell"); 781 TRACE_EVENT0("gpu", "VrShellGl::DrawVrShell");
674 std::vector<const ContentRectangle*> head_locked_elements; 782 std::vector<const ContentRectangle*> head_locked_elements;
675 std::vector<const ContentRectangle*> world_elements; 783 std::vector<const ContentRectangle*> world_elements;
676 for (const auto& rect : scene_->GetUiElements()) { 784 for (const auto& rect : scene_->GetUiElements()) {
677 if (!rect->visible) { 785 if (!rect->visible) {
678 continue; 786 continue;
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
720 // TODO(mthiesse): We don't unbind this? Maybe some cleanup is in order 828 // TODO(mthiesse): We don't unbind this? Maybe some cleanup is in order
721 // here. 829 // here.
722 frame.BindBuffer(kFrameHeadlockedBuffer); 830 frame.BindBuffer(kFrameHeadlockedBuffer);
723 glClearColor(0.0f, 0.0f, 0.0f, 0.0f); 831 glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
724 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); 832 glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
725 DrawUiView(nullptr, head_locked_elements, render_size_headlocked_, 833 DrawUiView(nullptr, head_locked_elements, render_size_headlocked_,
726 kViewportListHeadlockedOffset); 834 kViewportListHeadlockedOffset);
727 } 835 }
728 } 836 }
729 837
730 gvr::Sizei VrShellGl::GetWebVRCompositorSurfaceSize() { 838 void VrShellGl::GetWebVRSurfaceHandle(int32_t width, int32_t height, const devic e::mojom::VRDisplay::GetSurfaceHandleCallback& callback) {
731 // This is a stopgap while we're using the WebVR compositor rendering path. 839 VLOG(2) << __FUNCTION__ << ": size=" << width << "x" << height;
732 // TODO(klausw,crbug.com/655722): Remove this method and member once we're 840
733 // using a separate WebVR render surface. 841 if (!webvr_surface_texture_.get()) {
734 return content_tex_physical_size_; 842 // We can't set up a surface due to not having a SurfaceTexture.
843 VLOG(1) << __FUNCTION__ << ": Failed, don't have a SurfaceTexture";
844 callback.Run(0);
artem.bolgar 2017/02/14 05:04:24 You can't do this here. The callback may be execut
845 return;
846 }
847
848 if (webvr_surface_handle_) {
849 // We have a surface, resize if needed.
850 if (render_size_primary_webvr_.width == width && render_size_primary_webvr_. height == height) {
851 VLOG(1) << __FUNCTION__ << ": Ignoring redundant call, this matches the cu rrent size.";
852 } else {
853 render_size_primary_webvr_.width = width;
854 render_size_primary_webvr_.height = height;
855 // The size is a bit tricky to change after the fact, see
856 // SurfaceTexture.setDefaultBufferSize documentation:
857 //
858 // For OpenGL ES, the EGLSurface should be destroyed
859 // (via eglDestroySurface), made not-current (via
860 // eglMakeCurrent), and then recreated (via
861 // eglCreateWindowSurface) to ensure that the new
862 // default size has taken effect.
863 webvr_surface_texture_->SetDefaultBufferSize(width, height);
864 }
865 } else {
866 // Create a new surface.
867 render_size_primary_webvr_.width = width;
868 render_size_primary_webvr_.height = height;
869 webvr_surface_texture_->SetDefaultBufferSize(width, height);
870 // Assume we've already created the Surface.
871 //Java_VrShellImpl_createWebVrRenderSurface(env, j_vr_shell_.obj(), width, h eight);
872 SetWebVrSurface();
873 }
874 callback.Run(webvr_surface_handle_);
artem.bolgar 2017/02/14 05:04:24 You can't do this here. The callback may be execut
875 }
876
877 void VrShellGl::SetWebVrSurface() {
878 VLOG(1) << __FUNCTION__ << ": size=" << render_size_primary_webvr_.width << "x " << render_size_primary_webvr_.height << " webvr_texture_id_=" << webvr_texture _id_ << " webvr_surface_handle_=" << webvr_surface_handle_;
879
880 if (webvr_surface_handle_) {
881 VLOG(1) << __FUNCTION__ << ": ignoring redundant call, already have webvr_su rface_handle_=" << webvr_surface_handle_;
882 return;
883 }
884
885 // Note: This ensures that any local references used by
886 // ANativeWindow_fromSurface are released immediately. This is needed as a
887 // workaround for https://code.google.com/p/android/issues/detail?id=68174
888 JNIEnv* env = base::android::AttachCurrentThread();
889 base::android::ScopedJavaLocalFrame scoped_local_reference_frame(env);
890 ANativeWindow* window = ANativeWindow_fromSurface(
891 env, webvr_surface_->j_surface().obj());
892
893 // This variant doesn't seem to be working - lookup fails?!
894 //ANativeWindow* window = webvr_surface_texture_->CreateSurface();
895
896 gpu::GpuSurfaceTracker* tracker = gpu::GpuSurfaceTracker::Get();
897 ANativeWindow_acquire(window);
898
899 // TODO(klausw): is setBuffersGeometry necessary>
900 ANativeWindow_setBuffersGeometry(window, render_size_primary_webvr_.width, ren der_size_primary_webvr_.height, WINDOW_FORMAT_RGBA_8888);
901
902 auto handle = tracker->AddSurfaceForNativeWidget(window);
903
904 tracker->RegisterViewSurface(handle, webvr_surface_->j_surface().obj());
905
906 webvr_surface_handle_ = handle;
907
908 // Now we're ready for child_process_service_impl.cc to fetch it via
909 // AIDL/Binder by ID.
910
911 ANativeWindow_release(window);
912
913 VLOG(1) << __FUNCTION__ << ": size=" << render_size_primary_webvr_.width << "x " << render_size_primary_webvr_.height << ", webvr_surface_handle_=" << handle;
914 // TODO(klausw): add cleanup to avoid leaking surfaces:
915 //
916 // The caller must release the underlying reference when done with the handle
917 // by calling ANativeWindow_release().
918 }
919
920 void VrShellGl::SubmitWebVRFrame(int32_t surface_handle, const device::mojom::VR PosePtr& pose) {
921 // uint32_t pose_index, double frameStart, double serviceStart, double getPose , double gotPose, double submit) {
922 uint32_t pose_index = pose->poseIndex;
923
924 if (surface_handle != webvr_surface_handle_) {
925 VLOG(2) << __FUNCTION__ << ": ignoring submitted frame for surface " << surf ace_handle << ", ours is " << webvr_surface_handle_;
926 }
927
928 TRACE_EVENT1("media", "klausw:VrShell SubmitWebVRFrame", "frame", pose_index);
929 VLOG(2) << __FUNCTION__ << ": frame " << pose_index;
930
931 webvr_last_submitted_ = pose_index;
932
933 int idx = pose_index % kPoseRingBufferSize;
934 double submit_time = getMonotonicTimestampMs();
935 webvr_time_submit_[idx] = submit_time;
936
937 #if 0
938 // Align clocks. TODO(klausw): this assumes submit times being equal, this ign ores RPC lag.
939 double submitTs = pose->ts_submit;
940 auto fromJS = [=](double t) { return t - submitTs + submit_time; };
941 webvr_time_frame_start_[idx] = fromJS(pose->ts_frameStart);
942 webvr_time_get_pose_[idx] = fromJS(pose->ts_getPose);
943 webvr_time_got_pose_[idx] = fromJS(pose->ts_gotPose);
944 #else
945 webvr_time_frame_start_[idx] = pose->ts_frameStart;
946 webvr_time_get_pose_[idx] = pose->ts_getPose;
947 webvr_time_got_pose_[idx] = pose->ts_gotPose;
948 #endif
949
950 //Java_VrShellImpl_expectWebVrFrame(env, j_vr_shell_.obj(), static_cast<jlong> (pose_index));
951 VLOG(1) << __FUNCTION__ << ": expecting WebVR frame " << pose_index;
952 webvr_pending_poses_.push_back(pose_index);
953 if (webvr_already_available_frames_ > 0) {
954 // If the "frame available" was already triggered, draw now.
955 VLOG(2) << __FUNCTION__ << ": Drawing saved frame now";
956 --webvr_already_available_frames_;
957 OnWebVrFrameAvailable();
958 }
959
960 {
961 TRACE_EVENT1("gpu", "glFinish", "before frame", pose_index);
962 // This is a load-bearing glFinish. I'm not entirely sure what's
963 // going on since we haven't actually emitted any GL commands on
964 // this context since the glFlush at the end of the previous
965 // frame, but this measurably reduces stalls in AcquireFrame and
966 // steadies the framerate, at the cost of reducing throughput. It
967 // effectively aligns rAF calls to be in sync with frame
968 // completion.
969 //
970 // Without the glFinish here, rAF calls stay aligned to vsync,
971 // with dropped frames to catch up as needed. This looks jankier.
972 //
973 // Putting the glFinish at the end of vr_shell's DrawFrame causes
974 // a larger latency gap than doing it here.
975 //
976 // TODO(klausw): try adjusting rAF timing offsets to keep timing
977 // steady? Or is it possible to tweak pose prediction to handle
978 // this better? May need cooperation from the JS app to handle
979 // uneven timing.
980 //
981 //if (pose_index % 20 >= 10)
982 glFinish();
983 }
735 } 984 }
736 985
737 void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose, 986 void VrShellGl::DrawUiView(const gvr::Mat4f* head_pose,
738 const std::vector<const ContentRectangle*>& elements, 987 const std::vector<const ContentRectangle*>& elements,
739 const gvr::Sizei& render_size, 988 const gvr::Sizei& render_size,
740 int viewport_offset) { 989 int viewport_offset) {
741 TRACE_EVENT0("gpu", "VrShellGl::DrawUiView"); 990 TRACE_EVENT0("gpu", "VrShellGl::DrawUiView");
742 for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) { 991 for (auto eye : {GVR_LEFT_EYE, GVR_RIGHT_EYE}) {
743 buffer_viewport_list_->GetBufferViewport( 992 buffer_viewport_list_->GetBufferViewport(
744 eye + viewport_offset, buffer_viewport_.get()); 993 eye + viewport_offset, buffer_viewport_.get());
(...skipping 111 matching lines...) Expand 10 before | Expand all | Expand 10 after
856 1105
857 // Move the beam origin to the hand. 1106 // Move the beam origin to the hand.
858 TranslateM(face_transform, face_transform, kHandPosition.x, kHandPosition.y, 1107 TranslateM(face_transform, face_transform, kHandPosition.x, kHandPosition.y,
859 kHandPosition.z); 1108 kHandPosition.z);
860 1109
861 transform = MatrixMul(render_matrix, face_transform); 1110 transform = MatrixMul(render_matrix, face_transform);
862 vr_shell_renderer_->GetLaserRenderer()->Draw(transform); 1111 vr_shell_renderer_->GetLaserRenderer()->Draw(transform);
863 } 1112 }
864 } 1113 }
865 1114
866 void VrShellGl::DrawWebVr() { 1115 void VrShellGl::DrawWebVr(uint32_t pose_index) {
867 TRACE_EVENT0("gpu", "VrShellGl::DrawWebVr"); 1116 TRACE_EVENT1("gpu", "VrShellGl::DrawWebVr", "frame", pose_index);
868 // Don't need face culling, depth testing, blending, etc. Turn it all off. 1117 // Don't need face culling, depth testing, blending, etc. Turn it all off.
869 glDisable(GL_CULL_FACE); 1118 glDisable(GL_CULL_FACE);
870 glDepthMask(GL_FALSE); 1119 glDepthMask(GL_FALSE);
871 glDisable(GL_DEPTH_TEST); 1120 glDisable(GL_DEPTH_TEST);
872 glDisable(GL_SCISSOR_TEST); 1121 glDisable(GL_SCISSOR_TEST);
873 glDisable(GL_BLEND); 1122 glDisable(GL_BLEND);
874 glDisable(GL_POLYGON_OFFSET_FILL); 1123 glDisable(GL_POLYGON_OFFSET_FILL);
875 1124
876 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height); 1125 glViewport(0, 0, render_size_primary_.width, render_size_primary_.height);
877 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_); 1126 vr_shell_renderer_->GetWebVrRenderer()->Draw(webvr_texture_id_);
878 1127
1128 if (!webvr_texture_bounds_need_update_at_.empty()) {
1129 uint32_t next_at = webvr_texture_bounds_need_update_at_.front();
1130 VLOG(2) << __FUNCTION__ << ": bounds update at " << next_at;
1131 if (next_at <= pose_index && pose_index - next_at < 0x40000000) {
1132 auto left_bounds = webvr_texture_bounds_left_.front();
1133 auto right_bounds = webvr_texture_bounds_right_.front();
1134 webvr_texture_bounds_need_update_at_.pop_front();
1135 webvr_texture_bounds_left_.pop_front();
1136 webvr_texture_bounds_right_.pop_front();
1137 VLOG(2) << __FUNCTION__ << ": Update texture bounds, left l=" << left_boun ds.left << ",r=" << left_bounds.right << ",t=" << left_bounds.top << ",b=" << le ft_bounds.bottom;
1138 webvr_left_viewport_->SetSourceUv(left_bounds);
1139 webvr_right_viewport_->SetSourceUv(right_bounds);
1140 }
1141 }
1142
879 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE, 1143 buffer_viewport_list_->SetBufferViewport(GVR_LEFT_EYE,
880 *webvr_left_viewport_); 1144 *webvr_left_viewport_);
881 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE, 1145 buffer_viewport_list_->SetBufferViewport(GVR_RIGHT_EYE,
882 *webvr_right_viewport_); 1146 *webvr_right_viewport_);
883 } 1147 }
884 1148
885 void VrShellGl::OnTriggerEvent() { 1149 void VrShellGl::OnTriggerEvent() {
886 // Set a flag to handle this on the render thread at the next frame. 1150 // Set a flag to handle this on the render thread at the next frame.
887 touch_pending_ = true; 1151 touch_pending_ = true;
888 } 1152 }
889 1153
890 void VrShellGl::OnPause() { 1154 void VrShellGl::OnPause() {
891 draw_task_.Cancel(); 1155 draw_task_.Cancel();
892 controller_->OnPause(); 1156 controller_->OnPause();
893 gvr_api_->PauseTracking(); 1157 gvr_api_->PauseTracking();
894 } 1158 }
895 1159
896 void VrShellGl::OnResume() { 1160 void VrShellGl::OnResume() {
897 gvr_api_->RefreshViewerProfile(); 1161 gvr_api_->RefreshViewerProfile();
898 gvr_api_->ResumeTracking(); 1162 gvr_api_->ResumeTracking();
899 controller_->OnResume(); 1163 controller_->OnResume();
900 draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this))); 1164 draw_task_.Reset(base::Bind(&VrShellGl::DrawFrame, base::Unretained(this), 0)) ;
901 ScheduleNextDrawFrame(); 1165 ScheduleNextDrawFrame();
902 } 1166 }
903 1167
904 void VrShellGl::SetWebVrMode(bool enabled) { 1168 void VrShellGl::SetWebVrMode(bool enabled) {
1169 VLOG(1) << __FUNCTION__ << ": enabled=" << enabled;
905 web_vr_mode_ = enabled; 1170 web_vr_mode_ = enabled;
906 } 1171 }
907 1172
908 void VrShellGl::UpdateWebVRTextureBounds(const gvr::Rectf& left_bounds, 1173 void VrShellGl::UpdateWebVRTextureBounds(uint32_t for_pose_index,
1174 const gvr::Rectf& left_bounds,
909 const gvr::Rectf& right_bounds) { 1175 const gvr::Rectf& right_bounds) {
910 webvr_left_viewport_->SetSourceUv(left_bounds); 1176 VLOG(2) << __FUNCTION__ << ": for_pose_index=" << for_pose_index << " left_bou nds l=" << left_bounds.left << ",r=" << left_bounds.right << ",t=" << left_bound s.top << ",b=" << left_bounds.bottom;
911 webvr_right_viewport_->SetSourceUv(right_bounds); 1177 webvr_texture_bounds_need_update_at_.push_back(for_pose_index);
1178 webvr_texture_bounds_left_.push_back(left_bounds);
1179 webvr_texture_bounds_right_.push_back(right_bounds);
912 } 1180 }
913 1181
914 gvr::GvrApi* VrShellGl::gvr_api() { 1182 gvr::GvrApi* VrShellGl::gvr_api() {
915 return gvr_api_.get(); 1183 return gvr_api_.get();
916 } 1184 }
917 1185
918 void VrShellGl::ContentBoundsChanged(int width, int height) { 1186 void VrShellGl::ContentBoundsChanged(int width, int height) {
919 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged"); 1187 TRACE_EVENT0("gpu", "VrShellGl::ContentBoundsChanged");
920 content_tex_css_width_ = width; 1188 content_tex_css_width_ = width;
921 content_tex_css_height_ = height; 1189 content_tex_css_height_ = height;
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
968 void VrShellGl::ForceExitVr() { 1236 void VrShellGl::ForceExitVr() {
969 main_thread_task_runner_->PostTask( 1237 main_thread_task_runner_->PostTask(
970 FROM_HERE, base::Bind(&VrShell::ForceExitVr, weak_vr_shell_)); 1238 FROM_HERE, base::Bind(&VrShell::ForceExitVr, weak_vr_shell_));
971 } 1239 }
972 1240
973 void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) { 1241 void VrShellGl::UpdateScene(std::unique_ptr<base::ListValue> commands) {
974 scene_->HandleCommands(std::move(commands), TimeInMicroseconds()); 1242 scene_->HandleCommands(std::move(commands), TimeInMicroseconds());
975 } 1243 }
976 1244
977 } // namespace vr_shell 1245 } // namespace vr_shell
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698