ovr_sdk

view LibOVR/Src/Util/Util_Render_Stereo.h @ 0:1b39a1b46319

initial 0.4.4
author John Tsiombikas <nuclear@member.fsf.org>
date Wed, 14 Jan 2015 06:51:16 +0200
parents
children
line source
1 /************************************************************************************
3 Filename : Util_Render_Stereo.h
4 Content : Sample stereo rendering configuration classes.
5 Created : October 22, 2012
6 Authors : Michael Antonov, Tom Forsyth
8 Copyright : Copyright 2014 Oculus VR, LLC All Rights reserved.
10 Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License");
11 you may not use the Oculus VR Rift SDK except in compliance with the License,
12 which is provided at the time of installation or download, or which
13 otherwise accompanies this software in either electronic or hard copy form.
15 You may obtain a copy of the License at
17 http://www.oculusvr.com/licenses/LICENSE-3.2
19 Unless required by applicable law or agreed to in writing, the Oculus VR SDK
20 distributed under the License is distributed on an "AS IS" BASIS,
21 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
22 See the License for the specific language governing permissions and
23 limitations under the License.
25 *************************************************************************************/
27 #ifndef OVR_Util_Render_Stereo_h
28 #define OVR_Util_Render_Stereo_h
30 #include "../OVR_Stereo.h"
31 #include "../Tracking/Tracking_SensorStateReader.h"
33 namespace OVR { namespace Util { namespace Render {
37 //-----------------------------------------------------------------------------------
38 // **** Useful debug functions.
39 //
40 // Purely for debugging - the results are not very end-user-friendly.
41 char const* GetDebugNameEyeCupType ( EyeCupType eyeCupType );
42 char const* GetDebugNameHmdType ( HmdTypeEnum hmdType );
46 //-----------------------------------------------------------------------------------
47 // **** Higher-level utility functions.
49 Sizei CalculateRecommendedTextureSize ( HmdRenderInfo const &hmd,
50 bool bRendertargetSharedByBothEyes,
51 float pixelDensityInCenter = 1.0f );
53 FovPort CalculateRecommendedFov ( HmdRenderInfo const &hmd,
54 StereoEye eyeType,
55 bool bMakeFovSymmetrical = false);
57 StereoEyeParams CalculateStereoEyeParams ( HmdRenderInfo const &hmd,
58 StereoEye eyeType,
59 Sizei const &actualRendertargetSurfaceSize,
60 bool bRendertargetSharedByBothEyes,
61 bool bRightHanded = true,
62 float zNear = 0.01f, float zFar = 10000.0f,
63 Sizei const *pOverrideRenderedPixelSize = NULL,
64 FovPort const *pOverrideFovport = NULL,
65 float zoomFactor = 1.0f );
67 Vector3f CalculateEyeVirtualCameraOffset(HmdRenderInfo const &hmd,
68 StereoEye eyeType, bool bMonoRenderingMode );
71 // These are two components from StereoEyeParams that can be changed
72 // very easily without full recomputation of everything.
73 struct ViewportScaleAndOffset
74 {
75 Recti RenderedViewport;
76 ScaleAndOffset2D EyeToSourceUV;
77 };
79 // Three ways to override the size of the render view dynamically.
80 // None of these require changing the distortion parameters or the regenerating the distortion mesh,
81 // and can be called every frame if desired.
82 ViewportScaleAndOffset ModifyRenderViewport ( StereoEyeParams const &params,
83 Sizei const &actualRendertargetSurfaceSize,
84 Recti const &renderViewport );
86 ViewportScaleAndOffset ModifyRenderSize ( StereoEyeParams const &params,
87 Sizei const &actualRendertargetSurfaceSize,
88 Sizei const &requestedRenderSize,
89 bool bRendertargetSharedByBothEyes = false );
91 ViewportScaleAndOffset ModifyRenderDensity ( StereoEyeParams const &params,
92 Sizei const &actualRendertargetSurfaceSize,
93 float pixelDensity = 1.0f,
94 bool bRendertargetSharedByBothEyes = false );
97 //-----------------------------------------------------------------------------------
98 // ***** StereoConfig
100 // StereoConfig maintains a scene stereo state and allow switching between different
101 // stereo rendering modes. To support rendering, StereoConfig keeps track of HMD
102 // variables such as screen size, eye-to-screen distance and distortion, and computes
103 // extra data such as FOV and distortion center offsets based on it. Rendering
104 // parameters are returned though StereoEyeParams for each eye.
105 //
106 // Beyond regular 3D projection, this class supports rendering a 2D orthographic
107 // surface for UI and text. The 2D surface will be defined by CreateOrthoSubProjection().
108 // The (0,0) coordinate corresponds to eye center location.
109 //
110 // Applications are not required to use this class, but they should be doing very
111 // similar sequences of operations, and it may be useful to start with this class
112 // and modify it.
114 struct StereoEyeParamsWithOrtho
115 {
116 StereoEyeParams StereoEye;
117 Matrix4f OrthoProjection;
118 };
120 struct ViewportScaleAndOffsetBothEyes
121 {
122 ViewportScaleAndOffset Left;
123 ViewportScaleAndOffset Right;
124 };
126 class StereoConfig
127 {
128 public:
130 // StereoMode describes rendering modes that can be used by StereoConfig.
131 // These modes control whether stereo rendering is used or not (Stereo_None),
132 // and how it is implemented.
133 enum StereoMode
134 {
135 Stereo_None = 0, // Single eye
136 Stereo_LeftRight_Multipass = 1, // One frustum per eye
137 };
140 StereoConfig(StereoMode mode = Stereo_LeftRight_Multipass);
142 //---------------------------------------------------------------------------------------------
143 // *** Core functions - every app MUST call these functions at least once.
145 // Sets HMD parameters; also initializes distortion coefficients.
146 void SetHmdRenderInfo(const HmdRenderInfo& hmd);
148 // Set the physical size of the rendertarget surface the app created,
149 // and whether one RT is shared by both eyes, or each eye has its own RT:
150 // true: both eyes are rendered to the same RT. Left eye starts at top-left, right eye starts at top-middle.
151 // false: each eye is rendered to its own RT. Some GPU architectures prefer this arrangement.
152 // Typically, the app would call CalculateRecommendedTextureSize() to suggest the choice of RT size.
153 // This setting must be exactly the size of the actual RT created, or the UVs produced will be incorrect.
154 // If the app wants to render to a subsection of the RT, it should use SetRenderSize()
155 void SetRendertargetSize (Size<int> const rendertargetSize,
156 bool rendertargetIsSharedByBothEyes );
158 // Returns full set of Stereo rendering parameters for the specified eye.
159 const StereoEyeParamsWithOrtho& GetEyeRenderParams(StereoEye eye);
163 //---------------------------------------------------------------------------------------------
164 // *** Optional functions - an app may call these to override default behaviours.
166 const HmdRenderInfo& GetHmdRenderInfo() const { return Hmd; }
168 // Returns the recommended size of rendertargets.
169 // If rendertargetIsSharedByBothEyes is true, this is the size of the combined buffer.
170 // If rendertargetIsSharedByBothEyes is false, this is the size of each individual buffer.
171 // pixelDensityInCenter may be set to any number - by default it will match the HMD resolution in the center of the image.
172 // After creating the rendertargets, the application MUST call SetRendertargetSize() with the actual size created
173 // (which can be larger or smaller as the app wishes, but StereoConfig needs to know either way)
174 Sizei CalculateRecommendedTextureSize ( bool rendertargetSharedByBothEyes,
175 float pixelDensityInCenter = 1.0f );
177 // Sets a stereo rendering mode and updates internal cached
178 // state (matrices, per-eye view) based on it.
179 void SetStereoMode(StereoMode mode) { Mode = mode; DirtyFlag = true; }
180 StereoMode GetStereoMode() const { return Mode; }
182 // Sets the fieldOfView that the 2D coordinate area stretches to.
183 void Set2DAreaFov(float fovRadians);
185 // Really only for science experiments - no normal app should ever need to override
186 // the HMD's lens descriptors. Passing NULL removes the override.
187 // Supply both = set left and right.
188 // Supply just left = set both to the same.
189 // Supply neither = remove override.
190 void SetLensOverride ( LensConfig const *pLensOverrideLeft = NULL,
191 LensConfig const *pLensOverrideRight = NULL );
193 // Override the rendered FOV in various ways. All angles in tangent units.
194 // This is not clamped to the physical FOV of the display - you'll need to do that yourself!
195 // Supply both = set left and right.
196 // Supply just left = set both to the same.
197 // Supply neither = remove override.
198 void SetFov ( FovPort const *pfovLeft = NULL,
199 FovPort const *pfovRight = NULL );
201 void SetFovPortRadians ( float horizontal, float vertical )
202 {
203 FovPort fov = FovPort::CreateFromRadians(horizontal, vertical);
204 SetFov( &fov, &fov );
205 }
208 // This forces a "zero IPD" mode where there is just a single render with an FOV that
209 // is the union of the two calculated FOVs.
210 // The calculated render is for the left eye. Any size & FOV overrides for the right
211 // eye will be ignored.
212 // If you query the right eye's size, you will get the same render
213 // size & position as the left eye - you should not actually do the render of course!
214 // The distortion values will be different, because it goes to a different place on the framebuffer.
215 // Note that if you do this, the rendertarget does not need to be twice the width of
216 // the render size any more.
217 void SetZeroVirtualIpdOverride ( bool enableOverride );
219 // Allows the app to specify near and far clip planes and the right/left-handedness of the projection matrix.
220 void SetZClipPlanesAndHandedness ( float zNear = 0.01f, float zFar = 10000.0f,
221 bool rightHandedProjection = true );
223 // Allows the app to specify how much extra eye rotation to allow when determining the visible FOV.
224 void SetExtraEyeRotation ( float extraEyeRotationInRadians = 0.0f );
226 // The dirty flag is set by any of the above calls. Just handy for the app to know
227 // if e.g. the distortion mesh needs regeneration.
228 void SetDirty() { DirtyFlag = true; }
229 bool IsDirty() { return DirtyFlag; }
231 // An app never needs to call this - GetEyeRenderParams will call it internally if
232 // the state is dirty. However apps can call this explicitly to control when and where
233 // computation is performed (e.g. not inside critical loops)
234 void UpdateComputedState();
236 // This returns the projection matrix with a "zoom". Does not modify any internal state.
237 Matrix4f GetProjectionWithZoom ( StereoEye eye, float fovZoom ) const;
240 //---------------------------------------------------------------------------------------------
241 // The SetRender* functions are special.
242 //
243 // They do not require a full recalculation of state, and they do not change anything but the
244 // ViewportScaleAndOffset data for the eyes (which they return), and do not set the dirty flag!
245 // This means they can be called without regenerating the distortion mesh, and thus
246 // can happily be called every frame without causing performance problems. Dynamic rescaling
247 // of the rendertarget can help keep framerate up in demanding VR applications.
248 // See the documentation for more details on their use.
250 // Specify a pixel density - how many rendered pixels per pixel in the physical display.
251 ViewportScaleAndOffsetBothEyes SetRenderDensity ( float pixelsPerDisplayPixel );
253 // Supply the size directly. Will be clamped to the physical rendertarget size.
254 ViewportScaleAndOffsetBothEyes SetRenderSize ( Sizei const &renderSizeLeft, Sizei const &renderSizeRight );
256 // Supply the viewport directly. This is not clamped to the physical rendertarget - careful now!
257 ViewportScaleAndOffsetBothEyes SetRenderViewport ( Recti const &renderViewportLeft, Recti const &renderViewportRight );
259 private:
261 // *** Modifiable State
263 StereoMode Mode;
264 HmdRenderInfo Hmd;
266 float Area2DFov; // FOV range mapping to the 2D area.
268 // Only one of these three overrides can be true!
269 enum SetViewportModeEnum
270 {
271 SVPM_Density,
272 SVPM_Size,
273 SVPM_Viewport,
274 } SetViewportMode;
275 // ...and depending which it is, one of the following are used.
276 float SetViewportPixelsPerDisplayPixel;
277 Sizei SetViewportSize[2];
278 Recti SetViewport[2];
280 // Other overrides.
281 bool OverrideLens;
282 LensConfig LensOverrideLeft;
283 LensConfig LensOverrideRight;
284 Sizei RendertargetSize;
285 bool OverrideTanHalfFov;
286 FovPort FovOverrideLeft;
287 FovPort FovOverrideRight;
288 bool OverrideZeroIpd;
289 float ZNear;
290 float ZFar;
291 float ExtraEyeRotationInRadians;
292 bool IsRendertargetSharedByBothEyes;
293 bool RightHandedProjection;
295 bool DirtyFlag; // Set when any if the modifiable state changed. Does NOT get set by SetRender*()
297 // Utility function.
298 ViewportScaleAndOffsetBothEyes setupViewportScaleAndOffsets();
300 // *** Computed State
302 public: // Small hack for the config tool. Normal code should never read EyeRenderParams directly - use GetEyeRenderParams() instead.
303 // 0/1 = left/right main views.
304 StereoEyeParamsWithOrtho EyeRenderParams[2];
305 };
308 //-----------------------------------------------------------------------------------
309 // ***** Distortion Mesh Rendering
310 //
312 // Stores both texture UV coords, or tan(angle) values.
313 // Use whichever set of data the specific distortion algorithm requires.
314 // This struct *must* be binary compatible with CAPI ovrDistortionVertex.
315 struct DistortionMeshVertexData
316 {
317 // [-1,+1],[-1,+1] over the entire framebuffer.
318 Vector2f ScreenPosNDC;
319 // [0.0-1.0] interpolation value for timewarping - see documentation for details.
320 float TimewarpLerp;
321 // [0.0-1.0] fade-to-black at the edges to reduce peripheral vision noise.
322 float Shade;
323 // The red, green, and blue vectors in tan(angle) space.
324 // Scale and offset by the values in StereoEyeParams.EyeToSourceUV.Scale
325 // and StereoParams.EyeToSourceUV.Offset to get to real texture UV coords.
326 Vector2f TanEyeAnglesR;
327 Vector2f TanEyeAnglesG;
328 Vector2f TanEyeAnglesB;
329 };
331 // If you just want a single point on the screen transformed.
332 DistortionMeshVertexData DistortionMeshMakeVertex ( Vector2f screenNDC,
333 bool rightEye,
334 const HmdRenderInfo &hmdRenderInfo,
335 const DistortionRenderDesc &distortion, const ScaleAndOffset2D &eyeToSourceNDC );
337 void DistortionMeshCreate ( DistortionMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
338 int *pNumVertices, int *pNumTriangles,
339 const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo );
341 // Generate distortion mesh for a eye.
342 // This version requires less data then stereoParms, supporting dynamic change in render target viewport.
343 void DistortionMeshCreate( DistortionMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
344 int *pNumVertices, int *pNumTriangles,
345 bool rightEye,
346 const HmdRenderInfo &hmdRenderInfo,
347 const DistortionRenderDesc &distortion, const ScaleAndOffset2D &eyeToSourceNDC );
349 void DistortionMeshDestroy ( DistortionMeshVertexData *pVertices, uint16_t *pTriangleMeshIndices );
352 //-----------------------------------------------------------------------------------
353 // ***** Heightmap Mesh Rendering
354 //
356 // Stores both texture UV coords, or tan(angle) values.
357 // This struct *must* be binary compatible with CAPI ovrHeightmapVertex.
358 struct HeightmapMeshVertexData
359 {
360 // [-1,+1],[-1,+1] over the entire framebuffer.
361 Vector2f ScreenPosNDC;
362 // [0.0-1.0] interpolation value for timewarping - see documentation for details.
363 float TimewarpLerp;
364 // The vectors in tan(angle) space.
365 // Scale and offset by the values in StereoEyeParams.EyeToSourceUV.Scale
366 // and StereoParams.EyeToSourceUV.Offset to get to real texture UV coords.
367 Vector2f TanEyeAngles;
368 };
371 void HeightmapMeshCreate ( HeightmapMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
372 int *pNumVertices, int *pNumTriangles,
373 const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo );
375 // Generate heightmap mesh for a eye. This version requires less data then stereoParms, supporting
376 // dynamic change in render target viewport.
377 void HeightmapMeshCreate( HeightmapMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
378 int *pNumVertices, int *pNumTriangles, bool rightEye,
379 const HmdRenderInfo &hmdRenderInfo, const ScaleAndOffset2D &eyeToSourceNDC );
381 void HeightmapMeshDestroy ( HeightmapMeshVertexData *pVertices, uint16_t *pTriangleMeshIndices );
385 //-----------------------------------------------------------------------------------
386 // ***** Prediction and timewarp.
387 //
389 struct PredictionValues
390 {
391 // All values in seconds.
392 // These are the times in seconds from a present+flush to the relevant display element.
393 // The time is measured to the middle of that element's visibility window,
394 // e.g. if the device is a full-persistence display, the element will be visible for
395 // an entire frame, so the time measures to the middle of that period, i.e. half the frame time.
396 float PresentFlushToRenderedScene; // To the overall rendered 3D scene being visible.
397 float PresentFlushToTimewarpStart; // To when the first timewarped scanline will be visible.
398 float PresentFlushToTimewarpEnd; // To when the last timewarped scanline will be visible.
399 float PresentFlushToPresentFlush; // To the next present+flush, i.e. the ideal framerate.
401 bool WithTimewarp;
402 bool WithVsync;
403 };
405 // Calculates the values from the HMD info.
406 PredictionValues PredictionGetDeviceValues ( const HmdRenderInfo &hmdRenderInfo,
407 bool withTimewarp = true,
408 bool withVsync = true );
410 // Pass in an orientation used to render the scene, and then the predicted orientation
411 // (which may have been computed later on, and thus is more accurate), and this
412 // will return the matrix to pass to the timewarp distortion shader.
413 // TODO: deal with different handedness?
414 Matrix4f TimewarpComputePoseDelta ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&hmdToEyeViewOffset );
415 Matrix4f TimewarpComputePoseDeltaPosition ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&hmdToEyeViewOffset );
419 // TimewarpMachine helps keep track of rendered frame timing and
420 // handles predictions for time-warp rendering.
421 class TimewarpMachine
422 {
423 public:
424 TimewarpMachine();
426 // Call this on and every time something about the setup changes.
427 void Reset ( HmdRenderInfo& renderInfo, bool vsyncEnabled, double timeNow );
429 // The only reliable time in most engines is directly after the frame-present and GPU flush-and-wait.
430 // This call should be done right after that to give this system the timing info it needs.
431 void AfterPresentAndFlush(double timeNow);
432 // But some engines queue up the frame-present and only later find out when it actually happened.
433 // They should call these two at those times.
434 void AfterPresentWithoutFlush();
435 void AfterPresentFinishes(double timeNow);
437 // The "average" time the rendered frame will show up,
438 // and the predicted pose of the HMD at that time.
439 // You usually only need to call one of these functions.
440 double GetViewRenderPredictionTime();
441 bool GetViewRenderPredictionPose(Tracking::SensorStateReader* reader, Posef& transform);
444 // Timewarp prediction functions. You usually only need to call one of these three sets of functions.
446 // The predicted times that the first and last pixel will be visible on-screen.
447 double GetVisiblePixelTimeStart();
448 double GetVisiblePixelTimeEnd();
449 // Predicted poses of the HMD at those first and last pixels.
450 bool GetPredictedVisiblePixelPoseStart(Tracking::SensorStateReader* reader, Posef& transform);
451 bool GetPredictedVisiblePixelPoseEnd(Tracking::SensorStateReader* reader, Posef& transform);
452 // The delta matrices to feed to the timewarp distortion code,
453 // given the pose that was used for rendering.
454 // (usually the one returned by GetViewRenderPredictionPose() earlier)
455 bool GetTimewarpDeltaStart(Tracking::SensorStateReader* reader, Posef const &renderedPose, Matrix4f& transform);
456 bool GetTimewarpDeltaEnd(Tracking::SensorStateReader* reader, Posef const &renderedPose, Matrix4f& transform);
458 // Just-In-Time distortion aims to delay the second sensor reading & distortion
459 // until the very last moment to improve prediction. However, it is a little scary,
460 // since the delay might wait too long and miss the vsync completely!
461 // Use of the JustInTime_* functions is entirely optional, and we advise allowing
462 // users to turn it off in their video options to cope with odd machine configurations.
464 // What time should the app wait until before starting distortion?
465 double JustInTime_GetDistortionWaitUntilTime();
467 // Used to time the distortion rendering
468 bool JustInTime_NeedDistortionTimeMeasurement() const;
469 void JustInTime_BeforeDistortionTimeMeasurement(double timeNow);
470 void JustInTime_AfterDistortionTimeMeasurement(double timeNow);
471 double JustInTime_AverageDistortionTime(); // Just for profiling - use JustInTime_GetDistortionWaitUntilTime() for functionality.
473 private:
474 bool VsyncEnabled;
475 HmdRenderInfo RenderInfo;
476 PredictionValues CurrentPredictionValues;
478 enum { NumDistortionTimes = 100 };
479 int DistortionTimeCount;
480 double DistortionTimeCurrentStart;
481 float DistortionTimes[NumDistortionTimes];
482 float DistortionTimeAverage;
484 // Pose at which last time the eye was rendered.
485 Posef EyeRenderPoses[2];
487 // Absolute time of the last present+flush
488 double LastFramePresentFlushTime;
489 // Seconds between present+flushes
490 float PresentFlushToPresentFlushSeconds;
491 // Predicted absolute time of the next present+flush
492 double NextFramePresentFlushTime;
494 };
498 }}} // OVR::Util::Render
500 #endif