ovr_sdk

diff LibOVR/Src/Util/Util_Render_Stereo.h @ 0:1b39a1b46319

initial 0.4.4
author John Tsiombikas <nuclear@member.fsf.org>
date Wed, 14 Jan 2015 06:51:16 +0200
parents
children
line diff
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/LibOVR/Src/Util/Util_Render_Stereo.h	Wed Jan 14 06:51:16 2015 +0200
     1.3 @@ -0,0 +1,500 @@
     1.4 +/************************************************************************************
     1.5 +
     1.6 +Filename    :   Util_Render_Stereo.h
     1.7 +Content     :   Sample stereo rendering configuration classes.
     1.8 +Created     :   October 22, 2012
     1.9 +Authors     :   Michael Antonov, Tom Forsyth
    1.10 +
    1.11 +Copyright   :   Copyright 2014 Oculus VR, LLC All Rights reserved.
    1.12 +
    1.13 +Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License"); 
    1.14 +you may not use the Oculus VR Rift SDK except in compliance with the License, 
    1.15 +which is provided at the time of installation or download, or which 
    1.16 +otherwise accompanies this software in either electronic or hard copy form.
    1.17 +
    1.18 +You may obtain a copy of the License at
    1.19 +
    1.20 +http://www.oculusvr.com/licenses/LICENSE-3.2 
    1.21 +
    1.22 +Unless required by applicable law or agreed to in writing, the Oculus VR SDK 
    1.23 +distributed under the License is distributed on an "AS IS" BASIS,
    1.24 +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    1.25 +See the License for the specific language governing permissions and
    1.26 +limitations under the License.
    1.27 +
    1.28 +*************************************************************************************/
    1.29 +
    1.30 +#ifndef OVR_Util_Render_Stereo_h
    1.31 +#define OVR_Util_Render_Stereo_h
    1.32 +
    1.33 +#include "../OVR_Stereo.h"
    1.34 +#include "../Tracking/Tracking_SensorStateReader.h"
    1.35 +
    1.36 +namespace OVR { namespace Util { namespace Render {
    1.37 +
    1.38 +
    1.39 +
    1.40 +//-----------------------------------------------------------------------------------
    1.41 +// **** Useful debug functions.
    1.42 +//
    1.43 +// Purely for debugging - the results are not very end-user-friendly.
    1.44 +char const* GetDebugNameEyeCupType ( EyeCupType eyeCupType );
    1.45 +char const* GetDebugNameHmdType ( HmdTypeEnum hmdType );
    1.46 +
    1.47 +
    1.48 +
    1.49 +//-----------------------------------------------------------------------------------
    1.50 +// **** Higher-level utility functions.
    1.51 +
    1.52 +Sizei CalculateRecommendedTextureSize    ( HmdRenderInfo const &hmd,
    1.53 +                                           bool bRendertargetSharedByBothEyes,
    1.54 +                                           float pixelDensityInCenter = 1.0f );
    1.55 +
    1.56 +FovPort CalculateRecommendedFov          ( HmdRenderInfo const &hmd,
    1.57 +                                           StereoEye eyeType,
    1.58 +                                           bool bMakeFovSymmetrical = false);
    1.59 +
    1.60 +StereoEyeParams CalculateStereoEyeParams ( HmdRenderInfo const &hmd,
    1.61 +                                           StereoEye eyeType,
    1.62 +                                           Sizei const &actualRendertargetSurfaceSize,
    1.63 +                                           bool bRendertargetSharedByBothEyes,
    1.64 +                                           bool bRightHanded = true,
    1.65 +                                           float zNear = 0.01f, float zFar = 10000.0f,
    1.66 +										   Sizei const *pOverrideRenderedPixelSize = NULL,
    1.67 +                                           FovPort const *pOverrideFovport = NULL,
    1.68 +                                           float zoomFactor = 1.0f );
    1.69 +
    1.70 +Vector3f CalculateEyeVirtualCameraOffset(HmdRenderInfo const &hmd,
    1.71 +                                         StereoEye eyeType, bool bMonoRenderingMode );
    1.72 +
    1.73 +
    1.74 +// These are two components from StereoEyeParams that can be changed
    1.75 +// very easily without full recomputation of everything.
    1.76 +struct ViewportScaleAndOffset
    1.77 +{
    1.78 +    Recti               RenderedViewport;
    1.79 +    ScaleAndOffset2D    EyeToSourceUV;
    1.80 +};
    1.81 +
    1.82 +// Three ways to override the size of the render view dynamically.
    1.83 +// None of these require changing the distortion parameters or the regenerating the distortion mesh,
    1.84 +// and can be called every frame if desired.
    1.85 +ViewportScaleAndOffset ModifyRenderViewport ( StereoEyeParams const &params,
    1.86 +                                              Sizei const &actualRendertargetSurfaceSize,
    1.87 +                                              Recti const &renderViewport );
    1.88 +
    1.89 +ViewportScaleAndOffset ModifyRenderSize ( StereoEyeParams const &params,
    1.90 +                                          Sizei const &actualRendertargetSurfaceSize,
    1.91 +                                          Sizei const &requestedRenderSize,
    1.92 +                                          bool bRendertargetSharedByBothEyes = false );
    1.93 +
    1.94 +ViewportScaleAndOffset ModifyRenderDensity ( StereoEyeParams const &params,
    1.95 +                                             Sizei const &actualRendertargetSurfaceSize,
    1.96 +                                             float pixelDensity = 1.0f,
    1.97 +                                             bool bRendertargetSharedByBothEyes = false );
    1.98 +
    1.99 +
   1.100 +//-----------------------------------------------------------------------------------
   1.101 +// *****  StereoConfig
   1.102 +
   1.103 +// StereoConfig maintains a scene stereo state and allow switching between different
   1.104 +// stereo rendering modes. To support rendering, StereoConfig keeps track of HMD
   1.105 +// variables such as screen size, eye-to-screen distance and distortion, and computes
   1.106 +// extra data such as FOV and distortion center offsets based on it. Rendering
   1.107 +// parameters are returned though StereoEyeParams for each eye.
   1.108 +//
   1.109 +// Beyond regular 3D projection, this class supports rendering a 2D orthographic
   1.110 +// surface for UI and text. The 2D surface will be defined by CreateOrthoSubProjection().
   1.111 +// The (0,0) coordinate corresponds to eye center location.
   1.112 +// 
   1.113 +// Applications are not required to use this class, but they should be doing very
   1.114 +// similar sequences of operations, and it may be useful to start with this class
   1.115 +// and modify it.
   1.116 +
   1.117 +struct StereoEyeParamsWithOrtho
   1.118 +{
   1.119 +    StereoEyeParams         StereoEye;
   1.120 +    Matrix4f                OrthoProjection;
   1.121 +};
   1.122 +
   1.123 +struct ViewportScaleAndOffsetBothEyes
   1.124 +{
   1.125 +    ViewportScaleAndOffset  Left;
   1.126 +    ViewportScaleAndOffset  Right;
   1.127 +};
   1.128 +
   1.129 +class StereoConfig
   1.130 +{
   1.131 +public:
   1.132 +
   1.133 +    // StereoMode describes rendering modes that can be used by StereoConfig.
   1.134 +    // These modes control whether stereo rendering is used or not (Stereo_None),
   1.135 +    // and how it is implemented.
   1.136 +    enum StereoMode
   1.137 +    {
   1.138 +        Stereo_None                     = 0,        // Single eye
   1.139 +        Stereo_LeftRight_Multipass      = 1,        // One frustum per eye
   1.140 +    };
   1.141 +
   1.142 +
   1.143 +    StereoConfig(StereoMode mode = Stereo_LeftRight_Multipass);
   1.144 + 
   1.145 +    //---------------------------------------------------------------------------------------------
   1.146 +    // *** Core functions - every app MUST call these functions at least once.
   1.147 +
   1.148 +    // Sets HMD parameters; also initializes distortion coefficients.
   1.149 +    void        SetHmdRenderInfo(const HmdRenderInfo& hmd);
   1.150 +
   1.151 +    // Set the physical size of the rendertarget surface the app created,
   1.152 +    // and whether one RT is shared by both eyes, or each eye has its own RT:
   1.153 +    // true: both eyes are rendered to the same RT. Left eye starts at top-left, right eye starts at top-middle.
   1.154 +    // false: each eye is rendered to its own RT. Some GPU architectures prefer this arrangement.
   1.155 +    // Typically, the app would call CalculateRecommendedTextureSize() to suggest the choice of RT size.
   1.156 +    // This setting must be exactly the size of the actual RT created, or the UVs produced will be incorrect.
   1.157 +    // If the app wants to render to a subsection of the RT, it should use SetRenderSize()
   1.158 +    void        SetRendertargetSize (Size<int> const rendertargetSize,
   1.159 +                                     bool rendertargetIsSharedByBothEyes );
   1.160 +
   1.161 +    // Returns full set of Stereo rendering parameters for the specified eye.
   1.162 +    const StereoEyeParamsWithOrtho& GetEyeRenderParams(StereoEye eye);
   1.163 +
   1.164 +
   1.165 +
   1.166 +    //---------------------------------------------------------------------------------------------
   1.167 +    // *** Optional functions - an app may call these to override default behaviours.
   1.168 +
   1.169 +    const HmdRenderInfo& GetHmdRenderInfo() const { return Hmd; }
   1.170 +
   1.171 +    // Returns the recommended size of rendertargets.
   1.172 +    // If rendertargetIsSharedByBothEyes is true, this is the size of the combined buffer.
   1.173 +    // If rendertargetIsSharedByBothEyes is false, this is the size of each individual buffer.
   1.174 +    // pixelDensityInCenter may be set to any number - by default it will match the HMD resolution in the center of the image.
   1.175 +    // After creating the rendertargets, the application MUST call SetRendertargetSize() with the actual size created
   1.176 +    // (which can be larger or smaller as the app wishes, but StereoConfig needs to know either way)
   1.177 +    Sizei       CalculateRecommendedTextureSize ( bool rendertargetSharedByBothEyes,
   1.178 +                                                  float pixelDensityInCenter = 1.0f );
   1.179 +
   1.180 +    // Sets a stereo rendering mode and updates internal cached
   1.181 +    // state (matrices, per-eye view) based on it.
   1.182 +    void        SetStereoMode(StereoMode mode)  { Mode = mode; DirtyFlag = true; }
   1.183 +    StereoMode  GetStereoMode() const           { return Mode; }
   1.184 +
   1.185 +    // Sets the fieldOfView that the 2D coordinate area stretches to.
   1.186 +    void        Set2DAreaFov(float fovRadians);
   1.187 +
   1.188 +    // Really only for science experiments - no normal app should ever need to override
   1.189 +    // the HMD's lens descriptors. Passing NULL removes the override.
   1.190 +    // Supply both = set left and right.
   1.191 +    // Supply just left = set both to the same.
   1.192 +    // Supply neither = remove override.
   1.193 +    void        SetLensOverride ( LensConfig const *pLensOverrideLeft  = NULL,
   1.194 +                                  LensConfig const *pLensOverrideRight = NULL );
   1.195 + 
   1.196 +    // Override the rendered FOV in various ways. All angles in tangent units.
   1.197 +    // This is not clamped to the physical FOV of the display - you'll need to do that yourself!
   1.198 +    // Supply both = set left and right.
   1.199 +    // Supply just left = set both to the same.
   1.200 +    // Supply neither = remove override.
   1.201 +    void        SetFov ( FovPort const *pfovLeft  = NULL,
   1.202 +					     FovPort const *pfovRight = NULL );
   1.203 +    
   1.204 +    void        SetFovPortRadians ( float horizontal, float vertical )
   1.205 +    {
   1.206 +        FovPort fov = FovPort::CreateFromRadians(horizontal, vertical);
   1.207 +        SetFov( &fov, &fov );
   1.208 +    }
   1.209 +
   1.210 +
   1.211 +    // This forces a "zero IPD" mode where there is just a single render with an FOV that
   1.212 +    //   is the union of the two calculated FOVs.
   1.213 +    // The calculated render is for the left eye. Any size & FOV overrides for the right
   1.214 +    //   eye will be ignored.
   1.215 +    // If you query the right eye's size, you will get the same render
   1.216 +    //   size & position as the left eye - you should not actually do the render of course!
   1.217 +    //   The distortion values will be different, because it goes to a different place on the framebuffer.
   1.218 +    // Note that if you do this, the rendertarget does not need to be twice the width of
   1.219 +    //   the render size any more.
   1.220 +    void        SetZeroVirtualIpdOverride ( bool enableOverride );
   1.221 +
   1.222 +    // Allows the app to specify near and far clip planes and the right/left-handedness of the projection matrix.
   1.223 +    void        SetZClipPlanesAndHandedness ( float zNear = 0.01f, float zFar = 10000.0f,
   1.224 +                                              bool rightHandedProjection = true );
   1.225 +
   1.226 +    // Allows the app to specify how much extra eye rotation to allow when determining the visible FOV.
   1.227 +    void        SetExtraEyeRotation ( float extraEyeRotationInRadians = 0.0f );
   1.228 +
   1.229 +    // The dirty flag is set by any of the above calls. Just handy for the app to know
   1.230 +    // if e.g. the distortion mesh needs regeneration.
   1.231 +    void        SetDirty() { DirtyFlag = true; }
   1.232 +    bool        IsDirty() { return DirtyFlag; }
   1.233 +
   1.234 +    // An app never needs to call this - GetEyeRenderParams will call it internally if
   1.235 +    // the state is dirty. However apps can call this explicitly to control when and where
   1.236 +    // computation is performed (e.g. not inside critical loops)
   1.237 +    void        UpdateComputedState();
   1.238 +
   1.239 +    // This returns the projection matrix with a "zoom". Does not modify any internal state.
   1.240 +    Matrix4f    GetProjectionWithZoom ( StereoEye eye, float fovZoom ) const;
   1.241 +
   1.242 +
   1.243 +    //---------------------------------------------------------------------------------------------
   1.244 +    // The SetRender* functions are special.
   1.245 +    //
   1.246 +    // They do not require a full recalculation of state, and they do not change anything but the
   1.247 +    // ViewportScaleAndOffset data for the eyes (which they return), and do not set the dirty flag!
   1.248 +    // This means they can be called without regenerating the distortion mesh, and thus 
   1.249 +    // can happily be called every frame without causing performance problems. Dynamic rescaling 
   1.250 +    // of the rendertarget can help keep framerate up in demanding VR applications.
   1.251 +    // See the documentation for more details on their use.
   1.252 +
   1.253 +    // Specify a pixel density - how many rendered pixels per pixel in the physical display.
   1.254 +    ViewportScaleAndOffsetBothEyes SetRenderDensity ( float pixelsPerDisplayPixel );
   1.255 +
   1.256 +    // Supply the size directly. Will be clamped to the physical rendertarget size.
   1.257 +    ViewportScaleAndOffsetBothEyes SetRenderSize ( Sizei const &renderSizeLeft, Sizei const &renderSizeRight );
   1.258 +
   1.259 +    // Supply the viewport directly. This is not clamped to the physical rendertarget - careful now!
   1.260 +    ViewportScaleAndOffsetBothEyes SetRenderViewport ( Recti const &renderViewportLeft, Recti const &renderViewportRight );
   1.261 +
   1.262 +private:
   1.263 +
   1.264 +    // *** Modifiable State
   1.265 +
   1.266 +    StereoMode         Mode;
   1.267 +    HmdRenderInfo      Hmd;
   1.268 +
   1.269 +    float              Area2DFov;           // FOV range mapping to the 2D area.
   1.270 +
   1.271 +    // Only one of these three overrides can be true!
   1.272 +    enum SetViewportModeEnum
   1.273 +    {
   1.274 +        SVPM_Density,
   1.275 +        SVPM_Size,
   1.276 +        SVPM_Viewport,
   1.277 +    }                  SetViewportMode;
   1.278 +    // ...and depending which it is, one of the following are used.
   1.279 +    float              SetViewportPixelsPerDisplayPixel;
   1.280 +    Sizei              SetViewportSize[2];
   1.281 +    Recti           SetViewport[2];
   1.282 +
   1.283 +    // Other overrides.
   1.284 +    bool               OverrideLens;
   1.285 +    LensConfig         LensOverrideLeft;
   1.286 +    LensConfig         LensOverrideRight;
   1.287 +    Sizei              RendertargetSize;
   1.288 +    bool               OverrideTanHalfFov;
   1.289 +    FovPort            FovOverrideLeft;
   1.290 +    FovPort            FovOverrideRight;
   1.291 +    bool               OverrideZeroIpd;
   1.292 +    float              ZNear;
   1.293 +    float              ZFar;
   1.294 +    float              ExtraEyeRotationInRadians;
   1.295 +    bool               IsRendertargetSharedByBothEyes;
   1.296 +    bool               RightHandedProjection;
   1.297 +
   1.298 +    bool               DirtyFlag;   // Set when any if the modifiable state changed. Does NOT get set by SetRender*()
   1.299 +
   1.300 +    // Utility function.
   1.301 +    ViewportScaleAndOffsetBothEyes setupViewportScaleAndOffsets();
   1.302 +
   1.303 +    // *** Computed State
   1.304 +
   1.305 +public:     // Small hack for the config tool. Normal code should never read EyeRenderParams directly - use GetEyeRenderParams() instead.
   1.306 +    // 0/1 = left/right main views.
   1.307 +    StereoEyeParamsWithOrtho    EyeRenderParams[2];
   1.308 +};
   1.309 +
   1.310 +
   1.311 +//-----------------------------------------------------------------------------------
   1.312 +// *****  Distortion Mesh Rendering
   1.313 +//
   1.314 +
   1.315 +// Stores both texture UV coords, or tan(angle) values.
   1.316 +// Use whichever set of data the specific distortion algorithm requires.
   1.317 +// This struct *must* be binary compatible with CAPI ovrDistortionVertex.
   1.318 +struct DistortionMeshVertexData
   1.319 +{
   1.320 +    // [-1,+1],[-1,+1] over the entire framebuffer.
   1.321 +    Vector2f    ScreenPosNDC;
   1.322 +    // [0.0-1.0] interpolation value for timewarping - see documentation for details.
   1.323 +    float       TimewarpLerp;
   1.324 +    // [0.0-1.0] fade-to-black at the edges to reduce peripheral vision noise.
   1.325 +    float       Shade;        
   1.326 +    // The red, green, and blue vectors in tan(angle) space.
   1.327 +    // Scale and offset by the values in StereoEyeParams.EyeToSourceUV.Scale
   1.328 +    // and StereoParams.EyeToSourceUV.Offset to get to real texture UV coords.
   1.329 +    Vector2f    TanEyeAnglesR;
   1.330 +    Vector2f    TanEyeAnglesG;
   1.331 +    Vector2f    TanEyeAnglesB;    
   1.332 +};
   1.333 +
   1.334 +// If you just want a single point on the screen transformed.
   1.335 +DistortionMeshVertexData DistortionMeshMakeVertex ( Vector2f screenNDC,
   1.336 +                                                    bool rightEye,
   1.337 +                                                    const HmdRenderInfo &hmdRenderInfo, 
   1.338 +                                                    const DistortionRenderDesc &distortion, const ScaleAndOffset2D &eyeToSourceNDC );
   1.339 +
   1.340 +void DistortionMeshCreate ( DistortionMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
   1.341 +                            int *pNumVertices, int *pNumTriangles,
   1.342 +                            const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo );
   1.343 +
   1.344 +// Generate distortion mesh for a eye.
   1.345 +// This version requires less data then stereoParms, supporting dynamic change in render target viewport.
   1.346 +void DistortionMeshCreate( DistortionMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
   1.347 +                           int *pNumVertices, int *pNumTriangles,
   1.348 +                           bool rightEye,
   1.349 +                           const HmdRenderInfo &hmdRenderInfo, 
   1.350 +                           const DistortionRenderDesc &distortion, const ScaleAndOffset2D &eyeToSourceNDC );
   1.351 +
   1.352 +void DistortionMeshDestroy ( DistortionMeshVertexData *pVertices, uint16_t *pTriangleMeshIndices );
   1.353 +
   1.354 +
   1.355 +//-----------------------------------------------------------------------------------
   1.356 +// *****  Heightmap Mesh Rendering
   1.357 +//
   1.358 +
   1.359 +// Stores both texture UV coords, or tan(angle) values.
   1.360 +// This struct *must* be binary compatible with CAPI ovrHeightmapVertex.
   1.361 +struct HeightmapMeshVertexData
   1.362 +{
   1.363 +    // [-1,+1],[-1,+1] over the entire framebuffer.
   1.364 +    Vector2f    ScreenPosNDC;
   1.365 +    // [0.0-1.0] interpolation value for timewarping - see documentation for details.
   1.366 +    float       TimewarpLerp;
   1.367 +    // The vectors in tan(angle) space.
   1.368 +    // Scale and offset by the values in StereoEyeParams.EyeToSourceUV.Scale
   1.369 +    // and StereoParams.EyeToSourceUV.Offset to get to real texture UV coords.
   1.370 +    Vector2f    TanEyeAngles;    
   1.371 +};
   1.372 +
   1.373 +
   1.374 +void HeightmapMeshCreate ( HeightmapMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
   1.375 +    int *pNumVertices, int *pNumTriangles,
   1.376 +    const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo );
   1.377 +
   1.378 +// Generate heightmap mesh for a eye. This version requires less data then stereoParms, supporting
   1.379 +// dynamic change in render target viewport.
   1.380 +void HeightmapMeshCreate( HeightmapMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
   1.381 +    int *pNumVertices, int *pNumTriangles, bool rightEye,
   1.382 +    const HmdRenderInfo &hmdRenderInfo, const ScaleAndOffset2D &eyeToSourceNDC );
   1.383 +
   1.384 +void HeightmapMeshDestroy ( HeightmapMeshVertexData *pVertices, uint16_t *pTriangleMeshIndices );
   1.385 +
   1.386 +
   1.387 +
   1.388 +//-----------------------------------------------------------------------------------
   1.389 +// ***** Prediction and timewarp.
   1.390 +//
   1.391 +
   1.392 +struct PredictionValues
   1.393 +{
   1.394 +    // All values in seconds.
   1.395 +    // These are the times in seconds from a present+flush to the relevant display element.
   1.396 +    // The time is measured to the middle of that element's visibility window,
   1.397 +    // e.g. if the device is a full-persistence display, the element will be visible for
   1.398 +    // an entire frame, so the time measures to the middle of that period, i.e. half the frame time.
   1.399 +    float PresentFlushToRenderedScene;        // To the overall rendered 3D scene being visible.
   1.400 +    float PresentFlushToTimewarpStart;        // To when the first timewarped scanline will be visible.
   1.401 +    float PresentFlushToTimewarpEnd;          // To when the last timewarped scanline will be visible.
   1.402 +    float PresentFlushToPresentFlush;         // To the next present+flush, i.e. the ideal framerate.
   1.403 +
   1.404 +    bool  WithTimewarp;
   1.405 +    bool  WithVsync;
   1.406 +};
   1.407 +
   1.408 +// Calculates the values from the HMD info.
   1.409 +PredictionValues PredictionGetDeviceValues ( const HmdRenderInfo &hmdRenderInfo,
   1.410 +                                             bool withTimewarp = true,
   1.411 +                                             bool withVsync = true );
   1.412 +
   1.413 +// Pass in an orientation used to render the scene, and then the predicted orientation
   1.414 +// (which may have been computed later on, and thus is more accurate), and this
   1.415 +// will return the matrix to pass to the timewarp distortion shader.
   1.416 +// TODO: deal with different handedness?
   1.417 +Matrix4f TimewarpComputePoseDelta ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&hmdToEyeViewOffset );
   1.418 +Matrix4f TimewarpComputePoseDeltaPosition ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&hmdToEyeViewOffset );
   1.419 +
   1.420 +
   1.421 +
   1.422 +// TimewarpMachine helps keep track of rendered frame timing and
   1.423 +// handles predictions for time-warp rendering.
   1.424 +class TimewarpMachine
   1.425 +{
   1.426 +public:
   1.427 +    TimewarpMachine();
   1.428 +   
   1.429 +    // Call this on and every time something about the setup changes.
   1.430 +    void        Reset ( HmdRenderInfo& renderInfo, bool vsyncEnabled, double timeNow );
   1.431 +
   1.432 +    // The only reliable time in most engines is directly after the frame-present and GPU flush-and-wait.
   1.433 +    // This call should be done right after that to give this system the timing info it needs.
   1.434 +    void        AfterPresentAndFlush(double timeNow);
   1.435 +    // But some engines queue up the frame-present and only later find out when it actually happened.
   1.436 +    // They should call these two at those times.
   1.437 +    void        AfterPresentWithoutFlush();
   1.438 +    void        AfterPresentFinishes(double timeNow);
   1.439 +
   1.440 +    // The "average" time the rendered frame will show up,
   1.441 +    // and the predicted pose of the HMD at that time.
   1.442 +    // You usually only need to call one of these functions.
   1.443 +    double      GetViewRenderPredictionTime();
   1.444 +    bool        GetViewRenderPredictionPose(Tracking::SensorStateReader* reader, Posef& transform);
   1.445 +
   1.446 +
   1.447 +    // Timewarp prediction functions. You usually only need to call one of these three sets of functions.
   1.448 +
   1.449 +    // The predicted times that the first and last pixel will be visible on-screen.
   1.450 +    double      GetVisiblePixelTimeStart();
   1.451 +    double      GetVisiblePixelTimeEnd();
   1.452 +    // Predicted poses of the HMD at those first and last pixels.
   1.453 +	bool        GetPredictedVisiblePixelPoseStart(Tracking::SensorStateReader* reader, Posef& transform);
   1.454 +	bool        GetPredictedVisiblePixelPoseEnd(Tracking::SensorStateReader* reader, Posef& transform);
   1.455 +    // The delta matrices to feed to the timewarp distortion code,
   1.456 +    // given the pose that was used for rendering.
   1.457 +    // (usually the one returned by GetViewRenderPredictionPose() earlier)
   1.458 +	bool        GetTimewarpDeltaStart(Tracking::SensorStateReader* reader, Posef const &renderedPose, Matrix4f& transform);
   1.459 +	bool        GetTimewarpDeltaEnd(Tracking::SensorStateReader* reader, Posef const &renderedPose, Matrix4f& transform);
   1.460 +
   1.461 +    // Just-In-Time distortion aims to delay the second sensor reading & distortion
   1.462 +    // until the very last moment to improve prediction. However, it is a little scary,
   1.463 +    // since the delay might wait too long and miss the vsync completely!
   1.464 +    // Use of the JustInTime_* functions is entirely optional, and we advise allowing
   1.465 +    // users to turn it off in their video options to cope with odd machine configurations.
   1.466 +
   1.467 +    // What time should the app wait until before starting distortion?
   1.468 +    double      JustInTime_GetDistortionWaitUntilTime();
   1.469 +
   1.470 +    // Used to time the distortion rendering
   1.471 +    bool        JustInTime_NeedDistortionTimeMeasurement() const;
   1.472 +    void        JustInTime_BeforeDistortionTimeMeasurement(double timeNow);
   1.473 +    void        JustInTime_AfterDistortionTimeMeasurement(double timeNow);
   1.474 +    double      JustInTime_AverageDistortionTime();     // Just for profiling - use JustInTime_GetDistortionWaitUntilTime() for functionality.
   1.475 +
   1.476 +private:
   1.477 +    bool                VsyncEnabled;
   1.478 +    HmdRenderInfo       RenderInfo;
   1.479 +    PredictionValues    CurrentPredictionValues;
   1.480 +
   1.481 +    enum { NumDistortionTimes = 100 };
   1.482 +    int                 DistortionTimeCount;
   1.483 +    double              DistortionTimeCurrentStart;
   1.484 +    float               DistortionTimes[NumDistortionTimes];
   1.485 +    float               DistortionTimeAverage;
   1.486 +
   1.487 +    // Pose at which last time the eye was rendered.
   1.488 +    Posef               EyeRenderPoses[2];
   1.489 +
   1.490 +    // Absolute time of the last present+flush
   1.491 +    double              LastFramePresentFlushTime;
   1.492 +    // Seconds between present+flushes
   1.493 +    float               PresentFlushToPresentFlushSeconds;
   1.494 +    // Predicted absolute time of the next present+flush
   1.495 +    double              NextFramePresentFlushTime;
   1.496 +
   1.497 +};
   1.498 +
   1.499 +
   1.500 +
   1.501 +}}}  // OVR::Util::Render
   1.502 +
   1.503 +#endif