ovr_sdk
diff LibOVR/Src/Util/Util_Render_Stereo.cpp @ 0:1b39a1b46319
initial 0.4.4
author | John Tsiombikas <nuclear@member.fsf.org> |
---|---|
date | Wed, 14 Jan 2015 06:51:16 +0200 |
parents | |
children |
line diff
1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/LibOVR/Src/Util/Util_Render_Stereo.cpp Wed Jan 14 06:51:16 2015 +0200 1.3 @@ -0,0 +1,1554 @@ 1.4 +/************************************************************************************ 1.5 + 1.6 +Filename : Util_Render_Stereo.cpp 1.7 +Content : Stereo rendering configuration implementation 1.8 +Created : October 22, 2012 1.9 +Authors : Michael Antonov, Andrew Reisse, Tom Forsyth 1.10 + 1.11 +Copyright : Copyright 2014 Oculus VR, LLC All Rights reserved. 1.12 + 1.13 +Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License"); 1.14 +you may not use the Oculus VR Rift SDK except in compliance with the License, 1.15 +which is provided at the time of installation or download, or which 1.16 +otherwise accompanies this software in either electronic or hard copy form. 1.17 + 1.18 +You may obtain a copy of the License at 1.19 + 1.20 +http://www.oculusvr.com/licenses/LICENSE-3.2 1.21 + 1.22 +Unless required by applicable law or agreed to in writing, the Oculus VR SDK 1.23 +distributed under the License is distributed on an "AS IS" BASIS, 1.24 +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 1.25 +See the License for the specific language governing permissions and 1.26 +limitations under the License. 1.27 + 1.28 +*************************************************************************************/ 1.29 + 1.30 +#include "Util_Render_Stereo.h" 1.31 + 1.32 +namespace OVR { namespace Util { namespace Render { 1.33 + 1.34 +using namespace OVR::Tracking; 1.35 + 1.36 + 1.37 +//----------------------------------------------------------------------------------- 1.38 +// **** Useful debug functions. 1.39 + 1.40 +char const* GetDebugNameEyeCupType ( EyeCupType eyeCupType ) 1.41 +{ 1.42 + switch ( eyeCupType ) 1.43 + { 1.44 + case EyeCup_DK1A: return "DK1 A"; 1.45 + case EyeCup_DK1B: return "DK1 B"; 1.46 + case EyeCup_DK1C: return "DK1 C"; 1.47 + case EyeCup_DKHD2A: return "DKHD2 A"; 1.48 + case EyeCup_OrangeA: return "Orange A"; 1.49 + case EyeCup_RedA: return "Red A"; 1.50 + case EyeCup_PinkA: return "Pink A"; 1.51 + case EyeCup_BlueA: return "Blue A"; 1.52 + case EyeCup_Delilah1A: return "Delilah 1 A"; 1.53 + case EyeCup_Delilah2A: return "Delilah 2 A"; 1.54 + case EyeCup_JamesA: return "James A"; 1.55 + case EyeCup_SunMandalaA: return "Sun Mandala A"; 1.56 + case EyeCup_DK2A: return "DK2 A"; 1.57 + case EyeCup_LAST: return "LAST"; 1.58 + default: OVR_ASSERT ( false ); return "Error"; 1.59 + } 1.60 +} 1.61 + 1.62 +char const* GetDebugNameHmdType ( HmdTypeEnum hmdType ) 1.63 +{ 1.64 + switch ( hmdType ) 1.65 + { 1.66 + case HmdType_None: return "None"; 1.67 + case HmdType_DK1: return "DK1"; 1.68 + case HmdType_DKProto: return "DK1 prototype"; 1.69 + case HmdType_DKHDProto: return "DK HD prototype 1"; 1.70 + case HmdType_DKHDProto566Mi: return "DK HD prototype 566 Mi"; 1.71 + case HmdType_DKHD2Proto: return "DK HD prototype 585"; 1.72 + case HmdType_CrystalCoveProto: return "Crystal Cove"; 1.73 + case HmdType_DK2: return "DK2"; 1.74 + case HmdType_Unknown: return "Unknown"; 1.75 + case HmdType_LAST: return "LAST"; 1.76 + default: OVR_ASSERT ( false ); return "Error"; 1.77 + } 1.78 +} 1.79 + 1.80 + 1.81 +//----------------------------------------------------------------------------------- 1.82 +// **** Internal pipeline functions. 1.83 + 1.84 +struct DistortionAndFov 1.85 +{ 1.86 + DistortionRenderDesc Distortion; 1.87 + FovPort Fov; 1.88 +}; 1.89 + 1.90 +static DistortionAndFov CalculateDistortionAndFovInternal ( StereoEye eyeType, HmdRenderInfo const &hmd, 1.91 + LensConfig const *pLensOverride = NULL, 1.92 + FovPort const *pTanHalfFovOverride = NULL, 1.93 + float extraEyeRotationInRadians = OVR_DEFAULT_EXTRA_EYE_ROTATION ) 1.94 +{ 1.95 + // pLensOverride can be NULL, which means no override. 1.96 + 1.97 + DistortionRenderDesc localDistortion = CalculateDistortionRenderDesc ( eyeType, hmd, pLensOverride ); 1.98 + FovPort fov = CalculateFovFromHmdInfo ( eyeType, localDistortion, hmd, extraEyeRotationInRadians ); 1.99 + // Here the app or the user would optionally clamp this visible fov to a smaller number if 1.100 + // they want more perf or resolution and are willing to give up FOV. 1.101 + // They may also choose to clamp UDLR differently e.g. to get cinemascope-style views. 1.102 + if ( pTanHalfFovOverride != NULL ) 1.103 + { 1.104 + fov = *pTanHalfFovOverride; 1.105 + } 1.106 + 1.107 + // Here we could call ClampToPhysicalScreenFov(), but we do want people 1.108 + // to be able to play with larger-than-screen views. 1.109 + // The calling app can always do the clamping itself. 1.110 + DistortionAndFov result; 1.111 + result.Distortion = localDistortion; 1.112 + result.Fov = fov; 1.113 + 1.114 + return result; 1.115 +} 1.116 + 1.117 + 1.118 +static Recti CalculateViewportInternal ( StereoEye eyeType, 1.119 + Sizei const actualRendertargetSurfaceSize, 1.120 + Sizei const requestedRenderedPixelSize, 1.121 + bool bRendertargetSharedByBothEyes, 1.122 + bool bMonoRenderingMode = false ) 1.123 +{ 1.124 + Recti renderedViewport; 1.125 + if ( bMonoRenderingMode || !bRendertargetSharedByBothEyes || (eyeType == StereoEye_Center) ) 1.126 + { 1.127 + // One eye per RT. 1.128 + renderedViewport.x = 0; 1.129 + renderedViewport.y = 0; 1.130 + renderedViewport.w = Alg::Min ( actualRendertargetSurfaceSize.w, requestedRenderedPixelSize.w ); 1.131 + renderedViewport.h = Alg::Min ( actualRendertargetSurfaceSize.h, requestedRenderedPixelSize.h ); 1.132 + } 1.133 + else 1.134 + { 1.135 + // Both eyes share the RT. 1.136 + renderedViewport.x = 0; 1.137 + renderedViewport.y = 0; 1.138 + renderedViewport.w = Alg::Min ( actualRendertargetSurfaceSize.w/2, requestedRenderedPixelSize.w ); 1.139 + renderedViewport.h = Alg::Min ( actualRendertargetSurfaceSize.h, requestedRenderedPixelSize.h ); 1.140 + if ( eyeType == StereoEye_Right ) 1.141 + { 1.142 + renderedViewport.x = (actualRendertargetSurfaceSize.w+1)/2; // Round up, not down. 1.143 + } 1.144 + } 1.145 + return renderedViewport; 1.146 +} 1.147 + 1.148 +static Recti CalculateViewportDensityInternal ( StereoEye eyeType, 1.149 + DistortionRenderDesc const &distortion, 1.150 + FovPort const &fov, 1.151 + Sizei const &actualRendertargetSurfaceSize, 1.152 + bool bRendertargetSharedByBothEyes, 1.153 + float desiredPixelDensity = 1.0f, 1.154 + bool bMonoRenderingMode = false ) 1.155 +{ 1.156 + OVR_ASSERT ( actualRendertargetSurfaceSize.w > 0 ); 1.157 + OVR_ASSERT ( actualRendertargetSurfaceSize.h > 0 ); 1.158 + 1.159 + // What size RT do we need to get 1:1 mapping? 1.160 + Sizei idealPixelSize = CalculateIdealPixelSize ( eyeType, distortion, fov, desiredPixelDensity ); 1.161 + // ...but we might not actually get that size. 1.162 + return CalculateViewportInternal ( eyeType, 1.163 + actualRendertargetSurfaceSize, 1.164 + idealPixelSize, 1.165 + bRendertargetSharedByBothEyes, bMonoRenderingMode ); 1.166 +} 1.167 + 1.168 +static ViewportScaleAndOffset CalculateViewportScaleAndOffsetInternal ( 1.169 + ScaleAndOffset2D const &eyeToSourceNDC, 1.170 + Recti const &renderedViewport, 1.171 + Sizei const &actualRendertargetSurfaceSize ) 1.172 +{ 1.173 + ViewportScaleAndOffset result; 1.174 + result.RenderedViewport = renderedViewport; 1.175 + result.EyeToSourceUV = CreateUVScaleAndOffsetfromNDCScaleandOffset( 1.176 + eyeToSourceNDC, renderedViewport, actualRendertargetSurfaceSize ); 1.177 + return result; 1.178 +} 1.179 + 1.180 + 1.181 +static StereoEyeParams CalculateStereoEyeParamsInternal ( StereoEye eyeType, HmdRenderInfo const &hmd, 1.182 + DistortionRenderDesc const &distortion, 1.183 + FovPort const &fov, 1.184 + Sizei const &actualRendertargetSurfaceSize, 1.185 + Recti const &renderedViewport, 1.186 + bool bRightHanded = true, float zNear = 0.01f, float zFar = 10000.0f, 1.187 + bool bMonoRenderingMode = false, 1.188 + float zoomFactor = 1.0f ) 1.189 +{ 1.190 + // Generate the projection matrix for intermediate rendertarget. 1.191 + // Z range can also be inserted later by the app (though not in this particular case) 1.192 + float fovScale = 1.0f / zoomFactor; 1.193 + FovPort zoomedFov = fov; 1.194 + zoomedFov.LeftTan *= fovScale; 1.195 + zoomedFov.RightTan *= fovScale; 1.196 + zoomedFov.UpTan *= fovScale; 1.197 + zoomedFov.DownTan *= fovScale; 1.198 + Matrix4f projection = CreateProjection ( bRightHanded, zoomedFov, zNear, zFar ); 1.199 + 1.200 + // Find the mapping from TanAngle space to target NDC space. 1.201 + // Note this does NOT take the zoom factor into account because 1.202 + // this is the mapping of actual physical eye FOV (and our eyes do not zoom!) 1.203 + // to screen space. 1.204 + ScaleAndOffset2D eyeToSourceNDC = CreateNDCScaleAndOffsetFromFov ( fov ); 1.205 + 1.206 + // The size of the final FB, which is fixed and determined by the physical size of the device display. 1.207 + Recti distortedViewport = GetFramebufferViewport ( eyeType, hmd ); 1.208 + Vector3f virtualCameraOffset = CalculateEyeVirtualCameraOffset(hmd, eyeType, bMonoRenderingMode); 1.209 + 1.210 + StereoEyeParams result; 1.211 + result.Eye = eyeType; 1.212 + result.HmdToEyeViewOffset = Matrix4f::Translation(virtualCameraOffset); 1.213 + result.Distortion = distortion; 1.214 + result.DistortionViewport = distortedViewport; 1.215 + result.Fov = fov; 1.216 + result.RenderedProjection = projection; 1.217 + result.EyeToSourceNDC = eyeToSourceNDC; 1.218 + ViewportScaleAndOffset vsao = CalculateViewportScaleAndOffsetInternal ( eyeToSourceNDC, renderedViewport, actualRendertargetSurfaceSize ); 1.219 + result.RenderedViewport = vsao.RenderedViewport; 1.220 + result.EyeToSourceUV = vsao.EyeToSourceUV; 1.221 + 1.222 + return result; 1.223 +} 1.224 + 1.225 + 1.226 +Vector3f CalculateEyeVirtualCameraOffset(HmdRenderInfo const &hmd, 1.227 + StereoEye eyeType, bool bmonoRenderingMode) 1.228 +{ 1.229 + Vector3f virtualCameraOffset(0); 1.230 + 1.231 + if (!bmonoRenderingMode) 1.232 + { 1.233 + float eyeCenterRelief = hmd.GetEyeCenter().ReliefInMeters; 1.234 + 1.235 + if (eyeType == StereoEye_Left) 1.236 + { 1.237 + virtualCameraOffset.x = hmd.EyeLeft.NoseToPupilInMeters; 1.238 + virtualCameraOffset.z = eyeCenterRelief - hmd.EyeLeft.ReliefInMeters; 1.239 + } 1.240 + else if (eyeType == StereoEye_Right) 1.241 + { 1.242 + virtualCameraOffset.x = -hmd.EyeRight.NoseToPupilInMeters; 1.243 + virtualCameraOffset.z = eyeCenterRelief - hmd.EyeRight.ReliefInMeters; 1.244 + } 1.245 + } 1.246 + 1.247 + return virtualCameraOffset; 1.248 +} 1.249 + 1.250 + 1.251 +//----------------------------------------------------------------------------------- 1.252 +// **** Higher-level utility functions. 1.253 + 1.254 +Sizei CalculateRecommendedTextureSize ( HmdRenderInfo const &hmd, 1.255 + bool bRendertargetSharedByBothEyes, 1.256 + float pixelDensityInCenter /*= 1.0f*/ ) 1.257 +{ 1.258 + Sizei idealPixelSize[2]; 1.259 + for ( int eyeNum = 0; eyeNum < 2; eyeNum++ ) 1.260 + { 1.261 + StereoEye eyeType = ( eyeNum == 0 ) ? StereoEye_Left : StereoEye_Right; 1.262 + 1.263 + DistortionAndFov distortionAndFov = CalculateDistortionAndFovInternal ( eyeType, hmd, NULL, NULL, OVR_DEFAULT_EXTRA_EYE_ROTATION ); 1.264 + 1.265 + idealPixelSize[eyeNum] = CalculateIdealPixelSize ( eyeType, 1.266 + distortionAndFov.Distortion, 1.267 + distortionAndFov.Fov, 1.268 + pixelDensityInCenter ); 1.269 + } 1.270 + 1.271 + Sizei result; 1.272 + result.w = Alg::Max ( idealPixelSize[0].w, idealPixelSize[1].w ); 1.273 + result.h = Alg::Max ( idealPixelSize[0].h, idealPixelSize[1].h ); 1.274 + if ( bRendertargetSharedByBothEyes ) 1.275 + { 1.276 + result.w *= 2; 1.277 + } 1.278 + return result; 1.279 +} 1.280 + 1.281 +StereoEyeParams CalculateStereoEyeParams ( HmdRenderInfo const &hmd, 1.282 + StereoEye eyeType, 1.283 + Sizei const &actualRendertargetSurfaceSize, 1.284 + bool bRendertargetSharedByBothEyes, 1.285 + bool bRightHanded /*= true*/, 1.286 + float zNear /*= 0.01f*/, float zFar /*= 10000.0f*/, 1.287 + Sizei const *pOverrideRenderedPixelSize /* = NULL*/, 1.288 + FovPort const *pOverrideFovport /*= NULL*/, 1.289 + float zoomFactor /*= 1.0f*/ ) 1.290 +{ 1.291 + DistortionAndFov distortionAndFov = CalculateDistortionAndFovInternal ( eyeType, hmd, NULL, NULL, OVR_DEFAULT_EXTRA_EYE_ROTATION ); 1.292 + if ( pOverrideFovport != NULL ) 1.293 + { 1.294 + distortionAndFov.Fov = *pOverrideFovport; 1.295 + } 1.296 + 1.297 + Recti viewport; 1.298 + if ( pOverrideRenderedPixelSize != NULL ) 1.299 + { 1.300 + viewport = CalculateViewportInternal ( eyeType, actualRendertargetSurfaceSize, *pOverrideRenderedPixelSize, bRendertargetSharedByBothEyes, false ); 1.301 + } 1.302 + else 1.303 + { 1.304 + viewport = CalculateViewportDensityInternal ( eyeType, 1.305 + distortionAndFov.Distortion, 1.306 + distortionAndFov.Fov, 1.307 + actualRendertargetSurfaceSize, bRendertargetSharedByBothEyes, 1.0f, false ); 1.308 + } 1.309 + 1.310 + return CalculateStereoEyeParamsInternal ( 1.311 + eyeType, hmd, 1.312 + distortionAndFov.Distortion, 1.313 + distortionAndFov.Fov, 1.314 + actualRendertargetSurfaceSize, viewport, 1.315 + bRightHanded, zNear, zFar, false, zoomFactor ); 1.316 +} 1.317 + 1.318 + 1.319 +FovPort CalculateRecommendedFov ( HmdRenderInfo const &hmd, 1.320 + StereoEye eyeType, 1.321 + bool bMakeFovSymmetrical /* = false */ ) 1.322 +{ 1.323 + DistortionAndFov distortionAndFov = CalculateDistortionAndFovInternal ( eyeType, hmd, NULL, NULL, OVR_DEFAULT_EXTRA_EYE_ROTATION ); 1.324 + FovPort fov = distortionAndFov.Fov; 1.325 + if ( bMakeFovSymmetrical ) 1.326 + { 1.327 + // Deal with engines that cannot support an off-center projection. 1.328 + // Unfortunately this means they will be rendering pixels that the user can't actually see. 1.329 + float fovTanH = Alg::Max ( fov.LeftTan, fov.RightTan ); 1.330 + float fovTanV = Alg::Max ( fov.UpTan, fov.DownTan ); 1.331 + fov.LeftTan = fovTanH; 1.332 + fov.RightTan = fovTanH; 1.333 + fov.UpTan = fovTanV; 1.334 + fov.DownTan = fovTanV; 1.335 + } 1.336 + return fov; 1.337 +} 1.338 + 1.339 +ViewportScaleAndOffset ModifyRenderViewport ( StereoEyeParams const ¶ms, 1.340 + Sizei const &actualRendertargetSurfaceSize, 1.341 + Recti const &renderViewport ) 1.342 +{ 1.343 + return CalculateViewportScaleAndOffsetInternal ( params.EyeToSourceNDC, renderViewport, actualRendertargetSurfaceSize ); 1.344 +} 1.345 + 1.346 +ViewportScaleAndOffset ModifyRenderSize ( StereoEyeParams const ¶ms, 1.347 + Sizei const &actualRendertargetSurfaceSize, 1.348 + Sizei const &requestedRenderSize, 1.349 + bool bRendertargetSharedByBothEyes /*= false*/ ) 1.350 +{ 1.351 + Recti renderViewport = CalculateViewportInternal ( params.Eye, actualRendertargetSurfaceSize, requestedRenderSize, bRendertargetSharedByBothEyes, false ); 1.352 + return CalculateViewportScaleAndOffsetInternal ( params.EyeToSourceNDC, renderViewport, actualRendertargetSurfaceSize ); 1.353 +} 1.354 + 1.355 +ViewportScaleAndOffset ModifyRenderDensity ( StereoEyeParams const ¶ms, 1.356 + Sizei const &actualRendertargetSurfaceSize, 1.357 + float pixelDensity /*= 1.0f*/, 1.358 + bool bRendertargetSharedByBothEyes /*= false*/ ) 1.359 +{ 1.360 + Recti renderViewport = CalculateViewportDensityInternal ( params.Eye, params.Distortion, params.Fov, actualRendertargetSurfaceSize, bRendertargetSharedByBothEyes, pixelDensity, false ); 1.361 + return CalculateViewportScaleAndOffsetInternal ( params.EyeToSourceNDC, renderViewport, actualRendertargetSurfaceSize ); 1.362 +} 1.363 + 1.364 + 1.365 +//----------------------------------------------------------------------------------- 1.366 +// **** StereoConfig Implementation 1.367 + 1.368 +StereoConfig::StereoConfig(StereoMode mode) 1.369 + : Mode(mode), 1.370 + DirtyFlag(true) 1.371 +{ 1.372 + // Initialize "fake" default HMD values for testing without HMD plugged in. 1.373 + // These default values match those returned by DK1 1.374 + // (at least they did at time of writing - certainly good enough for debugging) 1.375 + Hmd.HmdType = HmdType_None; 1.376 + Hmd.ResolutionInPixels = Sizei(1280, 800); 1.377 + Hmd.ScreenSizeInMeters = Sizef(0.1498f, 0.0936f); 1.378 + Hmd.ScreenGapSizeInMeters = 0.0f; 1.379 + Hmd.PelOffsetR = Vector2f ( 0.0f, 0.0f ); 1.380 + Hmd.PelOffsetB = Vector2f ( 0.0f, 0.0f ); 1.381 + Hmd.CenterFromTopInMeters = 0.0468f; 1.382 + Hmd.LensSeparationInMeters = 0.0635f; 1.383 + Hmd.LensDiameterInMeters = 0.035f; 1.384 + Hmd.LensSurfaceToMidplateInMeters = 0.025f; 1.385 + Hmd.EyeCups = EyeCup_DK1A; 1.386 + Hmd.Shutter.Type = HmdShutter_RollingTopToBottom; 1.387 + Hmd.Shutter.VsyncToNextVsync = ( 1.0f / 60.0f ); 1.388 + Hmd.Shutter.VsyncToFirstScanline = 0.000052f; 1.389 + Hmd.Shutter.FirstScanlineToLastScanline = 0.016580f; 1.390 + Hmd.Shutter.PixelSettleTime = 0.015f; 1.391 + Hmd.Shutter.PixelPersistence = ( 1.0f / 60.0f ); 1.392 + Hmd.EyeLeft.Distortion.SetToIdentity(); 1.393 + Hmd.EyeLeft.Distortion.MetersPerTanAngleAtCenter = 0.043875f; 1.394 + Hmd.EyeLeft.Distortion.Eqn = Distortion_RecipPoly4; 1.395 + Hmd.EyeLeft.Distortion.K[0] = 1.0f; 1.396 + Hmd.EyeLeft.Distortion.K[1] = -0.3999f; 1.397 + Hmd.EyeLeft.Distortion.K[2] = 0.2408f; 1.398 + Hmd.EyeLeft.Distortion.K[3] = -0.4589f; 1.399 + Hmd.EyeLeft.Distortion.MaxR = 1.0f; 1.400 + Hmd.EyeLeft.Distortion.ChromaticAberration[0] = 0.006f; 1.401 + Hmd.EyeLeft.Distortion.ChromaticAberration[1] = 0.0f; 1.402 + Hmd.EyeLeft.Distortion.ChromaticAberration[2] = -0.014f; 1.403 + Hmd.EyeLeft.Distortion.ChromaticAberration[3] = 0.0f; 1.404 + Hmd.EyeLeft.NoseToPupilInMeters = 0.62f; 1.405 + Hmd.EyeLeft.ReliefInMeters = 0.013f; 1.406 + Hmd.EyeRight = Hmd.EyeLeft; 1.407 + 1.408 + SetViewportMode = SVPM_Density; 1.409 + SetViewportPixelsPerDisplayPixel = 1.0f; 1.410 + // Not used in this mode, but init them anyway. 1.411 + SetViewportSize[0] = Sizei(0,0); 1.412 + SetViewportSize[1] = Sizei(0,0); 1.413 + SetViewport[0] = Recti(0,0,0,0); 1.414 + SetViewport[1] = Recti(0,0,0,0); 1.415 + 1.416 + OverrideLens = false; 1.417 + OverrideTanHalfFov = false; 1.418 + OverrideZeroIpd = false; 1.419 + ExtraEyeRotationInRadians = OVR_DEFAULT_EXTRA_EYE_ROTATION; 1.420 + IsRendertargetSharedByBothEyes = true; 1.421 + RightHandedProjection = true; 1.422 + 1.423 + // This should cause an assert if the app does not call SetRendertargetSize() 1.424 + RendertargetSize = Sizei ( 0, 0 ); 1.425 + 1.426 + ZNear = 0.01f; 1.427 + ZFar = 10000.0f; 1.428 + 1.429 + Set2DAreaFov(DegreeToRad(85.0f)); 1.430 +} 1.431 + 1.432 +void StereoConfig::SetHmdRenderInfo(const HmdRenderInfo& hmd) 1.433 +{ 1.434 + Hmd = hmd; 1.435 + DirtyFlag = true; 1.436 +} 1.437 + 1.438 +void StereoConfig::Set2DAreaFov(float fovRadians) 1.439 +{ 1.440 + Area2DFov = fovRadians; 1.441 + DirtyFlag = true; 1.442 +} 1.443 + 1.444 +const StereoEyeParamsWithOrtho& StereoConfig::GetEyeRenderParams(StereoEye eye) 1.445 +{ 1.446 + if ( DirtyFlag ) 1.447 + { 1.448 + UpdateComputedState(); 1.449 + } 1.450 + 1.451 + static const uint8_t eyeParamIndices[3] = { 0, 0, 1 }; 1.452 + 1.453 + OVR_ASSERT(eye < sizeof(eyeParamIndices)); 1.454 + return EyeRenderParams[eyeParamIndices[eye]]; 1.455 +} 1.456 + 1.457 +void StereoConfig::SetLensOverride ( LensConfig const *pLensOverrideLeft /*= NULL*/, 1.458 + LensConfig const *pLensOverrideRight /*= NULL*/ ) 1.459 +{ 1.460 + if ( pLensOverrideLeft == NULL ) 1.461 + { 1.462 + OverrideLens = false; 1.463 + } 1.464 + else 1.465 + { 1.466 + OverrideLens = true; 1.467 + LensOverrideLeft = *pLensOverrideLeft; 1.468 + LensOverrideRight = *pLensOverrideLeft; 1.469 + if ( pLensOverrideRight != NULL ) 1.470 + { 1.471 + LensOverrideRight = *pLensOverrideRight; 1.472 + } 1.473 + } 1.474 + DirtyFlag = true; 1.475 +} 1.476 + 1.477 +void StereoConfig::SetRendertargetSize (Size<int> const rendertargetSize, 1.478 + bool rendertargetIsSharedByBothEyes ) 1.479 +{ 1.480 + RendertargetSize = rendertargetSize; 1.481 + IsRendertargetSharedByBothEyes = rendertargetIsSharedByBothEyes; 1.482 + DirtyFlag = true; 1.483 +} 1.484 + 1.485 +void StereoConfig::SetFov ( FovPort const *pfovLeft /*= NULL*/, 1.486 + FovPort const *pfovRight /*= NULL*/ ) 1.487 +{ 1.488 + DirtyFlag = true; 1.489 + if ( pfovLeft == NULL ) 1.490 + { 1.491 + OverrideTanHalfFov = false; 1.492 + } 1.493 + else 1.494 + { 1.495 + OverrideTanHalfFov = true; 1.496 + FovOverrideLeft = *pfovLeft; 1.497 + FovOverrideRight = *pfovLeft; 1.498 + if ( pfovRight != NULL ) 1.499 + { 1.500 + FovOverrideRight = *pfovRight; 1.501 + } 1.502 + } 1.503 +} 1.504 + 1.505 + 1.506 +void StereoConfig::SetZeroVirtualIpdOverride ( bool enableOverride ) 1.507 +{ 1.508 + DirtyFlag = true; 1.509 + OverrideZeroIpd = enableOverride; 1.510 +} 1.511 + 1.512 + 1.513 +void StereoConfig::SetZClipPlanesAndHandedness ( float zNear /*= 0.01f*/, float zFar /*= 10000.0f*/, bool rightHandedProjection /*= true*/ ) 1.514 +{ 1.515 + DirtyFlag = true; 1.516 + ZNear = zNear; 1.517 + ZFar = zFar; 1.518 + RightHandedProjection = rightHandedProjection; 1.519 +} 1.520 + 1.521 +void StereoConfig::SetExtraEyeRotation ( float extraEyeRotationInRadians ) 1.522 +{ 1.523 + DirtyFlag = true; 1.524 + ExtraEyeRotationInRadians = extraEyeRotationInRadians; 1.525 +} 1.526 + 1.527 +Sizei StereoConfig::CalculateRecommendedTextureSize ( bool rendertargetSharedByBothEyes, 1.528 + float pixelDensityInCenter /*= 1.0f*/ ) 1.529 +{ 1.530 + return Render::CalculateRecommendedTextureSize ( Hmd, rendertargetSharedByBothEyes, pixelDensityInCenter ); 1.531 +} 1.532 + 1.533 + 1.534 + 1.535 +void StereoConfig::UpdateComputedState() 1.536 +{ 1.537 + int numEyes = 2; 1.538 + StereoEye eyeTypes[2]; 1.539 + 1.540 + switch ( Mode ) 1.541 + { 1.542 + case Stereo_None: 1.543 + numEyes = 1; 1.544 + eyeTypes[0] = StereoEye_Center; 1.545 + break; 1.546 + 1.547 + case Stereo_LeftRight_Multipass: 1.548 + numEyes = 2; 1.549 + eyeTypes[0] = StereoEye_Left; 1.550 + eyeTypes[1] = StereoEye_Right; 1.551 + break; 1.552 + 1.553 + default: 1.554 + numEyes = 0; 1.555 + OVR_ASSERT( false ); 1.556 + break; 1.557 + } 1.558 + 1.559 + // If either of these fire, you've probably forgotten to call SetRendertargetSize() 1.560 + OVR_ASSERT ( RendertargetSize.w > 0 ); 1.561 + OVR_ASSERT ( RendertargetSize.h > 0 ); 1.562 + 1.563 + for ( int eyeNum = 0; eyeNum < numEyes; eyeNum++ ) 1.564 + { 1.565 + StereoEye eyeType = eyeTypes[eyeNum]; 1.566 + LensConfig *pLensOverride = NULL; 1.567 + if ( OverrideLens ) 1.568 + { 1.569 + if ( eyeType == StereoEye_Right ) 1.570 + { 1.571 + pLensOverride = &LensOverrideRight; 1.572 + } 1.573 + else 1.574 + { 1.575 + pLensOverride = &LensOverrideLeft; 1.576 + } 1.577 + } 1.578 + 1.579 + FovPort *pTanHalfFovOverride = NULL; 1.580 + if ( OverrideTanHalfFov ) 1.581 + { 1.582 + if ( eyeType == StereoEye_Right ) 1.583 + { 1.584 + pTanHalfFovOverride = &FovOverrideRight; 1.585 + } 1.586 + else 1.587 + { 1.588 + pTanHalfFovOverride = &FovOverrideLeft; 1.589 + } 1.590 + } 1.591 + 1.592 + DistortionAndFov distortionAndFov = 1.593 + CalculateDistortionAndFovInternal ( eyeType, Hmd, 1.594 + pLensOverride, pTanHalfFovOverride, 1.595 + ExtraEyeRotationInRadians ); 1.596 + 1.597 + EyeRenderParams[eyeNum].StereoEye.Distortion = distortionAndFov.Distortion; 1.598 + EyeRenderParams[eyeNum].StereoEye.Fov = distortionAndFov.Fov; 1.599 + } 1.600 + 1.601 + if ( OverrideZeroIpd ) 1.602 + { 1.603 + // Take the union of the calculated eye FOVs. 1.604 + FovPort fov; 1.605 + fov.UpTan = Alg::Max ( EyeRenderParams[0].StereoEye.Fov.UpTan , EyeRenderParams[1].StereoEye.Fov.UpTan ); 1.606 + fov.DownTan = Alg::Max ( EyeRenderParams[0].StereoEye.Fov.DownTan , EyeRenderParams[1].StereoEye.Fov.DownTan ); 1.607 + fov.LeftTan = Alg::Max ( EyeRenderParams[0].StereoEye.Fov.LeftTan , EyeRenderParams[1].StereoEye.Fov.LeftTan ); 1.608 + fov.RightTan = Alg::Max ( EyeRenderParams[0].StereoEye.Fov.RightTan, EyeRenderParams[1].StereoEye.Fov.RightTan ); 1.609 + EyeRenderParams[0].StereoEye.Fov = fov; 1.610 + EyeRenderParams[1].StereoEye.Fov = fov; 1.611 + } 1.612 + 1.613 + for ( int eyeNum = 0; eyeNum < numEyes; eyeNum++ ) 1.614 + { 1.615 + StereoEye eyeType = eyeTypes[eyeNum]; 1.616 + 1.617 + DistortionRenderDesc localDistortion = EyeRenderParams[eyeNum].StereoEye.Distortion; 1.618 + FovPort fov = EyeRenderParams[eyeNum].StereoEye.Fov; 1.619 + 1.620 + // Use a placeholder - will be overridden later. 1.621 + Recti tempViewport = Recti ( 0, 0, 1, 1 ); 1.622 + 1.623 + EyeRenderParams[eyeNum].StereoEye = CalculateStereoEyeParamsInternal ( 1.624 + eyeType, Hmd, localDistortion, fov, 1.625 + RendertargetSize, tempViewport, 1.626 + RightHandedProjection, ZNear, ZFar, 1.627 + OverrideZeroIpd ); 1.628 + 1.629 + // We want to create a virtual 2D surface we can draw debug text messages to. 1.630 + // We'd like it to be a fixed distance (OrthoDistance) away, 1.631 + // and to cover a specific FOV (Area2DFov). We need to find the projection matrix for this, 1.632 + // and also to know how large it is in pixels to achieve a 1:1 mapping at the center of the screen. 1.633 + float orthoDistance = 0.8f; 1.634 + float orthoHalfFov = tanf ( Area2DFov * 0.5f ); 1.635 + Vector2f unityOrthoPixelSize = localDistortion.PixelsPerTanAngleAtCenter * ( orthoHalfFov * 2.0f ); 1.636 + float localInterpupillaryDistance = Hmd.EyeLeft.NoseToPupilInMeters + Hmd.EyeRight.NoseToPupilInMeters; 1.637 + if ( OverrideZeroIpd ) 1.638 + { 1.639 + localInterpupillaryDistance = 0.0f; 1.640 + } 1.641 + Matrix4f ortho = CreateOrthoSubProjection ( true, eyeType, 1.642 + orthoHalfFov, orthoHalfFov, 1.643 + unityOrthoPixelSize.x, unityOrthoPixelSize.y, 1.644 + orthoDistance, localInterpupillaryDistance, 1.645 + EyeRenderParams[eyeNum].StereoEye.RenderedProjection ); 1.646 + EyeRenderParams[eyeNum].OrthoProjection = ortho; 1.647 + } 1.648 + 1.649 + // ...and now set up the viewport, scale & offset the way the app wanted. 1.650 + setupViewportScaleAndOffsets(); 1.651 + 1.652 + if ( OverrideZeroIpd ) 1.653 + { 1.654 + // Monocular rendering has some fragile parts... don't break any by accident. 1.655 + OVR_ASSERT ( EyeRenderParams[0].StereoEye.Fov.UpTan == EyeRenderParams[1].StereoEye.Fov.UpTan ); 1.656 + OVR_ASSERT ( EyeRenderParams[0].StereoEye.Fov.DownTan == EyeRenderParams[1].StereoEye.Fov.DownTan ); 1.657 + OVR_ASSERT ( EyeRenderParams[0].StereoEye.Fov.LeftTan == EyeRenderParams[1].StereoEye.Fov.LeftTan ); 1.658 + OVR_ASSERT ( EyeRenderParams[0].StereoEye.Fov.RightTan == EyeRenderParams[1].StereoEye.Fov.RightTan ); 1.659 + OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[0][0] == EyeRenderParams[1].StereoEye.RenderedProjection.M[0][0] ); 1.660 + OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[1][1] == EyeRenderParams[1].StereoEye.RenderedProjection.M[1][1] ); 1.661 + OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[0][2] == EyeRenderParams[1].StereoEye.RenderedProjection.M[0][2] ); 1.662 + OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[1][2] == EyeRenderParams[1].StereoEye.RenderedProjection.M[1][2] ); 1.663 + OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedViewport == EyeRenderParams[1].StereoEye.RenderedViewport ); 1.664 + OVR_ASSERT ( EyeRenderParams[0].StereoEye.EyeToSourceUV.Offset == EyeRenderParams[1].StereoEye.EyeToSourceUV.Offset ); 1.665 + OVR_ASSERT ( EyeRenderParams[0].StereoEye.EyeToSourceUV.Scale == EyeRenderParams[1].StereoEye.EyeToSourceUV.Scale ); 1.666 + OVR_ASSERT ( EyeRenderParams[0].StereoEye.EyeToSourceNDC.Offset == EyeRenderParams[1].StereoEye.EyeToSourceNDC.Offset ); 1.667 + OVR_ASSERT ( EyeRenderParams[0].StereoEye.EyeToSourceNDC.Scale == EyeRenderParams[1].StereoEye.EyeToSourceNDC.Scale ); 1.668 + OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[0][0] == EyeRenderParams[1].OrthoProjection.M[0][0] ); 1.669 + OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[1][1] == EyeRenderParams[1].OrthoProjection.M[1][1] ); 1.670 + OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[0][2] == EyeRenderParams[1].OrthoProjection.M[0][2] ); 1.671 + OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[1][2] == EyeRenderParams[1].OrthoProjection.M[1][2] ); 1.672 + } 1.673 + 1.674 + DirtyFlag = false; 1.675 +} 1.676 + 1.677 + 1.678 + 1.679 +ViewportScaleAndOffsetBothEyes StereoConfig::setupViewportScaleAndOffsets() 1.680 +{ 1.681 + for ( int eyeNum = 0; eyeNum < 2; eyeNum++ ) 1.682 + { 1.683 + StereoEye eyeType = ( eyeNum == 0 ) ? StereoEye_Left : StereoEye_Right; 1.684 + 1.685 + DistortionRenderDesc localDistortion = EyeRenderParams[eyeNum].StereoEye.Distortion; 1.686 + FovPort fov = EyeRenderParams[eyeNum].StereoEye.Fov; 1.687 + 1.688 + Recti renderedViewport; 1.689 + switch ( SetViewportMode ) 1.690 + { 1.691 + case SVPM_Density: 1.692 + renderedViewport = CalculateViewportDensityInternal ( 1.693 + eyeType, localDistortion, fov, 1.694 + RendertargetSize, IsRendertargetSharedByBothEyes, 1.695 + SetViewportPixelsPerDisplayPixel, OverrideZeroIpd ); 1.696 + break; 1.697 + case SVPM_Size: 1.698 + if ( ( eyeType == StereoEye_Right ) && !OverrideZeroIpd ) 1.699 + { 1.700 + renderedViewport = CalculateViewportInternal ( 1.701 + eyeType, RendertargetSize, 1.702 + SetViewportSize[1], 1.703 + IsRendertargetSharedByBothEyes, OverrideZeroIpd ); 1.704 + } 1.705 + else 1.706 + { 1.707 + renderedViewport = CalculateViewportInternal ( 1.708 + eyeType, RendertargetSize, 1.709 + SetViewportSize[0], 1.710 + IsRendertargetSharedByBothEyes, OverrideZeroIpd ); 1.711 + } 1.712 + break; 1.713 + case SVPM_Viewport: 1.714 + if ( ( eyeType == StereoEye_Right ) && !OverrideZeroIpd ) 1.715 + { 1.716 + renderedViewport = SetViewport[1]; 1.717 + } 1.718 + else 1.719 + { 1.720 + renderedViewport = SetViewport[0]; 1.721 + } 1.722 + break; 1.723 + default: OVR_ASSERT ( false ); break; 1.724 + } 1.725 + 1.726 + ViewportScaleAndOffset vpsao = CalculateViewportScaleAndOffsetInternal ( 1.727 + EyeRenderParams[eyeNum].StereoEye.EyeToSourceNDC, 1.728 + renderedViewport, 1.729 + RendertargetSize ); 1.730 + EyeRenderParams[eyeNum].StereoEye.RenderedViewport = vpsao.RenderedViewport; 1.731 + EyeRenderParams[eyeNum].StereoEye.EyeToSourceUV = vpsao.EyeToSourceUV; 1.732 + } 1.733 + 1.734 + ViewportScaleAndOffsetBothEyes result; 1.735 + result.Left.EyeToSourceUV = EyeRenderParams[0].StereoEye.EyeToSourceUV; 1.736 + result.Left.RenderedViewport = EyeRenderParams[0].StereoEye.RenderedViewport; 1.737 + result.Right.EyeToSourceUV = EyeRenderParams[1].StereoEye.EyeToSourceUV; 1.738 + result.Right.RenderedViewport = EyeRenderParams[1].StereoEye.RenderedViewport; 1.739 + return result; 1.740 +} 1.741 + 1.742 +// Specify a pixel density - how many rendered pixels per pixel in the physical display. 1.743 +ViewportScaleAndOffsetBothEyes StereoConfig::SetRenderDensity ( float pixelsPerDisplayPixel ) 1.744 +{ 1.745 + SetViewportMode = SVPM_Density; 1.746 + SetViewportPixelsPerDisplayPixel = pixelsPerDisplayPixel; 1.747 + return setupViewportScaleAndOffsets(); 1.748 +} 1.749 + 1.750 +// Supply the size directly. Will be clamped to the physical rendertarget size. 1.751 +ViewportScaleAndOffsetBothEyes StereoConfig::SetRenderSize ( Sizei const &renderSizeLeft, Sizei const &renderSizeRight ) 1.752 +{ 1.753 + SetViewportMode = SVPM_Size; 1.754 + SetViewportSize[0] = renderSizeLeft; 1.755 + SetViewportSize[1] = renderSizeRight; 1.756 + return setupViewportScaleAndOffsets(); 1.757 +} 1.758 + 1.759 +// Supply the viewport directly. This is not clamped to the physical rendertarget - careful now! 1.760 +ViewportScaleAndOffsetBothEyes StereoConfig::SetRenderViewport ( Recti const &renderViewportLeft, Recti const &renderViewportRight ) 1.761 +{ 1.762 + SetViewportMode = SVPM_Viewport; 1.763 + SetViewport[0] = renderViewportLeft; 1.764 + SetViewport[1] = renderViewportRight; 1.765 + return setupViewportScaleAndOffsets(); 1.766 +} 1.767 + 1.768 +Matrix4f StereoConfig::GetProjectionWithZoom ( StereoEye eye, float fovZoom ) const 1.769 +{ 1.770 + int eyeNum = ( eye == StereoEye_Right ) ? 1 : 0; 1.771 + float fovScale = 1.0f / fovZoom; 1.772 + FovPort fovPort = EyeRenderParams[eyeNum].StereoEye.Fov; 1.773 + fovPort.LeftTan *= fovScale; 1.774 + fovPort.RightTan *= fovScale; 1.775 + fovPort.UpTan *= fovScale; 1.776 + fovPort.DownTan *= fovScale; 1.777 + return CreateProjection ( RightHandedProjection, fovPort, ZNear, ZFar ); 1.778 +} 1.779 + 1.780 + 1.781 + 1.782 + 1.783 +//----------------------------------------------------------------------------------- 1.784 +// ***** Distortion Mesh Rendering 1.785 + 1.786 + 1.787 +// Pow2 for the Morton order to work! 1.788 +// 4 is too low - it is easy to see the "wobbles" in the HMD. 1.789 +// 5 is realllly close but you can see pixel differences with even/odd frame checking. 1.790 +// 6 is indistinguishable on a monitor on even/odd frames. 1.791 +static const int DMA_GridSizeLog2 = 6; 1.792 +static const int DMA_GridSize = 1<<DMA_GridSizeLog2; 1.793 +static const int DMA_NumVertsPerEye = (DMA_GridSize+1)*(DMA_GridSize+1); 1.794 +static const int DMA_NumTrisPerEye = (DMA_GridSize)*(DMA_GridSize)*2; 1.795 + 1.796 + 1.797 + 1.798 +DistortionMeshVertexData DistortionMeshMakeVertex ( Vector2f screenNDC, 1.799 + bool rightEye, 1.800 + const HmdRenderInfo &hmdRenderInfo, 1.801 + const DistortionRenderDesc &distortion, const ScaleAndOffset2D &eyeToSourceNDC ) 1.802 +{ 1.803 + DistortionMeshVertexData result; 1.804 + 1.805 + float xOffset = 0.0f; 1.806 + if (rightEye) 1.807 + { 1.808 + xOffset = 1.0f; 1.809 + } 1.810 + 1.811 + Vector2f tanEyeAnglesR, tanEyeAnglesG, tanEyeAnglesB; 1.812 + TransformScreenNDCToTanFovSpaceChroma ( &tanEyeAnglesR, &tanEyeAnglesG, &tanEyeAnglesB, 1.813 + distortion, screenNDC ); 1.814 + 1.815 + result.TanEyeAnglesR = tanEyeAnglesR; 1.816 + result.TanEyeAnglesG = tanEyeAnglesG; 1.817 + result.TanEyeAnglesB = tanEyeAnglesB; 1.818 + 1.819 + HmdShutterTypeEnum shutterType = hmdRenderInfo.Shutter.Type; 1.820 + switch ( shutterType ) 1.821 + { 1.822 + case HmdShutter_Global: 1.823 + result.TimewarpLerp = 0.0f; 1.824 + break; 1.825 + case HmdShutter_RollingLeftToRight: 1.826 + // Retrace is left to right - left eye goes 0.0 -> 0.5, then right goes 0.5 -> 1.0 1.827 + result.TimewarpLerp = screenNDC.x * 0.25f + 0.25f; 1.828 + if (rightEye) 1.829 + { 1.830 + result.TimewarpLerp += 0.5f; 1.831 + } 1.832 + break; 1.833 + case HmdShutter_RollingRightToLeft: 1.834 + // Retrace is right to left - right eye goes 0.0 -> 0.5, then left goes 0.5 -> 1.0 1.835 + result.TimewarpLerp = 0.75f - screenNDC.x * 0.25f; 1.836 + if (rightEye) 1.837 + { 1.838 + result.TimewarpLerp -= 0.5f; 1.839 + } 1.840 + break; 1.841 + case HmdShutter_RollingTopToBottom: 1.842 + // Retrace is top to bottom on both eyes at the same time. 1.843 + result.TimewarpLerp = screenNDC.y * 0.5f + 0.5f; 1.844 + break; 1.845 + default: OVR_ASSERT ( false ); break; 1.846 + } 1.847 + 1.848 + // When does the fade-to-black edge start? Chosen heuristically. 1.849 + float fadeOutBorderFractionTexture = 0.1f; 1.850 + float fadeOutBorderFractionTextureInnerEdge = 0.1f; 1.851 + float fadeOutBorderFractionScreen = 0.1f; 1.852 + float fadeOutFloor = 0.6f; // the floor controls how much black is in the fade region 1.853 + 1.854 + if (hmdRenderInfo.HmdType == HmdType_DK1) 1.855 + { 1.856 + fadeOutBorderFractionTexture = 0.3f; 1.857 + fadeOutBorderFractionTextureInnerEdge = 0.075f; 1.858 + fadeOutBorderFractionScreen = 0.075f; 1.859 + fadeOutFloor = 0.25f; 1.860 + } 1.861 + 1.862 + // Fade out at texture edges. 1.863 + // The furthest out will be the blue channel, because of chromatic aberration (true of any standard lens) 1.864 + Vector2f sourceTexCoordBlueNDC = TransformTanFovSpaceToRendertargetNDC ( eyeToSourceNDC, tanEyeAnglesB ); 1.865 + if (rightEye) 1.866 + { 1.867 + // The inner edge of the eye texture is usually much more magnified, because it's right against the middle of the screen, not the FOV edge. 1.868 + // So we want a different scaling factor for that. This code flips the texture NDC so that +1.0 is the inner edge 1.869 + sourceTexCoordBlueNDC.x = -sourceTexCoordBlueNDC.x; 1.870 + } 1.871 + float edgeFadeIn = ( 1.0f / fadeOutBorderFractionTextureInnerEdge ) * ( 1.0f - sourceTexCoordBlueNDC.x ) ; // Inner 1.872 + edgeFadeIn = Alg::Min ( edgeFadeIn, ( 1.0f / fadeOutBorderFractionTexture ) * ( 1.0f + sourceTexCoordBlueNDC.x ) ); // Outer 1.873 + edgeFadeIn = Alg::Min ( edgeFadeIn, ( 1.0f / fadeOutBorderFractionTexture ) * ( 1.0f - sourceTexCoordBlueNDC.y ) ); // Upper 1.874 + edgeFadeIn = Alg::Min ( edgeFadeIn, ( 1.0f / fadeOutBorderFractionTexture ) * ( 1.0f + sourceTexCoordBlueNDC.y ) ); // Lower 1.875 + 1.876 + // Also fade out at screen edges. Since this is in pixel space, no need to do inner specially. 1.877 + float edgeFadeInScreen = ( 1.0f / fadeOutBorderFractionScreen ) * 1.878 + ( 1.0f - Alg::Max ( Alg::Abs ( screenNDC.x ), Alg::Abs ( screenNDC.y ) ) ); 1.879 + edgeFadeIn = Alg::Min ( edgeFadeInScreen, edgeFadeIn ) + fadeOutFloor; 1.880 + 1.881 + // Note - this is NOT clamped negatively. 1.882 + // For rendering methods that interpolate over a coarse grid, we need the values to go negative for correct intersection with zero. 1.883 + result.Shade = Alg::Min ( edgeFadeIn, 1.0f ); 1.884 + result.ScreenPosNDC.x = 0.5f * screenNDC.x - 0.5f + xOffset; 1.885 + result.ScreenPosNDC.y = -screenNDC.y; 1.886 + 1.887 + return result; 1.888 +} 1.889 + 1.890 + 1.891 +void DistortionMeshDestroy ( DistortionMeshVertexData *pVertices, uint16_t *pTriangleMeshIndices ) 1.892 +{ 1.893 + OVR_FREE ( pVertices ); 1.894 + OVR_FREE ( pTriangleMeshIndices ); 1.895 +} 1.896 + 1.897 +void DistortionMeshCreate ( DistortionMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices, 1.898 + int *pNumVertices, int *pNumTriangles, 1.899 + const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo ) 1.900 +{ 1.901 + bool rightEye = ( stereoParams.Eye == StereoEye_Right ); 1.902 + int vertexCount = 0; 1.903 + int triangleCount = 0; 1.904 + 1.905 + // Generate mesh into allocated data and return result. 1.906 + DistortionMeshCreate(ppVertices, ppTriangleListIndices, &vertexCount, &triangleCount, 1.907 + rightEye, hmdRenderInfo, stereoParams.Distortion, stereoParams.EyeToSourceNDC); 1.908 + 1.909 + *pNumVertices = vertexCount; 1.910 + *pNumTriangles = triangleCount; 1.911 +} 1.912 + 1.913 + 1.914 +// Generate distortion mesh for a eye. 1.915 +void DistortionMeshCreate( DistortionMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices, 1.916 + int *pNumVertices, int *pNumTriangles, 1.917 + bool rightEye, 1.918 + const HmdRenderInfo &hmdRenderInfo, 1.919 + const DistortionRenderDesc &distortion, const ScaleAndOffset2D &eyeToSourceNDC ) 1.920 +{ 1.921 + *pNumVertices = DMA_NumVertsPerEye; 1.922 + *pNumTriangles = DMA_NumTrisPerEye; 1.923 + 1.924 + *ppVertices = (DistortionMeshVertexData*) 1.925 + OVR_ALLOC( sizeof(DistortionMeshVertexData) * (*pNumVertices) ); 1.926 + *ppTriangleListIndices = (uint16_t*) OVR_ALLOC( sizeof(uint16_t) * (*pNumTriangles) * 3 ); 1.927 + 1.928 + if (!*ppVertices || !*ppTriangleListIndices) 1.929 + { 1.930 + if (*ppVertices) 1.931 + { 1.932 + OVR_FREE(*ppVertices); 1.933 + } 1.934 + if (*ppTriangleListIndices) 1.935 + { 1.936 + OVR_FREE(*ppTriangleListIndices); 1.937 + } 1.938 + *ppVertices = NULL; 1.939 + *ppTriangleListIndices = NULL; 1.940 + *pNumTriangles = 0; 1.941 + *pNumVertices = 0; 1.942 + return; 1.943 + } 1.944 + 1.945 + 1.946 + 1.947 + // Populate vertex buffer info 1.948 + 1.949 + // First pass - build up raw vertex data. 1.950 + DistortionMeshVertexData* pcurVert = *ppVertices; 1.951 + 1.952 + for ( int y = 0; y <= DMA_GridSize; y++ ) 1.953 + { 1.954 + for ( int x = 0; x <= DMA_GridSize; x++ ) 1.955 + { 1.956 + 1.957 + Vector2f sourceCoordNDC; 1.958 + // NDC texture coords [-1,+1] 1.959 + sourceCoordNDC.x = 2.0f * ( (float)x / (float)DMA_GridSize ) - 1.0f; 1.960 + sourceCoordNDC.y = 2.0f * ( (float)y / (float)DMA_GridSize ) - 1.0f; 1.961 + Vector2f tanEyeAngle = TransformRendertargetNDCToTanFovSpace ( eyeToSourceNDC, sourceCoordNDC ); 1.962 + 1.963 + // Find a corresponding screen position. 1.964 + // Note - this function does not have to be precise - we're just trying to match the mesh tessellation 1.965 + // with the shape of the distortion to minimise the number of trianlges needed. 1.966 + Vector2f screenNDC = TransformTanFovSpaceToScreenNDC ( distortion, tanEyeAngle, false ); 1.967 + // ...but don't let verts overlap to the other eye. 1.968 + screenNDC.x = Alg::Max ( -1.0f, Alg::Min ( screenNDC.x, 1.0f ) ); 1.969 + screenNDC.y = Alg::Max ( -1.0f, Alg::Min ( screenNDC.y, 1.0f ) ); 1.970 + 1.971 + // From those screen positions, generate the vertex. 1.972 + *pcurVert = DistortionMeshMakeVertex ( screenNDC, rightEye, hmdRenderInfo, distortion, eyeToSourceNDC ); 1.973 + pcurVert++; 1.974 + } 1.975 + } 1.976 + 1.977 + 1.978 + // Populate index buffer info 1.979 + uint16_t *pcurIndex = *ppTriangleListIndices; 1.980 + 1.981 + for ( int triNum = 0; triNum < DMA_GridSize * DMA_GridSize; triNum++ ) 1.982 + { 1.983 + // Use a Morton order to help locality of FB, texture and vertex cache. 1.984 + // (0.325ms raster order -> 0.257ms Morton order) 1.985 + OVR_ASSERT ( DMA_GridSize <= 256 ); 1.986 + int x = ( ( triNum & 0x0001 ) >> 0 ) | 1.987 + ( ( triNum & 0x0004 ) >> 1 ) | 1.988 + ( ( triNum & 0x0010 ) >> 2 ) | 1.989 + ( ( triNum & 0x0040 ) >> 3 ) | 1.990 + ( ( triNum & 0x0100 ) >> 4 ) | 1.991 + ( ( triNum & 0x0400 ) >> 5 ) | 1.992 + ( ( triNum & 0x1000 ) >> 6 ) | 1.993 + ( ( triNum & 0x4000 ) >> 7 ); 1.994 + int y = ( ( triNum & 0x0002 ) >> 1 ) | 1.995 + ( ( triNum & 0x0008 ) >> 2 ) | 1.996 + ( ( triNum & 0x0020 ) >> 3 ) | 1.997 + ( ( triNum & 0x0080 ) >> 4 ) | 1.998 + ( ( triNum & 0x0200 ) >> 5 ) | 1.999 + ( ( triNum & 0x0800 ) >> 6 ) | 1.1000 + ( ( triNum & 0x2000 ) >> 7 ) | 1.1001 + ( ( triNum & 0x8000 ) >> 8 ); 1.1002 + int FirstVertex = x * (DMA_GridSize+1) + y; 1.1003 + // Another twist - we want the top-left and bottom-right quadrants to 1.1004 + // have the triangles split one way, the other two split the other. 1.1005 + // +---+---+---+---+ 1.1006 + // | /| /|\ |\ | 1.1007 + // | / | / | \ | \ | 1.1008 + // |/ |/ | \| \| 1.1009 + // +---+---+---+---+ 1.1010 + // | /| /|\ |\ | 1.1011 + // | / | / | \ | \ | 1.1012 + // |/ |/ | \| \| 1.1013 + // +---+---+---+---+ 1.1014 + // |\ |\ | /| /| 1.1015 + // | \ | \ | / | / | 1.1016 + // | \| \|/ |/ | 1.1017 + // +---+---+---+---+ 1.1018 + // |\ |\ | /| /| 1.1019 + // | \ | \ | / | / | 1.1020 + // | \| \|/ |/ | 1.1021 + // +---+---+---+---+ 1.1022 + // This way triangle edges don't span long distances over the distortion function, 1.1023 + // so linear interpolation works better & we can use fewer tris. 1.1024 + if ( ( x < DMA_GridSize/2 ) != ( y < DMA_GridSize/2 ) ) // != is logical XOR 1.1025 + { 1.1026 + *pcurIndex++ = (uint16_t)FirstVertex; 1.1027 + *pcurIndex++ = (uint16_t)FirstVertex+1; 1.1028 + *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1)+1; 1.1029 + 1.1030 + *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1)+1; 1.1031 + *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1); 1.1032 + *pcurIndex++ = (uint16_t)FirstVertex; 1.1033 + } 1.1034 + else 1.1035 + { 1.1036 + *pcurIndex++ = (uint16_t)FirstVertex; 1.1037 + *pcurIndex++ = (uint16_t)FirstVertex+1; 1.1038 + *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1); 1.1039 + 1.1040 + *pcurIndex++ = (uint16_t)FirstVertex+1; 1.1041 + *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1)+1; 1.1042 + *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1); 1.1043 + } 1.1044 + } 1.1045 +} 1.1046 + 1.1047 +//----------------------------------------------------------------------------------- 1.1048 +// ***** Heightmap Mesh Rendering 1.1049 + 1.1050 + 1.1051 +static const int HMA_GridSizeLog2 = 7; 1.1052 +static const int HMA_GridSize = 1<<HMA_GridSizeLog2; 1.1053 +static const int HMA_NumVertsPerEye = (HMA_GridSize+1)*(HMA_GridSize+1); 1.1054 +static const int HMA_NumTrisPerEye = (HMA_GridSize)*(HMA_GridSize)*2; 1.1055 + 1.1056 + 1.1057 +void HeightmapMeshDestroy ( HeightmapMeshVertexData *pVertices, uint16_t *pTriangleMeshIndices ) 1.1058 +{ 1.1059 + OVR_FREE ( pVertices ); 1.1060 + OVR_FREE ( pTriangleMeshIndices ); 1.1061 +} 1.1062 + 1.1063 +void HeightmapMeshCreate ( HeightmapMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices, 1.1064 + int *pNumVertices, int *pNumTriangles, 1.1065 + const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo ) 1.1066 +{ 1.1067 + bool rightEye = ( stereoParams.Eye == StereoEye_Right ); 1.1068 + int vertexCount = 0; 1.1069 + int triangleCount = 0; 1.1070 + 1.1071 + // Generate mesh into allocated data and return result. 1.1072 + HeightmapMeshCreate(ppVertices, ppTriangleListIndices, &vertexCount, &triangleCount, 1.1073 + rightEye, hmdRenderInfo, stereoParams.EyeToSourceNDC); 1.1074 + 1.1075 + *pNumVertices = vertexCount; 1.1076 + *pNumTriangles = triangleCount; 1.1077 +} 1.1078 + 1.1079 + 1.1080 +// Generate heightmap mesh for one eye. 1.1081 +void HeightmapMeshCreate( HeightmapMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices, 1.1082 + int *pNumVertices, int *pNumTriangles, bool rightEye, 1.1083 + const HmdRenderInfo &hmdRenderInfo, 1.1084 + const ScaleAndOffset2D &eyeToSourceNDC ) 1.1085 +{ 1.1086 + *pNumVertices = HMA_NumVertsPerEye; 1.1087 + *pNumTriangles = HMA_NumTrisPerEye; 1.1088 + 1.1089 + *ppVertices = (HeightmapMeshVertexData*) OVR_ALLOC( sizeof(HeightmapMeshVertexData) * (*pNumVertices) ); 1.1090 + *ppTriangleListIndices = (uint16_t*) OVR_ALLOC( sizeof(uint16_t) * (*pNumTriangles) * 3 ); 1.1091 + 1.1092 + if (!*ppVertices || !*ppTriangleListIndices) 1.1093 + { 1.1094 + if (*ppVertices) 1.1095 + { 1.1096 + OVR_FREE(*ppVertices); 1.1097 + } 1.1098 + if (*ppTriangleListIndices) 1.1099 + { 1.1100 + OVR_FREE(*ppTriangleListIndices); 1.1101 + } 1.1102 + *ppVertices = NULL; 1.1103 + *ppTriangleListIndices = NULL; 1.1104 + *pNumTriangles = 0; 1.1105 + *pNumVertices = 0; 1.1106 + return; 1.1107 + } 1.1108 + 1.1109 + // Populate vertex buffer info 1.1110 + // float xOffset = (rightEye ? 1.0f : 0.0f); Currently disabled because its usage is disabled below. 1.1111 + 1.1112 + // First pass - build up raw vertex data. 1.1113 + HeightmapMeshVertexData* pcurVert = *ppVertices; 1.1114 + 1.1115 + for ( int y = 0; y <= HMA_GridSize; y++ ) 1.1116 + { 1.1117 + for ( int x = 0; x <= HMA_GridSize; x++ ) 1.1118 + { 1.1119 + Vector2f sourceCoordNDC; 1.1120 + // NDC texture coords [-1,+1] 1.1121 + sourceCoordNDC.x = 2.0f * ( (float)x / (float)HMA_GridSize ) - 1.0f; 1.1122 + sourceCoordNDC.y = 2.0f * ( (float)y / (float)HMA_GridSize ) - 1.0f; 1.1123 + Vector2f tanEyeAngle = TransformRendertargetNDCToTanFovSpace ( eyeToSourceNDC, sourceCoordNDC ); 1.1124 + 1.1125 + pcurVert->TanEyeAngles = tanEyeAngle; 1.1126 + 1.1127 + HmdShutterTypeEnum shutterType = hmdRenderInfo.Shutter.Type; 1.1128 + switch ( shutterType ) 1.1129 + { 1.1130 + case HmdShutter_Global: 1.1131 + pcurVert->TimewarpLerp = 0.0f; 1.1132 + break; 1.1133 + case HmdShutter_RollingLeftToRight: 1.1134 + // Retrace is left to right - left eye goes 0.0 -> 0.5, then right goes 0.5 -> 1.0 1.1135 + pcurVert->TimewarpLerp = sourceCoordNDC.x * 0.25f + 0.25f; 1.1136 + if (rightEye) 1.1137 + { 1.1138 + pcurVert->TimewarpLerp += 0.5f; 1.1139 + } 1.1140 + break; 1.1141 + case HmdShutter_RollingRightToLeft: 1.1142 + // Retrace is right to left - right eye goes 0.0 -> 0.5, then left goes 0.5 -> 1.0 1.1143 + pcurVert->TimewarpLerp = 0.75f - sourceCoordNDC.x * 0.25f; 1.1144 + if (rightEye) 1.1145 + { 1.1146 + pcurVert->TimewarpLerp -= 0.5f; 1.1147 + } 1.1148 + break; 1.1149 + case HmdShutter_RollingTopToBottom: 1.1150 + // Retrace is top to bottom on both eyes at the same time. 1.1151 + pcurVert->TimewarpLerp = sourceCoordNDC.y * 0.5f + 0.5f; 1.1152 + break; 1.1153 + default: OVR_ASSERT ( false ); break; 1.1154 + } 1.1155 + 1.1156 + // Don't let verts overlap to the other eye. 1.1157 + //sourceCoordNDC.x = Alg::Max ( -1.0f, Alg::Min ( sourceCoordNDC.x, 1.0f ) ); 1.1158 + //sourceCoordNDC.y = Alg::Max ( -1.0f, Alg::Min ( sourceCoordNDC.y, 1.0f ) ); 1.1159 + 1.1160 + //pcurVert->ScreenPosNDC.x = 0.5f * sourceCoordNDC.x - 0.5f + xOffset; 1.1161 + pcurVert->ScreenPosNDC.x = sourceCoordNDC.x; 1.1162 + pcurVert->ScreenPosNDC.y = -sourceCoordNDC.y; 1.1163 + 1.1164 + pcurVert++; 1.1165 + } 1.1166 + } 1.1167 + 1.1168 + 1.1169 + // Populate index buffer info 1.1170 + uint16_t *pcurIndex = *ppTriangleListIndices; 1.1171 + 1.1172 + for ( int triNum = 0; triNum < HMA_GridSize * HMA_GridSize; triNum++ ) 1.1173 + { 1.1174 + // Use a Morton order to help locality of FB, texture and vertex cache. 1.1175 + // (0.325ms raster order -> 0.257ms Morton order) 1.1176 + OVR_ASSERT ( HMA_GridSize < 256 ); 1.1177 + int x = ( ( triNum & 0x0001 ) >> 0 ) | 1.1178 + ( ( triNum & 0x0004 ) >> 1 ) | 1.1179 + ( ( triNum & 0x0010 ) >> 2 ) | 1.1180 + ( ( triNum & 0x0040 ) >> 3 ) | 1.1181 + ( ( triNum & 0x0100 ) >> 4 ) | 1.1182 + ( ( triNum & 0x0400 ) >> 5 ) | 1.1183 + ( ( triNum & 0x1000 ) >> 6 ) | 1.1184 + ( ( triNum & 0x4000 ) >> 7 ); 1.1185 + int y = ( ( triNum & 0x0002 ) >> 1 ) | 1.1186 + ( ( triNum & 0x0008 ) >> 2 ) | 1.1187 + ( ( triNum & 0x0020 ) >> 3 ) | 1.1188 + ( ( triNum & 0x0080 ) >> 4 ) | 1.1189 + ( ( triNum & 0x0200 ) >> 5 ) | 1.1190 + ( ( triNum & 0x0800 ) >> 6 ) | 1.1191 + ( ( triNum & 0x2000 ) >> 7 ) | 1.1192 + ( ( triNum & 0x8000 ) >> 8 ); 1.1193 + int FirstVertex = x * (HMA_GridSize+1) + y; 1.1194 + // Another twist - we want the top-left and bottom-right quadrants to 1.1195 + // have the triangles split one way, the other two split the other. 1.1196 + // +---+---+---+---+ 1.1197 + // | /| /|\ |\ | 1.1198 + // | / | / | \ | \ | 1.1199 + // |/ |/ | \| \| 1.1200 + // +---+---+---+---+ 1.1201 + // | /| /|\ |\ | 1.1202 + // | / | / | \ | \ | 1.1203 + // |/ |/ | \| \| 1.1204 + // +---+---+---+---+ 1.1205 + // |\ |\ | /| /| 1.1206 + // | \ | \ | / | / | 1.1207 + // | \| \|/ |/ | 1.1208 + // +---+---+---+---+ 1.1209 + // |\ |\ | /| /| 1.1210 + // | \ | \ | / | / | 1.1211 + // | \| \|/ |/ | 1.1212 + // +---+---+---+---+ 1.1213 + // This way triangle edges don't span long distances over the distortion function, 1.1214 + // so linear interpolation works better & we can use fewer tris. 1.1215 + if ( ( x < HMA_GridSize/2 ) != ( y < HMA_GridSize/2 ) ) // != is logical XOR 1.1216 + { 1.1217 + *pcurIndex++ = (uint16_t)FirstVertex; 1.1218 + *pcurIndex++ = (uint16_t)FirstVertex+1; 1.1219 + *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1)+1; 1.1220 + 1.1221 + *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1)+1; 1.1222 + *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1); 1.1223 + *pcurIndex++ = (uint16_t)FirstVertex; 1.1224 + } 1.1225 + else 1.1226 + { 1.1227 + *pcurIndex++ = (uint16_t)FirstVertex; 1.1228 + *pcurIndex++ = (uint16_t)FirstVertex+1; 1.1229 + *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1); 1.1230 + 1.1231 + *pcurIndex++ = (uint16_t)FirstVertex+1; 1.1232 + *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1)+1; 1.1233 + *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1); 1.1234 + } 1.1235 + } 1.1236 +} 1.1237 + 1.1238 +//----------------------------------------------------------------------------------- 1.1239 +// ***** Prediction and timewarp. 1.1240 +// 1.1241 + 1.1242 +// Calculates the values from the HMD info. 1.1243 +PredictionValues PredictionGetDeviceValues ( const HmdRenderInfo &hmdRenderInfo, 1.1244 + bool withTimewarp /*= true*/, 1.1245 + bool withVsync /*= true*/ ) 1.1246 +{ 1.1247 + PredictionValues result; 1.1248 + 1.1249 + result.WithTimewarp = withTimewarp; 1.1250 + result.WithVsync = withVsync; 1.1251 + 1.1252 + // For unclear reasons, most graphics systems add an extra frame of latency 1.1253 + // somewhere along the way. In time we'll debug this and figure it out, but 1.1254 + // for now this gets prediction a little bit better. 1.1255 + const float extraFramesOfBufferingKludge = 1.0f; 1.1256 + 1.1257 + if ( withVsync ) 1.1258 + { 1.1259 + // These are the times from the Present+Flush to when the middle of the scene is "averagely visible" (without timewarp) 1.1260 + // So if you had no timewarp, this, plus the time until the next vsync, is how much to predict by. 1.1261 + result.PresentFlushToRenderedScene = extraFramesOfBufferingKludge * hmdRenderInfo.Shutter.FirstScanlineToLastScanline; 1.1262 + // Predict to the middle of the screen being scanned out. 1.1263 + result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.VsyncToFirstScanline + 0.5f * hmdRenderInfo.Shutter.FirstScanlineToLastScanline; 1.1264 + // Time for pixels to get half-way to settling. 1.1265 + result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.PixelSettleTime * 0.5f; 1.1266 + // Predict to half-way through persistence 1.1267 + result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.PixelPersistence * 0.5f; 1.1268 + 1.1269 + // The time from the Present+Flush to when the first scanline is "averagely visible". 1.1270 + result.PresentFlushToTimewarpStart = extraFramesOfBufferingKludge * hmdRenderInfo.Shutter.FirstScanlineToLastScanline; 1.1271 + // Predict to the first line being scanned out. 1.1272 + result.PresentFlushToTimewarpStart += hmdRenderInfo.Shutter.VsyncToFirstScanline; 1.1273 + // Time for pixels to get half-way to settling. 1.1274 + result.PresentFlushToTimewarpStart += hmdRenderInfo.Shutter.PixelSettleTime * 0.5f; 1.1275 + // Predict to half-way through persistence 1.1276 + result.PresentFlushToTimewarpStart += hmdRenderInfo.Shutter.PixelPersistence * 0.5f; 1.1277 + 1.1278 + // Time to the the last scanline. 1.1279 + result.PresentFlushToTimewarpEnd = result.PresentFlushToTimewarpStart + hmdRenderInfo.Shutter.FirstScanlineToLastScanline; 1.1280 + 1.1281 + // Ideal framerate. 1.1282 + result.PresentFlushToPresentFlush = hmdRenderInfo.Shutter.VsyncToNextVsync; 1.1283 + } 1.1284 + else 1.1285 + { 1.1286 + // Timewarp without vsync is a little odd. 1.1287 + // Currently, we assume that without vsync, we have no idea which scanline 1.1288 + // is currently being sent to the display. So we can't do lerping timewarp, 1.1289 + // we can just do a full-screen late-stage fixup. 1.1290 + 1.1291 + // "PresentFlushToRenderedScene" means the time from the Present+Flush to when the middle of the scene is "averagely visible" (without timewarp) 1.1292 + // So if you had no timewarp, this, plus the time until the next flush (which is usually the time to render the frame), is how much to predict by. 1.1293 + // Time for pixels to get half-way to settling. 1.1294 + result.PresentFlushToRenderedScene = hmdRenderInfo.Shutter.PixelSettleTime * 0.5f; 1.1295 + // Predict to half-way through persistence 1.1296 + result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.PixelPersistence * 0.5f; 1.1297 + 1.1298 + // Without vsync, you don't know timings, and so can't do anything useful with lerped warping. 1.1299 + result.PresentFlushToTimewarpStart = result.PresentFlushToRenderedScene; 1.1300 + result.PresentFlushToTimewarpEnd = result.PresentFlushToRenderedScene; 1.1301 + 1.1302 + // There's no concept of "ideal" when vsync is off. 1.1303 + result.PresentFlushToPresentFlush = 0.0f; 1.1304 + } 1.1305 + 1.1306 + return result; 1.1307 +} 1.1308 + 1.1309 +Matrix4f TimewarpComputePoseDelta ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&hmdToEyeViewOffset ) 1.1310 +{ 1.1311 + Matrix4f worldFromPredictedView = (hmdToEyeViewOffset * predictedViewFromWorld).InvertedHomogeneousTransform(); 1.1312 + Matrix4f matRenderFromNowStart = (hmdToEyeViewOffset * renderedViewFromWorld) * worldFromPredictedView; 1.1313 + 1.1314 + // The sensor-predicted orientations have: X=right, Y=up, Z=backwards. 1.1315 + // The vectors inside the mesh are in NDC to keep the shader simple: X=right, Y=down, Z=forwards. 1.1316 + // So we need to perform a similarity transform on this delta matrix. 1.1317 + // The verbose code would look like this: 1.1318 + /* 1.1319 + Matrix4f matBasisChange; 1.1320 + matBasisChange.SetIdentity(); 1.1321 + matBasisChange.M[0][0] = 1.0f; 1.1322 + matBasisChange.M[1][1] = -1.0f; 1.1323 + matBasisChange.M[2][2] = -1.0f; 1.1324 + Matrix4f matBasisChangeInv = matBasisChange.Inverted(); 1.1325 + matRenderFromNow = matBasisChangeInv * matRenderFromNow * matBasisChange; 1.1326 + */ 1.1327 + // ...but of course all the above is a constant transform and much more easily done. 1.1328 + // We flip the signs of the Y&Z row, then flip the signs of the Y&Z column, 1.1329 + // and of course most of the flips cancel: 1.1330 + // +++ +-- +-- 1.1331 + // +++ -> flip Y&Z columns -> +-- -> flip Y&Z rows -> -++ 1.1332 + // +++ +-- -++ 1.1333 + matRenderFromNowStart.M[0][1] = -matRenderFromNowStart.M[0][1]; 1.1334 + matRenderFromNowStart.M[0][2] = -matRenderFromNowStart.M[0][2]; 1.1335 + matRenderFromNowStart.M[1][0] = -matRenderFromNowStart.M[1][0]; 1.1336 + matRenderFromNowStart.M[2][0] = -matRenderFromNowStart.M[2][0]; 1.1337 + matRenderFromNowStart.M[1][3] = -matRenderFromNowStart.M[1][3]; 1.1338 + matRenderFromNowStart.M[2][3] = -matRenderFromNowStart.M[2][3]; 1.1339 + 1.1340 + return matRenderFromNowStart; 1.1341 +} 1.1342 + 1.1343 +Matrix4f TimewarpComputePoseDeltaPosition ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&hmdToEyeViewOffset ) 1.1344 +{ 1.1345 + Matrix4f worldFromPredictedView = (hmdToEyeViewOffset * predictedViewFromWorld).InvertedHomogeneousTransform(); 1.1346 + Matrix4f matRenderXform = (hmdToEyeViewOffset * renderedViewFromWorld) * worldFromPredictedView; 1.1347 + 1.1348 + return matRenderXform.Inverted(); 1.1349 +} 1.1350 + 1.1351 +TimewarpMachine::TimewarpMachine() 1.1352 + : VsyncEnabled(false), 1.1353 + RenderInfo(), 1.1354 + CurrentPredictionValues(), 1.1355 + DistortionTimeCount(0), 1.1356 + DistortionTimeCurrentStart(0.0), 1.1357 + //DistortionTimes[], 1.1358 + DistortionTimeAverage(0.f), 1.1359 + //EyeRenderPoses[], 1.1360 + LastFramePresentFlushTime(0.0), 1.1361 + PresentFlushToPresentFlushSeconds(0.f), 1.1362 + NextFramePresentFlushTime(0.0) 1.1363 +{ 1.1364 + #if defined(OVR_BUILD_DEBUG) 1.1365 + memset(DistortionTimes, 0, sizeof(DistortionTimes)); 1.1366 + #endif 1.1367 + 1.1368 + for ( int i = 0; i < 2; i++ ) 1.1369 + { 1.1370 + EyeRenderPoses[i] = Posef(); 1.1371 + } 1.1372 +} 1.1373 + 1.1374 +void TimewarpMachine::Reset(HmdRenderInfo& renderInfo, bool vsyncEnabled, double timeNow) 1.1375 +{ 1.1376 + RenderInfo = renderInfo; 1.1377 + VsyncEnabled = vsyncEnabled; 1.1378 + CurrentPredictionValues = PredictionGetDeviceValues ( renderInfo, true, VsyncEnabled ); 1.1379 + PresentFlushToPresentFlushSeconds = 0.0f; 1.1380 + DistortionTimeCount = 0; 1.1381 + DistortionTimeAverage = 0.0f; 1.1382 + LastFramePresentFlushTime = timeNow; 1.1383 + AfterPresentAndFlush(timeNow); 1.1384 +} 1.1385 + 1.1386 +void TimewarpMachine::AfterPresentAndFlush(double timeNow) 1.1387 +{ 1.1388 + AfterPresentWithoutFlush(); 1.1389 + AfterPresentFinishes ( timeNow ); 1.1390 +} 1.1391 + 1.1392 +void TimewarpMachine::AfterPresentWithoutFlush() 1.1393 +{ 1.1394 + // We've only issued the Present - it hasn't actually finished (i.e. appeared) 1.1395 + // But we need to estimate when the next Present will appear, so extrapolate from previous data. 1.1396 + NextFramePresentFlushTime = LastFramePresentFlushTime + 2.0 * (double)PresentFlushToPresentFlushSeconds; 1.1397 +} 1.1398 + 1.1399 +void TimewarpMachine::AfterPresentFinishes(double timeNow) 1.1400 +{ 1.1401 + // The present has now actually happened. 1.1402 + PresentFlushToPresentFlushSeconds = (float)(timeNow - LastFramePresentFlushTime); 1.1403 + LastFramePresentFlushTime = timeNow; 1.1404 + NextFramePresentFlushTime = timeNow + (double)PresentFlushToPresentFlushSeconds; 1.1405 +} 1.1406 + 1.1407 + 1.1408 + 1.1409 +double TimewarpMachine::GetViewRenderPredictionTime() 1.1410 +{ 1.1411 + // Note that PredictionGetDeviceValues() did all the vsync-dependent thinking for us. 1.1412 + return NextFramePresentFlushTime + CurrentPredictionValues.PresentFlushToRenderedScene; 1.1413 +} 1.1414 + 1.1415 +bool TimewarpMachine::GetViewRenderPredictionPose(SensorStateReader* reader, Posef& pose) 1.1416 +{ 1.1417 + return reader->GetPoseAtTime(GetViewRenderPredictionTime(), pose); 1.1418 +} 1.1419 + 1.1420 +double TimewarpMachine::GetVisiblePixelTimeStart() 1.1421 +{ 1.1422 + // Note that PredictionGetDeviceValues() did all the vsync-dependent thinking for us. 1.1423 + return NextFramePresentFlushTime + CurrentPredictionValues.PresentFlushToTimewarpStart; 1.1424 +} 1.1425 +double TimewarpMachine::GetVisiblePixelTimeEnd() 1.1426 +{ 1.1427 + // Note that PredictionGetDeviceValues() did all the vsync-dependent thinking for us. 1.1428 + return NextFramePresentFlushTime + CurrentPredictionValues.PresentFlushToTimewarpEnd; 1.1429 +} 1.1430 +bool TimewarpMachine::GetPredictedVisiblePixelPoseStart(SensorStateReader* reader, Posef& pose) 1.1431 +{ 1.1432 + return reader->GetPoseAtTime(GetVisiblePixelTimeStart(), pose); 1.1433 +} 1.1434 +bool TimewarpMachine::GetPredictedVisiblePixelPoseEnd(SensorStateReader* reader, Posef& pose) 1.1435 +{ 1.1436 + return reader->GetPoseAtTime(GetVisiblePixelTimeEnd(), pose); 1.1437 +} 1.1438 +bool TimewarpMachine::GetTimewarpDeltaStart(SensorStateReader* reader, Posef const &renderedPose, Matrix4f& transform) 1.1439 +{ 1.1440 + Posef visiblePose; 1.1441 + if (!GetPredictedVisiblePixelPoseStart(reader, visiblePose)) 1.1442 + { 1.1443 + return false; 1.1444 + } 1.1445 + 1.1446 + Matrix4f visibleMatrix(visiblePose); 1.1447 + Matrix4f renderedMatrix(renderedPose); 1.1448 + Matrix4f identity; // doesn't matter for orientation-only timewarp 1.1449 + transform = TimewarpComputePoseDelta ( renderedMatrix, visibleMatrix, identity ); 1.1450 + 1.1451 + return true; 1.1452 +} 1.1453 +bool TimewarpMachine::GetTimewarpDeltaEnd(SensorStateReader* reader, Posef const &renderedPose, Matrix4f& transform) 1.1454 +{ 1.1455 + Posef visiblePose; 1.1456 + if (!GetPredictedVisiblePixelPoseEnd(reader, visiblePose)) 1.1457 + { 1.1458 + return false; 1.1459 + } 1.1460 + 1.1461 + Matrix4f visibleMatrix(visiblePose); 1.1462 + Matrix4f renderedMatrix(renderedPose); 1.1463 + Matrix4f identity; // doesn't matter for orientation-only timewarp 1.1464 + transform = TimewarpComputePoseDelta ( renderedMatrix, visibleMatrix, identity ); 1.1465 + 1.1466 + return true; 1.1467 +} 1.1468 + 1.1469 + 1.1470 +// What time should the app wait until before starting distortion? 1.1471 +double TimewarpMachine::JustInTime_GetDistortionWaitUntilTime() 1.1472 +{ 1.1473 + if ( !VsyncEnabled || ( DistortionTimeCount < NumDistortionTimes ) ) 1.1474 + { 1.1475 + // Don't wait. 1.1476 + return LastFramePresentFlushTime; 1.1477 + } 1.1478 + 1.1479 + // Note - 1-2ms fudge factor (because Windows timer granularity etc) is NOT added here, 1.1480 + // because otherwise you end up adding multiple fudge factors! 1.1481 + // So it's left for the calling app to add just one fudge factor. 1.1482 + 1.1483 + float howLongBeforePresent = DistortionTimeAverage; 1.1484 + // Subtlety here. Technically, the correct time is NextFramePresentFlushTime - howLongBeforePresent. 1.1485 + // However, if the app drops a frame, this then perpetuates it, 1.1486 + // i.e. if the display is running at 60fps, but the last frame was slow, 1.1487 + // (e.g. because of swapping or whatever), then NextFramePresentFlushTime is 1.1488 + // 33ms in the future, not 16ms. Since this function supplies the 1.1489 + // time to wait until, the app will indeed wait until 32ms, so the framerate 1.1490 + // drops to 30fps and never comes back up! 1.1491 + // So we return the *ideal* framerate, not the *actual* framerate. 1.1492 + return LastFramePresentFlushTime + (float)( CurrentPredictionValues.PresentFlushToPresentFlush - howLongBeforePresent ); 1.1493 +} 1.1494 + 1.1495 +double TimewarpMachine::JustInTime_AverageDistortionTime() 1.1496 +{ 1.1497 + if ( JustInTime_NeedDistortionTimeMeasurement() ) 1.1498 + { 1.1499 + return 0.0; 1.1500 + } 1.1501 + return DistortionTimeAverage; 1.1502 +} 1.1503 + 1.1504 +bool TimewarpMachine::JustInTime_NeedDistortionTimeMeasurement() const 1.1505 +{ 1.1506 + if (!VsyncEnabled) 1.1507 + { 1.1508 + return false; 1.1509 + } 1.1510 + return ( DistortionTimeCount < NumDistortionTimes ); 1.1511 +} 1.1512 + 1.1513 +void TimewarpMachine::JustInTime_BeforeDistortionTimeMeasurement(double timeNow) 1.1514 +{ 1.1515 + DistortionTimeCurrentStart = timeNow; 1.1516 +} 1.1517 + 1.1518 +void TimewarpMachine::JustInTime_AfterDistortionTimeMeasurement(double timeNow) 1.1519 +{ 1.1520 + float timeDelta = (float)( timeNow - DistortionTimeCurrentStart ); 1.1521 + if ( DistortionTimeCount < NumDistortionTimes ) 1.1522 + { 1.1523 + DistortionTimes[DistortionTimeCount] = timeDelta; 1.1524 + DistortionTimeCount++; 1.1525 + if ( DistortionTimeCount == NumDistortionTimes ) 1.1526 + { 1.1527 + // Median. 1.1528 + float distortionTimeMedian = 0.0f; 1.1529 + for ( int i = 0; i < NumDistortionTimes/2; i++ ) 1.1530 + { 1.1531 + // Find the maximum time of those remaining. 1.1532 + float maxTime = DistortionTimes[0]; 1.1533 + int maxIndex = 0; 1.1534 + for ( int j = 1; j < NumDistortionTimes; j++ ) 1.1535 + { 1.1536 + if ( maxTime < DistortionTimes[j] ) 1.1537 + { 1.1538 + maxTime = DistortionTimes[j]; 1.1539 + maxIndex = j; 1.1540 + } 1.1541 + } 1.1542 + // Zero that max time, so we'll find the next-highest time. 1.1543 + DistortionTimes[maxIndex] = 0.0f; 1.1544 + distortionTimeMedian = maxTime; 1.1545 + } 1.1546 + DistortionTimeAverage = distortionTimeMedian; 1.1547 + } 1.1548 + } 1.1549 + else 1.1550 + { 1.1551 + OVR_ASSERT ( !"Really didn't need more measurements, thanks" ); 1.1552 + } 1.1553 +} 1.1554 + 1.1555 + 1.1556 +}}} // OVR::Util::Render 1.1557 +