ovr_sdk
view LibOVR/Src/Util/Util_Render_Stereo.cpp @ 3:f12a8f74fe1f
added the Xcode project
author | John Tsiombikas <nuclear@member.fsf.org> |
---|---|
date | Wed, 21 Jan 2015 11:37:50 +0200 |
parents | |
children |
line source
1 /************************************************************************************
3 Filename : Util_Render_Stereo.cpp
4 Content : Stereo rendering configuration implementation
5 Created : October 22, 2012
6 Authors : Michael Antonov, Andrew Reisse, Tom Forsyth
8 Copyright : Copyright 2014 Oculus VR, LLC All Rights reserved.
10 Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License");
11 you may not use the Oculus VR Rift SDK except in compliance with the License,
12 which is provided at the time of installation or download, or which
13 otherwise accompanies this software in either electronic or hard copy form.
15 You may obtain a copy of the License at
17 http://www.oculusvr.com/licenses/LICENSE-3.2
19 Unless required by applicable law or agreed to in writing, the Oculus VR SDK
20 distributed under the License is distributed on an "AS IS" BASIS,
21 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
22 See the License for the specific language governing permissions and
23 limitations under the License.
25 *************************************************************************************/
27 #include "Util_Render_Stereo.h"
29 namespace OVR { namespace Util { namespace Render {
31 using namespace OVR::Tracking;
34 //-----------------------------------------------------------------------------------
35 // **** Useful debug functions.
37 char const* GetDebugNameEyeCupType ( EyeCupType eyeCupType )
38 {
39 switch ( eyeCupType )
40 {
41 case EyeCup_DK1A: return "DK1 A";
42 case EyeCup_DK1B: return "DK1 B";
43 case EyeCup_DK1C: return "DK1 C";
44 case EyeCup_DKHD2A: return "DKHD2 A";
45 case EyeCup_OrangeA: return "Orange A";
46 case EyeCup_RedA: return "Red A";
47 case EyeCup_PinkA: return "Pink A";
48 case EyeCup_BlueA: return "Blue A";
49 case EyeCup_Delilah1A: return "Delilah 1 A";
50 case EyeCup_Delilah2A: return "Delilah 2 A";
51 case EyeCup_JamesA: return "James A";
52 case EyeCup_SunMandalaA: return "Sun Mandala A";
53 case EyeCup_DK2A: return "DK2 A";
54 case EyeCup_LAST: return "LAST";
55 default: OVR_ASSERT ( false ); return "Error";
56 }
57 }
59 char const* GetDebugNameHmdType ( HmdTypeEnum hmdType )
60 {
61 switch ( hmdType )
62 {
63 case HmdType_None: return "None";
64 case HmdType_DK1: return "DK1";
65 case HmdType_DKProto: return "DK1 prototype";
66 case HmdType_DKHDProto: return "DK HD prototype 1";
67 case HmdType_DKHDProto566Mi: return "DK HD prototype 566 Mi";
68 case HmdType_DKHD2Proto: return "DK HD prototype 585";
69 case HmdType_CrystalCoveProto: return "Crystal Cove";
70 case HmdType_DK2: return "DK2";
71 case HmdType_Unknown: return "Unknown";
72 case HmdType_LAST: return "LAST";
73 default: OVR_ASSERT ( false ); return "Error";
74 }
75 }
78 //-----------------------------------------------------------------------------------
79 // **** Internal pipeline functions.
81 struct DistortionAndFov
82 {
83 DistortionRenderDesc Distortion;
84 FovPort Fov;
85 };
87 static DistortionAndFov CalculateDistortionAndFovInternal ( StereoEye eyeType, HmdRenderInfo const &hmd,
88 LensConfig const *pLensOverride = NULL,
89 FovPort const *pTanHalfFovOverride = NULL,
90 float extraEyeRotationInRadians = OVR_DEFAULT_EXTRA_EYE_ROTATION )
91 {
92 // pLensOverride can be NULL, which means no override.
94 DistortionRenderDesc localDistortion = CalculateDistortionRenderDesc ( eyeType, hmd, pLensOverride );
95 FovPort fov = CalculateFovFromHmdInfo ( eyeType, localDistortion, hmd, extraEyeRotationInRadians );
96 // Here the app or the user would optionally clamp this visible fov to a smaller number if
97 // they want more perf or resolution and are willing to give up FOV.
98 // They may also choose to clamp UDLR differently e.g. to get cinemascope-style views.
99 if ( pTanHalfFovOverride != NULL )
100 {
101 fov = *pTanHalfFovOverride;
102 }
104 // Here we could call ClampToPhysicalScreenFov(), but we do want people
105 // to be able to play with larger-than-screen views.
106 // The calling app can always do the clamping itself.
107 DistortionAndFov result;
108 result.Distortion = localDistortion;
109 result.Fov = fov;
111 return result;
112 }
115 static Recti CalculateViewportInternal ( StereoEye eyeType,
116 Sizei const actualRendertargetSurfaceSize,
117 Sizei const requestedRenderedPixelSize,
118 bool bRendertargetSharedByBothEyes,
119 bool bMonoRenderingMode = false )
120 {
121 Recti renderedViewport;
122 if ( bMonoRenderingMode || !bRendertargetSharedByBothEyes || (eyeType == StereoEye_Center) )
123 {
124 // One eye per RT.
125 renderedViewport.x = 0;
126 renderedViewport.y = 0;
127 renderedViewport.w = Alg::Min ( actualRendertargetSurfaceSize.w, requestedRenderedPixelSize.w );
128 renderedViewport.h = Alg::Min ( actualRendertargetSurfaceSize.h, requestedRenderedPixelSize.h );
129 }
130 else
131 {
132 // Both eyes share the RT.
133 renderedViewport.x = 0;
134 renderedViewport.y = 0;
135 renderedViewport.w = Alg::Min ( actualRendertargetSurfaceSize.w/2, requestedRenderedPixelSize.w );
136 renderedViewport.h = Alg::Min ( actualRendertargetSurfaceSize.h, requestedRenderedPixelSize.h );
137 if ( eyeType == StereoEye_Right )
138 {
139 renderedViewport.x = (actualRendertargetSurfaceSize.w+1)/2; // Round up, not down.
140 }
141 }
142 return renderedViewport;
143 }
145 static Recti CalculateViewportDensityInternal ( StereoEye eyeType,
146 DistortionRenderDesc const &distortion,
147 FovPort const &fov,
148 Sizei const &actualRendertargetSurfaceSize,
149 bool bRendertargetSharedByBothEyes,
150 float desiredPixelDensity = 1.0f,
151 bool bMonoRenderingMode = false )
152 {
153 OVR_ASSERT ( actualRendertargetSurfaceSize.w > 0 );
154 OVR_ASSERT ( actualRendertargetSurfaceSize.h > 0 );
156 // What size RT do we need to get 1:1 mapping?
157 Sizei idealPixelSize = CalculateIdealPixelSize ( eyeType, distortion, fov, desiredPixelDensity );
158 // ...but we might not actually get that size.
159 return CalculateViewportInternal ( eyeType,
160 actualRendertargetSurfaceSize,
161 idealPixelSize,
162 bRendertargetSharedByBothEyes, bMonoRenderingMode );
163 }
165 static ViewportScaleAndOffset CalculateViewportScaleAndOffsetInternal (
166 ScaleAndOffset2D const &eyeToSourceNDC,
167 Recti const &renderedViewport,
168 Sizei const &actualRendertargetSurfaceSize )
169 {
170 ViewportScaleAndOffset result;
171 result.RenderedViewport = renderedViewport;
172 result.EyeToSourceUV = CreateUVScaleAndOffsetfromNDCScaleandOffset(
173 eyeToSourceNDC, renderedViewport, actualRendertargetSurfaceSize );
174 return result;
175 }
178 static StereoEyeParams CalculateStereoEyeParamsInternal ( StereoEye eyeType, HmdRenderInfo const &hmd,
179 DistortionRenderDesc const &distortion,
180 FovPort const &fov,
181 Sizei const &actualRendertargetSurfaceSize,
182 Recti const &renderedViewport,
183 bool bRightHanded = true, float zNear = 0.01f, float zFar = 10000.0f,
184 bool bMonoRenderingMode = false,
185 float zoomFactor = 1.0f )
186 {
187 // Generate the projection matrix for intermediate rendertarget.
188 // Z range can also be inserted later by the app (though not in this particular case)
189 float fovScale = 1.0f / zoomFactor;
190 FovPort zoomedFov = fov;
191 zoomedFov.LeftTan *= fovScale;
192 zoomedFov.RightTan *= fovScale;
193 zoomedFov.UpTan *= fovScale;
194 zoomedFov.DownTan *= fovScale;
195 Matrix4f projection = CreateProjection ( bRightHanded, zoomedFov, zNear, zFar );
197 // Find the mapping from TanAngle space to target NDC space.
198 // Note this does NOT take the zoom factor into account because
199 // this is the mapping of actual physical eye FOV (and our eyes do not zoom!)
200 // to screen space.
201 ScaleAndOffset2D eyeToSourceNDC = CreateNDCScaleAndOffsetFromFov ( fov );
203 // The size of the final FB, which is fixed and determined by the physical size of the device display.
204 Recti distortedViewport = GetFramebufferViewport ( eyeType, hmd );
205 Vector3f virtualCameraOffset = CalculateEyeVirtualCameraOffset(hmd, eyeType, bMonoRenderingMode);
207 StereoEyeParams result;
208 result.Eye = eyeType;
209 result.HmdToEyeViewOffset = Matrix4f::Translation(virtualCameraOffset);
210 result.Distortion = distortion;
211 result.DistortionViewport = distortedViewport;
212 result.Fov = fov;
213 result.RenderedProjection = projection;
214 result.EyeToSourceNDC = eyeToSourceNDC;
215 ViewportScaleAndOffset vsao = CalculateViewportScaleAndOffsetInternal ( eyeToSourceNDC, renderedViewport, actualRendertargetSurfaceSize );
216 result.RenderedViewport = vsao.RenderedViewport;
217 result.EyeToSourceUV = vsao.EyeToSourceUV;
219 return result;
220 }
223 Vector3f CalculateEyeVirtualCameraOffset(HmdRenderInfo const &hmd,
224 StereoEye eyeType, bool bmonoRenderingMode)
225 {
226 Vector3f virtualCameraOffset(0);
228 if (!bmonoRenderingMode)
229 {
230 float eyeCenterRelief = hmd.GetEyeCenter().ReliefInMeters;
232 if (eyeType == StereoEye_Left)
233 {
234 virtualCameraOffset.x = hmd.EyeLeft.NoseToPupilInMeters;
235 virtualCameraOffset.z = eyeCenterRelief - hmd.EyeLeft.ReliefInMeters;
236 }
237 else if (eyeType == StereoEye_Right)
238 {
239 virtualCameraOffset.x = -hmd.EyeRight.NoseToPupilInMeters;
240 virtualCameraOffset.z = eyeCenterRelief - hmd.EyeRight.ReliefInMeters;
241 }
242 }
244 return virtualCameraOffset;
245 }
248 //-----------------------------------------------------------------------------------
249 // **** Higher-level utility functions.
251 Sizei CalculateRecommendedTextureSize ( HmdRenderInfo const &hmd,
252 bool bRendertargetSharedByBothEyes,
253 float pixelDensityInCenter /*= 1.0f*/ )
254 {
255 Sizei idealPixelSize[2];
256 for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
257 {
258 StereoEye eyeType = ( eyeNum == 0 ) ? StereoEye_Left : StereoEye_Right;
260 DistortionAndFov distortionAndFov = CalculateDistortionAndFovInternal ( eyeType, hmd, NULL, NULL, OVR_DEFAULT_EXTRA_EYE_ROTATION );
262 idealPixelSize[eyeNum] = CalculateIdealPixelSize ( eyeType,
263 distortionAndFov.Distortion,
264 distortionAndFov.Fov,
265 pixelDensityInCenter );
266 }
268 Sizei result;
269 result.w = Alg::Max ( idealPixelSize[0].w, idealPixelSize[1].w );
270 result.h = Alg::Max ( idealPixelSize[0].h, idealPixelSize[1].h );
271 if ( bRendertargetSharedByBothEyes )
272 {
273 result.w *= 2;
274 }
275 return result;
276 }
278 StereoEyeParams CalculateStereoEyeParams ( HmdRenderInfo const &hmd,
279 StereoEye eyeType,
280 Sizei const &actualRendertargetSurfaceSize,
281 bool bRendertargetSharedByBothEyes,
282 bool bRightHanded /*= true*/,
283 float zNear /*= 0.01f*/, float zFar /*= 10000.0f*/,
284 Sizei const *pOverrideRenderedPixelSize /* = NULL*/,
285 FovPort const *pOverrideFovport /*= NULL*/,
286 float zoomFactor /*= 1.0f*/ )
287 {
288 DistortionAndFov distortionAndFov = CalculateDistortionAndFovInternal ( eyeType, hmd, NULL, NULL, OVR_DEFAULT_EXTRA_EYE_ROTATION );
289 if ( pOverrideFovport != NULL )
290 {
291 distortionAndFov.Fov = *pOverrideFovport;
292 }
294 Recti viewport;
295 if ( pOverrideRenderedPixelSize != NULL )
296 {
297 viewport = CalculateViewportInternal ( eyeType, actualRendertargetSurfaceSize, *pOverrideRenderedPixelSize, bRendertargetSharedByBothEyes, false );
298 }
299 else
300 {
301 viewport = CalculateViewportDensityInternal ( eyeType,
302 distortionAndFov.Distortion,
303 distortionAndFov.Fov,
304 actualRendertargetSurfaceSize, bRendertargetSharedByBothEyes, 1.0f, false );
305 }
307 return CalculateStereoEyeParamsInternal (
308 eyeType, hmd,
309 distortionAndFov.Distortion,
310 distortionAndFov.Fov,
311 actualRendertargetSurfaceSize, viewport,
312 bRightHanded, zNear, zFar, false, zoomFactor );
313 }
316 FovPort CalculateRecommendedFov ( HmdRenderInfo const &hmd,
317 StereoEye eyeType,
318 bool bMakeFovSymmetrical /* = false */ )
319 {
320 DistortionAndFov distortionAndFov = CalculateDistortionAndFovInternal ( eyeType, hmd, NULL, NULL, OVR_DEFAULT_EXTRA_EYE_ROTATION );
321 FovPort fov = distortionAndFov.Fov;
322 if ( bMakeFovSymmetrical )
323 {
324 // Deal with engines that cannot support an off-center projection.
325 // Unfortunately this means they will be rendering pixels that the user can't actually see.
326 float fovTanH = Alg::Max ( fov.LeftTan, fov.RightTan );
327 float fovTanV = Alg::Max ( fov.UpTan, fov.DownTan );
328 fov.LeftTan = fovTanH;
329 fov.RightTan = fovTanH;
330 fov.UpTan = fovTanV;
331 fov.DownTan = fovTanV;
332 }
333 return fov;
334 }
336 ViewportScaleAndOffset ModifyRenderViewport ( StereoEyeParams const ¶ms,
337 Sizei const &actualRendertargetSurfaceSize,
338 Recti const &renderViewport )
339 {
340 return CalculateViewportScaleAndOffsetInternal ( params.EyeToSourceNDC, renderViewport, actualRendertargetSurfaceSize );
341 }
343 ViewportScaleAndOffset ModifyRenderSize ( StereoEyeParams const ¶ms,
344 Sizei const &actualRendertargetSurfaceSize,
345 Sizei const &requestedRenderSize,
346 bool bRendertargetSharedByBothEyes /*= false*/ )
347 {
348 Recti renderViewport = CalculateViewportInternal ( params.Eye, actualRendertargetSurfaceSize, requestedRenderSize, bRendertargetSharedByBothEyes, false );
349 return CalculateViewportScaleAndOffsetInternal ( params.EyeToSourceNDC, renderViewport, actualRendertargetSurfaceSize );
350 }
352 ViewportScaleAndOffset ModifyRenderDensity ( StereoEyeParams const ¶ms,
353 Sizei const &actualRendertargetSurfaceSize,
354 float pixelDensity /*= 1.0f*/,
355 bool bRendertargetSharedByBothEyes /*= false*/ )
356 {
357 Recti renderViewport = CalculateViewportDensityInternal ( params.Eye, params.Distortion, params.Fov, actualRendertargetSurfaceSize, bRendertargetSharedByBothEyes, pixelDensity, false );
358 return CalculateViewportScaleAndOffsetInternal ( params.EyeToSourceNDC, renderViewport, actualRendertargetSurfaceSize );
359 }
362 //-----------------------------------------------------------------------------------
363 // **** StereoConfig Implementation
365 StereoConfig::StereoConfig(StereoMode mode)
366 : Mode(mode),
367 DirtyFlag(true)
368 {
369 // Initialize "fake" default HMD values for testing without HMD plugged in.
370 // These default values match those returned by DK1
371 // (at least they did at time of writing - certainly good enough for debugging)
372 Hmd.HmdType = HmdType_None;
373 Hmd.ResolutionInPixels = Sizei(1280, 800);
374 Hmd.ScreenSizeInMeters = Sizef(0.1498f, 0.0936f);
375 Hmd.ScreenGapSizeInMeters = 0.0f;
376 Hmd.PelOffsetR = Vector2f ( 0.0f, 0.0f );
377 Hmd.PelOffsetB = Vector2f ( 0.0f, 0.0f );
378 Hmd.CenterFromTopInMeters = 0.0468f;
379 Hmd.LensSeparationInMeters = 0.0635f;
380 Hmd.LensDiameterInMeters = 0.035f;
381 Hmd.LensSurfaceToMidplateInMeters = 0.025f;
382 Hmd.EyeCups = EyeCup_DK1A;
383 Hmd.Shutter.Type = HmdShutter_RollingTopToBottom;
384 Hmd.Shutter.VsyncToNextVsync = ( 1.0f / 60.0f );
385 Hmd.Shutter.VsyncToFirstScanline = 0.000052f;
386 Hmd.Shutter.FirstScanlineToLastScanline = 0.016580f;
387 Hmd.Shutter.PixelSettleTime = 0.015f;
388 Hmd.Shutter.PixelPersistence = ( 1.0f / 60.0f );
389 Hmd.EyeLeft.Distortion.SetToIdentity();
390 Hmd.EyeLeft.Distortion.MetersPerTanAngleAtCenter = 0.043875f;
391 Hmd.EyeLeft.Distortion.Eqn = Distortion_RecipPoly4;
392 Hmd.EyeLeft.Distortion.K[0] = 1.0f;
393 Hmd.EyeLeft.Distortion.K[1] = -0.3999f;
394 Hmd.EyeLeft.Distortion.K[2] = 0.2408f;
395 Hmd.EyeLeft.Distortion.K[3] = -0.4589f;
396 Hmd.EyeLeft.Distortion.MaxR = 1.0f;
397 Hmd.EyeLeft.Distortion.ChromaticAberration[0] = 0.006f;
398 Hmd.EyeLeft.Distortion.ChromaticAberration[1] = 0.0f;
399 Hmd.EyeLeft.Distortion.ChromaticAberration[2] = -0.014f;
400 Hmd.EyeLeft.Distortion.ChromaticAberration[3] = 0.0f;
401 Hmd.EyeLeft.NoseToPupilInMeters = 0.62f;
402 Hmd.EyeLeft.ReliefInMeters = 0.013f;
403 Hmd.EyeRight = Hmd.EyeLeft;
405 SetViewportMode = SVPM_Density;
406 SetViewportPixelsPerDisplayPixel = 1.0f;
407 // Not used in this mode, but init them anyway.
408 SetViewportSize[0] = Sizei(0,0);
409 SetViewportSize[1] = Sizei(0,0);
410 SetViewport[0] = Recti(0,0,0,0);
411 SetViewport[1] = Recti(0,0,0,0);
413 OverrideLens = false;
414 OverrideTanHalfFov = false;
415 OverrideZeroIpd = false;
416 ExtraEyeRotationInRadians = OVR_DEFAULT_EXTRA_EYE_ROTATION;
417 IsRendertargetSharedByBothEyes = true;
418 RightHandedProjection = true;
420 // This should cause an assert if the app does not call SetRendertargetSize()
421 RendertargetSize = Sizei ( 0, 0 );
423 ZNear = 0.01f;
424 ZFar = 10000.0f;
426 Set2DAreaFov(DegreeToRad(85.0f));
427 }
429 void StereoConfig::SetHmdRenderInfo(const HmdRenderInfo& hmd)
430 {
431 Hmd = hmd;
432 DirtyFlag = true;
433 }
435 void StereoConfig::Set2DAreaFov(float fovRadians)
436 {
437 Area2DFov = fovRadians;
438 DirtyFlag = true;
439 }
441 const StereoEyeParamsWithOrtho& StereoConfig::GetEyeRenderParams(StereoEye eye)
442 {
443 if ( DirtyFlag )
444 {
445 UpdateComputedState();
446 }
448 static const uint8_t eyeParamIndices[3] = { 0, 0, 1 };
450 OVR_ASSERT(eye < sizeof(eyeParamIndices));
451 return EyeRenderParams[eyeParamIndices[eye]];
452 }
454 void StereoConfig::SetLensOverride ( LensConfig const *pLensOverrideLeft /*= NULL*/,
455 LensConfig const *pLensOverrideRight /*= NULL*/ )
456 {
457 if ( pLensOverrideLeft == NULL )
458 {
459 OverrideLens = false;
460 }
461 else
462 {
463 OverrideLens = true;
464 LensOverrideLeft = *pLensOverrideLeft;
465 LensOverrideRight = *pLensOverrideLeft;
466 if ( pLensOverrideRight != NULL )
467 {
468 LensOverrideRight = *pLensOverrideRight;
469 }
470 }
471 DirtyFlag = true;
472 }
474 void StereoConfig::SetRendertargetSize (Size<int> const rendertargetSize,
475 bool rendertargetIsSharedByBothEyes )
476 {
477 RendertargetSize = rendertargetSize;
478 IsRendertargetSharedByBothEyes = rendertargetIsSharedByBothEyes;
479 DirtyFlag = true;
480 }
482 void StereoConfig::SetFov ( FovPort const *pfovLeft /*= NULL*/,
483 FovPort const *pfovRight /*= NULL*/ )
484 {
485 DirtyFlag = true;
486 if ( pfovLeft == NULL )
487 {
488 OverrideTanHalfFov = false;
489 }
490 else
491 {
492 OverrideTanHalfFov = true;
493 FovOverrideLeft = *pfovLeft;
494 FovOverrideRight = *pfovLeft;
495 if ( pfovRight != NULL )
496 {
497 FovOverrideRight = *pfovRight;
498 }
499 }
500 }
503 void StereoConfig::SetZeroVirtualIpdOverride ( bool enableOverride )
504 {
505 DirtyFlag = true;
506 OverrideZeroIpd = enableOverride;
507 }
510 void StereoConfig::SetZClipPlanesAndHandedness ( float zNear /*= 0.01f*/, float zFar /*= 10000.0f*/, bool rightHandedProjection /*= true*/ )
511 {
512 DirtyFlag = true;
513 ZNear = zNear;
514 ZFar = zFar;
515 RightHandedProjection = rightHandedProjection;
516 }
518 void StereoConfig::SetExtraEyeRotation ( float extraEyeRotationInRadians )
519 {
520 DirtyFlag = true;
521 ExtraEyeRotationInRadians = extraEyeRotationInRadians;
522 }
524 Sizei StereoConfig::CalculateRecommendedTextureSize ( bool rendertargetSharedByBothEyes,
525 float pixelDensityInCenter /*= 1.0f*/ )
526 {
527 return Render::CalculateRecommendedTextureSize ( Hmd, rendertargetSharedByBothEyes, pixelDensityInCenter );
528 }
532 void StereoConfig::UpdateComputedState()
533 {
534 int numEyes = 2;
535 StereoEye eyeTypes[2];
537 switch ( Mode )
538 {
539 case Stereo_None:
540 numEyes = 1;
541 eyeTypes[0] = StereoEye_Center;
542 break;
544 case Stereo_LeftRight_Multipass:
545 numEyes = 2;
546 eyeTypes[0] = StereoEye_Left;
547 eyeTypes[1] = StereoEye_Right;
548 break;
550 default:
551 numEyes = 0;
552 OVR_ASSERT( false );
553 break;
554 }
556 // If either of these fire, you've probably forgotten to call SetRendertargetSize()
557 OVR_ASSERT ( RendertargetSize.w > 0 );
558 OVR_ASSERT ( RendertargetSize.h > 0 );
560 for ( int eyeNum = 0; eyeNum < numEyes; eyeNum++ )
561 {
562 StereoEye eyeType = eyeTypes[eyeNum];
563 LensConfig *pLensOverride = NULL;
564 if ( OverrideLens )
565 {
566 if ( eyeType == StereoEye_Right )
567 {
568 pLensOverride = &LensOverrideRight;
569 }
570 else
571 {
572 pLensOverride = &LensOverrideLeft;
573 }
574 }
576 FovPort *pTanHalfFovOverride = NULL;
577 if ( OverrideTanHalfFov )
578 {
579 if ( eyeType == StereoEye_Right )
580 {
581 pTanHalfFovOverride = &FovOverrideRight;
582 }
583 else
584 {
585 pTanHalfFovOverride = &FovOverrideLeft;
586 }
587 }
589 DistortionAndFov distortionAndFov =
590 CalculateDistortionAndFovInternal ( eyeType, Hmd,
591 pLensOverride, pTanHalfFovOverride,
592 ExtraEyeRotationInRadians );
594 EyeRenderParams[eyeNum].StereoEye.Distortion = distortionAndFov.Distortion;
595 EyeRenderParams[eyeNum].StereoEye.Fov = distortionAndFov.Fov;
596 }
598 if ( OverrideZeroIpd )
599 {
600 // Take the union of the calculated eye FOVs.
601 FovPort fov;
602 fov.UpTan = Alg::Max ( EyeRenderParams[0].StereoEye.Fov.UpTan , EyeRenderParams[1].StereoEye.Fov.UpTan );
603 fov.DownTan = Alg::Max ( EyeRenderParams[0].StereoEye.Fov.DownTan , EyeRenderParams[1].StereoEye.Fov.DownTan );
604 fov.LeftTan = Alg::Max ( EyeRenderParams[0].StereoEye.Fov.LeftTan , EyeRenderParams[1].StereoEye.Fov.LeftTan );
605 fov.RightTan = Alg::Max ( EyeRenderParams[0].StereoEye.Fov.RightTan, EyeRenderParams[1].StereoEye.Fov.RightTan );
606 EyeRenderParams[0].StereoEye.Fov = fov;
607 EyeRenderParams[1].StereoEye.Fov = fov;
608 }
610 for ( int eyeNum = 0; eyeNum < numEyes; eyeNum++ )
611 {
612 StereoEye eyeType = eyeTypes[eyeNum];
614 DistortionRenderDesc localDistortion = EyeRenderParams[eyeNum].StereoEye.Distortion;
615 FovPort fov = EyeRenderParams[eyeNum].StereoEye.Fov;
617 // Use a placeholder - will be overridden later.
618 Recti tempViewport = Recti ( 0, 0, 1, 1 );
620 EyeRenderParams[eyeNum].StereoEye = CalculateStereoEyeParamsInternal (
621 eyeType, Hmd, localDistortion, fov,
622 RendertargetSize, tempViewport,
623 RightHandedProjection, ZNear, ZFar,
624 OverrideZeroIpd );
626 // We want to create a virtual 2D surface we can draw debug text messages to.
627 // We'd like it to be a fixed distance (OrthoDistance) away,
628 // and to cover a specific FOV (Area2DFov). We need to find the projection matrix for this,
629 // and also to know how large it is in pixels to achieve a 1:1 mapping at the center of the screen.
630 float orthoDistance = 0.8f;
631 float orthoHalfFov = tanf ( Area2DFov * 0.5f );
632 Vector2f unityOrthoPixelSize = localDistortion.PixelsPerTanAngleAtCenter * ( orthoHalfFov * 2.0f );
633 float localInterpupillaryDistance = Hmd.EyeLeft.NoseToPupilInMeters + Hmd.EyeRight.NoseToPupilInMeters;
634 if ( OverrideZeroIpd )
635 {
636 localInterpupillaryDistance = 0.0f;
637 }
638 Matrix4f ortho = CreateOrthoSubProjection ( true, eyeType,
639 orthoHalfFov, orthoHalfFov,
640 unityOrthoPixelSize.x, unityOrthoPixelSize.y,
641 orthoDistance, localInterpupillaryDistance,
642 EyeRenderParams[eyeNum].StereoEye.RenderedProjection );
643 EyeRenderParams[eyeNum].OrthoProjection = ortho;
644 }
646 // ...and now set up the viewport, scale & offset the way the app wanted.
647 setupViewportScaleAndOffsets();
649 if ( OverrideZeroIpd )
650 {
651 // Monocular rendering has some fragile parts... don't break any by accident.
652 OVR_ASSERT ( EyeRenderParams[0].StereoEye.Fov.UpTan == EyeRenderParams[1].StereoEye.Fov.UpTan );
653 OVR_ASSERT ( EyeRenderParams[0].StereoEye.Fov.DownTan == EyeRenderParams[1].StereoEye.Fov.DownTan );
654 OVR_ASSERT ( EyeRenderParams[0].StereoEye.Fov.LeftTan == EyeRenderParams[1].StereoEye.Fov.LeftTan );
655 OVR_ASSERT ( EyeRenderParams[0].StereoEye.Fov.RightTan == EyeRenderParams[1].StereoEye.Fov.RightTan );
656 OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[0][0] == EyeRenderParams[1].StereoEye.RenderedProjection.M[0][0] );
657 OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[1][1] == EyeRenderParams[1].StereoEye.RenderedProjection.M[1][1] );
658 OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[0][2] == EyeRenderParams[1].StereoEye.RenderedProjection.M[0][2] );
659 OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[1][2] == EyeRenderParams[1].StereoEye.RenderedProjection.M[1][2] );
660 OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedViewport == EyeRenderParams[1].StereoEye.RenderedViewport );
661 OVR_ASSERT ( EyeRenderParams[0].StereoEye.EyeToSourceUV.Offset == EyeRenderParams[1].StereoEye.EyeToSourceUV.Offset );
662 OVR_ASSERT ( EyeRenderParams[0].StereoEye.EyeToSourceUV.Scale == EyeRenderParams[1].StereoEye.EyeToSourceUV.Scale );
663 OVR_ASSERT ( EyeRenderParams[0].StereoEye.EyeToSourceNDC.Offset == EyeRenderParams[1].StereoEye.EyeToSourceNDC.Offset );
664 OVR_ASSERT ( EyeRenderParams[0].StereoEye.EyeToSourceNDC.Scale == EyeRenderParams[1].StereoEye.EyeToSourceNDC.Scale );
665 OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[0][0] == EyeRenderParams[1].OrthoProjection.M[0][0] );
666 OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[1][1] == EyeRenderParams[1].OrthoProjection.M[1][1] );
667 OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[0][2] == EyeRenderParams[1].OrthoProjection.M[0][2] );
668 OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[1][2] == EyeRenderParams[1].OrthoProjection.M[1][2] );
669 }
671 DirtyFlag = false;
672 }
676 ViewportScaleAndOffsetBothEyes StereoConfig::setupViewportScaleAndOffsets()
677 {
678 for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
679 {
680 StereoEye eyeType = ( eyeNum == 0 ) ? StereoEye_Left : StereoEye_Right;
682 DistortionRenderDesc localDistortion = EyeRenderParams[eyeNum].StereoEye.Distortion;
683 FovPort fov = EyeRenderParams[eyeNum].StereoEye.Fov;
685 Recti renderedViewport;
686 switch ( SetViewportMode )
687 {
688 case SVPM_Density:
689 renderedViewport = CalculateViewportDensityInternal (
690 eyeType, localDistortion, fov,
691 RendertargetSize, IsRendertargetSharedByBothEyes,
692 SetViewportPixelsPerDisplayPixel, OverrideZeroIpd );
693 break;
694 case SVPM_Size:
695 if ( ( eyeType == StereoEye_Right ) && !OverrideZeroIpd )
696 {
697 renderedViewport = CalculateViewportInternal (
698 eyeType, RendertargetSize,
699 SetViewportSize[1],
700 IsRendertargetSharedByBothEyes, OverrideZeroIpd );
701 }
702 else
703 {
704 renderedViewport = CalculateViewportInternal (
705 eyeType, RendertargetSize,
706 SetViewportSize[0],
707 IsRendertargetSharedByBothEyes, OverrideZeroIpd );
708 }
709 break;
710 case SVPM_Viewport:
711 if ( ( eyeType == StereoEye_Right ) && !OverrideZeroIpd )
712 {
713 renderedViewport = SetViewport[1];
714 }
715 else
716 {
717 renderedViewport = SetViewport[0];
718 }
719 break;
720 default: OVR_ASSERT ( false ); break;
721 }
723 ViewportScaleAndOffset vpsao = CalculateViewportScaleAndOffsetInternal (
724 EyeRenderParams[eyeNum].StereoEye.EyeToSourceNDC,
725 renderedViewport,
726 RendertargetSize );
727 EyeRenderParams[eyeNum].StereoEye.RenderedViewport = vpsao.RenderedViewport;
728 EyeRenderParams[eyeNum].StereoEye.EyeToSourceUV = vpsao.EyeToSourceUV;
729 }
731 ViewportScaleAndOffsetBothEyes result;
732 result.Left.EyeToSourceUV = EyeRenderParams[0].StereoEye.EyeToSourceUV;
733 result.Left.RenderedViewport = EyeRenderParams[0].StereoEye.RenderedViewport;
734 result.Right.EyeToSourceUV = EyeRenderParams[1].StereoEye.EyeToSourceUV;
735 result.Right.RenderedViewport = EyeRenderParams[1].StereoEye.RenderedViewport;
736 return result;
737 }
739 // Specify a pixel density - how many rendered pixels per pixel in the physical display.
740 ViewportScaleAndOffsetBothEyes StereoConfig::SetRenderDensity ( float pixelsPerDisplayPixel )
741 {
742 SetViewportMode = SVPM_Density;
743 SetViewportPixelsPerDisplayPixel = pixelsPerDisplayPixel;
744 return setupViewportScaleAndOffsets();
745 }
747 // Supply the size directly. Will be clamped to the physical rendertarget size.
748 ViewportScaleAndOffsetBothEyes StereoConfig::SetRenderSize ( Sizei const &renderSizeLeft, Sizei const &renderSizeRight )
749 {
750 SetViewportMode = SVPM_Size;
751 SetViewportSize[0] = renderSizeLeft;
752 SetViewportSize[1] = renderSizeRight;
753 return setupViewportScaleAndOffsets();
754 }
756 // Supply the viewport directly. This is not clamped to the physical rendertarget - careful now!
757 ViewportScaleAndOffsetBothEyes StereoConfig::SetRenderViewport ( Recti const &renderViewportLeft, Recti const &renderViewportRight )
758 {
759 SetViewportMode = SVPM_Viewport;
760 SetViewport[0] = renderViewportLeft;
761 SetViewport[1] = renderViewportRight;
762 return setupViewportScaleAndOffsets();
763 }
765 Matrix4f StereoConfig::GetProjectionWithZoom ( StereoEye eye, float fovZoom ) const
766 {
767 int eyeNum = ( eye == StereoEye_Right ) ? 1 : 0;
768 float fovScale = 1.0f / fovZoom;
769 FovPort fovPort = EyeRenderParams[eyeNum].StereoEye.Fov;
770 fovPort.LeftTan *= fovScale;
771 fovPort.RightTan *= fovScale;
772 fovPort.UpTan *= fovScale;
773 fovPort.DownTan *= fovScale;
774 return CreateProjection ( RightHandedProjection, fovPort, ZNear, ZFar );
775 }
780 //-----------------------------------------------------------------------------------
781 // ***** Distortion Mesh Rendering
784 // Pow2 for the Morton order to work!
785 // 4 is too low - it is easy to see the "wobbles" in the HMD.
786 // 5 is realllly close but you can see pixel differences with even/odd frame checking.
787 // 6 is indistinguishable on a monitor on even/odd frames.
788 static const int DMA_GridSizeLog2 = 6;
789 static const int DMA_GridSize = 1<<DMA_GridSizeLog2;
790 static const int DMA_NumVertsPerEye = (DMA_GridSize+1)*(DMA_GridSize+1);
791 static const int DMA_NumTrisPerEye = (DMA_GridSize)*(DMA_GridSize)*2;
795 DistortionMeshVertexData DistortionMeshMakeVertex ( Vector2f screenNDC,
796 bool rightEye,
797 const HmdRenderInfo &hmdRenderInfo,
798 const DistortionRenderDesc &distortion, const ScaleAndOffset2D &eyeToSourceNDC )
799 {
800 DistortionMeshVertexData result;
802 float xOffset = 0.0f;
803 if (rightEye)
804 {
805 xOffset = 1.0f;
806 }
808 Vector2f tanEyeAnglesR, tanEyeAnglesG, tanEyeAnglesB;
809 TransformScreenNDCToTanFovSpaceChroma ( &tanEyeAnglesR, &tanEyeAnglesG, &tanEyeAnglesB,
810 distortion, screenNDC );
812 result.TanEyeAnglesR = tanEyeAnglesR;
813 result.TanEyeAnglesG = tanEyeAnglesG;
814 result.TanEyeAnglesB = tanEyeAnglesB;
816 HmdShutterTypeEnum shutterType = hmdRenderInfo.Shutter.Type;
817 switch ( shutterType )
818 {
819 case HmdShutter_Global:
820 result.TimewarpLerp = 0.0f;
821 break;
822 case HmdShutter_RollingLeftToRight:
823 // Retrace is left to right - left eye goes 0.0 -> 0.5, then right goes 0.5 -> 1.0
824 result.TimewarpLerp = screenNDC.x * 0.25f + 0.25f;
825 if (rightEye)
826 {
827 result.TimewarpLerp += 0.5f;
828 }
829 break;
830 case HmdShutter_RollingRightToLeft:
831 // Retrace is right to left - right eye goes 0.0 -> 0.5, then left goes 0.5 -> 1.0
832 result.TimewarpLerp = 0.75f - screenNDC.x * 0.25f;
833 if (rightEye)
834 {
835 result.TimewarpLerp -= 0.5f;
836 }
837 break;
838 case HmdShutter_RollingTopToBottom:
839 // Retrace is top to bottom on both eyes at the same time.
840 result.TimewarpLerp = screenNDC.y * 0.5f + 0.5f;
841 break;
842 default: OVR_ASSERT ( false ); break;
843 }
845 // When does the fade-to-black edge start? Chosen heuristically.
846 float fadeOutBorderFractionTexture = 0.1f;
847 float fadeOutBorderFractionTextureInnerEdge = 0.1f;
848 float fadeOutBorderFractionScreen = 0.1f;
849 float fadeOutFloor = 0.6f; // the floor controls how much black is in the fade region
851 if (hmdRenderInfo.HmdType == HmdType_DK1)
852 {
853 fadeOutBorderFractionTexture = 0.3f;
854 fadeOutBorderFractionTextureInnerEdge = 0.075f;
855 fadeOutBorderFractionScreen = 0.075f;
856 fadeOutFloor = 0.25f;
857 }
859 // Fade out at texture edges.
860 // The furthest out will be the blue channel, because of chromatic aberration (true of any standard lens)
861 Vector2f sourceTexCoordBlueNDC = TransformTanFovSpaceToRendertargetNDC ( eyeToSourceNDC, tanEyeAnglesB );
862 if (rightEye)
863 {
864 // The inner edge of the eye texture is usually much more magnified, because it's right against the middle of the screen, not the FOV edge.
865 // So we want a different scaling factor for that. This code flips the texture NDC so that +1.0 is the inner edge
866 sourceTexCoordBlueNDC.x = -sourceTexCoordBlueNDC.x;
867 }
868 float edgeFadeIn = ( 1.0f / fadeOutBorderFractionTextureInnerEdge ) * ( 1.0f - sourceTexCoordBlueNDC.x ) ; // Inner
869 edgeFadeIn = Alg::Min ( edgeFadeIn, ( 1.0f / fadeOutBorderFractionTexture ) * ( 1.0f + sourceTexCoordBlueNDC.x ) ); // Outer
870 edgeFadeIn = Alg::Min ( edgeFadeIn, ( 1.0f / fadeOutBorderFractionTexture ) * ( 1.0f - sourceTexCoordBlueNDC.y ) ); // Upper
871 edgeFadeIn = Alg::Min ( edgeFadeIn, ( 1.0f / fadeOutBorderFractionTexture ) * ( 1.0f + sourceTexCoordBlueNDC.y ) ); // Lower
873 // Also fade out at screen edges. Since this is in pixel space, no need to do inner specially.
874 float edgeFadeInScreen = ( 1.0f / fadeOutBorderFractionScreen ) *
875 ( 1.0f - Alg::Max ( Alg::Abs ( screenNDC.x ), Alg::Abs ( screenNDC.y ) ) );
876 edgeFadeIn = Alg::Min ( edgeFadeInScreen, edgeFadeIn ) + fadeOutFloor;
878 // Note - this is NOT clamped negatively.
879 // For rendering methods that interpolate over a coarse grid, we need the values to go negative for correct intersection with zero.
880 result.Shade = Alg::Min ( edgeFadeIn, 1.0f );
881 result.ScreenPosNDC.x = 0.5f * screenNDC.x - 0.5f + xOffset;
882 result.ScreenPosNDC.y = -screenNDC.y;
884 return result;
885 }
888 void DistortionMeshDestroy ( DistortionMeshVertexData *pVertices, uint16_t *pTriangleMeshIndices )
889 {
890 OVR_FREE ( pVertices );
891 OVR_FREE ( pTriangleMeshIndices );
892 }
894 void DistortionMeshCreate ( DistortionMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
895 int *pNumVertices, int *pNumTriangles,
896 const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo )
897 {
898 bool rightEye = ( stereoParams.Eye == StereoEye_Right );
899 int vertexCount = 0;
900 int triangleCount = 0;
902 // Generate mesh into allocated data and return result.
903 DistortionMeshCreate(ppVertices, ppTriangleListIndices, &vertexCount, &triangleCount,
904 rightEye, hmdRenderInfo, stereoParams.Distortion, stereoParams.EyeToSourceNDC);
906 *pNumVertices = vertexCount;
907 *pNumTriangles = triangleCount;
908 }
911 // Generate distortion mesh for a eye.
912 void DistortionMeshCreate( DistortionMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
913 int *pNumVertices, int *pNumTriangles,
914 bool rightEye,
915 const HmdRenderInfo &hmdRenderInfo,
916 const DistortionRenderDesc &distortion, const ScaleAndOffset2D &eyeToSourceNDC )
917 {
918 *pNumVertices = DMA_NumVertsPerEye;
919 *pNumTriangles = DMA_NumTrisPerEye;
921 *ppVertices = (DistortionMeshVertexData*)
922 OVR_ALLOC( sizeof(DistortionMeshVertexData) * (*pNumVertices) );
923 *ppTriangleListIndices = (uint16_t*) OVR_ALLOC( sizeof(uint16_t) * (*pNumTriangles) * 3 );
925 if (!*ppVertices || !*ppTriangleListIndices)
926 {
927 if (*ppVertices)
928 {
929 OVR_FREE(*ppVertices);
930 }
931 if (*ppTriangleListIndices)
932 {
933 OVR_FREE(*ppTriangleListIndices);
934 }
935 *ppVertices = NULL;
936 *ppTriangleListIndices = NULL;
937 *pNumTriangles = 0;
938 *pNumVertices = 0;
939 return;
940 }
944 // Populate vertex buffer info
946 // First pass - build up raw vertex data.
947 DistortionMeshVertexData* pcurVert = *ppVertices;
949 for ( int y = 0; y <= DMA_GridSize; y++ )
950 {
951 for ( int x = 0; x <= DMA_GridSize; x++ )
952 {
954 Vector2f sourceCoordNDC;
955 // NDC texture coords [-1,+1]
956 sourceCoordNDC.x = 2.0f * ( (float)x / (float)DMA_GridSize ) - 1.0f;
957 sourceCoordNDC.y = 2.0f * ( (float)y / (float)DMA_GridSize ) - 1.0f;
958 Vector2f tanEyeAngle = TransformRendertargetNDCToTanFovSpace ( eyeToSourceNDC, sourceCoordNDC );
960 // Find a corresponding screen position.
961 // Note - this function does not have to be precise - we're just trying to match the mesh tessellation
962 // with the shape of the distortion to minimise the number of trianlges needed.
963 Vector2f screenNDC = TransformTanFovSpaceToScreenNDC ( distortion, tanEyeAngle, false );
964 // ...but don't let verts overlap to the other eye.
965 screenNDC.x = Alg::Max ( -1.0f, Alg::Min ( screenNDC.x, 1.0f ) );
966 screenNDC.y = Alg::Max ( -1.0f, Alg::Min ( screenNDC.y, 1.0f ) );
968 // From those screen positions, generate the vertex.
969 *pcurVert = DistortionMeshMakeVertex ( screenNDC, rightEye, hmdRenderInfo, distortion, eyeToSourceNDC );
970 pcurVert++;
971 }
972 }
975 // Populate index buffer info
976 uint16_t *pcurIndex = *ppTriangleListIndices;
978 for ( int triNum = 0; triNum < DMA_GridSize * DMA_GridSize; triNum++ )
979 {
980 // Use a Morton order to help locality of FB, texture and vertex cache.
981 // (0.325ms raster order -> 0.257ms Morton order)
982 OVR_ASSERT ( DMA_GridSize <= 256 );
983 int x = ( ( triNum & 0x0001 ) >> 0 ) |
984 ( ( triNum & 0x0004 ) >> 1 ) |
985 ( ( triNum & 0x0010 ) >> 2 ) |
986 ( ( triNum & 0x0040 ) >> 3 ) |
987 ( ( triNum & 0x0100 ) >> 4 ) |
988 ( ( triNum & 0x0400 ) >> 5 ) |
989 ( ( triNum & 0x1000 ) >> 6 ) |
990 ( ( triNum & 0x4000 ) >> 7 );
991 int y = ( ( triNum & 0x0002 ) >> 1 ) |
992 ( ( triNum & 0x0008 ) >> 2 ) |
993 ( ( triNum & 0x0020 ) >> 3 ) |
994 ( ( triNum & 0x0080 ) >> 4 ) |
995 ( ( triNum & 0x0200 ) >> 5 ) |
996 ( ( triNum & 0x0800 ) >> 6 ) |
997 ( ( triNum & 0x2000 ) >> 7 ) |
998 ( ( triNum & 0x8000 ) >> 8 );
999 int FirstVertex = x * (DMA_GridSize+1) + y;
1000 // Another twist - we want the top-left and bottom-right quadrants to
1001 // have the triangles split one way, the other two split the other.
1002 // +---+---+---+---+
1003 // | /| /|\ |\ |
1004 // | / | / | \ | \ |
1005 // |/ |/ | \| \|
1006 // +---+---+---+---+
1007 // | /| /|\ |\ |
1008 // | / | / | \ | \ |
1009 // |/ |/ | \| \|
1010 // +---+---+---+---+
1011 // |\ |\ | /| /|
1012 // | \ | \ | / | / |
1013 // | \| \|/ |/ |
1014 // +---+---+---+---+
1015 // |\ |\ | /| /|
1016 // | \ | \ | / | / |
1017 // | \| \|/ |/ |
1018 // +---+---+---+---+
1019 // This way triangle edges don't span long distances over the distortion function,
1020 // so linear interpolation works better & we can use fewer tris.
1021 if ( ( x < DMA_GridSize/2 ) != ( y < DMA_GridSize/2 ) ) // != is logical XOR
1022 {
1023 *pcurIndex++ = (uint16_t)FirstVertex;
1024 *pcurIndex++ = (uint16_t)FirstVertex+1;
1025 *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1)+1;
1027 *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1)+1;
1028 *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1);
1029 *pcurIndex++ = (uint16_t)FirstVertex;
1030 }
1031 else
1032 {
1033 *pcurIndex++ = (uint16_t)FirstVertex;
1034 *pcurIndex++ = (uint16_t)FirstVertex+1;
1035 *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1);
1037 *pcurIndex++ = (uint16_t)FirstVertex+1;
1038 *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1)+1;
1039 *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1);
1040 }
1041 }
1042 }
1044 //-----------------------------------------------------------------------------------
1045 // ***** Heightmap Mesh Rendering
1048 static const int HMA_GridSizeLog2 = 7;
1049 static const int HMA_GridSize = 1<<HMA_GridSizeLog2;
1050 static const int HMA_NumVertsPerEye = (HMA_GridSize+1)*(HMA_GridSize+1);
1051 static const int HMA_NumTrisPerEye = (HMA_GridSize)*(HMA_GridSize)*2;
1054 void HeightmapMeshDestroy ( HeightmapMeshVertexData *pVertices, uint16_t *pTriangleMeshIndices )
1055 {
1056 OVR_FREE ( pVertices );
1057 OVR_FREE ( pTriangleMeshIndices );
1058 }
1060 void HeightmapMeshCreate ( HeightmapMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
1061 int *pNumVertices, int *pNumTriangles,
1062 const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo )
1063 {
1064 bool rightEye = ( stereoParams.Eye == StereoEye_Right );
1065 int vertexCount = 0;
1066 int triangleCount = 0;
1068 // Generate mesh into allocated data and return result.
1069 HeightmapMeshCreate(ppVertices, ppTriangleListIndices, &vertexCount, &triangleCount,
1070 rightEye, hmdRenderInfo, stereoParams.EyeToSourceNDC);
1072 *pNumVertices = vertexCount;
1073 *pNumTriangles = triangleCount;
1074 }
1077 // Generate heightmap mesh for one eye.
1078 void HeightmapMeshCreate( HeightmapMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
1079 int *pNumVertices, int *pNumTriangles, bool rightEye,
1080 const HmdRenderInfo &hmdRenderInfo,
1081 const ScaleAndOffset2D &eyeToSourceNDC )
1082 {
1083 *pNumVertices = HMA_NumVertsPerEye;
1084 *pNumTriangles = HMA_NumTrisPerEye;
1086 *ppVertices = (HeightmapMeshVertexData*) OVR_ALLOC( sizeof(HeightmapMeshVertexData) * (*pNumVertices) );
1087 *ppTriangleListIndices = (uint16_t*) OVR_ALLOC( sizeof(uint16_t) * (*pNumTriangles) * 3 );
1089 if (!*ppVertices || !*ppTriangleListIndices)
1090 {
1091 if (*ppVertices)
1092 {
1093 OVR_FREE(*ppVertices);
1094 }
1095 if (*ppTriangleListIndices)
1096 {
1097 OVR_FREE(*ppTriangleListIndices);
1098 }
1099 *ppVertices = NULL;
1100 *ppTriangleListIndices = NULL;
1101 *pNumTriangles = 0;
1102 *pNumVertices = 0;
1103 return;
1104 }
1106 // Populate vertex buffer info
1107 // float xOffset = (rightEye ? 1.0f : 0.0f); Currently disabled because its usage is disabled below.
1109 // First pass - build up raw vertex data.
1110 HeightmapMeshVertexData* pcurVert = *ppVertices;
1112 for ( int y = 0; y <= HMA_GridSize; y++ )
1113 {
1114 for ( int x = 0; x <= HMA_GridSize; x++ )
1115 {
1116 Vector2f sourceCoordNDC;
1117 // NDC texture coords [-1,+1]
1118 sourceCoordNDC.x = 2.0f * ( (float)x / (float)HMA_GridSize ) - 1.0f;
1119 sourceCoordNDC.y = 2.0f * ( (float)y / (float)HMA_GridSize ) - 1.0f;
1120 Vector2f tanEyeAngle = TransformRendertargetNDCToTanFovSpace ( eyeToSourceNDC, sourceCoordNDC );
1122 pcurVert->TanEyeAngles = tanEyeAngle;
1124 HmdShutterTypeEnum shutterType = hmdRenderInfo.Shutter.Type;
1125 switch ( shutterType )
1126 {
1127 case HmdShutter_Global:
1128 pcurVert->TimewarpLerp = 0.0f;
1129 break;
1130 case HmdShutter_RollingLeftToRight:
1131 // Retrace is left to right - left eye goes 0.0 -> 0.5, then right goes 0.5 -> 1.0
1132 pcurVert->TimewarpLerp = sourceCoordNDC.x * 0.25f + 0.25f;
1133 if (rightEye)
1134 {
1135 pcurVert->TimewarpLerp += 0.5f;
1136 }
1137 break;
1138 case HmdShutter_RollingRightToLeft:
1139 // Retrace is right to left - right eye goes 0.0 -> 0.5, then left goes 0.5 -> 1.0
1140 pcurVert->TimewarpLerp = 0.75f - sourceCoordNDC.x * 0.25f;
1141 if (rightEye)
1142 {
1143 pcurVert->TimewarpLerp -= 0.5f;
1144 }
1145 break;
1146 case HmdShutter_RollingTopToBottom:
1147 // Retrace is top to bottom on both eyes at the same time.
1148 pcurVert->TimewarpLerp = sourceCoordNDC.y * 0.5f + 0.5f;
1149 break;
1150 default: OVR_ASSERT ( false ); break;
1151 }
1153 // Don't let verts overlap to the other eye.
1154 //sourceCoordNDC.x = Alg::Max ( -1.0f, Alg::Min ( sourceCoordNDC.x, 1.0f ) );
1155 //sourceCoordNDC.y = Alg::Max ( -1.0f, Alg::Min ( sourceCoordNDC.y, 1.0f ) );
1157 //pcurVert->ScreenPosNDC.x = 0.5f * sourceCoordNDC.x - 0.5f + xOffset;
1158 pcurVert->ScreenPosNDC.x = sourceCoordNDC.x;
1159 pcurVert->ScreenPosNDC.y = -sourceCoordNDC.y;
1161 pcurVert++;
1162 }
1163 }
1166 // Populate index buffer info
1167 uint16_t *pcurIndex = *ppTriangleListIndices;
1169 for ( int triNum = 0; triNum < HMA_GridSize * HMA_GridSize; triNum++ )
1170 {
1171 // Use a Morton order to help locality of FB, texture and vertex cache.
1172 // (0.325ms raster order -> 0.257ms Morton order)
1173 OVR_ASSERT ( HMA_GridSize < 256 );
1174 int x = ( ( triNum & 0x0001 ) >> 0 ) |
1175 ( ( triNum & 0x0004 ) >> 1 ) |
1176 ( ( triNum & 0x0010 ) >> 2 ) |
1177 ( ( triNum & 0x0040 ) >> 3 ) |
1178 ( ( triNum & 0x0100 ) >> 4 ) |
1179 ( ( triNum & 0x0400 ) >> 5 ) |
1180 ( ( triNum & 0x1000 ) >> 6 ) |
1181 ( ( triNum & 0x4000 ) >> 7 );
1182 int y = ( ( triNum & 0x0002 ) >> 1 ) |
1183 ( ( triNum & 0x0008 ) >> 2 ) |
1184 ( ( triNum & 0x0020 ) >> 3 ) |
1185 ( ( triNum & 0x0080 ) >> 4 ) |
1186 ( ( triNum & 0x0200 ) >> 5 ) |
1187 ( ( triNum & 0x0800 ) >> 6 ) |
1188 ( ( triNum & 0x2000 ) >> 7 ) |
1189 ( ( triNum & 0x8000 ) >> 8 );
1190 int FirstVertex = x * (HMA_GridSize+1) + y;
1191 // Another twist - we want the top-left and bottom-right quadrants to
1192 // have the triangles split one way, the other two split the other.
1193 // +---+---+---+---+
1194 // | /| /|\ |\ |
1195 // | / | / | \ | \ |
1196 // |/ |/ | \| \|
1197 // +---+---+---+---+
1198 // | /| /|\ |\ |
1199 // | / | / | \ | \ |
1200 // |/ |/ | \| \|
1201 // +---+---+---+---+
1202 // |\ |\ | /| /|
1203 // | \ | \ | / | / |
1204 // | \| \|/ |/ |
1205 // +---+---+---+---+
1206 // |\ |\ | /| /|
1207 // | \ | \ | / | / |
1208 // | \| \|/ |/ |
1209 // +---+---+---+---+
1210 // This way triangle edges don't span long distances over the distortion function,
1211 // so linear interpolation works better & we can use fewer tris.
1212 if ( ( x < HMA_GridSize/2 ) != ( y < HMA_GridSize/2 ) ) // != is logical XOR
1213 {
1214 *pcurIndex++ = (uint16_t)FirstVertex;
1215 *pcurIndex++ = (uint16_t)FirstVertex+1;
1216 *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1)+1;
1218 *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1)+1;
1219 *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1);
1220 *pcurIndex++ = (uint16_t)FirstVertex;
1221 }
1222 else
1223 {
1224 *pcurIndex++ = (uint16_t)FirstVertex;
1225 *pcurIndex++ = (uint16_t)FirstVertex+1;
1226 *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1);
1228 *pcurIndex++ = (uint16_t)FirstVertex+1;
1229 *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1)+1;
1230 *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1);
1231 }
1232 }
1233 }
1235 //-----------------------------------------------------------------------------------
1236 // ***** Prediction and timewarp.
1237 //
1239 // Calculates the values from the HMD info.
1240 PredictionValues PredictionGetDeviceValues ( const HmdRenderInfo &hmdRenderInfo,
1241 bool withTimewarp /*= true*/,
1242 bool withVsync /*= true*/ )
1243 {
1244 PredictionValues result;
1246 result.WithTimewarp = withTimewarp;
1247 result.WithVsync = withVsync;
1249 // For unclear reasons, most graphics systems add an extra frame of latency
1250 // somewhere along the way. In time we'll debug this and figure it out, but
1251 // for now this gets prediction a little bit better.
1252 const float extraFramesOfBufferingKludge = 1.0f;
1254 if ( withVsync )
1255 {
1256 // These are the times from the Present+Flush to when the middle of the scene is "averagely visible" (without timewarp)
1257 // So if you had no timewarp, this, plus the time until the next vsync, is how much to predict by.
1258 result.PresentFlushToRenderedScene = extraFramesOfBufferingKludge * hmdRenderInfo.Shutter.FirstScanlineToLastScanline;
1259 // Predict to the middle of the screen being scanned out.
1260 result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.VsyncToFirstScanline + 0.5f * hmdRenderInfo.Shutter.FirstScanlineToLastScanline;
1261 // Time for pixels to get half-way to settling.
1262 result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.PixelSettleTime * 0.5f;
1263 // Predict to half-way through persistence
1264 result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.PixelPersistence * 0.5f;
1266 // The time from the Present+Flush to when the first scanline is "averagely visible".
1267 result.PresentFlushToTimewarpStart = extraFramesOfBufferingKludge * hmdRenderInfo.Shutter.FirstScanlineToLastScanline;
1268 // Predict to the first line being scanned out.
1269 result.PresentFlushToTimewarpStart += hmdRenderInfo.Shutter.VsyncToFirstScanline;
1270 // Time for pixels to get half-way to settling.
1271 result.PresentFlushToTimewarpStart += hmdRenderInfo.Shutter.PixelSettleTime * 0.5f;
1272 // Predict to half-way through persistence
1273 result.PresentFlushToTimewarpStart += hmdRenderInfo.Shutter.PixelPersistence * 0.5f;
1275 // Time to the the last scanline.
1276 result.PresentFlushToTimewarpEnd = result.PresentFlushToTimewarpStart + hmdRenderInfo.Shutter.FirstScanlineToLastScanline;
1278 // Ideal framerate.
1279 result.PresentFlushToPresentFlush = hmdRenderInfo.Shutter.VsyncToNextVsync;
1280 }
1281 else
1282 {
1283 // Timewarp without vsync is a little odd.
1284 // Currently, we assume that without vsync, we have no idea which scanline
1285 // is currently being sent to the display. So we can't do lerping timewarp,
1286 // we can just do a full-screen late-stage fixup.
1288 // "PresentFlushToRenderedScene" means the time from the Present+Flush to when the middle of the scene is "averagely visible" (without timewarp)
1289 // So if you had no timewarp, this, plus the time until the next flush (which is usually the time to render the frame), is how much to predict by.
1290 // Time for pixels to get half-way to settling.
1291 result.PresentFlushToRenderedScene = hmdRenderInfo.Shutter.PixelSettleTime * 0.5f;
1292 // Predict to half-way through persistence
1293 result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.PixelPersistence * 0.5f;
1295 // Without vsync, you don't know timings, and so can't do anything useful with lerped warping.
1296 result.PresentFlushToTimewarpStart = result.PresentFlushToRenderedScene;
1297 result.PresentFlushToTimewarpEnd = result.PresentFlushToRenderedScene;
1299 // There's no concept of "ideal" when vsync is off.
1300 result.PresentFlushToPresentFlush = 0.0f;
1301 }
1303 return result;
1304 }
1306 Matrix4f TimewarpComputePoseDelta ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&hmdToEyeViewOffset )
1307 {
1308 Matrix4f worldFromPredictedView = (hmdToEyeViewOffset * predictedViewFromWorld).InvertedHomogeneousTransform();
1309 Matrix4f matRenderFromNowStart = (hmdToEyeViewOffset * renderedViewFromWorld) * worldFromPredictedView;
1311 // The sensor-predicted orientations have: X=right, Y=up, Z=backwards.
1312 // The vectors inside the mesh are in NDC to keep the shader simple: X=right, Y=down, Z=forwards.
1313 // So we need to perform a similarity transform on this delta matrix.
1314 // The verbose code would look like this:
1315 /*
1316 Matrix4f matBasisChange;
1317 matBasisChange.SetIdentity();
1318 matBasisChange.M[0][0] = 1.0f;
1319 matBasisChange.M[1][1] = -1.0f;
1320 matBasisChange.M[2][2] = -1.0f;
1321 Matrix4f matBasisChangeInv = matBasisChange.Inverted();
1322 matRenderFromNow = matBasisChangeInv * matRenderFromNow * matBasisChange;
1323 */
1324 // ...but of course all the above is a constant transform and much more easily done.
1325 // We flip the signs of the Y&Z row, then flip the signs of the Y&Z column,
1326 // and of course most of the flips cancel:
1327 // +++ +-- +--
1328 // +++ -> flip Y&Z columns -> +-- -> flip Y&Z rows -> -++
1329 // +++ +-- -++
1330 matRenderFromNowStart.M[0][1] = -matRenderFromNowStart.M[0][1];
1331 matRenderFromNowStart.M[0][2] = -matRenderFromNowStart.M[0][2];
1332 matRenderFromNowStart.M[1][0] = -matRenderFromNowStart.M[1][0];
1333 matRenderFromNowStart.M[2][0] = -matRenderFromNowStart.M[2][0];
1334 matRenderFromNowStart.M[1][3] = -matRenderFromNowStart.M[1][3];
1335 matRenderFromNowStart.M[2][3] = -matRenderFromNowStart.M[2][3];
1337 return matRenderFromNowStart;
1338 }
1340 Matrix4f TimewarpComputePoseDeltaPosition ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&hmdToEyeViewOffset )
1341 {
1342 Matrix4f worldFromPredictedView = (hmdToEyeViewOffset * predictedViewFromWorld).InvertedHomogeneousTransform();
1343 Matrix4f matRenderXform = (hmdToEyeViewOffset * renderedViewFromWorld) * worldFromPredictedView;
1345 return matRenderXform.Inverted();
1346 }
1348 TimewarpMachine::TimewarpMachine()
1349 : VsyncEnabled(false),
1350 RenderInfo(),
1351 CurrentPredictionValues(),
1352 DistortionTimeCount(0),
1353 DistortionTimeCurrentStart(0.0),
1354 //DistortionTimes[],
1355 DistortionTimeAverage(0.f),
1356 //EyeRenderPoses[],
1357 LastFramePresentFlushTime(0.0),
1358 PresentFlushToPresentFlushSeconds(0.f),
1359 NextFramePresentFlushTime(0.0)
1360 {
1361 #if defined(OVR_BUILD_DEBUG)
1362 memset(DistortionTimes, 0, sizeof(DistortionTimes));
1363 #endif
1365 for ( int i = 0; i < 2; i++ )
1366 {
1367 EyeRenderPoses[i] = Posef();
1368 }
1369 }
1371 void TimewarpMachine::Reset(HmdRenderInfo& renderInfo, bool vsyncEnabled, double timeNow)
1372 {
1373 RenderInfo = renderInfo;
1374 VsyncEnabled = vsyncEnabled;
1375 CurrentPredictionValues = PredictionGetDeviceValues ( renderInfo, true, VsyncEnabled );
1376 PresentFlushToPresentFlushSeconds = 0.0f;
1377 DistortionTimeCount = 0;
1378 DistortionTimeAverage = 0.0f;
1379 LastFramePresentFlushTime = timeNow;
1380 AfterPresentAndFlush(timeNow);
1381 }
1383 void TimewarpMachine::AfterPresentAndFlush(double timeNow)
1384 {
1385 AfterPresentWithoutFlush();
1386 AfterPresentFinishes ( timeNow );
1387 }
1389 void TimewarpMachine::AfterPresentWithoutFlush()
1390 {
1391 // We've only issued the Present - it hasn't actually finished (i.e. appeared)
1392 // But we need to estimate when the next Present will appear, so extrapolate from previous data.
1393 NextFramePresentFlushTime = LastFramePresentFlushTime + 2.0 * (double)PresentFlushToPresentFlushSeconds;
1394 }
1396 void TimewarpMachine::AfterPresentFinishes(double timeNow)
1397 {
1398 // The present has now actually happened.
1399 PresentFlushToPresentFlushSeconds = (float)(timeNow - LastFramePresentFlushTime);
1400 LastFramePresentFlushTime = timeNow;
1401 NextFramePresentFlushTime = timeNow + (double)PresentFlushToPresentFlushSeconds;
1402 }
1406 double TimewarpMachine::GetViewRenderPredictionTime()
1407 {
1408 // Note that PredictionGetDeviceValues() did all the vsync-dependent thinking for us.
1409 return NextFramePresentFlushTime + CurrentPredictionValues.PresentFlushToRenderedScene;
1410 }
1412 bool TimewarpMachine::GetViewRenderPredictionPose(SensorStateReader* reader, Posef& pose)
1413 {
1414 return reader->GetPoseAtTime(GetViewRenderPredictionTime(), pose);
1415 }
1417 double TimewarpMachine::GetVisiblePixelTimeStart()
1418 {
1419 // Note that PredictionGetDeviceValues() did all the vsync-dependent thinking for us.
1420 return NextFramePresentFlushTime + CurrentPredictionValues.PresentFlushToTimewarpStart;
1421 }
1422 double TimewarpMachine::GetVisiblePixelTimeEnd()
1423 {
1424 // Note that PredictionGetDeviceValues() did all the vsync-dependent thinking for us.
1425 return NextFramePresentFlushTime + CurrentPredictionValues.PresentFlushToTimewarpEnd;
1426 }
1427 bool TimewarpMachine::GetPredictedVisiblePixelPoseStart(SensorStateReader* reader, Posef& pose)
1428 {
1429 return reader->GetPoseAtTime(GetVisiblePixelTimeStart(), pose);
1430 }
1431 bool TimewarpMachine::GetPredictedVisiblePixelPoseEnd(SensorStateReader* reader, Posef& pose)
1432 {
1433 return reader->GetPoseAtTime(GetVisiblePixelTimeEnd(), pose);
1434 }
1435 bool TimewarpMachine::GetTimewarpDeltaStart(SensorStateReader* reader, Posef const &renderedPose, Matrix4f& transform)
1436 {
1437 Posef visiblePose;
1438 if (!GetPredictedVisiblePixelPoseStart(reader, visiblePose))
1439 {
1440 return false;
1441 }
1443 Matrix4f visibleMatrix(visiblePose);
1444 Matrix4f renderedMatrix(renderedPose);
1445 Matrix4f identity; // doesn't matter for orientation-only timewarp
1446 transform = TimewarpComputePoseDelta ( renderedMatrix, visibleMatrix, identity );
1448 return true;
1449 }
1450 bool TimewarpMachine::GetTimewarpDeltaEnd(SensorStateReader* reader, Posef const &renderedPose, Matrix4f& transform)
1451 {
1452 Posef visiblePose;
1453 if (!GetPredictedVisiblePixelPoseEnd(reader, visiblePose))
1454 {
1455 return false;
1456 }
1458 Matrix4f visibleMatrix(visiblePose);
1459 Matrix4f renderedMatrix(renderedPose);
1460 Matrix4f identity; // doesn't matter for orientation-only timewarp
1461 transform = TimewarpComputePoseDelta ( renderedMatrix, visibleMatrix, identity );
1463 return true;
1464 }
1467 // What time should the app wait until before starting distortion?
1468 double TimewarpMachine::JustInTime_GetDistortionWaitUntilTime()
1469 {
1470 if ( !VsyncEnabled || ( DistortionTimeCount < NumDistortionTimes ) )
1471 {
1472 // Don't wait.
1473 return LastFramePresentFlushTime;
1474 }
1476 // Note - 1-2ms fudge factor (because Windows timer granularity etc) is NOT added here,
1477 // because otherwise you end up adding multiple fudge factors!
1478 // So it's left for the calling app to add just one fudge factor.
1480 float howLongBeforePresent = DistortionTimeAverage;
1481 // Subtlety here. Technically, the correct time is NextFramePresentFlushTime - howLongBeforePresent.
1482 // However, if the app drops a frame, this then perpetuates it,
1483 // i.e. if the display is running at 60fps, but the last frame was slow,
1484 // (e.g. because of swapping or whatever), then NextFramePresentFlushTime is
1485 // 33ms in the future, not 16ms. Since this function supplies the
1486 // time to wait until, the app will indeed wait until 32ms, so the framerate
1487 // drops to 30fps and never comes back up!
1488 // So we return the *ideal* framerate, not the *actual* framerate.
1489 return LastFramePresentFlushTime + (float)( CurrentPredictionValues.PresentFlushToPresentFlush - howLongBeforePresent );
1490 }
1492 double TimewarpMachine::JustInTime_AverageDistortionTime()
1493 {
1494 if ( JustInTime_NeedDistortionTimeMeasurement() )
1495 {
1496 return 0.0;
1497 }
1498 return DistortionTimeAverage;
1499 }
1501 bool TimewarpMachine::JustInTime_NeedDistortionTimeMeasurement() const
1502 {
1503 if (!VsyncEnabled)
1504 {
1505 return false;
1506 }
1507 return ( DistortionTimeCount < NumDistortionTimes );
1508 }
1510 void TimewarpMachine::JustInTime_BeforeDistortionTimeMeasurement(double timeNow)
1511 {
1512 DistortionTimeCurrentStart = timeNow;
1513 }
1515 void TimewarpMachine::JustInTime_AfterDistortionTimeMeasurement(double timeNow)
1516 {
1517 float timeDelta = (float)( timeNow - DistortionTimeCurrentStart );
1518 if ( DistortionTimeCount < NumDistortionTimes )
1519 {
1520 DistortionTimes[DistortionTimeCount] = timeDelta;
1521 DistortionTimeCount++;
1522 if ( DistortionTimeCount == NumDistortionTimes )
1523 {
1524 // Median.
1525 float distortionTimeMedian = 0.0f;
1526 for ( int i = 0; i < NumDistortionTimes/2; i++ )
1527 {
1528 // Find the maximum time of those remaining.
1529 float maxTime = DistortionTimes[0];
1530 int maxIndex = 0;
1531 for ( int j = 1; j < NumDistortionTimes; j++ )
1532 {
1533 if ( maxTime < DistortionTimes[j] )
1534 {
1535 maxTime = DistortionTimes[j];
1536 maxIndex = j;
1537 }
1538 }
1539 // Zero that max time, so we'll find the next-highest time.
1540 DistortionTimes[maxIndex] = 0.0f;
1541 distortionTimeMedian = maxTime;
1542 }
1543 DistortionTimeAverage = distortionTimeMedian;
1544 }
1545 }
1546 else
1547 {
1548 OVR_ASSERT ( !"Really didn't need more measurements, thanks" );
1549 }
1550 }
1553 }}} // OVR::Util::Render