rev |
line source |
nuclear@0
|
1 /************************************************************************************
|
nuclear@0
|
2
|
nuclear@0
|
3 Filename : Util_Render_Stereo.cpp
|
nuclear@0
|
4 Content : Stereo rendering configuration implementation
|
nuclear@0
|
5 Created : October 22, 2012
|
nuclear@0
|
6 Authors : Michael Antonov, Andrew Reisse, Tom Forsyth
|
nuclear@0
|
7
|
nuclear@0
|
8 Copyright : Copyright 2014 Oculus VR, LLC All Rights reserved.
|
nuclear@0
|
9
|
nuclear@0
|
10 Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License");
|
nuclear@0
|
11 you may not use the Oculus VR Rift SDK except in compliance with the License,
|
nuclear@0
|
12 which is provided at the time of installation or download, or which
|
nuclear@0
|
13 otherwise accompanies this software in either electronic or hard copy form.
|
nuclear@0
|
14
|
nuclear@0
|
15 You may obtain a copy of the License at
|
nuclear@0
|
16
|
nuclear@0
|
17 http://www.oculusvr.com/licenses/LICENSE-3.2
|
nuclear@0
|
18
|
nuclear@0
|
19 Unless required by applicable law or agreed to in writing, the Oculus VR SDK
|
nuclear@0
|
20 distributed under the License is distributed on an "AS IS" BASIS,
|
nuclear@0
|
21 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
nuclear@0
|
22 See the License for the specific language governing permissions and
|
nuclear@0
|
23 limitations under the License.
|
nuclear@0
|
24
|
nuclear@0
|
25 *************************************************************************************/
|
nuclear@0
|
26
|
nuclear@0
|
27 #include "Util_Render_Stereo.h"
|
nuclear@0
|
28
|
nuclear@0
|
29 namespace OVR { namespace Util { namespace Render {
|
nuclear@0
|
30
|
nuclear@0
|
31 using namespace OVR::Tracking;
|
nuclear@0
|
32
|
nuclear@0
|
33
|
nuclear@0
|
34 //-----------------------------------------------------------------------------------
|
nuclear@0
|
35 // **** Useful debug functions.
|
nuclear@0
|
36
|
nuclear@0
|
37 char const* GetDebugNameEyeCupType ( EyeCupType eyeCupType )
|
nuclear@0
|
38 {
|
nuclear@0
|
39 switch ( eyeCupType )
|
nuclear@0
|
40 {
|
nuclear@0
|
41 case EyeCup_DK1A: return "DK1 A";
|
nuclear@0
|
42 case EyeCup_DK1B: return "DK1 B";
|
nuclear@0
|
43 case EyeCup_DK1C: return "DK1 C";
|
nuclear@0
|
44 case EyeCup_DKHD2A: return "DKHD2 A";
|
nuclear@0
|
45 case EyeCup_OrangeA: return "Orange A";
|
nuclear@0
|
46 case EyeCup_RedA: return "Red A";
|
nuclear@0
|
47 case EyeCup_PinkA: return "Pink A";
|
nuclear@0
|
48 case EyeCup_BlueA: return "Blue A";
|
nuclear@0
|
49 case EyeCup_Delilah1A: return "Delilah 1 A";
|
nuclear@0
|
50 case EyeCup_Delilah2A: return "Delilah 2 A";
|
nuclear@0
|
51 case EyeCup_JamesA: return "James A";
|
nuclear@0
|
52 case EyeCup_SunMandalaA: return "Sun Mandala A";
|
nuclear@0
|
53 case EyeCup_DK2A: return "DK2 A";
|
nuclear@0
|
54 case EyeCup_LAST: return "LAST";
|
nuclear@0
|
55 default: OVR_ASSERT ( false ); return "Error";
|
nuclear@0
|
56 }
|
nuclear@0
|
57 }
|
nuclear@0
|
58
|
nuclear@0
|
59 char const* GetDebugNameHmdType ( HmdTypeEnum hmdType )
|
nuclear@0
|
60 {
|
nuclear@0
|
61 switch ( hmdType )
|
nuclear@0
|
62 {
|
nuclear@0
|
63 case HmdType_None: return "None";
|
nuclear@0
|
64 case HmdType_DK1: return "DK1";
|
nuclear@0
|
65 case HmdType_DKProto: return "DK1 prototype";
|
nuclear@0
|
66 case HmdType_DKHDProto: return "DK HD prototype 1";
|
nuclear@0
|
67 case HmdType_DKHDProto566Mi: return "DK HD prototype 566 Mi";
|
nuclear@0
|
68 case HmdType_DKHD2Proto: return "DK HD prototype 585";
|
nuclear@0
|
69 case HmdType_CrystalCoveProto: return "Crystal Cove";
|
nuclear@0
|
70 case HmdType_DK2: return "DK2";
|
nuclear@0
|
71 case HmdType_Unknown: return "Unknown";
|
nuclear@0
|
72 case HmdType_LAST: return "LAST";
|
nuclear@0
|
73 default: OVR_ASSERT ( false ); return "Error";
|
nuclear@0
|
74 }
|
nuclear@0
|
75 }
|
nuclear@0
|
76
|
nuclear@0
|
77
|
nuclear@0
|
78 //-----------------------------------------------------------------------------------
|
nuclear@0
|
79 // **** Internal pipeline functions.
|
nuclear@0
|
80
|
nuclear@0
|
81 struct DistortionAndFov
|
nuclear@0
|
82 {
|
nuclear@0
|
83 DistortionRenderDesc Distortion;
|
nuclear@0
|
84 FovPort Fov;
|
nuclear@0
|
85 };
|
nuclear@0
|
86
|
nuclear@0
|
87 static DistortionAndFov CalculateDistortionAndFovInternal ( StereoEye eyeType, HmdRenderInfo const &hmd,
|
nuclear@0
|
88 LensConfig const *pLensOverride = NULL,
|
nuclear@0
|
89 FovPort const *pTanHalfFovOverride = NULL,
|
nuclear@0
|
90 float extraEyeRotationInRadians = OVR_DEFAULT_EXTRA_EYE_ROTATION )
|
nuclear@0
|
91 {
|
nuclear@0
|
92 // pLensOverride can be NULL, which means no override.
|
nuclear@0
|
93
|
nuclear@0
|
94 DistortionRenderDesc localDistortion = CalculateDistortionRenderDesc ( eyeType, hmd, pLensOverride );
|
nuclear@0
|
95 FovPort fov = CalculateFovFromHmdInfo ( eyeType, localDistortion, hmd, extraEyeRotationInRadians );
|
nuclear@0
|
96 // Here the app or the user would optionally clamp this visible fov to a smaller number if
|
nuclear@0
|
97 // they want more perf or resolution and are willing to give up FOV.
|
nuclear@0
|
98 // They may also choose to clamp UDLR differently e.g. to get cinemascope-style views.
|
nuclear@0
|
99 if ( pTanHalfFovOverride != NULL )
|
nuclear@0
|
100 {
|
nuclear@0
|
101 fov = *pTanHalfFovOverride;
|
nuclear@0
|
102 }
|
nuclear@0
|
103
|
nuclear@0
|
104 // Here we could call ClampToPhysicalScreenFov(), but we do want people
|
nuclear@0
|
105 // to be able to play with larger-than-screen views.
|
nuclear@0
|
106 // The calling app can always do the clamping itself.
|
nuclear@0
|
107 DistortionAndFov result;
|
nuclear@0
|
108 result.Distortion = localDistortion;
|
nuclear@0
|
109 result.Fov = fov;
|
nuclear@0
|
110
|
nuclear@0
|
111 return result;
|
nuclear@0
|
112 }
|
nuclear@0
|
113
|
nuclear@0
|
114
|
nuclear@0
|
115 static Recti CalculateViewportInternal ( StereoEye eyeType,
|
nuclear@0
|
116 Sizei const actualRendertargetSurfaceSize,
|
nuclear@0
|
117 Sizei const requestedRenderedPixelSize,
|
nuclear@0
|
118 bool bRendertargetSharedByBothEyes,
|
nuclear@0
|
119 bool bMonoRenderingMode = false )
|
nuclear@0
|
120 {
|
nuclear@0
|
121 Recti renderedViewport;
|
nuclear@0
|
122 if ( bMonoRenderingMode || !bRendertargetSharedByBothEyes || (eyeType == StereoEye_Center) )
|
nuclear@0
|
123 {
|
nuclear@0
|
124 // One eye per RT.
|
nuclear@0
|
125 renderedViewport.x = 0;
|
nuclear@0
|
126 renderedViewport.y = 0;
|
nuclear@0
|
127 renderedViewport.w = Alg::Min ( actualRendertargetSurfaceSize.w, requestedRenderedPixelSize.w );
|
nuclear@0
|
128 renderedViewport.h = Alg::Min ( actualRendertargetSurfaceSize.h, requestedRenderedPixelSize.h );
|
nuclear@0
|
129 }
|
nuclear@0
|
130 else
|
nuclear@0
|
131 {
|
nuclear@0
|
132 // Both eyes share the RT.
|
nuclear@0
|
133 renderedViewport.x = 0;
|
nuclear@0
|
134 renderedViewport.y = 0;
|
nuclear@0
|
135 renderedViewport.w = Alg::Min ( actualRendertargetSurfaceSize.w/2, requestedRenderedPixelSize.w );
|
nuclear@0
|
136 renderedViewport.h = Alg::Min ( actualRendertargetSurfaceSize.h, requestedRenderedPixelSize.h );
|
nuclear@0
|
137 if ( eyeType == StereoEye_Right )
|
nuclear@0
|
138 {
|
nuclear@0
|
139 renderedViewport.x = (actualRendertargetSurfaceSize.w+1)/2; // Round up, not down.
|
nuclear@0
|
140 }
|
nuclear@0
|
141 }
|
nuclear@0
|
142 return renderedViewport;
|
nuclear@0
|
143 }
|
nuclear@0
|
144
|
nuclear@0
|
145 static Recti CalculateViewportDensityInternal ( StereoEye eyeType,
|
nuclear@0
|
146 DistortionRenderDesc const &distortion,
|
nuclear@0
|
147 FovPort const &fov,
|
nuclear@0
|
148 Sizei const &actualRendertargetSurfaceSize,
|
nuclear@0
|
149 bool bRendertargetSharedByBothEyes,
|
nuclear@0
|
150 float desiredPixelDensity = 1.0f,
|
nuclear@0
|
151 bool bMonoRenderingMode = false )
|
nuclear@0
|
152 {
|
nuclear@0
|
153 OVR_ASSERT ( actualRendertargetSurfaceSize.w > 0 );
|
nuclear@0
|
154 OVR_ASSERT ( actualRendertargetSurfaceSize.h > 0 );
|
nuclear@0
|
155
|
nuclear@0
|
156 // What size RT do we need to get 1:1 mapping?
|
nuclear@0
|
157 Sizei idealPixelSize = CalculateIdealPixelSize ( eyeType, distortion, fov, desiredPixelDensity );
|
nuclear@0
|
158 // ...but we might not actually get that size.
|
nuclear@0
|
159 return CalculateViewportInternal ( eyeType,
|
nuclear@0
|
160 actualRendertargetSurfaceSize,
|
nuclear@0
|
161 idealPixelSize,
|
nuclear@0
|
162 bRendertargetSharedByBothEyes, bMonoRenderingMode );
|
nuclear@0
|
163 }
|
nuclear@0
|
164
|
nuclear@0
|
165 static ViewportScaleAndOffset CalculateViewportScaleAndOffsetInternal (
|
nuclear@0
|
166 ScaleAndOffset2D const &eyeToSourceNDC,
|
nuclear@0
|
167 Recti const &renderedViewport,
|
nuclear@0
|
168 Sizei const &actualRendertargetSurfaceSize )
|
nuclear@0
|
169 {
|
nuclear@0
|
170 ViewportScaleAndOffset result;
|
nuclear@0
|
171 result.RenderedViewport = renderedViewport;
|
nuclear@0
|
172 result.EyeToSourceUV = CreateUVScaleAndOffsetfromNDCScaleandOffset(
|
nuclear@0
|
173 eyeToSourceNDC, renderedViewport, actualRendertargetSurfaceSize );
|
nuclear@0
|
174 return result;
|
nuclear@0
|
175 }
|
nuclear@0
|
176
|
nuclear@0
|
177
|
nuclear@0
|
178 static StereoEyeParams CalculateStereoEyeParamsInternal ( StereoEye eyeType, HmdRenderInfo const &hmd,
|
nuclear@0
|
179 DistortionRenderDesc const &distortion,
|
nuclear@0
|
180 FovPort const &fov,
|
nuclear@0
|
181 Sizei const &actualRendertargetSurfaceSize,
|
nuclear@0
|
182 Recti const &renderedViewport,
|
nuclear@0
|
183 bool bRightHanded = true, float zNear = 0.01f, float zFar = 10000.0f,
|
nuclear@0
|
184 bool bMonoRenderingMode = false,
|
nuclear@0
|
185 float zoomFactor = 1.0f )
|
nuclear@0
|
186 {
|
nuclear@0
|
187 // Generate the projection matrix for intermediate rendertarget.
|
nuclear@0
|
188 // Z range can also be inserted later by the app (though not in this particular case)
|
nuclear@0
|
189 float fovScale = 1.0f / zoomFactor;
|
nuclear@0
|
190 FovPort zoomedFov = fov;
|
nuclear@0
|
191 zoomedFov.LeftTan *= fovScale;
|
nuclear@0
|
192 zoomedFov.RightTan *= fovScale;
|
nuclear@0
|
193 zoomedFov.UpTan *= fovScale;
|
nuclear@0
|
194 zoomedFov.DownTan *= fovScale;
|
nuclear@0
|
195 Matrix4f projection = CreateProjection ( bRightHanded, zoomedFov, zNear, zFar );
|
nuclear@0
|
196
|
nuclear@0
|
197 // Find the mapping from TanAngle space to target NDC space.
|
nuclear@0
|
198 // Note this does NOT take the zoom factor into account because
|
nuclear@0
|
199 // this is the mapping of actual physical eye FOV (and our eyes do not zoom!)
|
nuclear@0
|
200 // to screen space.
|
nuclear@0
|
201 ScaleAndOffset2D eyeToSourceNDC = CreateNDCScaleAndOffsetFromFov ( fov );
|
nuclear@0
|
202
|
nuclear@0
|
203 // The size of the final FB, which is fixed and determined by the physical size of the device display.
|
nuclear@0
|
204 Recti distortedViewport = GetFramebufferViewport ( eyeType, hmd );
|
nuclear@0
|
205 Vector3f virtualCameraOffset = CalculateEyeVirtualCameraOffset(hmd, eyeType, bMonoRenderingMode);
|
nuclear@0
|
206
|
nuclear@0
|
207 StereoEyeParams result;
|
nuclear@0
|
208 result.Eye = eyeType;
|
nuclear@0
|
209 result.HmdToEyeViewOffset = Matrix4f::Translation(virtualCameraOffset);
|
nuclear@0
|
210 result.Distortion = distortion;
|
nuclear@0
|
211 result.DistortionViewport = distortedViewport;
|
nuclear@0
|
212 result.Fov = fov;
|
nuclear@0
|
213 result.RenderedProjection = projection;
|
nuclear@0
|
214 result.EyeToSourceNDC = eyeToSourceNDC;
|
nuclear@0
|
215 ViewportScaleAndOffset vsao = CalculateViewportScaleAndOffsetInternal ( eyeToSourceNDC, renderedViewport, actualRendertargetSurfaceSize );
|
nuclear@0
|
216 result.RenderedViewport = vsao.RenderedViewport;
|
nuclear@0
|
217 result.EyeToSourceUV = vsao.EyeToSourceUV;
|
nuclear@0
|
218
|
nuclear@0
|
219 return result;
|
nuclear@0
|
220 }
|
nuclear@0
|
221
|
nuclear@0
|
222
|
nuclear@0
|
223 Vector3f CalculateEyeVirtualCameraOffset(HmdRenderInfo const &hmd,
|
nuclear@0
|
224 StereoEye eyeType, bool bmonoRenderingMode)
|
nuclear@0
|
225 {
|
nuclear@0
|
226 Vector3f virtualCameraOffset(0);
|
nuclear@0
|
227
|
nuclear@0
|
228 if (!bmonoRenderingMode)
|
nuclear@0
|
229 {
|
nuclear@0
|
230 float eyeCenterRelief = hmd.GetEyeCenter().ReliefInMeters;
|
nuclear@0
|
231
|
nuclear@0
|
232 if (eyeType == StereoEye_Left)
|
nuclear@0
|
233 {
|
nuclear@0
|
234 virtualCameraOffset.x = hmd.EyeLeft.NoseToPupilInMeters;
|
nuclear@0
|
235 virtualCameraOffset.z = eyeCenterRelief - hmd.EyeLeft.ReliefInMeters;
|
nuclear@0
|
236 }
|
nuclear@0
|
237 else if (eyeType == StereoEye_Right)
|
nuclear@0
|
238 {
|
nuclear@0
|
239 virtualCameraOffset.x = -hmd.EyeRight.NoseToPupilInMeters;
|
nuclear@0
|
240 virtualCameraOffset.z = eyeCenterRelief - hmd.EyeRight.ReliefInMeters;
|
nuclear@0
|
241 }
|
nuclear@0
|
242 }
|
nuclear@0
|
243
|
nuclear@0
|
244 return virtualCameraOffset;
|
nuclear@0
|
245 }
|
nuclear@0
|
246
|
nuclear@0
|
247
|
nuclear@0
|
248 //-----------------------------------------------------------------------------------
|
nuclear@0
|
249 // **** Higher-level utility functions.
|
nuclear@0
|
250
|
nuclear@0
|
251 Sizei CalculateRecommendedTextureSize ( HmdRenderInfo const &hmd,
|
nuclear@0
|
252 bool bRendertargetSharedByBothEyes,
|
nuclear@0
|
253 float pixelDensityInCenter /*= 1.0f*/ )
|
nuclear@0
|
254 {
|
nuclear@0
|
255 Sizei idealPixelSize[2];
|
nuclear@0
|
256 for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
|
nuclear@0
|
257 {
|
nuclear@0
|
258 StereoEye eyeType = ( eyeNum == 0 ) ? StereoEye_Left : StereoEye_Right;
|
nuclear@0
|
259
|
nuclear@0
|
260 DistortionAndFov distortionAndFov = CalculateDistortionAndFovInternal ( eyeType, hmd, NULL, NULL, OVR_DEFAULT_EXTRA_EYE_ROTATION );
|
nuclear@0
|
261
|
nuclear@0
|
262 idealPixelSize[eyeNum] = CalculateIdealPixelSize ( eyeType,
|
nuclear@0
|
263 distortionAndFov.Distortion,
|
nuclear@0
|
264 distortionAndFov.Fov,
|
nuclear@0
|
265 pixelDensityInCenter );
|
nuclear@0
|
266 }
|
nuclear@0
|
267
|
nuclear@0
|
268 Sizei result;
|
nuclear@0
|
269 result.w = Alg::Max ( idealPixelSize[0].w, idealPixelSize[1].w );
|
nuclear@0
|
270 result.h = Alg::Max ( idealPixelSize[0].h, idealPixelSize[1].h );
|
nuclear@0
|
271 if ( bRendertargetSharedByBothEyes )
|
nuclear@0
|
272 {
|
nuclear@0
|
273 result.w *= 2;
|
nuclear@0
|
274 }
|
nuclear@0
|
275 return result;
|
nuclear@0
|
276 }
|
nuclear@0
|
277
|
nuclear@0
|
278 StereoEyeParams CalculateStereoEyeParams ( HmdRenderInfo const &hmd,
|
nuclear@0
|
279 StereoEye eyeType,
|
nuclear@0
|
280 Sizei const &actualRendertargetSurfaceSize,
|
nuclear@0
|
281 bool bRendertargetSharedByBothEyes,
|
nuclear@0
|
282 bool bRightHanded /*= true*/,
|
nuclear@0
|
283 float zNear /*= 0.01f*/, float zFar /*= 10000.0f*/,
|
nuclear@0
|
284 Sizei const *pOverrideRenderedPixelSize /* = NULL*/,
|
nuclear@0
|
285 FovPort const *pOverrideFovport /*= NULL*/,
|
nuclear@0
|
286 float zoomFactor /*= 1.0f*/ )
|
nuclear@0
|
287 {
|
nuclear@0
|
288 DistortionAndFov distortionAndFov = CalculateDistortionAndFovInternal ( eyeType, hmd, NULL, NULL, OVR_DEFAULT_EXTRA_EYE_ROTATION );
|
nuclear@0
|
289 if ( pOverrideFovport != NULL )
|
nuclear@0
|
290 {
|
nuclear@0
|
291 distortionAndFov.Fov = *pOverrideFovport;
|
nuclear@0
|
292 }
|
nuclear@0
|
293
|
nuclear@0
|
294 Recti viewport;
|
nuclear@0
|
295 if ( pOverrideRenderedPixelSize != NULL )
|
nuclear@0
|
296 {
|
nuclear@0
|
297 viewport = CalculateViewportInternal ( eyeType, actualRendertargetSurfaceSize, *pOverrideRenderedPixelSize, bRendertargetSharedByBothEyes, false );
|
nuclear@0
|
298 }
|
nuclear@0
|
299 else
|
nuclear@0
|
300 {
|
nuclear@0
|
301 viewport = CalculateViewportDensityInternal ( eyeType,
|
nuclear@0
|
302 distortionAndFov.Distortion,
|
nuclear@0
|
303 distortionAndFov.Fov,
|
nuclear@0
|
304 actualRendertargetSurfaceSize, bRendertargetSharedByBothEyes, 1.0f, false );
|
nuclear@0
|
305 }
|
nuclear@0
|
306
|
nuclear@0
|
307 return CalculateStereoEyeParamsInternal (
|
nuclear@0
|
308 eyeType, hmd,
|
nuclear@0
|
309 distortionAndFov.Distortion,
|
nuclear@0
|
310 distortionAndFov.Fov,
|
nuclear@0
|
311 actualRendertargetSurfaceSize, viewport,
|
nuclear@0
|
312 bRightHanded, zNear, zFar, false, zoomFactor );
|
nuclear@0
|
313 }
|
nuclear@0
|
314
|
nuclear@0
|
315
|
nuclear@0
|
316 FovPort CalculateRecommendedFov ( HmdRenderInfo const &hmd,
|
nuclear@0
|
317 StereoEye eyeType,
|
nuclear@0
|
318 bool bMakeFovSymmetrical /* = false */ )
|
nuclear@0
|
319 {
|
nuclear@0
|
320 DistortionAndFov distortionAndFov = CalculateDistortionAndFovInternal ( eyeType, hmd, NULL, NULL, OVR_DEFAULT_EXTRA_EYE_ROTATION );
|
nuclear@0
|
321 FovPort fov = distortionAndFov.Fov;
|
nuclear@0
|
322 if ( bMakeFovSymmetrical )
|
nuclear@0
|
323 {
|
nuclear@0
|
324 // Deal with engines that cannot support an off-center projection.
|
nuclear@0
|
325 // Unfortunately this means they will be rendering pixels that the user can't actually see.
|
nuclear@0
|
326 float fovTanH = Alg::Max ( fov.LeftTan, fov.RightTan );
|
nuclear@0
|
327 float fovTanV = Alg::Max ( fov.UpTan, fov.DownTan );
|
nuclear@0
|
328 fov.LeftTan = fovTanH;
|
nuclear@0
|
329 fov.RightTan = fovTanH;
|
nuclear@0
|
330 fov.UpTan = fovTanV;
|
nuclear@0
|
331 fov.DownTan = fovTanV;
|
nuclear@0
|
332 }
|
nuclear@0
|
333 return fov;
|
nuclear@0
|
334 }
|
nuclear@0
|
335
|
nuclear@0
|
336 ViewportScaleAndOffset ModifyRenderViewport ( StereoEyeParams const ¶ms,
|
nuclear@0
|
337 Sizei const &actualRendertargetSurfaceSize,
|
nuclear@0
|
338 Recti const &renderViewport )
|
nuclear@0
|
339 {
|
nuclear@0
|
340 return CalculateViewportScaleAndOffsetInternal ( params.EyeToSourceNDC, renderViewport, actualRendertargetSurfaceSize );
|
nuclear@0
|
341 }
|
nuclear@0
|
342
|
nuclear@0
|
343 ViewportScaleAndOffset ModifyRenderSize ( StereoEyeParams const ¶ms,
|
nuclear@0
|
344 Sizei const &actualRendertargetSurfaceSize,
|
nuclear@0
|
345 Sizei const &requestedRenderSize,
|
nuclear@0
|
346 bool bRendertargetSharedByBothEyes /*= false*/ )
|
nuclear@0
|
347 {
|
nuclear@0
|
348 Recti renderViewport = CalculateViewportInternal ( params.Eye, actualRendertargetSurfaceSize, requestedRenderSize, bRendertargetSharedByBothEyes, false );
|
nuclear@0
|
349 return CalculateViewportScaleAndOffsetInternal ( params.EyeToSourceNDC, renderViewport, actualRendertargetSurfaceSize );
|
nuclear@0
|
350 }
|
nuclear@0
|
351
|
nuclear@0
|
352 ViewportScaleAndOffset ModifyRenderDensity ( StereoEyeParams const ¶ms,
|
nuclear@0
|
353 Sizei const &actualRendertargetSurfaceSize,
|
nuclear@0
|
354 float pixelDensity /*= 1.0f*/,
|
nuclear@0
|
355 bool bRendertargetSharedByBothEyes /*= false*/ )
|
nuclear@0
|
356 {
|
nuclear@0
|
357 Recti renderViewport = CalculateViewportDensityInternal ( params.Eye, params.Distortion, params.Fov, actualRendertargetSurfaceSize, bRendertargetSharedByBothEyes, pixelDensity, false );
|
nuclear@0
|
358 return CalculateViewportScaleAndOffsetInternal ( params.EyeToSourceNDC, renderViewport, actualRendertargetSurfaceSize );
|
nuclear@0
|
359 }
|
nuclear@0
|
360
|
nuclear@0
|
361
|
nuclear@0
|
362 //-----------------------------------------------------------------------------------
|
nuclear@0
|
363 // **** StereoConfig Implementation
|
nuclear@0
|
364
|
nuclear@0
|
365 StereoConfig::StereoConfig(StereoMode mode)
|
nuclear@0
|
366 : Mode(mode),
|
nuclear@0
|
367 DirtyFlag(true)
|
nuclear@0
|
368 {
|
nuclear@0
|
369 // Initialize "fake" default HMD values for testing without HMD plugged in.
|
nuclear@0
|
370 // These default values match those returned by DK1
|
nuclear@0
|
371 // (at least they did at time of writing - certainly good enough for debugging)
|
nuclear@0
|
372 Hmd.HmdType = HmdType_None;
|
nuclear@0
|
373 Hmd.ResolutionInPixels = Sizei(1280, 800);
|
nuclear@0
|
374 Hmd.ScreenSizeInMeters = Sizef(0.1498f, 0.0936f);
|
nuclear@0
|
375 Hmd.ScreenGapSizeInMeters = 0.0f;
|
nuclear@0
|
376 Hmd.PelOffsetR = Vector2f ( 0.0f, 0.0f );
|
nuclear@0
|
377 Hmd.PelOffsetB = Vector2f ( 0.0f, 0.0f );
|
nuclear@0
|
378 Hmd.CenterFromTopInMeters = 0.0468f;
|
nuclear@0
|
379 Hmd.LensSeparationInMeters = 0.0635f;
|
nuclear@0
|
380 Hmd.LensDiameterInMeters = 0.035f;
|
nuclear@0
|
381 Hmd.LensSurfaceToMidplateInMeters = 0.025f;
|
nuclear@0
|
382 Hmd.EyeCups = EyeCup_DK1A;
|
nuclear@0
|
383 Hmd.Shutter.Type = HmdShutter_RollingTopToBottom;
|
nuclear@0
|
384 Hmd.Shutter.VsyncToNextVsync = ( 1.0f / 60.0f );
|
nuclear@0
|
385 Hmd.Shutter.VsyncToFirstScanline = 0.000052f;
|
nuclear@0
|
386 Hmd.Shutter.FirstScanlineToLastScanline = 0.016580f;
|
nuclear@0
|
387 Hmd.Shutter.PixelSettleTime = 0.015f;
|
nuclear@0
|
388 Hmd.Shutter.PixelPersistence = ( 1.0f / 60.0f );
|
nuclear@0
|
389 Hmd.EyeLeft.Distortion.SetToIdentity();
|
nuclear@0
|
390 Hmd.EyeLeft.Distortion.MetersPerTanAngleAtCenter = 0.043875f;
|
nuclear@0
|
391 Hmd.EyeLeft.Distortion.Eqn = Distortion_RecipPoly4;
|
nuclear@0
|
392 Hmd.EyeLeft.Distortion.K[0] = 1.0f;
|
nuclear@0
|
393 Hmd.EyeLeft.Distortion.K[1] = -0.3999f;
|
nuclear@0
|
394 Hmd.EyeLeft.Distortion.K[2] = 0.2408f;
|
nuclear@0
|
395 Hmd.EyeLeft.Distortion.K[3] = -0.4589f;
|
nuclear@0
|
396 Hmd.EyeLeft.Distortion.MaxR = 1.0f;
|
nuclear@0
|
397 Hmd.EyeLeft.Distortion.ChromaticAberration[0] = 0.006f;
|
nuclear@0
|
398 Hmd.EyeLeft.Distortion.ChromaticAberration[1] = 0.0f;
|
nuclear@0
|
399 Hmd.EyeLeft.Distortion.ChromaticAberration[2] = -0.014f;
|
nuclear@0
|
400 Hmd.EyeLeft.Distortion.ChromaticAberration[3] = 0.0f;
|
nuclear@0
|
401 Hmd.EyeLeft.NoseToPupilInMeters = 0.62f;
|
nuclear@0
|
402 Hmd.EyeLeft.ReliefInMeters = 0.013f;
|
nuclear@0
|
403 Hmd.EyeRight = Hmd.EyeLeft;
|
nuclear@0
|
404
|
nuclear@0
|
405 SetViewportMode = SVPM_Density;
|
nuclear@0
|
406 SetViewportPixelsPerDisplayPixel = 1.0f;
|
nuclear@0
|
407 // Not used in this mode, but init them anyway.
|
nuclear@0
|
408 SetViewportSize[0] = Sizei(0,0);
|
nuclear@0
|
409 SetViewportSize[1] = Sizei(0,0);
|
nuclear@0
|
410 SetViewport[0] = Recti(0,0,0,0);
|
nuclear@0
|
411 SetViewport[1] = Recti(0,0,0,0);
|
nuclear@0
|
412
|
nuclear@0
|
413 OverrideLens = false;
|
nuclear@0
|
414 OverrideTanHalfFov = false;
|
nuclear@0
|
415 OverrideZeroIpd = false;
|
nuclear@0
|
416 ExtraEyeRotationInRadians = OVR_DEFAULT_EXTRA_EYE_ROTATION;
|
nuclear@0
|
417 IsRendertargetSharedByBothEyes = true;
|
nuclear@0
|
418 RightHandedProjection = true;
|
nuclear@0
|
419
|
nuclear@0
|
420 // This should cause an assert if the app does not call SetRendertargetSize()
|
nuclear@0
|
421 RendertargetSize = Sizei ( 0, 0 );
|
nuclear@0
|
422
|
nuclear@0
|
423 ZNear = 0.01f;
|
nuclear@0
|
424 ZFar = 10000.0f;
|
nuclear@0
|
425
|
nuclear@0
|
426 Set2DAreaFov(DegreeToRad(85.0f));
|
nuclear@0
|
427 }
|
nuclear@0
|
428
|
nuclear@0
|
429 void StereoConfig::SetHmdRenderInfo(const HmdRenderInfo& hmd)
|
nuclear@0
|
430 {
|
nuclear@0
|
431 Hmd = hmd;
|
nuclear@0
|
432 DirtyFlag = true;
|
nuclear@0
|
433 }
|
nuclear@0
|
434
|
nuclear@0
|
435 void StereoConfig::Set2DAreaFov(float fovRadians)
|
nuclear@0
|
436 {
|
nuclear@0
|
437 Area2DFov = fovRadians;
|
nuclear@0
|
438 DirtyFlag = true;
|
nuclear@0
|
439 }
|
nuclear@0
|
440
|
nuclear@0
|
441 const StereoEyeParamsWithOrtho& StereoConfig::GetEyeRenderParams(StereoEye eye)
|
nuclear@0
|
442 {
|
nuclear@0
|
443 if ( DirtyFlag )
|
nuclear@0
|
444 {
|
nuclear@0
|
445 UpdateComputedState();
|
nuclear@0
|
446 }
|
nuclear@0
|
447
|
nuclear@0
|
448 static const uint8_t eyeParamIndices[3] = { 0, 0, 1 };
|
nuclear@0
|
449
|
nuclear@0
|
450 OVR_ASSERT(eye < sizeof(eyeParamIndices));
|
nuclear@0
|
451 return EyeRenderParams[eyeParamIndices[eye]];
|
nuclear@0
|
452 }
|
nuclear@0
|
453
|
nuclear@0
|
454 void StereoConfig::SetLensOverride ( LensConfig const *pLensOverrideLeft /*= NULL*/,
|
nuclear@0
|
455 LensConfig const *pLensOverrideRight /*= NULL*/ )
|
nuclear@0
|
456 {
|
nuclear@0
|
457 if ( pLensOverrideLeft == NULL )
|
nuclear@0
|
458 {
|
nuclear@0
|
459 OverrideLens = false;
|
nuclear@0
|
460 }
|
nuclear@0
|
461 else
|
nuclear@0
|
462 {
|
nuclear@0
|
463 OverrideLens = true;
|
nuclear@0
|
464 LensOverrideLeft = *pLensOverrideLeft;
|
nuclear@0
|
465 LensOverrideRight = *pLensOverrideLeft;
|
nuclear@0
|
466 if ( pLensOverrideRight != NULL )
|
nuclear@0
|
467 {
|
nuclear@0
|
468 LensOverrideRight = *pLensOverrideRight;
|
nuclear@0
|
469 }
|
nuclear@0
|
470 }
|
nuclear@0
|
471 DirtyFlag = true;
|
nuclear@0
|
472 }
|
nuclear@0
|
473
|
nuclear@0
|
474 void StereoConfig::SetRendertargetSize (Size<int> const rendertargetSize,
|
nuclear@0
|
475 bool rendertargetIsSharedByBothEyes )
|
nuclear@0
|
476 {
|
nuclear@0
|
477 RendertargetSize = rendertargetSize;
|
nuclear@0
|
478 IsRendertargetSharedByBothEyes = rendertargetIsSharedByBothEyes;
|
nuclear@0
|
479 DirtyFlag = true;
|
nuclear@0
|
480 }
|
nuclear@0
|
481
|
nuclear@0
|
482 void StereoConfig::SetFov ( FovPort const *pfovLeft /*= NULL*/,
|
nuclear@0
|
483 FovPort const *pfovRight /*= NULL*/ )
|
nuclear@0
|
484 {
|
nuclear@0
|
485 DirtyFlag = true;
|
nuclear@0
|
486 if ( pfovLeft == NULL )
|
nuclear@0
|
487 {
|
nuclear@0
|
488 OverrideTanHalfFov = false;
|
nuclear@0
|
489 }
|
nuclear@0
|
490 else
|
nuclear@0
|
491 {
|
nuclear@0
|
492 OverrideTanHalfFov = true;
|
nuclear@0
|
493 FovOverrideLeft = *pfovLeft;
|
nuclear@0
|
494 FovOverrideRight = *pfovLeft;
|
nuclear@0
|
495 if ( pfovRight != NULL )
|
nuclear@0
|
496 {
|
nuclear@0
|
497 FovOverrideRight = *pfovRight;
|
nuclear@0
|
498 }
|
nuclear@0
|
499 }
|
nuclear@0
|
500 }
|
nuclear@0
|
501
|
nuclear@0
|
502
|
nuclear@0
|
503 void StereoConfig::SetZeroVirtualIpdOverride ( bool enableOverride )
|
nuclear@0
|
504 {
|
nuclear@0
|
505 DirtyFlag = true;
|
nuclear@0
|
506 OverrideZeroIpd = enableOverride;
|
nuclear@0
|
507 }
|
nuclear@0
|
508
|
nuclear@0
|
509
|
nuclear@0
|
510 void StereoConfig::SetZClipPlanesAndHandedness ( float zNear /*= 0.01f*/, float zFar /*= 10000.0f*/, bool rightHandedProjection /*= true*/ )
|
nuclear@0
|
511 {
|
nuclear@0
|
512 DirtyFlag = true;
|
nuclear@0
|
513 ZNear = zNear;
|
nuclear@0
|
514 ZFar = zFar;
|
nuclear@0
|
515 RightHandedProjection = rightHandedProjection;
|
nuclear@0
|
516 }
|
nuclear@0
|
517
|
nuclear@0
|
518 void StereoConfig::SetExtraEyeRotation ( float extraEyeRotationInRadians )
|
nuclear@0
|
519 {
|
nuclear@0
|
520 DirtyFlag = true;
|
nuclear@0
|
521 ExtraEyeRotationInRadians = extraEyeRotationInRadians;
|
nuclear@0
|
522 }
|
nuclear@0
|
523
|
nuclear@0
|
524 Sizei StereoConfig::CalculateRecommendedTextureSize ( bool rendertargetSharedByBothEyes,
|
nuclear@0
|
525 float pixelDensityInCenter /*= 1.0f*/ )
|
nuclear@0
|
526 {
|
nuclear@0
|
527 return Render::CalculateRecommendedTextureSize ( Hmd, rendertargetSharedByBothEyes, pixelDensityInCenter );
|
nuclear@0
|
528 }
|
nuclear@0
|
529
|
nuclear@0
|
530
|
nuclear@0
|
531
|
nuclear@0
|
532 void StereoConfig::UpdateComputedState()
|
nuclear@0
|
533 {
|
nuclear@0
|
534 int numEyes = 2;
|
nuclear@0
|
535 StereoEye eyeTypes[2];
|
nuclear@0
|
536
|
nuclear@0
|
537 switch ( Mode )
|
nuclear@0
|
538 {
|
nuclear@0
|
539 case Stereo_None:
|
nuclear@0
|
540 numEyes = 1;
|
nuclear@0
|
541 eyeTypes[0] = StereoEye_Center;
|
nuclear@0
|
542 break;
|
nuclear@0
|
543
|
nuclear@0
|
544 case Stereo_LeftRight_Multipass:
|
nuclear@0
|
545 numEyes = 2;
|
nuclear@0
|
546 eyeTypes[0] = StereoEye_Left;
|
nuclear@0
|
547 eyeTypes[1] = StereoEye_Right;
|
nuclear@0
|
548 break;
|
nuclear@0
|
549
|
nuclear@0
|
550 default:
|
nuclear@0
|
551 numEyes = 0;
|
nuclear@0
|
552 OVR_ASSERT( false );
|
nuclear@0
|
553 break;
|
nuclear@0
|
554 }
|
nuclear@0
|
555
|
nuclear@0
|
556 // If either of these fire, you've probably forgotten to call SetRendertargetSize()
|
nuclear@0
|
557 OVR_ASSERT ( RendertargetSize.w > 0 );
|
nuclear@0
|
558 OVR_ASSERT ( RendertargetSize.h > 0 );
|
nuclear@0
|
559
|
nuclear@0
|
560 for ( int eyeNum = 0; eyeNum < numEyes; eyeNum++ )
|
nuclear@0
|
561 {
|
nuclear@0
|
562 StereoEye eyeType = eyeTypes[eyeNum];
|
nuclear@0
|
563 LensConfig *pLensOverride = NULL;
|
nuclear@0
|
564 if ( OverrideLens )
|
nuclear@0
|
565 {
|
nuclear@0
|
566 if ( eyeType == StereoEye_Right )
|
nuclear@0
|
567 {
|
nuclear@0
|
568 pLensOverride = &LensOverrideRight;
|
nuclear@0
|
569 }
|
nuclear@0
|
570 else
|
nuclear@0
|
571 {
|
nuclear@0
|
572 pLensOverride = &LensOverrideLeft;
|
nuclear@0
|
573 }
|
nuclear@0
|
574 }
|
nuclear@0
|
575
|
nuclear@0
|
576 FovPort *pTanHalfFovOverride = NULL;
|
nuclear@0
|
577 if ( OverrideTanHalfFov )
|
nuclear@0
|
578 {
|
nuclear@0
|
579 if ( eyeType == StereoEye_Right )
|
nuclear@0
|
580 {
|
nuclear@0
|
581 pTanHalfFovOverride = &FovOverrideRight;
|
nuclear@0
|
582 }
|
nuclear@0
|
583 else
|
nuclear@0
|
584 {
|
nuclear@0
|
585 pTanHalfFovOverride = &FovOverrideLeft;
|
nuclear@0
|
586 }
|
nuclear@0
|
587 }
|
nuclear@0
|
588
|
nuclear@0
|
589 DistortionAndFov distortionAndFov =
|
nuclear@0
|
590 CalculateDistortionAndFovInternal ( eyeType, Hmd,
|
nuclear@0
|
591 pLensOverride, pTanHalfFovOverride,
|
nuclear@0
|
592 ExtraEyeRotationInRadians );
|
nuclear@0
|
593
|
nuclear@0
|
594 EyeRenderParams[eyeNum].StereoEye.Distortion = distortionAndFov.Distortion;
|
nuclear@0
|
595 EyeRenderParams[eyeNum].StereoEye.Fov = distortionAndFov.Fov;
|
nuclear@0
|
596 }
|
nuclear@0
|
597
|
nuclear@0
|
598 if ( OverrideZeroIpd )
|
nuclear@0
|
599 {
|
nuclear@0
|
600 // Take the union of the calculated eye FOVs.
|
nuclear@0
|
601 FovPort fov;
|
nuclear@0
|
602 fov.UpTan = Alg::Max ( EyeRenderParams[0].StereoEye.Fov.UpTan , EyeRenderParams[1].StereoEye.Fov.UpTan );
|
nuclear@0
|
603 fov.DownTan = Alg::Max ( EyeRenderParams[0].StereoEye.Fov.DownTan , EyeRenderParams[1].StereoEye.Fov.DownTan );
|
nuclear@0
|
604 fov.LeftTan = Alg::Max ( EyeRenderParams[0].StereoEye.Fov.LeftTan , EyeRenderParams[1].StereoEye.Fov.LeftTan );
|
nuclear@0
|
605 fov.RightTan = Alg::Max ( EyeRenderParams[0].StereoEye.Fov.RightTan, EyeRenderParams[1].StereoEye.Fov.RightTan );
|
nuclear@0
|
606 EyeRenderParams[0].StereoEye.Fov = fov;
|
nuclear@0
|
607 EyeRenderParams[1].StereoEye.Fov = fov;
|
nuclear@0
|
608 }
|
nuclear@0
|
609
|
nuclear@0
|
610 for ( int eyeNum = 0; eyeNum < numEyes; eyeNum++ )
|
nuclear@0
|
611 {
|
nuclear@0
|
612 StereoEye eyeType = eyeTypes[eyeNum];
|
nuclear@0
|
613
|
nuclear@0
|
614 DistortionRenderDesc localDistortion = EyeRenderParams[eyeNum].StereoEye.Distortion;
|
nuclear@0
|
615 FovPort fov = EyeRenderParams[eyeNum].StereoEye.Fov;
|
nuclear@0
|
616
|
nuclear@0
|
617 // Use a placeholder - will be overridden later.
|
nuclear@0
|
618 Recti tempViewport = Recti ( 0, 0, 1, 1 );
|
nuclear@0
|
619
|
nuclear@0
|
620 EyeRenderParams[eyeNum].StereoEye = CalculateStereoEyeParamsInternal (
|
nuclear@0
|
621 eyeType, Hmd, localDistortion, fov,
|
nuclear@0
|
622 RendertargetSize, tempViewport,
|
nuclear@0
|
623 RightHandedProjection, ZNear, ZFar,
|
nuclear@0
|
624 OverrideZeroIpd );
|
nuclear@0
|
625
|
nuclear@0
|
626 // We want to create a virtual 2D surface we can draw debug text messages to.
|
nuclear@0
|
627 // We'd like it to be a fixed distance (OrthoDistance) away,
|
nuclear@0
|
628 // and to cover a specific FOV (Area2DFov). We need to find the projection matrix for this,
|
nuclear@0
|
629 // and also to know how large it is in pixels to achieve a 1:1 mapping at the center of the screen.
|
nuclear@0
|
630 float orthoDistance = 0.8f;
|
nuclear@0
|
631 float orthoHalfFov = tanf ( Area2DFov * 0.5f );
|
nuclear@0
|
632 Vector2f unityOrthoPixelSize = localDistortion.PixelsPerTanAngleAtCenter * ( orthoHalfFov * 2.0f );
|
nuclear@0
|
633 float localInterpupillaryDistance = Hmd.EyeLeft.NoseToPupilInMeters + Hmd.EyeRight.NoseToPupilInMeters;
|
nuclear@0
|
634 if ( OverrideZeroIpd )
|
nuclear@0
|
635 {
|
nuclear@0
|
636 localInterpupillaryDistance = 0.0f;
|
nuclear@0
|
637 }
|
nuclear@0
|
638 Matrix4f ortho = CreateOrthoSubProjection ( true, eyeType,
|
nuclear@0
|
639 orthoHalfFov, orthoHalfFov,
|
nuclear@0
|
640 unityOrthoPixelSize.x, unityOrthoPixelSize.y,
|
nuclear@0
|
641 orthoDistance, localInterpupillaryDistance,
|
nuclear@0
|
642 EyeRenderParams[eyeNum].StereoEye.RenderedProjection );
|
nuclear@0
|
643 EyeRenderParams[eyeNum].OrthoProjection = ortho;
|
nuclear@0
|
644 }
|
nuclear@0
|
645
|
nuclear@0
|
646 // ...and now set up the viewport, scale & offset the way the app wanted.
|
nuclear@0
|
647 setupViewportScaleAndOffsets();
|
nuclear@0
|
648
|
nuclear@0
|
649 if ( OverrideZeroIpd )
|
nuclear@0
|
650 {
|
nuclear@0
|
651 // Monocular rendering has some fragile parts... don't break any by accident.
|
nuclear@0
|
652 OVR_ASSERT ( EyeRenderParams[0].StereoEye.Fov.UpTan == EyeRenderParams[1].StereoEye.Fov.UpTan );
|
nuclear@0
|
653 OVR_ASSERT ( EyeRenderParams[0].StereoEye.Fov.DownTan == EyeRenderParams[1].StereoEye.Fov.DownTan );
|
nuclear@0
|
654 OVR_ASSERT ( EyeRenderParams[0].StereoEye.Fov.LeftTan == EyeRenderParams[1].StereoEye.Fov.LeftTan );
|
nuclear@0
|
655 OVR_ASSERT ( EyeRenderParams[0].StereoEye.Fov.RightTan == EyeRenderParams[1].StereoEye.Fov.RightTan );
|
nuclear@0
|
656 OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[0][0] == EyeRenderParams[1].StereoEye.RenderedProjection.M[0][0] );
|
nuclear@0
|
657 OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[1][1] == EyeRenderParams[1].StereoEye.RenderedProjection.M[1][1] );
|
nuclear@0
|
658 OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[0][2] == EyeRenderParams[1].StereoEye.RenderedProjection.M[0][2] );
|
nuclear@0
|
659 OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedProjection.M[1][2] == EyeRenderParams[1].StereoEye.RenderedProjection.M[1][2] );
|
nuclear@0
|
660 OVR_ASSERT ( EyeRenderParams[0].StereoEye.RenderedViewport == EyeRenderParams[1].StereoEye.RenderedViewport );
|
nuclear@0
|
661 OVR_ASSERT ( EyeRenderParams[0].StereoEye.EyeToSourceUV.Offset == EyeRenderParams[1].StereoEye.EyeToSourceUV.Offset );
|
nuclear@0
|
662 OVR_ASSERT ( EyeRenderParams[0].StereoEye.EyeToSourceUV.Scale == EyeRenderParams[1].StereoEye.EyeToSourceUV.Scale );
|
nuclear@0
|
663 OVR_ASSERT ( EyeRenderParams[0].StereoEye.EyeToSourceNDC.Offset == EyeRenderParams[1].StereoEye.EyeToSourceNDC.Offset );
|
nuclear@0
|
664 OVR_ASSERT ( EyeRenderParams[0].StereoEye.EyeToSourceNDC.Scale == EyeRenderParams[1].StereoEye.EyeToSourceNDC.Scale );
|
nuclear@0
|
665 OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[0][0] == EyeRenderParams[1].OrthoProjection.M[0][0] );
|
nuclear@0
|
666 OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[1][1] == EyeRenderParams[1].OrthoProjection.M[1][1] );
|
nuclear@0
|
667 OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[0][2] == EyeRenderParams[1].OrthoProjection.M[0][2] );
|
nuclear@0
|
668 OVR_ASSERT ( EyeRenderParams[0].OrthoProjection.M[1][2] == EyeRenderParams[1].OrthoProjection.M[1][2] );
|
nuclear@0
|
669 }
|
nuclear@0
|
670
|
nuclear@0
|
671 DirtyFlag = false;
|
nuclear@0
|
672 }
|
nuclear@0
|
673
|
nuclear@0
|
674
|
nuclear@0
|
675
|
nuclear@0
|
676 ViewportScaleAndOffsetBothEyes StereoConfig::setupViewportScaleAndOffsets()
|
nuclear@0
|
677 {
|
nuclear@0
|
678 for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
|
nuclear@0
|
679 {
|
nuclear@0
|
680 StereoEye eyeType = ( eyeNum == 0 ) ? StereoEye_Left : StereoEye_Right;
|
nuclear@0
|
681
|
nuclear@0
|
682 DistortionRenderDesc localDistortion = EyeRenderParams[eyeNum].StereoEye.Distortion;
|
nuclear@0
|
683 FovPort fov = EyeRenderParams[eyeNum].StereoEye.Fov;
|
nuclear@0
|
684
|
nuclear@0
|
685 Recti renderedViewport;
|
nuclear@0
|
686 switch ( SetViewportMode )
|
nuclear@0
|
687 {
|
nuclear@0
|
688 case SVPM_Density:
|
nuclear@0
|
689 renderedViewport = CalculateViewportDensityInternal (
|
nuclear@0
|
690 eyeType, localDistortion, fov,
|
nuclear@0
|
691 RendertargetSize, IsRendertargetSharedByBothEyes,
|
nuclear@0
|
692 SetViewportPixelsPerDisplayPixel, OverrideZeroIpd );
|
nuclear@0
|
693 break;
|
nuclear@0
|
694 case SVPM_Size:
|
nuclear@0
|
695 if ( ( eyeType == StereoEye_Right ) && !OverrideZeroIpd )
|
nuclear@0
|
696 {
|
nuclear@0
|
697 renderedViewport = CalculateViewportInternal (
|
nuclear@0
|
698 eyeType, RendertargetSize,
|
nuclear@0
|
699 SetViewportSize[1],
|
nuclear@0
|
700 IsRendertargetSharedByBothEyes, OverrideZeroIpd );
|
nuclear@0
|
701 }
|
nuclear@0
|
702 else
|
nuclear@0
|
703 {
|
nuclear@0
|
704 renderedViewport = CalculateViewportInternal (
|
nuclear@0
|
705 eyeType, RendertargetSize,
|
nuclear@0
|
706 SetViewportSize[0],
|
nuclear@0
|
707 IsRendertargetSharedByBothEyes, OverrideZeroIpd );
|
nuclear@0
|
708 }
|
nuclear@0
|
709 break;
|
nuclear@0
|
710 case SVPM_Viewport:
|
nuclear@0
|
711 if ( ( eyeType == StereoEye_Right ) && !OverrideZeroIpd )
|
nuclear@0
|
712 {
|
nuclear@0
|
713 renderedViewport = SetViewport[1];
|
nuclear@0
|
714 }
|
nuclear@0
|
715 else
|
nuclear@0
|
716 {
|
nuclear@0
|
717 renderedViewport = SetViewport[0];
|
nuclear@0
|
718 }
|
nuclear@0
|
719 break;
|
nuclear@0
|
720 default: OVR_ASSERT ( false ); break;
|
nuclear@0
|
721 }
|
nuclear@0
|
722
|
nuclear@0
|
723 ViewportScaleAndOffset vpsao = CalculateViewportScaleAndOffsetInternal (
|
nuclear@0
|
724 EyeRenderParams[eyeNum].StereoEye.EyeToSourceNDC,
|
nuclear@0
|
725 renderedViewport,
|
nuclear@0
|
726 RendertargetSize );
|
nuclear@0
|
727 EyeRenderParams[eyeNum].StereoEye.RenderedViewport = vpsao.RenderedViewport;
|
nuclear@0
|
728 EyeRenderParams[eyeNum].StereoEye.EyeToSourceUV = vpsao.EyeToSourceUV;
|
nuclear@0
|
729 }
|
nuclear@0
|
730
|
nuclear@0
|
731 ViewportScaleAndOffsetBothEyes result;
|
nuclear@0
|
732 result.Left.EyeToSourceUV = EyeRenderParams[0].StereoEye.EyeToSourceUV;
|
nuclear@0
|
733 result.Left.RenderedViewport = EyeRenderParams[0].StereoEye.RenderedViewport;
|
nuclear@0
|
734 result.Right.EyeToSourceUV = EyeRenderParams[1].StereoEye.EyeToSourceUV;
|
nuclear@0
|
735 result.Right.RenderedViewport = EyeRenderParams[1].StereoEye.RenderedViewport;
|
nuclear@0
|
736 return result;
|
nuclear@0
|
737 }
|
nuclear@0
|
738
|
nuclear@0
|
739 // Specify a pixel density - how many rendered pixels per pixel in the physical display.
|
nuclear@0
|
740 ViewportScaleAndOffsetBothEyes StereoConfig::SetRenderDensity ( float pixelsPerDisplayPixel )
|
nuclear@0
|
741 {
|
nuclear@0
|
742 SetViewportMode = SVPM_Density;
|
nuclear@0
|
743 SetViewportPixelsPerDisplayPixel = pixelsPerDisplayPixel;
|
nuclear@0
|
744 return setupViewportScaleAndOffsets();
|
nuclear@0
|
745 }
|
nuclear@0
|
746
|
nuclear@0
|
747 // Supply the size directly. Will be clamped to the physical rendertarget size.
|
nuclear@0
|
748 ViewportScaleAndOffsetBothEyes StereoConfig::SetRenderSize ( Sizei const &renderSizeLeft, Sizei const &renderSizeRight )
|
nuclear@0
|
749 {
|
nuclear@0
|
750 SetViewportMode = SVPM_Size;
|
nuclear@0
|
751 SetViewportSize[0] = renderSizeLeft;
|
nuclear@0
|
752 SetViewportSize[1] = renderSizeRight;
|
nuclear@0
|
753 return setupViewportScaleAndOffsets();
|
nuclear@0
|
754 }
|
nuclear@0
|
755
|
nuclear@0
|
756 // Supply the viewport directly. This is not clamped to the physical rendertarget - careful now!
|
nuclear@0
|
757 ViewportScaleAndOffsetBothEyes StereoConfig::SetRenderViewport ( Recti const &renderViewportLeft, Recti const &renderViewportRight )
|
nuclear@0
|
758 {
|
nuclear@0
|
759 SetViewportMode = SVPM_Viewport;
|
nuclear@0
|
760 SetViewport[0] = renderViewportLeft;
|
nuclear@0
|
761 SetViewport[1] = renderViewportRight;
|
nuclear@0
|
762 return setupViewportScaleAndOffsets();
|
nuclear@0
|
763 }
|
nuclear@0
|
764
|
nuclear@0
|
765 Matrix4f StereoConfig::GetProjectionWithZoom ( StereoEye eye, float fovZoom ) const
|
nuclear@0
|
766 {
|
nuclear@0
|
767 int eyeNum = ( eye == StereoEye_Right ) ? 1 : 0;
|
nuclear@0
|
768 float fovScale = 1.0f / fovZoom;
|
nuclear@0
|
769 FovPort fovPort = EyeRenderParams[eyeNum].StereoEye.Fov;
|
nuclear@0
|
770 fovPort.LeftTan *= fovScale;
|
nuclear@0
|
771 fovPort.RightTan *= fovScale;
|
nuclear@0
|
772 fovPort.UpTan *= fovScale;
|
nuclear@0
|
773 fovPort.DownTan *= fovScale;
|
nuclear@0
|
774 return CreateProjection ( RightHandedProjection, fovPort, ZNear, ZFar );
|
nuclear@0
|
775 }
|
nuclear@0
|
776
|
nuclear@0
|
777
|
nuclear@0
|
778
|
nuclear@0
|
779
|
nuclear@0
|
780 //-----------------------------------------------------------------------------------
|
nuclear@0
|
781 // ***** Distortion Mesh Rendering
|
nuclear@0
|
782
|
nuclear@0
|
783
|
nuclear@0
|
784 // Pow2 for the Morton order to work!
|
nuclear@0
|
785 // 4 is too low - it is easy to see the "wobbles" in the HMD.
|
nuclear@0
|
786 // 5 is realllly close but you can see pixel differences with even/odd frame checking.
|
nuclear@0
|
787 // 6 is indistinguishable on a monitor on even/odd frames.
|
nuclear@0
|
788 static const int DMA_GridSizeLog2 = 6;
|
nuclear@0
|
789 static const int DMA_GridSize = 1<<DMA_GridSizeLog2;
|
nuclear@0
|
790 static const int DMA_NumVertsPerEye = (DMA_GridSize+1)*(DMA_GridSize+1);
|
nuclear@0
|
791 static const int DMA_NumTrisPerEye = (DMA_GridSize)*(DMA_GridSize)*2;
|
nuclear@0
|
792
|
nuclear@0
|
793
|
nuclear@0
|
794
|
nuclear@0
|
795 DistortionMeshVertexData DistortionMeshMakeVertex ( Vector2f screenNDC,
|
nuclear@0
|
796 bool rightEye,
|
nuclear@0
|
797 const HmdRenderInfo &hmdRenderInfo,
|
nuclear@0
|
798 const DistortionRenderDesc &distortion, const ScaleAndOffset2D &eyeToSourceNDC )
|
nuclear@0
|
799 {
|
nuclear@0
|
800 DistortionMeshVertexData result;
|
nuclear@0
|
801
|
nuclear@0
|
802 float xOffset = 0.0f;
|
nuclear@0
|
803 if (rightEye)
|
nuclear@0
|
804 {
|
nuclear@0
|
805 xOffset = 1.0f;
|
nuclear@0
|
806 }
|
nuclear@0
|
807
|
nuclear@0
|
808 Vector2f tanEyeAnglesR, tanEyeAnglesG, tanEyeAnglesB;
|
nuclear@0
|
809 TransformScreenNDCToTanFovSpaceChroma ( &tanEyeAnglesR, &tanEyeAnglesG, &tanEyeAnglesB,
|
nuclear@0
|
810 distortion, screenNDC );
|
nuclear@0
|
811
|
nuclear@0
|
812 result.TanEyeAnglesR = tanEyeAnglesR;
|
nuclear@0
|
813 result.TanEyeAnglesG = tanEyeAnglesG;
|
nuclear@0
|
814 result.TanEyeAnglesB = tanEyeAnglesB;
|
nuclear@0
|
815
|
nuclear@0
|
816 HmdShutterTypeEnum shutterType = hmdRenderInfo.Shutter.Type;
|
nuclear@0
|
817 switch ( shutterType )
|
nuclear@0
|
818 {
|
nuclear@0
|
819 case HmdShutter_Global:
|
nuclear@0
|
820 result.TimewarpLerp = 0.0f;
|
nuclear@0
|
821 break;
|
nuclear@0
|
822 case HmdShutter_RollingLeftToRight:
|
nuclear@0
|
823 // Retrace is left to right - left eye goes 0.0 -> 0.5, then right goes 0.5 -> 1.0
|
nuclear@0
|
824 result.TimewarpLerp = screenNDC.x * 0.25f + 0.25f;
|
nuclear@0
|
825 if (rightEye)
|
nuclear@0
|
826 {
|
nuclear@0
|
827 result.TimewarpLerp += 0.5f;
|
nuclear@0
|
828 }
|
nuclear@0
|
829 break;
|
nuclear@0
|
830 case HmdShutter_RollingRightToLeft:
|
nuclear@0
|
831 // Retrace is right to left - right eye goes 0.0 -> 0.5, then left goes 0.5 -> 1.0
|
nuclear@0
|
832 result.TimewarpLerp = 0.75f - screenNDC.x * 0.25f;
|
nuclear@0
|
833 if (rightEye)
|
nuclear@0
|
834 {
|
nuclear@0
|
835 result.TimewarpLerp -= 0.5f;
|
nuclear@0
|
836 }
|
nuclear@0
|
837 break;
|
nuclear@0
|
838 case HmdShutter_RollingTopToBottom:
|
nuclear@0
|
839 // Retrace is top to bottom on both eyes at the same time.
|
nuclear@0
|
840 result.TimewarpLerp = screenNDC.y * 0.5f + 0.5f;
|
nuclear@0
|
841 break;
|
nuclear@0
|
842 default: OVR_ASSERT ( false ); break;
|
nuclear@0
|
843 }
|
nuclear@0
|
844
|
nuclear@0
|
845 // When does the fade-to-black edge start? Chosen heuristically.
|
nuclear@0
|
846 float fadeOutBorderFractionTexture = 0.1f;
|
nuclear@0
|
847 float fadeOutBorderFractionTextureInnerEdge = 0.1f;
|
nuclear@0
|
848 float fadeOutBorderFractionScreen = 0.1f;
|
nuclear@0
|
849 float fadeOutFloor = 0.6f; // the floor controls how much black is in the fade region
|
nuclear@0
|
850
|
nuclear@0
|
851 if (hmdRenderInfo.HmdType == HmdType_DK1)
|
nuclear@0
|
852 {
|
nuclear@0
|
853 fadeOutBorderFractionTexture = 0.3f;
|
nuclear@0
|
854 fadeOutBorderFractionTextureInnerEdge = 0.075f;
|
nuclear@0
|
855 fadeOutBorderFractionScreen = 0.075f;
|
nuclear@0
|
856 fadeOutFloor = 0.25f;
|
nuclear@0
|
857 }
|
nuclear@0
|
858
|
nuclear@0
|
859 // Fade out at texture edges.
|
nuclear@0
|
860 // The furthest out will be the blue channel, because of chromatic aberration (true of any standard lens)
|
nuclear@0
|
861 Vector2f sourceTexCoordBlueNDC = TransformTanFovSpaceToRendertargetNDC ( eyeToSourceNDC, tanEyeAnglesB );
|
nuclear@0
|
862 if (rightEye)
|
nuclear@0
|
863 {
|
nuclear@0
|
864 // The inner edge of the eye texture is usually much more magnified, because it's right against the middle of the screen, not the FOV edge.
|
nuclear@0
|
865 // So we want a different scaling factor for that. This code flips the texture NDC so that +1.0 is the inner edge
|
nuclear@0
|
866 sourceTexCoordBlueNDC.x = -sourceTexCoordBlueNDC.x;
|
nuclear@0
|
867 }
|
nuclear@0
|
868 float edgeFadeIn = ( 1.0f / fadeOutBorderFractionTextureInnerEdge ) * ( 1.0f - sourceTexCoordBlueNDC.x ) ; // Inner
|
nuclear@0
|
869 edgeFadeIn = Alg::Min ( edgeFadeIn, ( 1.0f / fadeOutBorderFractionTexture ) * ( 1.0f + sourceTexCoordBlueNDC.x ) ); // Outer
|
nuclear@0
|
870 edgeFadeIn = Alg::Min ( edgeFadeIn, ( 1.0f / fadeOutBorderFractionTexture ) * ( 1.0f - sourceTexCoordBlueNDC.y ) ); // Upper
|
nuclear@0
|
871 edgeFadeIn = Alg::Min ( edgeFadeIn, ( 1.0f / fadeOutBorderFractionTexture ) * ( 1.0f + sourceTexCoordBlueNDC.y ) ); // Lower
|
nuclear@0
|
872
|
nuclear@0
|
873 // Also fade out at screen edges. Since this is in pixel space, no need to do inner specially.
|
nuclear@0
|
874 float edgeFadeInScreen = ( 1.0f / fadeOutBorderFractionScreen ) *
|
nuclear@0
|
875 ( 1.0f - Alg::Max ( Alg::Abs ( screenNDC.x ), Alg::Abs ( screenNDC.y ) ) );
|
nuclear@0
|
876 edgeFadeIn = Alg::Min ( edgeFadeInScreen, edgeFadeIn ) + fadeOutFloor;
|
nuclear@0
|
877
|
nuclear@0
|
878 // Note - this is NOT clamped negatively.
|
nuclear@0
|
879 // For rendering methods that interpolate over a coarse grid, we need the values to go negative for correct intersection with zero.
|
nuclear@0
|
880 result.Shade = Alg::Min ( edgeFadeIn, 1.0f );
|
nuclear@0
|
881 result.ScreenPosNDC.x = 0.5f * screenNDC.x - 0.5f + xOffset;
|
nuclear@0
|
882 result.ScreenPosNDC.y = -screenNDC.y;
|
nuclear@0
|
883
|
nuclear@0
|
884 return result;
|
nuclear@0
|
885 }
|
nuclear@0
|
886
|
nuclear@0
|
887
|
nuclear@0
|
888 void DistortionMeshDestroy ( DistortionMeshVertexData *pVertices, uint16_t *pTriangleMeshIndices )
|
nuclear@0
|
889 {
|
nuclear@0
|
890 OVR_FREE ( pVertices );
|
nuclear@0
|
891 OVR_FREE ( pTriangleMeshIndices );
|
nuclear@0
|
892 }
|
nuclear@0
|
893
|
nuclear@0
|
894 void DistortionMeshCreate ( DistortionMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
|
nuclear@0
|
895 int *pNumVertices, int *pNumTriangles,
|
nuclear@0
|
896 const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo )
|
nuclear@0
|
897 {
|
nuclear@0
|
898 bool rightEye = ( stereoParams.Eye == StereoEye_Right );
|
nuclear@0
|
899 int vertexCount = 0;
|
nuclear@0
|
900 int triangleCount = 0;
|
nuclear@0
|
901
|
nuclear@0
|
902 // Generate mesh into allocated data and return result.
|
nuclear@0
|
903 DistortionMeshCreate(ppVertices, ppTriangleListIndices, &vertexCount, &triangleCount,
|
nuclear@0
|
904 rightEye, hmdRenderInfo, stereoParams.Distortion, stereoParams.EyeToSourceNDC);
|
nuclear@0
|
905
|
nuclear@0
|
906 *pNumVertices = vertexCount;
|
nuclear@0
|
907 *pNumTriangles = triangleCount;
|
nuclear@0
|
908 }
|
nuclear@0
|
909
|
nuclear@0
|
910
|
nuclear@0
|
911 // Generate distortion mesh for a eye.
|
nuclear@0
|
912 void DistortionMeshCreate( DistortionMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
|
nuclear@0
|
913 int *pNumVertices, int *pNumTriangles,
|
nuclear@0
|
914 bool rightEye,
|
nuclear@0
|
915 const HmdRenderInfo &hmdRenderInfo,
|
nuclear@0
|
916 const DistortionRenderDesc &distortion, const ScaleAndOffset2D &eyeToSourceNDC )
|
nuclear@0
|
917 {
|
nuclear@0
|
918 *pNumVertices = DMA_NumVertsPerEye;
|
nuclear@0
|
919 *pNumTriangles = DMA_NumTrisPerEye;
|
nuclear@0
|
920
|
nuclear@0
|
921 *ppVertices = (DistortionMeshVertexData*)
|
nuclear@0
|
922 OVR_ALLOC( sizeof(DistortionMeshVertexData) * (*pNumVertices) );
|
nuclear@0
|
923 *ppTriangleListIndices = (uint16_t*) OVR_ALLOC( sizeof(uint16_t) * (*pNumTriangles) * 3 );
|
nuclear@0
|
924
|
nuclear@0
|
925 if (!*ppVertices || !*ppTriangleListIndices)
|
nuclear@0
|
926 {
|
nuclear@0
|
927 if (*ppVertices)
|
nuclear@0
|
928 {
|
nuclear@0
|
929 OVR_FREE(*ppVertices);
|
nuclear@0
|
930 }
|
nuclear@0
|
931 if (*ppTriangleListIndices)
|
nuclear@0
|
932 {
|
nuclear@0
|
933 OVR_FREE(*ppTriangleListIndices);
|
nuclear@0
|
934 }
|
nuclear@0
|
935 *ppVertices = NULL;
|
nuclear@0
|
936 *ppTriangleListIndices = NULL;
|
nuclear@0
|
937 *pNumTriangles = 0;
|
nuclear@0
|
938 *pNumVertices = 0;
|
nuclear@0
|
939 return;
|
nuclear@0
|
940 }
|
nuclear@0
|
941
|
nuclear@0
|
942
|
nuclear@0
|
943
|
nuclear@0
|
944 // Populate vertex buffer info
|
nuclear@0
|
945
|
nuclear@0
|
946 // First pass - build up raw vertex data.
|
nuclear@0
|
947 DistortionMeshVertexData* pcurVert = *ppVertices;
|
nuclear@0
|
948
|
nuclear@0
|
949 for ( int y = 0; y <= DMA_GridSize; y++ )
|
nuclear@0
|
950 {
|
nuclear@0
|
951 for ( int x = 0; x <= DMA_GridSize; x++ )
|
nuclear@0
|
952 {
|
nuclear@0
|
953
|
nuclear@0
|
954 Vector2f sourceCoordNDC;
|
nuclear@0
|
955 // NDC texture coords [-1,+1]
|
nuclear@0
|
956 sourceCoordNDC.x = 2.0f * ( (float)x / (float)DMA_GridSize ) - 1.0f;
|
nuclear@0
|
957 sourceCoordNDC.y = 2.0f * ( (float)y / (float)DMA_GridSize ) - 1.0f;
|
nuclear@0
|
958 Vector2f tanEyeAngle = TransformRendertargetNDCToTanFovSpace ( eyeToSourceNDC, sourceCoordNDC );
|
nuclear@0
|
959
|
nuclear@0
|
960 // Find a corresponding screen position.
|
nuclear@0
|
961 // Note - this function does not have to be precise - we're just trying to match the mesh tessellation
|
nuclear@0
|
962 // with the shape of the distortion to minimise the number of trianlges needed.
|
nuclear@0
|
963 Vector2f screenNDC = TransformTanFovSpaceToScreenNDC ( distortion, tanEyeAngle, false );
|
nuclear@0
|
964 // ...but don't let verts overlap to the other eye.
|
nuclear@0
|
965 screenNDC.x = Alg::Max ( -1.0f, Alg::Min ( screenNDC.x, 1.0f ) );
|
nuclear@0
|
966 screenNDC.y = Alg::Max ( -1.0f, Alg::Min ( screenNDC.y, 1.0f ) );
|
nuclear@0
|
967
|
nuclear@0
|
968 // From those screen positions, generate the vertex.
|
nuclear@0
|
969 *pcurVert = DistortionMeshMakeVertex ( screenNDC, rightEye, hmdRenderInfo, distortion, eyeToSourceNDC );
|
nuclear@0
|
970 pcurVert++;
|
nuclear@0
|
971 }
|
nuclear@0
|
972 }
|
nuclear@0
|
973
|
nuclear@0
|
974
|
nuclear@0
|
975 // Populate index buffer info
|
nuclear@0
|
976 uint16_t *pcurIndex = *ppTriangleListIndices;
|
nuclear@0
|
977
|
nuclear@0
|
978 for ( int triNum = 0; triNum < DMA_GridSize * DMA_GridSize; triNum++ )
|
nuclear@0
|
979 {
|
nuclear@0
|
980 // Use a Morton order to help locality of FB, texture and vertex cache.
|
nuclear@0
|
981 // (0.325ms raster order -> 0.257ms Morton order)
|
nuclear@0
|
982 OVR_ASSERT ( DMA_GridSize <= 256 );
|
nuclear@0
|
983 int x = ( ( triNum & 0x0001 ) >> 0 ) |
|
nuclear@0
|
984 ( ( triNum & 0x0004 ) >> 1 ) |
|
nuclear@0
|
985 ( ( triNum & 0x0010 ) >> 2 ) |
|
nuclear@0
|
986 ( ( triNum & 0x0040 ) >> 3 ) |
|
nuclear@0
|
987 ( ( triNum & 0x0100 ) >> 4 ) |
|
nuclear@0
|
988 ( ( triNum & 0x0400 ) >> 5 ) |
|
nuclear@0
|
989 ( ( triNum & 0x1000 ) >> 6 ) |
|
nuclear@0
|
990 ( ( triNum & 0x4000 ) >> 7 );
|
nuclear@0
|
991 int y = ( ( triNum & 0x0002 ) >> 1 ) |
|
nuclear@0
|
992 ( ( triNum & 0x0008 ) >> 2 ) |
|
nuclear@0
|
993 ( ( triNum & 0x0020 ) >> 3 ) |
|
nuclear@0
|
994 ( ( triNum & 0x0080 ) >> 4 ) |
|
nuclear@0
|
995 ( ( triNum & 0x0200 ) >> 5 ) |
|
nuclear@0
|
996 ( ( triNum & 0x0800 ) >> 6 ) |
|
nuclear@0
|
997 ( ( triNum & 0x2000 ) >> 7 ) |
|
nuclear@0
|
998 ( ( triNum & 0x8000 ) >> 8 );
|
nuclear@0
|
999 int FirstVertex = x * (DMA_GridSize+1) + y;
|
nuclear@0
|
1000 // Another twist - we want the top-left and bottom-right quadrants to
|
nuclear@0
|
1001 // have the triangles split one way, the other two split the other.
|
nuclear@0
|
1002 // +---+---+---+---+
|
nuclear@0
|
1003 // | /| /|\ |\ |
|
nuclear@0
|
1004 // | / | / | \ | \ |
|
nuclear@0
|
1005 // |/ |/ | \| \|
|
nuclear@0
|
1006 // +---+---+---+---+
|
nuclear@0
|
1007 // | /| /|\ |\ |
|
nuclear@0
|
1008 // | / | / | \ | \ |
|
nuclear@0
|
1009 // |/ |/ | \| \|
|
nuclear@0
|
1010 // +---+---+---+---+
|
nuclear@0
|
1011 // |\ |\ | /| /|
|
nuclear@0
|
1012 // | \ | \ | / | / |
|
nuclear@0
|
1013 // | \| \|/ |/ |
|
nuclear@0
|
1014 // +---+---+---+---+
|
nuclear@0
|
1015 // |\ |\ | /| /|
|
nuclear@0
|
1016 // | \ | \ | / | / |
|
nuclear@0
|
1017 // | \| \|/ |/ |
|
nuclear@0
|
1018 // +---+---+---+---+
|
nuclear@0
|
1019 // This way triangle edges don't span long distances over the distortion function,
|
nuclear@0
|
1020 // so linear interpolation works better & we can use fewer tris.
|
nuclear@0
|
1021 if ( ( x < DMA_GridSize/2 ) != ( y < DMA_GridSize/2 ) ) // != is logical XOR
|
nuclear@0
|
1022 {
|
nuclear@0
|
1023 *pcurIndex++ = (uint16_t)FirstVertex;
|
nuclear@0
|
1024 *pcurIndex++ = (uint16_t)FirstVertex+1;
|
nuclear@0
|
1025 *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1)+1;
|
nuclear@0
|
1026
|
nuclear@0
|
1027 *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1)+1;
|
nuclear@0
|
1028 *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1);
|
nuclear@0
|
1029 *pcurIndex++ = (uint16_t)FirstVertex;
|
nuclear@0
|
1030 }
|
nuclear@0
|
1031 else
|
nuclear@0
|
1032 {
|
nuclear@0
|
1033 *pcurIndex++ = (uint16_t)FirstVertex;
|
nuclear@0
|
1034 *pcurIndex++ = (uint16_t)FirstVertex+1;
|
nuclear@0
|
1035 *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1);
|
nuclear@0
|
1036
|
nuclear@0
|
1037 *pcurIndex++ = (uint16_t)FirstVertex+1;
|
nuclear@0
|
1038 *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1)+1;
|
nuclear@0
|
1039 *pcurIndex++ = (uint16_t)FirstVertex+(DMA_GridSize+1);
|
nuclear@0
|
1040 }
|
nuclear@0
|
1041 }
|
nuclear@0
|
1042 }
|
nuclear@0
|
1043
|
nuclear@0
|
1044 //-----------------------------------------------------------------------------------
|
nuclear@0
|
1045 // ***** Heightmap Mesh Rendering
|
nuclear@0
|
1046
|
nuclear@0
|
1047
|
nuclear@0
|
1048 static const int HMA_GridSizeLog2 = 7;
|
nuclear@0
|
1049 static const int HMA_GridSize = 1<<HMA_GridSizeLog2;
|
nuclear@0
|
1050 static const int HMA_NumVertsPerEye = (HMA_GridSize+1)*(HMA_GridSize+1);
|
nuclear@0
|
1051 static const int HMA_NumTrisPerEye = (HMA_GridSize)*(HMA_GridSize)*2;
|
nuclear@0
|
1052
|
nuclear@0
|
1053
|
nuclear@0
|
1054 void HeightmapMeshDestroy ( HeightmapMeshVertexData *pVertices, uint16_t *pTriangleMeshIndices )
|
nuclear@0
|
1055 {
|
nuclear@0
|
1056 OVR_FREE ( pVertices );
|
nuclear@0
|
1057 OVR_FREE ( pTriangleMeshIndices );
|
nuclear@0
|
1058 }
|
nuclear@0
|
1059
|
nuclear@0
|
1060 void HeightmapMeshCreate ( HeightmapMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
|
nuclear@0
|
1061 int *pNumVertices, int *pNumTriangles,
|
nuclear@0
|
1062 const StereoEyeParams &stereoParams, const HmdRenderInfo &hmdRenderInfo )
|
nuclear@0
|
1063 {
|
nuclear@0
|
1064 bool rightEye = ( stereoParams.Eye == StereoEye_Right );
|
nuclear@0
|
1065 int vertexCount = 0;
|
nuclear@0
|
1066 int triangleCount = 0;
|
nuclear@0
|
1067
|
nuclear@0
|
1068 // Generate mesh into allocated data and return result.
|
nuclear@0
|
1069 HeightmapMeshCreate(ppVertices, ppTriangleListIndices, &vertexCount, &triangleCount,
|
nuclear@0
|
1070 rightEye, hmdRenderInfo, stereoParams.EyeToSourceNDC);
|
nuclear@0
|
1071
|
nuclear@0
|
1072 *pNumVertices = vertexCount;
|
nuclear@0
|
1073 *pNumTriangles = triangleCount;
|
nuclear@0
|
1074 }
|
nuclear@0
|
1075
|
nuclear@0
|
1076
|
nuclear@0
|
1077 // Generate heightmap mesh for one eye.
|
nuclear@0
|
1078 void HeightmapMeshCreate( HeightmapMeshVertexData **ppVertices, uint16_t **ppTriangleListIndices,
|
nuclear@0
|
1079 int *pNumVertices, int *pNumTriangles, bool rightEye,
|
nuclear@0
|
1080 const HmdRenderInfo &hmdRenderInfo,
|
nuclear@0
|
1081 const ScaleAndOffset2D &eyeToSourceNDC )
|
nuclear@0
|
1082 {
|
nuclear@0
|
1083 *pNumVertices = HMA_NumVertsPerEye;
|
nuclear@0
|
1084 *pNumTriangles = HMA_NumTrisPerEye;
|
nuclear@0
|
1085
|
nuclear@0
|
1086 *ppVertices = (HeightmapMeshVertexData*) OVR_ALLOC( sizeof(HeightmapMeshVertexData) * (*pNumVertices) );
|
nuclear@0
|
1087 *ppTriangleListIndices = (uint16_t*) OVR_ALLOC( sizeof(uint16_t) * (*pNumTriangles) * 3 );
|
nuclear@0
|
1088
|
nuclear@0
|
1089 if (!*ppVertices || !*ppTriangleListIndices)
|
nuclear@0
|
1090 {
|
nuclear@0
|
1091 if (*ppVertices)
|
nuclear@0
|
1092 {
|
nuclear@0
|
1093 OVR_FREE(*ppVertices);
|
nuclear@0
|
1094 }
|
nuclear@0
|
1095 if (*ppTriangleListIndices)
|
nuclear@0
|
1096 {
|
nuclear@0
|
1097 OVR_FREE(*ppTriangleListIndices);
|
nuclear@0
|
1098 }
|
nuclear@0
|
1099 *ppVertices = NULL;
|
nuclear@0
|
1100 *ppTriangleListIndices = NULL;
|
nuclear@0
|
1101 *pNumTriangles = 0;
|
nuclear@0
|
1102 *pNumVertices = 0;
|
nuclear@0
|
1103 return;
|
nuclear@0
|
1104 }
|
nuclear@0
|
1105
|
nuclear@0
|
1106 // Populate vertex buffer info
|
nuclear@0
|
1107 // float xOffset = (rightEye ? 1.0f : 0.0f); Currently disabled because its usage is disabled below.
|
nuclear@0
|
1108
|
nuclear@0
|
1109 // First pass - build up raw vertex data.
|
nuclear@0
|
1110 HeightmapMeshVertexData* pcurVert = *ppVertices;
|
nuclear@0
|
1111
|
nuclear@0
|
1112 for ( int y = 0; y <= HMA_GridSize; y++ )
|
nuclear@0
|
1113 {
|
nuclear@0
|
1114 for ( int x = 0; x <= HMA_GridSize; x++ )
|
nuclear@0
|
1115 {
|
nuclear@0
|
1116 Vector2f sourceCoordNDC;
|
nuclear@0
|
1117 // NDC texture coords [-1,+1]
|
nuclear@0
|
1118 sourceCoordNDC.x = 2.0f * ( (float)x / (float)HMA_GridSize ) - 1.0f;
|
nuclear@0
|
1119 sourceCoordNDC.y = 2.0f * ( (float)y / (float)HMA_GridSize ) - 1.0f;
|
nuclear@0
|
1120 Vector2f tanEyeAngle = TransformRendertargetNDCToTanFovSpace ( eyeToSourceNDC, sourceCoordNDC );
|
nuclear@0
|
1121
|
nuclear@0
|
1122 pcurVert->TanEyeAngles = tanEyeAngle;
|
nuclear@0
|
1123
|
nuclear@0
|
1124 HmdShutterTypeEnum shutterType = hmdRenderInfo.Shutter.Type;
|
nuclear@0
|
1125 switch ( shutterType )
|
nuclear@0
|
1126 {
|
nuclear@0
|
1127 case HmdShutter_Global:
|
nuclear@0
|
1128 pcurVert->TimewarpLerp = 0.0f;
|
nuclear@0
|
1129 break;
|
nuclear@0
|
1130 case HmdShutter_RollingLeftToRight:
|
nuclear@0
|
1131 // Retrace is left to right - left eye goes 0.0 -> 0.5, then right goes 0.5 -> 1.0
|
nuclear@0
|
1132 pcurVert->TimewarpLerp = sourceCoordNDC.x * 0.25f + 0.25f;
|
nuclear@0
|
1133 if (rightEye)
|
nuclear@0
|
1134 {
|
nuclear@0
|
1135 pcurVert->TimewarpLerp += 0.5f;
|
nuclear@0
|
1136 }
|
nuclear@0
|
1137 break;
|
nuclear@0
|
1138 case HmdShutter_RollingRightToLeft:
|
nuclear@0
|
1139 // Retrace is right to left - right eye goes 0.0 -> 0.5, then left goes 0.5 -> 1.0
|
nuclear@0
|
1140 pcurVert->TimewarpLerp = 0.75f - sourceCoordNDC.x * 0.25f;
|
nuclear@0
|
1141 if (rightEye)
|
nuclear@0
|
1142 {
|
nuclear@0
|
1143 pcurVert->TimewarpLerp -= 0.5f;
|
nuclear@0
|
1144 }
|
nuclear@0
|
1145 break;
|
nuclear@0
|
1146 case HmdShutter_RollingTopToBottom:
|
nuclear@0
|
1147 // Retrace is top to bottom on both eyes at the same time.
|
nuclear@0
|
1148 pcurVert->TimewarpLerp = sourceCoordNDC.y * 0.5f + 0.5f;
|
nuclear@0
|
1149 break;
|
nuclear@0
|
1150 default: OVR_ASSERT ( false ); break;
|
nuclear@0
|
1151 }
|
nuclear@0
|
1152
|
nuclear@0
|
1153 // Don't let verts overlap to the other eye.
|
nuclear@0
|
1154 //sourceCoordNDC.x = Alg::Max ( -1.0f, Alg::Min ( sourceCoordNDC.x, 1.0f ) );
|
nuclear@0
|
1155 //sourceCoordNDC.y = Alg::Max ( -1.0f, Alg::Min ( sourceCoordNDC.y, 1.0f ) );
|
nuclear@0
|
1156
|
nuclear@0
|
1157 //pcurVert->ScreenPosNDC.x = 0.5f * sourceCoordNDC.x - 0.5f + xOffset;
|
nuclear@0
|
1158 pcurVert->ScreenPosNDC.x = sourceCoordNDC.x;
|
nuclear@0
|
1159 pcurVert->ScreenPosNDC.y = -sourceCoordNDC.y;
|
nuclear@0
|
1160
|
nuclear@0
|
1161 pcurVert++;
|
nuclear@0
|
1162 }
|
nuclear@0
|
1163 }
|
nuclear@0
|
1164
|
nuclear@0
|
1165
|
nuclear@0
|
1166 // Populate index buffer info
|
nuclear@0
|
1167 uint16_t *pcurIndex = *ppTriangleListIndices;
|
nuclear@0
|
1168
|
nuclear@0
|
1169 for ( int triNum = 0; triNum < HMA_GridSize * HMA_GridSize; triNum++ )
|
nuclear@0
|
1170 {
|
nuclear@0
|
1171 // Use a Morton order to help locality of FB, texture and vertex cache.
|
nuclear@0
|
1172 // (0.325ms raster order -> 0.257ms Morton order)
|
nuclear@0
|
1173 OVR_ASSERT ( HMA_GridSize < 256 );
|
nuclear@0
|
1174 int x = ( ( triNum & 0x0001 ) >> 0 ) |
|
nuclear@0
|
1175 ( ( triNum & 0x0004 ) >> 1 ) |
|
nuclear@0
|
1176 ( ( triNum & 0x0010 ) >> 2 ) |
|
nuclear@0
|
1177 ( ( triNum & 0x0040 ) >> 3 ) |
|
nuclear@0
|
1178 ( ( triNum & 0x0100 ) >> 4 ) |
|
nuclear@0
|
1179 ( ( triNum & 0x0400 ) >> 5 ) |
|
nuclear@0
|
1180 ( ( triNum & 0x1000 ) >> 6 ) |
|
nuclear@0
|
1181 ( ( triNum & 0x4000 ) >> 7 );
|
nuclear@0
|
1182 int y = ( ( triNum & 0x0002 ) >> 1 ) |
|
nuclear@0
|
1183 ( ( triNum & 0x0008 ) >> 2 ) |
|
nuclear@0
|
1184 ( ( triNum & 0x0020 ) >> 3 ) |
|
nuclear@0
|
1185 ( ( triNum & 0x0080 ) >> 4 ) |
|
nuclear@0
|
1186 ( ( triNum & 0x0200 ) >> 5 ) |
|
nuclear@0
|
1187 ( ( triNum & 0x0800 ) >> 6 ) |
|
nuclear@0
|
1188 ( ( triNum & 0x2000 ) >> 7 ) |
|
nuclear@0
|
1189 ( ( triNum & 0x8000 ) >> 8 );
|
nuclear@0
|
1190 int FirstVertex = x * (HMA_GridSize+1) + y;
|
nuclear@0
|
1191 // Another twist - we want the top-left and bottom-right quadrants to
|
nuclear@0
|
1192 // have the triangles split one way, the other two split the other.
|
nuclear@0
|
1193 // +---+---+---+---+
|
nuclear@0
|
1194 // | /| /|\ |\ |
|
nuclear@0
|
1195 // | / | / | \ | \ |
|
nuclear@0
|
1196 // |/ |/ | \| \|
|
nuclear@0
|
1197 // +---+---+---+---+
|
nuclear@0
|
1198 // | /| /|\ |\ |
|
nuclear@0
|
1199 // | / | / | \ | \ |
|
nuclear@0
|
1200 // |/ |/ | \| \|
|
nuclear@0
|
1201 // +---+---+---+---+
|
nuclear@0
|
1202 // |\ |\ | /| /|
|
nuclear@0
|
1203 // | \ | \ | / | / |
|
nuclear@0
|
1204 // | \| \|/ |/ |
|
nuclear@0
|
1205 // +---+---+---+---+
|
nuclear@0
|
1206 // |\ |\ | /| /|
|
nuclear@0
|
1207 // | \ | \ | / | / |
|
nuclear@0
|
1208 // | \| \|/ |/ |
|
nuclear@0
|
1209 // +---+---+---+---+
|
nuclear@0
|
1210 // This way triangle edges don't span long distances over the distortion function,
|
nuclear@0
|
1211 // so linear interpolation works better & we can use fewer tris.
|
nuclear@0
|
1212 if ( ( x < HMA_GridSize/2 ) != ( y < HMA_GridSize/2 ) ) // != is logical XOR
|
nuclear@0
|
1213 {
|
nuclear@0
|
1214 *pcurIndex++ = (uint16_t)FirstVertex;
|
nuclear@0
|
1215 *pcurIndex++ = (uint16_t)FirstVertex+1;
|
nuclear@0
|
1216 *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1)+1;
|
nuclear@0
|
1217
|
nuclear@0
|
1218 *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1)+1;
|
nuclear@0
|
1219 *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1);
|
nuclear@0
|
1220 *pcurIndex++ = (uint16_t)FirstVertex;
|
nuclear@0
|
1221 }
|
nuclear@0
|
1222 else
|
nuclear@0
|
1223 {
|
nuclear@0
|
1224 *pcurIndex++ = (uint16_t)FirstVertex;
|
nuclear@0
|
1225 *pcurIndex++ = (uint16_t)FirstVertex+1;
|
nuclear@0
|
1226 *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1);
|
nuclear@0
|
1227
|
nuclear@0
|
1228 *pcurIndex++ = (uint16_t)FirstVertex+1;
|
nuclear@0
|
1229 *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1)+1;
|
nuclear@0
|
1230 *pcurIndex++ = (uint16_t)FirstVertex+(HMA_GridSize+1);
|
nuclear@0
|
1231 }
|
nuclear@0
|
1232 }
|
nuclear@0
|
1233 }
|
nuclear@0
|
1234
|
nuclear@0
|
1235 //-----------------------------------------------------------------------------------
|
nuclear@0
|
1236 // ***** Prediction and timewarp.
|
nuclear@0
|
1237 //
|
nuclear@0
|
1238
|
nuclear@0
|
1239 // Calculates the values from the HMD info.
|
nuclear@0
|
1240 PredictionValues PredictionGetDeviceValues ( const HmdRenderInfo &hmdRenderInfo,
|
nuclear@0
|
1241 bool withTimewarp /*= true*/,
|
nuclear@0
|
1242 bool withVsync /*= true*/ )
|
nuclear@0
|
1243 {
|
nuclear@0
|
1244 PredictionValues result;
|
nuclear@0
|
1245
|
nuclear@0
|
1246 result.WithTimewarp = withTimewarp;
|
nuclear@0
|
1247 result.WithVsync = withVsync;
|
nuclear@0
|
1248
|
nuclear@0
|
1249 // For unclear reasons, most graphics systems add an extra frame of latency
|
nuclear@0
|
1250 // somewhere along the way. In time we'll debug this and figure it out, but
|
nuclear@0
|
1251 // for now this gets prediction a little bit better.
|
nuclear@0
|
1252 const float extraFramesOfBufferingKludge = 1.0f;
|
nuclear@0
|
1253
|
nuclear@0
|
1254 if ( withVsync )
|
nuclear@0
|
1255 {
|
nuclear@0
|
1256 // These are the times from the Present+Flush to when the middle of the scene is "averagely visible" (without timewarp)
|
nuclear@0
|
1257 // So if you had no timewarp, this, plus the time until the next vsync, is how much to predict by.
|
nuclear@0
|
1258 result.PresentFlushToRenderedScene = extraFramesOfBufferingKludge * hmdRenderInfo.Shutter.FirstScanlineToLastScanline;
|
nuclear@0
|
1259 // Predict to the middle of the screen being scanned out.
|
nuclear@0
|
1260 result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.VsyncToFirstScanline + 0.5f * hmdRenderInfo.Shutter.FirstScanlineToLastScanline;
|
nuclear@0
|
1261 // Time for pixels to get half-way to settling.
|
nuclear@0
|
1262 result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.PixelSettleTime * 0.5f;
|
nuclear@0
|
1263 // Predict to half-way through persistence
|
nuclear@0
|
1264 result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.PixelPersistence * 0.5f;
|
nuclear@0
|
1265
|
nuclear@0
|
1266 // The time from the Present+Flush to when the first scanline is "averagely visible".
|
nuclear@0
|
1267 result.PresentFlushToTimewarpStart = extraFramesOfBufferingKludge * hmdRenderInfo.Shutter.FirstScanlineToLastScanline;
|
nuclear@0
|
1268 // Predict to the first line being scanned out.
|
nuclear@0
|
1269 result.PresentFlushToTimewarpStart += hmdRenderInfo.Shutter.VsyncToFirstScanline;
|
nuclear@0
|
1270 // Time for pixels to get half-way to settling.
|
nuclear@0
|
1271 result.PresentFlushToTimewarpStart += hmdRenderInfo.Shutter.PixelSettleTime * 0.5f;
|
nuclear@0
|
1272 // Predict to half-way through persistence
|
nuclear@0
|
1273 result.PresentFlushToTimewarpStart += hmdRenderInfo.Shutter.PixelPersistence * 0.5f;
|
nuclear@0
|
1274
|
nuclear@0
|
1275 // Time to the the last scanline.
|
nuclear@0
|
1276 result.PresentFlushToTimewarpEnd = result.PresentFlushToTimewarpStart + hmdRenderInfo.Shutter.FirstScanlineToLastScanline;
|
nuclear@0
|
1277
|
nuclear@0
|
1278 // Ideal framerate.
|
nuclear@0
|
1279 result.PresentFlushToPresentFlush = hmdRenderInfo.Shutter.VsyncToNextVsync;
|
nuclear@0
|
1280 }
|
nuclear@0
|
1281 else
|
nuclear@0
|
1282 {
|
nuclear@0
|
1283 // Timewarp without vsync is a little odd.
|
nuclear@0
|
1284 // Currently, we assume that without vsync, we have no idea which scanline
|
nuclear@0
|
1285 // is currently being sent to the display. So we can't do lerping timewarp,
|
nuclear@0
|
1286 // we can just do a full-screen late-stage fixup.
|
nuclear@0
|
1287
|
nuclear@0
|
1288 // "PresentFlushToRenderedScene" means the time from the Present+Flush to when the middle of the scene is "averagely visible" (without timewarp)
|
nuclear@0
|
1289 // So if you had no timewarp, this, plus the time until the next flush (which is usually the time to render the frame), is how much to predict by.
|
nuclear@0
|
1290 // Time for pixels to get half-way to settling.
|
nuclear@0
|
1291 result.PresentFlushToRenderedScene = hmdRenderInfo.Shutter.PixelSettleTime * 0.5f;
|
nuclear@0
|
1292 // Predict to half-way through persistence
|
nuclear@0
|
1293 result.PresentFlushToRenderedScene += hmdRenderInfo.Shutter.PixelPersistence * 0.5f;
|
nuclear@0
|
1294
|
nuclear@0
|
1295 // Without vsync, you don't know timings, and so can't do anything useful with lerped warping.
|
nuclear@0
|
1296 result.PresentFlushToTimewarpStart = result.PresentFlushToRenderedScene;
|
nuclear@0
|
1297 result.PresentFlushToTimewarpEnd = result.PresentFlushToRenderedScene;
|
nuclear@0
|
1298
|
nuclear@0
|
1299 // There's no concept of "ideal" when vsync is off.
|
nuclear@0
|
1300 result.PresentFlushToPresentFlush = 0.0f;
|
nuclear@0
|
1301 }
|
nuclear@0
|
1302
|
nuclear@0
|
1303 return result;
|
nuclear@0
|
1304 }
|
nuclear@0
|
1305
|
nuclear@0
|
1306 Matrix4f TimewarpComputePoseDelta ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&hmdToEyeViewOffset )
|
nuclear@0
|
1307 {
|
nuclear@0
|
1308 Matrix4f worldFromPredictedView = (hmdToEyeViewOffset * predictedViewFromWorld).InvertedHomogeneousTransform();
|
nuclear@0
|
1309 Matrix4f matRenderFromNowStart = (hmdToEyeViewOffset * renderedViewFromWorld) * worldFromPredictedView;
|
nuclear@0
|
1310
|
nuclear@0
|
1311 // The sensor-predicted orientations have: X=right, Y=up, Z=backwards.
|
nuclear@0
|
1312 // The vectors inside the mesh are in NDC to keep the shader simple: X=right, Y=down, Z=forwards.
|
nuclear@0
|
1313 // So we need to perform a similarity transform on this delta matrix.
|
nuclear@0
|
1314 // The verbose code would look like this:
|
nuclear@0
|
1315 /*
|
nuclear@0
|
1316 Matrix4f matBasisChange;
|
nuclear@0
|
1317 matBasisChange.SetIdentity();
|
nuclear@0
|
1318 matBasisChange.M[0][0] = 1.0f;
|
nuclear@0
|
1319 matBasisChange.M[1][1] = -1.0f;
|
nuclear@0
|
1320 matBasisChange.M[2][2] = -1.0f;
|
nuclear@0
|
1321 Matrix4f matBasisChangeInv = matBasisChange.Inverted();
|
nuclear@0
|
1322 matRenderFromNow = matBasisChangeInv * matRenderFromNow * matBasisChange;
|
nuclear@0
|
1323 */
|
nuclear@0
|
1324 // ...but of course all the above is a constant transform and much more easily done.
|
nuclear@0
|
1325 // We flip the signs of the Y&Z row, then flip the signs of the Y&Z column,
|
nuclear@0
|
1326 // and of course most of the flips cancel:
|
nuclear@0
|
1327 // +++ +-- +--
|
nuclear@0
|
1328 // +++ -> flip Y&Z columns -> +-- -> flip Y&Z rows -> -++
|
nuclear@0
|
1329 // +++ +-- -++
|
nuclear@0
|
1330 matRenderFromNowStart.M[0][1] = -matRenderFromNowStart.M[0][1];
|
nuclear@0
|
1331 matRenderFromNowStart.M[0][2] = -matRenderFromNowStart.M[0][2];
|
nuclear@0
|
1332 matRenderFromNowStart.M[1][0] = -matRenderFromNowStart.M[1][0];
|
nuclear@0
|
1333 matRenderFromNowStart.M[2][0] = -matRenderFromNowStart.M[2][0];
|
nuclear@0
|
1334 matRenderFromNowStart.M[1][3] = -matRenderFromNowStart.M[1][3];
|
nuclear@0
|
1335 matRenderFromNowStart.M[2][3] = -matRenderFromNowStart.M[2][3];
|
nuclear@0
|
1336
|
nuclear@0
|
1337 return matRenderFromNowStart;
|
nuclear@0
|
1338 }
|
nuclear@0
|
1339
|
nuclear@0
|
1340 Matrix4f TimewarpComputePoseDeltaPosition ( Matrix4f const &renderedViewFromWorld, Matrix4f const &predictedViewFromWorld, Matrix4f const&hmdToEyeViewOffset )
|
nuclear@0
|
1341 {
|
nuclear@0
|
1342 Matrix4f worldFromPredictedView = (hmdToEyeViewOffset * predictedViewFromWorld).InvertedHomogeneousTransform();
|
nuclear@0
|
1343 Matrix4f matRenderXform = (hmdToEyeViewOffset * renderedViewFromWorld) * worldFromPredictedView;
|
nuclear@0
|
1344
|
nuclear@0
|
1345 return matRenderXform.Inverted();
|
nuclear@0
|
1346 }
|
nuclear@0
|
1347
|
nuclear@0
|
1348 TimewarpMachine::TimewarpMachine()
|
nuclear@0
|
1349 : VsyncEnabled(false),
|
nuclear@0
|
1350 RenderInfo(),
|
nuclear@0
|
1351 CurrentPredictionValues(),
|
nuclear@0
|
1352 DistortionTimeCount(0),
|
nuclear@0
|
1353 DistortionTimeCurrentStart(0.0),
|
nuclear@0
|
1354 //DistortionTimes[],
|
nuclear@0
|
1355 DistortionTimeAverage(0.f),
|
nuclear@0
|
1356 //EyeRenderPoses[],
|
nuclear@0
|
1357 LastFramePresentFlushTime(0.0),
|
nuclear@0
|
1358 PresentFlushToPresentFlushSeconds(0.f),
|
nuclear@0
|
1359 NextFramePresentFlushTime(0.0)
|
nuclear@0
|
1360 {
|
nuclear@0
|
1361 #if defined(OVR_BUILD_DEBUG)
|
nuclear@0
|
1362 memset(DistortionTimes, 0, sizeof(DistortionTimes));
|
nuclear@0
|
1363 #endif
|
nuclear@0
|
1364
|
nuclear@0
|
1365 for ( int i = 0; i < 2; i++ )
|
nuclear@0
|
1366 {
|
nuclear@0
|
1367 EyeRenderPoses[i] = Posef();
|
nuclear@0
|
1368 }
|
nuclear@0
|
1369 }
|
nuclear@0
|
1370
|
nuclear@0
|
1371 void TimewarpMachine::Reset(HmdRenderInfo& renderInfo, bool vsyncEnabled, double timeNow)
|
nuclear@0
|
1372 {
|
nuclear@0
|
1373 RenderInfo = renderInfo;
|
nuclear@0
|
1374 VsyncEnabled = vsyncEnabled;
|
nuclear@0
|
1375 CurrentPredictionValues = PredictionGetDeviceValues ( renderInfo, true, VsyncEnabled );
|
nuclear@0
|
1376 PresentFlushToPresentFlushSeconds = 0.0f;
|
nuclear@0
|
1377 DistortionTimeCount = 0;
|
nuclear@0
|
1378 DistortionTimeAverage = 0.0f;
|
nuclear@0
|
1379 LastFramePresentFlushTime = timeNow;
|
nuclear@0
|
1380 AfterPresentAndFlush(timeNow);
|
nuclear@0
|
1381 }
|
nuclear@0
|
1382
|
nuclear@0
|
1383 void TimewarpMachine::AfterPresentAndFlush(double timeNow)
|
nuclear@0
|
1384 {
|
nuclear@0
|
1385 AfterPresentWithoutFlush();
|
nuclear@0
|
1386 AfterPresentFinishes ( timeNow );
|
nuclear@0
|
1387 }
|
nuclear@0
|
1388
|
nuclear@0
|
1389 void TimewarpMachine::AfterPresentWithoutFlush()
|
nuclear@0
|
1390 {
|
nuclear@0
|
1391 // We've only issued the Present - it hasn't actually finished (i.e. appeared)
|
nuclear@0
|
1392 // But we need to estimate when the next Present will appear, so extrapolate from previous data.
|
nuclear@0
|
1393 NextFramePresentFlushTime = LastFramePresentFlushTime + 2.0 * (double)PresentFlushToPresentFlushSeconds;
|
nuclear@0
|
1394 }
|
nuclear@0
|
1395
|
nuclear@0
|
1396 void TimewarpMachine::AfterPresentFinishes(double timeNow)
|
nuclear@0
|
1397 {
|
nuclear@0
|
1398 // The present has now actually happened.
|
nuclear@0
|
1399 PresentFlushToPresentFlushSeconds = (float)(timeNow - LastFramePresentFlushTime);
|
nuclear@0
|
1400 LastFramePresentFlushTime = timeNow;
|
nuclear@0
|
1401 NextFramePresentFlushTime = timeNow + (double)PresentFlushToPresentFlushSeconds;
|
nuclear@0
|
1402 }
|
nuclear@0
|
1403
|
nuclear@0
|
1404
|
nuclear@0
|
1405
|
nuclear@0
|
1406 double TimewarpMachine::GetViewRenderPredictionTime()
|
nuclear@0
|
1407 {
|
nuclear@0
|
1408 // Note that PredictionGetDeviceValues() did all the vsync-dependent thinking for us.
|
nuclear@0
|
1409 return NextFramePresentFlushTime + CurrentPredictionValues.PresentFlushToRenderedScene;
|
nuclear@0
|
1410 }
|
nuclear@0
|
1411
|
nuclear@0
|
1412 bool TimewarpMachine::GetViewRenderPredictionPose(SensorStateReader* reader, Posef& pose)
|
nuclear@0
|
1413 {
|
nuclear@0
|
1414 return reader->GetPoseAtTime(GetViewRenderPredictionTime(), pose);
|
nuclear@0
|
1415 }
|
nuclear@0
|
1416
|
nuclear@0
|
1417 double TimewarpMachine::GetVisiblePixelTimeStart()
|
nuclear@0
|
1418 {
|
nuclear@0
|
1419 // Note that PredictionGetDeviceValues() did all the vsync-dependent thinking for us.
|
nuclear@0
|
1420 return NextFramePresentFlushTime + CurrentPredictionValues.PresentFlushToTimewarpStart;
|
nuclear@0
|
1421 }
|
nuclear@0
|
1422 double TimewarpMachine::GetVisiblePixelTimeEnd()
|
nuclear@0
|
1423 {
|
nuclear@0
|
1424 // Note that PredictionGetDeviceValues() did all the vsync-dependent thinking for us.
|
nuclear@0
|
1425 return NextFramePresentFlushTime + CurrentPredictionValues.PresentFlushToTimewarpEnd;
|
nuclear@0
|
1426 }
|
nuclear@0
|
1427 bool TimewarpMachine::GetPredictedVisiblePixelPoseStart(SensorStateReader* reader, Posef& pose)
|
nuclear@0
|
1428 {
|
nuclear@0
|
1429 return reader->GetPoseAtTime(GetVisiblePixelTimeStart(), pose);
|
nuclear@0
|
1430 }
|
nuclear@0
|
1431 bool TimewarpMachine::GetPredictedVisiblePixelPoseEnd(SensorStateReader* reader, Posef& pose)
|
nuclear@0
|
1432 {
|
nuclear@0
|
1433 return reader->GetPoseAtTime(GetVisiblePixelTimeEnd(), pose);
|
nuclear@0
|
1434 }
|
nuclear@0
|
1435 bool TimewarpMachine::GetTimewarpDeltaStart(SensorStateReader* reader, Posef const &renderedPose, Matrix4f& transform)
|
nuclear@0
|
1436 {
|
nuclear@0
|
1437 Posef visiblePose;
|
nuclear@0
|
1438 if (!GetPredictedVisiblePixelPoseStart(reader, visiblePose))
|
nuclear@0
|
1439 {
|
nuclear@0
|
1440 return false;
|
nuclear@0
|
1441 }
|
nuclear@0
|
1442
|
nuclear@0
|
1443 Matrix4f visibleMatrix(visiblePose);
|
nuclear@0
|
1444 Matrix4f renderedMatrix(renderedPose);
|
nuclear@0
|
1445 Matrix4f identity; // doesn't matter for orientation-only timewarp
|
nuclear@0
|
1446 transform = TimewarpComputePoseDelta ( renderedMatrix, visibleMatrix, identity );
|
nuclear@0
|
1447
|
nuclear@0
|
1448 return true;
|
nuclear@0
|
1449 }
|
nuclear@0
|
1450 bool TimewarpMachine::GetTimewarpDeltaEnd(SensorStateReader* reader, Posef const &renderedPose, Matrix4f& transform)
|
nuclear@0
|
1451 {
|
nuclear@0
|
1452 Posef visiblePose;
|
nuclear@0
|
1453 if (!GetPredictedVisiblePixelPoseEnd(reader, visiblePose))
|
nuclear@0
|
1454 {
|
nuclear@0
|
1455 return false;
|
nuclear@0
|
1456 }
|
nuclear@0
|
1457
|
nuclear@0
|
1458 Matrix4f visibleMatrix(visiblePose);
|
nuclear@0
|
1459 Matrix4f renderedMatrix(renderedPose);
|
nuclear@0
|
1460 Matrix4f identity; // doesn't matter for orientation-only timewarp
|
nuclear@0
|
1461 transform = TimewarpComputePoseDelta ( renderedMatrix, visibleMatrix, identity );
|
nuclear@0
|
1462
|
nuclear@0
|
1463 return true;
|
nuclear@0
|
1464 }
|
nuclear@0
|
1465
|
nuclear@0
|
1466
|
nuclear@0
|
1467 // What time should the app wait until before starting distortion?
|
nuclear@0
|
1468 double TimewarpMachine::JustInTime_GetDistortionWaitUntilTime()
|
nuclear@0
|
1469 {
|
nuclear@0
|
1470 if ( !VsyncEnabled || ( DistortionTimeCount < NumDistortionTimes ) )
|
nuclear@0
|
1471 {
|
nuclear@0
|
1472 // Don't wait.
|
nuclear@0
|
1473 return LastFramePresentFlushTime;
|
nuclear@0
|
1474 }
|
nuclear@0
|
1475
|
nuclear@0
|
1476 // Note - 1-2ms fudge factor (because Windows timer granularity etc) is NOT added here,
|
nuclear@0
|
1477 // because otherwise you end up adding multiple fudge factors!
|
nuclear@0
|
1478 // So it's left for the calling app to add just one fudge factor.
|
nuclear@0
|
1479
|
nuclear@0
|
1480 float howLongBeforePresent = DistortionTimeAverage;
|
nuclear@0
|
1481 // Subtlety here. Technically, the correct time is NextFramePresentFlushTime - howLongBeforePresent.
|
nuclear@0
|
1482 // However, if the app drops a frame, this then perpetuates it,
|
nuclear@0
|
1483 // i.e. if the display is running at 60fps, but the last frame was slow,
|
nuclear@0
|
1484 // (e.g. because of swapping or whatever), then NextFramePresentFlushTime is
|
nuclear@0
|
1485 // 33ms in the future, not 16ms. Since this function supplies the
|
nuclear@0
|
1486 // time to wait until, the app will indeed wait until 32ms, so the framerate
|
nuclear@0
|
1487 // drops to 30fps and never comes back up!
|
nuclear@0
|
1488 // So we return the *ideal* framerate, not the *actual* framerate.
|
nuclear@0
|
1489 return LastFramePresentFlushTime + (float)( CurrentPredictionValues.PresentFlushToPresentFlush - howLongBeforePresent );
|
nuclear@0
|
1490 }
|
nuclear@0
|
1491
|
nuclear@0
|
1492 double TimewarpMachine::JustInTime_AverageDistortionTime()
|
nuclear@0
|
1493 {
|
nuclear@0
|
1494 if ( JustInTime_NeedDistortionTimeMeasurement() )
|
nuclear@0
|
1495 {
|
nuclear@0
|
1496 return 0.0;
|
nuclear@0
|
1497 }
|
nuclear@0
|
1498 return DistortionTimeAverage;
|
nuclear@0
|
1499 }
|
nuclear@0
|
1500
|
nuclear@0
|
1501 bool TimewarpMachine::JustInTime_NeedDistortionTimeMeasurement() const
|
nuclear@0
|
1502 {
|
nuclear@0
|
1503 if (!VsyncEnabled)
|
nuclear@0
|
1504 {
|
nuclear@0
|
1505 return false;
|
nuclear@0
|
1506 }
|
nuclear@0
|
1507 return ( DistortionTimeCount < NumDistortionTimes );
|
nuclear@0
|
1508 }
|
nuclear@0
|
1509
|
nuclear@0
|
1510 void TimewarpMachine::JustInTime_BeforeDistortionTimeMeasurement(double timeNow)
|
nuclear@0
|
1511 {
|
nuclear@0
|
1512 DistortionTimeCurrentStart = timeNow;
|
nuclear@0
|
1513 }
|
nuclear@0
|
1514
|
nuclear@0
|
1515 void TimewarpMachine::JustInTime_AfterDistortionTimeMeasurement(double timeNow)
|
nuclear@0
|
1516 {
|
nuclear@0
|
1517 float timeDelta = (float)( timeNow - DistortionTimeCurrentStart );
|
nuclear@0
|
1518 if ( DistortionTimeCount < NumDistortionTimes )
|
nuclear@0
|
1519 {
|
nuclear@0
|
1520 DistortionTimes[DistortionTimeCount] = timeDelta;
|
nuclear@0
|
1521 DistortionTimeCount++;
|
nuclear@0
|
1522 if ( DistortionTimeCount == NumDistortionTimes )
|
nuclear@0
|
1523 {
|
nuclear@0
|
1524 // Median.
|
nuclear@0
|
1525 float distortionTimeMedian = 0.0f;
|
nuclear@0
|
1526 for ( int i = 0; i < NumDistortionTimes/2; i++ )
|
nuclear@0
|
1527 {
|
nuclear@0
|
1528 // Find the maximum time of those remaining.
|
nuclear@0
|
1529 float maxTime = DistortionTimes[0];
|
nuclear@0
|
1530 int maxIndex = 0;
|
nuclear@0
|
1531 for ( int j = 1; j < NumDistortionTimes; j++ )
|
nuclear@0
|
1532 {
|
nuclear@0
|
1533 if ( maxTime < DistortionTimes[j] )
|
nuclear@0
|
1534 {
|
nuclear@0
|
1535 maxTime = DistortionTimes[j];
|
nuclear@0
|
1536 maxIndex = j;
|
nuclear@0
|
1537 }
|
nuclear@0
|
1538 }
|
nuclear@0
|
1539 // Zero that max time, so we'll find the next-highest time.
|
nuclear@0
|
1540 DistortionTimes[maxIndex] = 0.0f;
|
nuclear@0
|
1541 distortionTimeMedian = maxTime;
|
nuclear@0
|
1542 }
|
nuclear@0
|
1543 DistortionTimeAverage = distortionTimeMedian;
|
nuclear@0
|
1544 }
|
nuclear@0
|
1545 }
|
nuclear@0
|
1546 else
|
nuclear@0
|
1547 {
|
nuclear@0
|
1548 OVR_ASSERT ( !"Really didn't need more measurements, thanks" );
|
nuclear@0
|
1549 }
|
nuclear@0
|
1550 }
|
nuclear@0
|
1551
|
nuclear@0
|
1552
|
nuclear@0
|
1553 }}} // OVR::Util::Render
|
nuclear@0
|
1554
|