rev |
line source |
nuclear@0
|
1 /************************************************************************************
|
nuclear@0
|
2
|
nuclear@0
|
3 Filename : CAPI_FrameTimeManager.cpp
|
nuclear@0
|
4 Content : Manage frame timing and pose prediction for rendering
|
nuclear@0
|
5 Created : November 30, 2013
|
nuclear@0
|
6 Authors : Volga Aksoy, Michael Antonov
|
nuclear@0
|
7
|
nuclear@0
|
8 Copyright : Copyright 2014 Oculus VR, LLC All Rights reserved.
|
nuclear@0
|
9
|
nuclear@0
|
10 Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License");
|
nuclear@0
|
11 you may not use the Oculus VR Rift SDK except in compliance with the License,
|
nuclear@0
|
12 which is provided at the time of installation or download, or which
|
nuclear@0
|
13 otherwise accompanies this software in either electronic or hard copy form.
|
nuclear@0
|
14
|
nuclear@0
|
15 You may obtain a copy of the License at
|
nuclear@0
|
16
|
nuclear@0
|
17 http://www.oculusvr.com/licenses/LICENSE-3.2
|
nuclear@0
|
18
|
nuclear@0
|
19 Unless required by applicable law or agreed to in writing, the Oculus VR SDK
|
nuclear@0
|
20 distributed under the License is distributed on an "AS IS" BASIS,
|
nuclear@0
|
21 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
nuclear@0
|
22 See the License for the specific language governing permissions and
|
nuclear@0
|
23 limitations under the License.
|
nuclear@0
|
24
|
nuclear@0
|
25 ************************************************************************************/
|
nuclear@0
|
26
|
nuclear@0
|
27 #include "CAPI_FrameTimeManager.h"
|
nuclear@0
|
28
|
nuclear@0
|
29 #include "../Kernel/OVR_Log.h"
|
nuclear@0
|
30
|
nuclear@0
|
31 namespace OVR { namespace CAPI {
|
nuclear@0
|
32
|
nuclear@0
|
33
|
nuclear@0
|
34 //-------------------------------------------------------------------------------------
|
nuclear@0
|
35 // ***** FrameLatencyTracker
|
nuclear@0
|
36
|
nuclear@0
|
37
|
nuclear@0
|
38 FrameLatencyTracker::FrameLatencyTracker()
|
nuclear@0
|
39 {
|
nuclear@0
|
40 Reset();
|
nuclear@0
|
41 }
|
nuclear@0
|
42
|
nuclear@0
|
43
|
nuclear@0
|
44 void FrameLatencyTracker::Reset()
|
nuclear@0
|
45 {
|
nuclear@0
|
46 TrackerEnabled = true;
|
nuclear@0
|
47 WaitMode = SampleWait_Zeroes;
|
nuclear@0
|
48 MatchCount = 0;
|
nuclear@0
|
49 memset(FrameEndTimes, 0, sizeof(FrameEndTimes));
|
nuclear@0
|
50 FrameIndex = 0;
|
nuclear@0
|
51 //FrameDeltas
|
nuclear@0
|
52 RenderLatencySeconds = 0.0;
|
nuclear@0
|
53 TimewarpLatencySeconds = 0.0;
|
nuclear@0
|
54 LatencyRecordTime = 0.0;
|
nuclear@0
|
55
|
nuclear@0
|
56 FrameDeltas.Clear();
|
nuclear@0
|
57 }
|
nuclear@0
|
58
|
nuclear@0
|
59
|
nuclear@0
|
60 unsigned char FrameLatencyTracker::GetNextDrawColor()
|
nuclear@0
|
61 {
|
nuclear@0
|
62 if (!TrackerEnabled || (WaitMode == SampleWait_Zeroes) ||
|
nuclear@0
|
63 (FrameIndex >= FramesTracked))
|
nuclear@0
|
64 {
|
nuclear@0
|
65 return (unsigned char)Util::FrameTimeRecord::ReadbackIndexToColor(0);
|
nuclear@0
|
66 }
|
nuclear@0
|
67
|
nuclear@0
|
68 OVR_ASSERT(FrameIndex < FramesTracked);
|
nuclear@0
|
69 return (unsigned char)Util::FrameTimeRecord::ReadbackIndexToColor(FrameIndex+1);
|
nuclear@0
|
70 }
|
nuclear@0
|
71
|
nuclear@0
|
72
|
nuclear@0
|
73 void FrameLatencyTracker::SaveDrawColor(unsigned char drawColor, double endFrameTime,
|
nuclear@0
|
74 double renderIMUTime, double timewarpIMUTime )
|
nuclear@0
|
75 {
|
nuclear@0
|
76 if (!TrackerEnabled || (WaitMode == SampleWait_Zeroes))
|
nuclear@0
|
77 return;
|
nuclear@0
|
78
|
nuclear@0
|
79 if (FrameIndex < FramesTracked)
|
nuclear@0
|
80 {
|
nuclear@0
|
81 OVR_ASSERT(Util::FrameTimeRecord::ReadbackIndexToColor(FrameIndex+1) == drawColor);
|
nuclear@0
|
82 OVR_UNUSED(drawColor);
|
nuclear@0
|
83
|
nuclear@0
|
84 // saves {color, endFrame time}
|
nuclear@0
|
85 FrameEndTimes[FrameIndex].ReadbackIndex = FrameIndex + 1;
|
nuclear@0
|
86 FrameEndTimes[FrameIndex].TimeSeconds = endFrameTime;
|
nuclear@0
|
87 FrameEndTimes[FrameIndex].RenderIMUTimeSeconds = renderIMUTime;
|
nuclear@0
|
88 FrameEndTimes[FrameIndex].TimewarpIMUTimeSeconds= timewarpIMUTime;
|
nuclear@0
|
89 FrameEndTimes[FrameIndex].MatchedRecord = false;
|
nuclear@0
|
90 FrameIndex++;
|
nuclear@0
|
91 }
|
nuclear@0
|
92 else
|
nuclear@0
|
93 {
|
nuclear@0
|
94 // If the request was outstanding for too long, switch to zero mode to restart.
|
nuclear@0
|
95 if (endFrameTime > (FrameEndTimes[FrameIndex-1].TimeSeconds + 0.15))
|
nuclear@0
|
96 {
|
nuclear@0
|
97 if (MatchCount == 0)
|
nuclear@0
|
98 {
|
nuclear@0
|
99 // If nothing was matched, we have no latency reading.
|
nuclear@0
|
100 RenderLatencySeconds = 0.0;
|
nuclear@0
|
101 TimewarpLatencySeconds = 0.0;
|
nuclear@0
|
102 }
|
nuclear@0
|
103
|
nuclear@0
|
104 WaitMode = SampleWait_Zeroes;
|
nuclear@0
|
105 MatchCount = 0;
|
nuclear@0
|
106 FrameIndex = 0;
|
nuclear@0
|
107 }
|
nuclear@0
|
108 }
|
nuclear@0
|
109 }
|
nuclear@0
|
110
|
nuclear@0
|
111
|
nuclear@0
|
112 void FrameLatencyTracker::MatchRecord(const Util::FrameTimeRecordSet &r)
|
nuclear@0
|
113 {
|
nuclear@0
|
114 if (!TrackerEnabled)
|
nuclear@0
|
115 return;
|
nuclear@0
|
116
|
nuclear@0
|
117 if (WaitMode == SampleWait_Zeroes)
|
nuclear@0
|
118 {
|
nuclear@0
|
119 // Do we have all zeros?
|
nuclear@0
|
120 if (r.IsAllZeroes())
|
nuclear@0
|
121 {
|
nuclear@0
|
122 OVR_ASSERT(FrameIndex == 0);
|
nuclear@0
|
123 WaitMode = SampleWait_Match;
|
nuclear@0
|
124 MatchCount = 0;
|
nuclear@0
|
125 }
|
nuclear@0
|
126 return;
|
nuclear@0
|
127 }
|
nuclear@0
|
128
|
nuclear@0
|
129 // We are in Match Mode. Wait until all colors are matched or timeout,
|
nuclear@0
|
130 // at which point we go back to zeros.
|
nuclear@0
|
131
|
nuclear@0
|
132 for (int i = 0; i < FrameIndex; i++)
|
nuclear@0
|
133 {
|
nuclear@0
|
134 int recordIndex = 0;
|
nuclear@0
|
135 int consecutiveMatch = 0;
|
nuclear@0
|
136
|
nuclear@0
|
137 OVR_ASSERT(FrameEndTimes[i].ReadbackIndex != 0);
|
nuclear@0
|
138
|
nuclear@0
|
139 if (r.FindReadbackIndex(&recordIndex, FrameEndTimes[i].ReadbackIndex))
|
nuclear@0
|
140 {
|
nuclear@0
|
141 // Advance forward to see that we have several more matches.
|
nuclear@0
|
142 int ri = recordIndex + 1;
|
nuclear@0
|
143 int j = i + 1;
|
nuclear@0
|
144
|
nuclear@0
|
145 consecutiveMatch++;
|
nuclear@0
|
146
|
nuclear@0
|
147 for (; (j < FrameIndex) && (ri < Util::FrameTimeRecordSet::RecordCount); j++, ri++)
|
nuclear@0
|
148 {
|
nuclear@0
|
149 if (r[ri].ReadbackIndex != FrameEndTimes[j].ReadbackIndex)
|
nuclear@0
|
150 break;
|
nuclear@0
|
151 consecutiveMatch++;
|
nuclear@0
|
152 }
|
nuclear@0
|
153
|
nuclear@0
|
154 // Match at least 2 items in the row, to avoid accidentally matching color.
|
nuclear@0
|
155 if (consecutiveMatch > 1)
|
nuclear@0
|
156 {
|
nuclear@0
|
157 // Record latency values for all but last samples. Keep last 2 samples
|
nuclear@0
|
158 // for the future to simplify matching.
|
nuclear@0
|
159 for (int q = 0; q < consecutiveMatch; q++)
|
nuclear@0
|
160 {
|
nuclear@0
|
161 const Util::FrameTimeRecord &scanoutFrame = r[recordIndex+q];
|
nuclear@0
|
162 FrameTimeRecordEx &renderFrame = FrameEndTimes[i+q];
|
nuclear@0
|
163
|
nuclear@0
|
164 if (!renderFrame.MatchedRecord)
|
nuclear@0
|
165 {
|
nuclear@0
|
166 double deltaSeconds = scanoutFrame.TimeSeconds - renderFrame.TimeSeconds;
|
nuclear@0
|
167 if (deltaSeconds > 0.0)
|
nuclear@0
|
168 {
|
nuclear@0
|
169 FrameDeltas.AddTimeDelta(deltaSeconds);
|
nuclear@0
|
170
|
nuclear@0
|
171 // FIRMWARE HACK: don't take new readings if they're 10ms higher than previous reading
|
nuclear@0
|
172 // but only do that for 1 second, after that accept it regardless of the timing difference
|
nuclear@0
|
173 double newRenderLatency = scanoutFrame.TimeSeconds - renderFrame.RenderIMUTimeSeconds;
|
nuclear@0
|
174 if( newRenderLatency < RenderLatencySeconds + 0.01 ||
|
nuclear@0
|
175 scanoutFrame.TimeSeconds > LatencyRecordTime + 1.0)
|
nuclear@0
|
176 {
|
nuclear@0
|
177 LatencyRecordTime = scanoutFrame.TimeSeconds;
|
nuclear@0
|
178 RenderLatencySeconds = scanoutFrame.TimeSeconds - renderFrame.RenderIMUTimeSeconds;
|
nuclear@0
|
179 TimewarpLatencySeconds = (renderFrame.TimewarpIMUTimeSeconds == 0.0) ? 0.0 :
|
nuclear@0
|
180 (scanoutFrame.TimeSeconds - renderFrame.TimewarpIMUTimeSeconds);
|
nuclear@0
|
181 }
|
nuclear@0
|
182 }
|
nuclear@0
|
183
|
nuclear@0
|
184 renderFrame.MatchedRecord = true;
|
nuclear@0
|
185 MatchCount++;
|
nuclear@0
|
186 }
|
nuclear@0
|
187 }
|
nuclear@0
|
188
|
nuclear@0
|
189 // Exit for.
|
nuclear@0
|
190 break;
|
nuclear@0
|
191 }
|
nuclear@0
|
192 }
|
nuclear@0
|
193 } // for ( i => FrameIndex )
|
nuclear@0
|
194
|
nuclear@0
|
195
|
nuclear@0
|
196 // If we matched all frames, start over.
|
nuclear@0
|
197 if (MatchCount == FramesTracked)
|
nuclear@0
|
198 {
|
nuclear@0
|
199 WaitMode = SampleWait_Zeroes;
|
nuclear@0
|
200 MatchCount = 0;
|
nuclear@0
|
201 FrameIndex = 0;
|
nuclear@0
|
202 }
|
nuclear@0
|
203 }
|
nuclear@0
|
204
|
nuclear@0
|
205 bool FrameLatencyTracker::IsLatencyTimingAvailable()
|
nuclear@0
|
206 {
|
nuclear@0
|
207 return ovr_GetTimeInSeconds() < (LatencyRecordTime + 2.0);
|
nuclear@0
|
208 }
|
nuclear@0
|
209
|
nuclear@0
|
210 void FrameLatencyTracker::GetLatencyTimings(float& latencyRender, float& latencyTimewarp, float& latencyPostPresent)
|
nuclear@0
|
211 {
|
nuclear@0
|
212 if (!IsLatencyTimingAvailable())
|
nuclear@0
|
213 {
|
nuclear@0
|
214 latencyRender = 0.0f;
|
nuclear@0
|
215 latencyTimewarp = 0.0f;
|
nuclear@0
|
216 latencyPostPresent = 0.0f;
|
nuclear@0
|
217 }
|
nuclear@0
|
218 else
|
nuclear@0
|
219 {
|
nuclear@0
|
220 latencyRender = (float)RenderLatencySeconds;
|
nuclear@0
|
221 latencyTimewarp = (float)TimewarpLatencySeconds;
|
nuclear@0
|
222 latencyPostPresent = (float)FrameDeltas.GetMedianTimeDelta();
|
nuclear@0
|
223 }
|
nuclear@0
|
224 }
|
nuclear@0
|
225
|
nuclear@0
|
226
|
nuclear@0
|
227 //-------------------------------------------------------------------------------------
|
nuclear@0
|
228 // ***** FrameTimeManager
|
nuclear@0
|
229
|
nuclear@0
|
230 FrameTimeManager::FrameTimeManager(bool vsyncEnabled) :
|
nuclear@0
|
231 RenderInfo(),
|
nuclear@0
|
232 FrameTimeDeltas(),
|
nuclear@0
|
233 DistortionRenderTimes(),
|
nuclear@0
|
234 ScreenLatencyTracker(),
|
nuclear@0
|
235 VsyncEnabled(vsyncEnabled),
|
nuclear@0
|
236 DynamicPrediction(true),
|
nuclear@0
|
237 SdkRender(false),
|
nuclear@0
|
238 //DirectToRift(false), Initialized below.
|
nuclear@0
|
239 //VSyncToScanoutDelay(0.0), Initialized below.
|
nuclear@0
|
240 //NoVSyncToScanoutDelay(0.0), Initialized below.
|
nuclear@0
|
241 ScreenSwitchingDelay(0.0),
|
nuclear@0
|
242 FrameTiming(),
|
nuclear@0
|
243 LocklessTiming(),
|
nuclear@0
|
244 RenderIMUTimeSeconds(0.0),
|
nuclear@0
|
245 TimewarpIMUTimeSeconds(0.0)
|
nuclear@0
|
246 {
|
nuclear@0
|
247 // If driver is in use,
|
nuclear@0
|
248 DirectToRift = !Display::InCompatibilityMode(false);
|
nuclear@0
|
249 if (DirectToRift)
|
nuclear@0
|
250 {
|
nuclear@0
|
251 // The latest driver provides a post-present vsync-to-scan-out delay
|
nuclear@0
|
252 // that is roughly zero. The latency tester will provide real numbers
|
nuclear@0
|
253 // but when it is unavailable for some reason, we should default to
|
nuclear@0
|
254 // an expected value.
|
nuclear@0
|
255 VSyncToScanoutDelay = 0.0001f;
|
nuclear@0
|
256 }
|
nuclear@0
|
257 else
|
nuclear@0
|
258 {
|
nuclear@0
|
259 // HACK: SyncToScanoutDelay observed close to 1 frame in video cards.
|
nuclear@0
|
260 // Overwritten by dynamic latency measurement on DK2.
|
nuclear@0
|
261 VSyncToScanoutDelay = 0.013f;
|
nuclear@0
|
262 }
|
nuclear@0
|
263 NoVSyncToScanoutDelay = 0.004f;
|
nuclear@0
|
264 }
|
nuclear@0
|
265
|
nuclear@0
|
266 void FrameTimeManager::Init(HmdRenderInfo& renderInfo)
|
nuclear@0
|
267 {
|
nuclear@0
|
268 // Set up prediction distances.
|
nuclear@0
|
269 // With-Vsync timings.
|
nuclear@0
|
270 RenderInfo = renderInfo;
|
nuclear@0
|
271
|
nuclear@0
|
272 ScreenSwitchingDelay = RenderInfo.Shutter.PixelSettleTime * 0.5f +
|
nuclear@0
|
273 RenderInfo.Shutter.PixelPersistence * 0.5f;
|
nuclear@0
|
274 }
|
nuclear@0
|
275
|
nuclear@0
|
276 void FrameTimeManager::ResetFrameTiming(unsigned frameIndex,
|
nuclear@0
|
277 bool dynamicPrediction,
|
nuclear@0
|
278 bool sdkRender)
|
nuclear@0
|
279 {
|
nuclear@0
|
280 DynamicPrediction = dynamicPrediction;
|
nuclear@0
|
281 SdkRender = sdkRender;
|
nuclear@0
|
282
|
nuclear@0
|
283 FrameTimeDeltas.Clear();
|
nuclear@0
|
284 DistortionRenderTimes.Clear();
|
nuclear@0
|
285 ScreenLatencyTracker.Reset();
|
nuclear@0
|
286 //Revisit dynamic pre-Timewarp delay adjustment logic
|
nuclear@0
|
287 //TimewarpAdjuster.Reset();
|
nuclear@0
|
288
|
nuclear@0
|
289 FrameTiming.FrameIndex = frameIndex;
|
nuclear@0
|
290 FrameTiming.NextFrameTime = 0.0;
|
nuclear@0
|
291 FrameTiming.ThisFrameTime = 0.0;
|
nuclear@0
|
292 FrameTiming.Inputs.FrameDelta = calcFrameDelta();
|
nuclear@0
|
293 // This one is particularly critical, and has been missed in the past because
|
nuclear@0
|
294 // this init function wasn't called for app-rendered.
|
nuclear@0
|
295 FrameTiming.Inputs.ScreenDelay = calcScreenDelay();
|
nuclear@0
|
296 FrameTiming.Inputs.TimewarpWaitDelta = 0.0f;
|
nuclear@0
|
297
|
nuclear@0
|
298 LocklessTiming.SetState(FrameTiming);
|
nuclear@0
|
299 }
|
nuclear@0
|
300
|
nuclear@0
|
301
|
nuclear@0
|
302 double FrameTimeManager::calcFrameDelta() const
|
nuclear@0
|
303 {
|
nuclear@0
|
304 // Timing difference between frame is tracked by FrameTimeDeltas, or
|
nuclear@0
|
305 // is a hard-coded value of 1/FrameRate.
|
nuclear@0
|
306 double frameDelta;
|
nuclear@0
|
307
|
nuclear@0
|
308 if (!VsyncEnabled)
|
nuclear@0
|
309 {
|
nuclear@0
|
310 frameDelta = 0.0;
|
nuclear@0
|
311 }
|
nuclear@0
|
312 else if (FrameTimeDeltas.GetCount() > 3)
|
nuclear@0
|
313 {
|
nuclear@0
|
314 frameDelta = FrameTimeDeltas.GetMedianTimeDelta();
|
nuclear@0
|
315 if (frameDelta > (RenderInfo.Shutter.VsyncToNextVsync + 0.001))
|
nuclear@0
|
316 frameDelta = RenderInfo.Shutter.VsyncToNextVsync;
|
nuclear@0
|
317 }
|
nuclear@0
|
318 else
|
nuclear@0
|
319 {
|
nuclear@0
|
320 frameDelta = RenderInfo.Shutter.VsyncToNextVsync;
|
nuclear@0
|
321 }
|
nuclear@0
|
322
|
nuclear@0
|
323 return frameDelta;
|
nuclear@0
|
324 }
|
nuclear@0
|
325
|
nuclear@0
|
326
|
nuclear@0
|
327 double FrameTimeManager::calcScreenDelay() const
|
nuclear@0
|
328 {
|
nuclear@0
|
329 double screenDelay = ScreenSwitchingDelay;
|
nuclear@0
|
330 double measuredVSyncToScanout;
|
nuclear@0
|
331
|
nuclear@0
|
332 // Use real-time DK2 latency tester HW for prediction if its is working.
|
nuclear@0
|
333 // Do sanity check under 60 ms
|
nuclear@0
|
334 if (!VsyncEnabled)
|
nuclear@0
|
335 {
|
nuclear@0
|
336 screenDelay += NoVSyncToScanoutDelay;
|
nuclear@0
|
337 }
|
nuclear@0
|
338 else if ( DynamicPrediction &&
|
nuclear@0
|
339 (ScreenLatencyTracker.FrameDeltas.GetCount() > 3) &&
|
nuclear@0
|
340 (measuredVSyncToScanout = ScreenLatencyTracker.FrameDeltas.GetMedianTimeDelta(),
|
nuclear@0
|
341 (measuredVSyncToScanout > -0.0001) && (measuredVSyncToScanout < 0.06)) )
|
nuclear@0
|
342 {
|
nuclear@0
|
343 screenDelay += measuredVSyncToScanout;
|
nuclear@0
|
344 }
|
nuclear@0
|
345 else
|
nuclear@0
|
346 {
|
nuclear@0
|
347 screenDelay += VSyncToScanoutDelay;
|
nuclear@0
|
348 }
|
nuclear@0
|
349
|
nuclear@0
|
350 return screenDelay;
|
nuclear@0
|
351 }
|
nuclear@0
|
352
|
nuclear@0
|
353 double FrameTimeManager::calcTimewarpWaitDelta() const
|
nuclear@0
|
354 {
|
nuclear@0
|
355 // If timewarp timing hasn't been calculated, we should wait.
|
nuclear@0
|
356 if (!VsyncEnabled)
|
nuclear@0
|
357 return 0.0;
|
nuclear@0
|
358
|
nuclear@0
|
359 if (SdkRender)
|
nuclear@0
|
360 {
|
nuclear@0
|
361 if (NeedDistortionTimeMeasurement())
|
nuclear@0
|
362 return 0.0;
|
nuclear@0
|
363 return -(DistortionRenderTimes.GetMedianTimeDelta() + 0.0035);
|
nuclear@0
|
364
|
nuclear@0
|
365 //Revisit dynamic pre-Timewarp delay adjustment logic
|
nuclear@0
|
366 /*return -(DistortionRenderTimes.GetMedianTimeDelta() + 0.002 +
|
nuclear@0
|
367 TimewarpAdjuster.GetDelayReduction());*/
|
nuclear@0
|
368 }
|
nuclear@0
|
369
|
nuclear@0
|
370 // Just a hard-coded "high" value for game-drawn code.
|
nuclear@0
|
371 // TBD: Just return 0 and let users calculate this themselves?
|
nuclear@0
|
372 return -0.004;
|
nuclear@0
|
373
|
nuclear@0
|
374 //Revisit dynamic pre-Timewarp delay adjustment logic
|
nuclear@0
|
375 //return -(0.003 + TimewarpAdjuster.GetDelayReduction());
|
nuclear@0
|
376 }
|
nuclear@0
|
377
|
nuclear@0
|
378 //Revisit dynamic pre-Timewarp delay adjustment logic
|
nuclear@0
|
379 /*
|
nuclear@0
|
380 void FrameTimeManager::updateTimewarpTiming()
|
nuclear@0
|
381 {
|
nuclear@0
|
382 // If timewarp timing changes based on this sample, update it.
|
nuclear@0
|
383 double newTimewarpWaitDelta = calcTimewarpWaitDelta();
|
nuclear@0
|
384 if (newTimewarpWaitDelta != FrameTiming.Inputs.TimewarpWaitDelta)
|
nuclear@0
|
385 {
|
nuclear@0
|
386 FrameTiming.Inputs.TimewarpWaitDelta = newTimewarpWaitDelta;
|
nuclear@0
|
387 LocklessTiming.SetState(FrameTiming);
|
nuclear@0
|
388 }
|
nuclear@0
|
389 }
|
nuclear@0
|
390 */
|
nuclear@0
|
391
|
nuclear@0
|
392 void FrameTimeManager::Timing::InitTimingFromInputs(const FrameTimeManager::TimingInputs& inputs,
|
nuclear@0
|
393 HmdShutterTypeEnum shutterType,
|
nuclear@0
|
394 double thisFrameTime, unsigned int frameIndex)
|
nuclear@0
|
395 {
|
nuclear@0
|
396 // ThisFrameTime comes from the end of last frame, unless it it changed.
|
nuclear@0
|
397 double nextFrameBase;
|
nuclear@0
|
398 double frameDelta = inputs.FrameDelta;
|
nuclear@0
|
399
|
nuclear@0
|
400 FrameIndex = frameIndex;
|
nuclear@0
|
401
|
nuclear@0
|
402 ThisFrameTime = thisFrameTime;
|
nuclear@0
|
403 NextFrameTime = ThisFrameTime + frameDelta;
|
nuclear@0
|
404 nextFrameBase = NextFrameTime + inputs.ScreenDelay;
|
nuclear@0
|
405 MidpointTime = nextFrameBase + frameDelta * 0.5;
|
nuclear@0
|
406 TimewarpPointTime = (inputs.TimewarpWaitDelta == 0.0) ?
|
nuclear@0
|
407 0.0 : (NextFrameTime + inputs.TimewarpWaitDelta);
|
nuclear@0
|
408
|
nuclear@0
|
409 // Calculate absolute points in time when eye rendering or corresponding time-warp
|
nuclear@0
|
410 // screen edges will become visible.
|
nuclear@0
|
411 // This only matters with VSync.
|
nuclear@0
|
412 switch(shutterType)
|
nuclear@0
|
413 {
|
nuclear@0
|
414 case HmdShutter_RollingTopToBottom:
|
nuclear@0
|
415 EyeRenderTimes[0] = MidpointTime;
|
nuclear@0
|
416 EyeRenderTimes[1] = MidpointTime;
|
nuclear@0
|
417 TimeWarpStartEndTimes[0][0] = nextFrameBase;
|
nuclear@0
|
418 TimeWarpStartEndTimes[0][1] = nextFrameBase + frameDelta;
|
nuclear@0
|
419 TimeWarpStartEndTimes[1][0] = nextFrameBase;
|
nuclear@0
|
420 TimeWarpStartEndTimes[1][1] = nextFrameBase + frameDelta;
|
nuclear@0
|
421 break;
|
nuclear@0
|
422 case HmdShutter_RollingLeftToRight:
|
nuclear@0
|
423 EyeRenderTimes[0] = nextFrameBase + frameDelta * 0.25;
|
nuclear@0
|
424 EyeRenderTimes[1] = nextFrameBase + frameDelta * 0.75;
|
nuclear@0
|
425
|
nuclear@0
|
426 /*
|
nuclear@0
|
427 // TBD: MA: It is probably better if mesh sets it up per-eye.
|
nuclear@0
|
428 // Would apply if screen is 0 -> 1 for each eye mesh
|
nuclear@0
|
429 TimeWarpStartEndTimes[0][0] = nextFrameBase;
|
nuclear@0
|
430 TimeWarpStartEndTimes[0][1] = MidpointTime;
|
nuclear@0
|
431 TimeWarpStartEndTimes[1][0] = MidpointTime;
|
nuclear@0
|
432 TimeWarpStartEndTimes[1][1] = nextFrameBase + frameDelta;
|
nuclear@0
|
433 */
|
nuclear@0
|
434
|
nuclear@0
|
435 // Mesh is set up to vary from Edge of scree 0 -> 1 across both eyes
|
nuclear@0
|
436 TimeWarpStartEndTimes[0][0] = nextFrameBase;
|
nuclear@0
|
437 TimeWarpStartEndTimes[0][1] = nextFrameBase + frameDelta;
|
nuclear@0
|
438 TimeWarpStartEndTimes[1][0] = nextFrameBase;
|
nuclear@0
|
439 TimeWarpStartEndTimes[1][1] = nextFrameBase + frameDelta;
|
nuclear@0
|
440
|
nuclear@0
|
441 break;
|
nuclear@0
|
442 case HmdShutter_RollingRightToLeft:
|
nuclear@0
|
443
|
nuclear@0
|
444 EyeRenderTimes[0] = nextFrameBase + frameDelta * 0.75;
|
nuclear@0
|
445 EyeRenderTimes[1] = nextFrameBase + frameDelta * 0.25;
|
nuclear@0
|
446
|
nuclear@0
|
447 // This is *Correct* with Tom's distortion mesh organization.
|
nuclear@0
|
448 TimeWarpStartEndTimes[0][0] = nextFrameBase ;
|
nuclear@0
|
449 TimeWarpStartEndTimes[0][1] = nextFrameBase + frameDelta;
|
nuclear@0
|
450 TimeWarpStartEndTimes[1][0] = nextFrameBase ;
|
nuclear@0
|
451 TimeWarpStartEndTimes[1][1] = nextFrameBase + frameDelta;
|
nuclear@0
|
452 break;
|
nuclear@0
|
453 case HmdShutter_Global:
|
nuclear@0
|
454 // TBD
|
nuclear@0
|
455 EyeRenderTimes[0] = MidpointTime;
|
nuclear@0
|
456 EyeRenderTimes[1] = MidpointTime;
|
nuclear@0
|
457 TimeWarpStartEndTimes[0][0] = MidpointTime;
|
nuclear@0
|
458 TimeWarpStartEndTimes[0][1] = MidpointTime;
|
nuclear@0
|
459 TimeWarpStartEndTimes[1][0] = MidpointTime;
|
nuclear@0
|
460 TimeWarpStartEndTimes[1][1] = MidpointTime;
|
nuclear@0
|
461 break;
|
nuclear@0
|
462 default:
|
nuclear@0
|
463 break;
|
nuclear@0
|
464 }
|
nuclear@0
|
465 }
|
nuclear@0
|
466
|
nuclear@0
|
467
|
nuclear@0
|
468 double FrameTimeManager::BeginFrame(unsigned frameIndex)
|
nuclear@0
|
469 {
|
nuclear@0
|
470 RenderIMUTimeSeconds = 0.0;
|
nuclear@0
|
471 TimewarpIMUTimeSeconds = 0.0;
|
nuclear@0
|
472
|
nuclear@0
|
473 // TPH - putting an assert so this doesn't remain a hidden problem.
|
nuclear@0
|
474 OVR_ASSERT(FrameTiming.Inputs.ScreenDelay != 0);
|
nuclear@0
|
475
|
nuclear@0
|
476 // ThisFrameTime comes from the end of last frame, unless it it changed.
|
nuclear@0
|
477 double thisFrameTime = (FrameTiming.NextFrameTime != 0.0) ?
|
nuclear@0
|
478 FrameTiming.NextFrameTime : ovr_GetTimeInSeconds();
|
nuclear@0
|
479
|
nuclear@0
|
480 // We are starting to process a new frame...
|
nuclear@0
|
481 FrameTiming.InitTimingFromInputs(FrameTiming.Inputs, RenderInfo.Shutter.Type,
|
nuclear@0
|
482 thisFrameTime, frameIndex);
|
nuclear@0
|
483
|
nuclear@0
|
484 return FrameTiming.ThisFrameTime;
|
nuclear@0
|
485 }
|
nuclear@0
|
486
|
nuclear@0
|
487
|
nuclear@0
|
488 void FrameTimeManager::EndFrame()
|
nuclear@0
|
489 {
|
nuclear@0
|
490 // Record timing since last frame; must be called after Present & sync.
|
nuclear@0
|
491 FrameTiming.NextFrameTime = ovr_GetTimeInSeconds();
|
nuclear@0
|
492 if (FrameTiming.ThisFrameTime > 0.0)
|
nuclear@0
|
493 {
|
nuclear@0
|
494 //Revisit dynamic pre-Timewarp delay adjustment logic
|
nuclear@0
|
495 /*
|
nuclear@0
|
496 double actualFrameDelta = FrameTiming.NextFrameTime - FrameTiming.ThisFrameTime;
|
nuclear@0
|
497
|
nuclear@0
|
498 if (VsyncEnabled)
|
nuclear@0
|
499 TimewarpAdjuster.UpdateTimewarpWaitIfSkippedFrames(this, actualFrameDelta,
|
nuclear@0
|
500 FrameTiming.NextFrameTime);
|
nuclear@0
|
501
|
nuclear@0
|
502 FrameTimeDeltas.AddTimeDelta(actualFrameDelta);
|
nuclear@0
|
503 */
|
nuclear@0
|
504 FrameTimeDeltas.AddTimeDelta(FrameTiming.NextFrameTime - FrameTiming.ThisFrameTime);
|
nuclear@0
|
505 FrameTiming.Inputs.FrameDelta = calcFrameDelta();
|
nuclear@0
|
506 }
|
nuclear@0
|
507
|
nuclear@0
|
508 // Write to Lock-less
|
nuclear@0
|
509 LocklessTiming.SetState(FrameTiming);
|
nuclear@0
|
510 }
|
nuclear@0
|
511
|
nuclear@0
|
512 // Thread-safe function to query timing for a future frame
|
nuclear@0
|
513
|
nuclear@0
|
514 FrameTimeManager::Timing FrameTimeManager::GetFrameTiming(unsigned frameIndex)
|
nuclear@0
|
515 {
|
nuclear@0
|
516 Timing frameTiming = LocklessTiming.GetState();
|
nuclear@0
|
517
|
nuclear@0
|
518 if (frameTiming.ThisFrameTime == 0.0)
|
nuclear@0
|
519 {
|
nuclear@0
|
520 // If timing hasn't been initialized, starting based on "now" is the best guess.
|
nuclear@0
|
521 frameTiming.InitTimingFromInputs(frameTiming.Inputs, RenderInfo.Shutter.Type,
|
nuclear@0
|
522 ovr_GetTimeInSeconds(), frameIndex);
|
nuclear@0
|
523 }
|
nuclear@0
|
524
|
nuclear@0
|
525 else if (frameIndex > frameTiming.FrameIndex)
|
nuclear@0
|
526 {
|
nuclear@0
|
527 unsigned frameDelta = frameIndex - frameTiming.FrameIndex;
|
nuclear@0
|
528 double thisFrameTime = frameTiming.NextFrameTime +
|
nuclear@0
|
529 double(frameDelta-1) * frameTiming.Inputs.FrameDelta;
|
nuclear@0
|
530 // Don't run away too far into the future beyond rendering.
|
nuclear@0
|
531 OVR_DEBUG_LOG_COND(frameDelta >= 6, ("GetFrameTiming is 6 or more frames in future beyond rendering!"));
|
nuclear@0
|
532
|
nuclear@0
|
533 frameTiming.InitTimingFromInputs(frameTiming.Inputs, RenderInfo.Shutter.Type,
|
nuclear@0
|
534 thisFrameTime, frameIndex);
|
nuclear@0
|
535 }
|
nuclear@0
|
536
|
nuclear@0
|
537 return frameTiming;
|
nuclear@0
|
538 }
|
nuclear@0
|
539
|
nuclear@0
|
540
|
nuclear@0
|
541 double FrameTimeManager::GetEyePredictionTime(ovrEyeType eye, unsigned int frameIndex)
|
nuclear@0
|
542 {
|
nuclear@0
|
543 if (VsyncEnabled)
|
nuclear@0
|
544 {
|
nuclear@0
|
545 FrameTimeManager::Timing frameTiming = GetFrameTiming(frameIndex);
|
nuclear@0
|
546
|
nuclear@0
|
547 // Special case: ovrEye_Count predicts to midpoint
|
nuclear@0
|
548 return (eye == ovrEye_Count) ? frameTiming.MidpointTime : frameTiming.EyeRenderTimes[eye];
|
nuclear@0
|
549 }
|
nuclear@0
|
550
|
nuclear@0
|
551 // No VSync: Best guess for the near future
|
nuclear@0
|
552 return ovr_GetTimeInSeconds() + ScreenSwitchingDelay + NoVSyncToScanoutDelay;
|
nuclear@0
|
553 }
|
nuclear@0
|
554
|
nuclear@0
|
555 ovrTrackingState FrameTimeManager::GetEyePredictionTracking(ovrHmd hmd, ovrEyeType eye, unsigned int frameIndex)
|
nuclear@0
|
556 {
|
nuclear@0
|
557 double eyeRenderTime = GetEyePredictionTime(eye, frameIndex);
|
nuclear@0
|
558 ovrTrackingState eyeState = ovrHmd_GetTrackingState(hmd, eyeRenderTime);
|
nuclear@0
|
559
|
nuclear@0
|
560 // Record view pose sampling time for Latency reporting.
|
nuclear@0
|
561 if (RenderIMUTimeSeconds == 0.0)
|
nuclear@0
|
562 {
|
nuclear@0
|
563 // TODO: Figure out why this are not as accurate as ovr_GetTimeInSeconds()
|
nuclear@0
|
564 //RenderIMUTimeSeconds = eyeState.RawSensorData.TimeInSeconds;
|
nuclear@0
|
565 RenderIMUTimeSeconds = ovr_GetTimeInSeconds();
|
nuclear@0
|
566 }
|
nuclear@0
|
567
|
nuclear@0
|
568 return eyeState;
|
nuclear@0
|
569 }
|
nuclear@0
|
570
|
nuclear@0
|
571 Posef FrameTimeManager::GetEyePredictionPose(ovrHmd hmd, ovrEyeType eye)
|
nuclear@0
|
572 {
|
nuclear@0
|
573 double eyeRenderTime = GetEyePredictionTime(eye, 0);
|
nuclear@0
|
574 ovrTrackingState eyeState = ovrHmd_GetTrackingState(hmd, eyeRenderTime);
|
nuclear@0
|
575
|
nuclear@0
|
576 // Record view pose sampling time for Latency reporting.
|
nuclear@0
|
577 if (RenderIMUTimeSeconds == 0.0)
|
nuclear@0
|
578 {
|
nuclear@0
|
579 // TODO: Figure out why this are not as accurate as ovr_GetTimeInSeconds()
|
nuclear@0
|
580 //RenderIMUTimeSeconds = eyeState.RawSensorData.TimeInSeconds;
|
nuclear@0
|
581 RenderIMUTimeSeconds = ovr_GetTimeInSeconds();
|
nuclear@0
|
582 }
|
nuclear@0
|
583
|
nuclear@0
|
584 return eyeState.HeadPose.ThePose;
|
nuclear@0
|
585 }
|
nuclear@0
|
586
|
nuclear@0
|
587 void FrameTimeManager::GetTimewarpPredictions(ovrEyeType eye, double timewarpStartEnd[2])
|
nuclear@0
|
588 {
|
nuclear@0
|
589 if (VsyncEnabled)
|
nuclear@0
|
590 {
|
nuclear@0
|
591 timewarpStartEnd[0] = FrameTiming.TimeWarpStartEndTimes[eye][0];
|
nuclear@0
|
592 timewarpStartEnd[1] = FrameTiming.TimeWarpStartEndTimes[eye][1];
|
nuclear@0
|
593 return;
|
nuclear@0
|
594 }
|
nuclear@0
|
595
|
nuclear@0
|
596 // Free-running, so this will be displayed immediately.
|
nuclear@0
|
597 // Unfortunately we have no idea which bit of the screen is actually going to be displayed.
|
nuclear@0
|
598 // TODO: guess which bit of the screen is being displayed!
|
nuclear@0
|
599 // (e.g. use DONOTWAIT on present and see when the return isn't WASSTILLWAITING?)
|
nuclear@0
|
600
|
nuclear@0
|
601 // We have no idea where scan-out is currently, so we can't usefully warp the screen spatially.
|
nuclear@0
|
602 timewarpStartEnd[0] = ovr_GetTimeInSeconds() + ScreenSwitchingDelay + NoVSyncToScanoutDelay;
|
nuclear@0
|
603 timewarpStartEnd[1] = timewarpStartEnd[0];
|
nuclear@0
|
604 }
|
nuclear@0
|
605
|
nuclear@0
|
606
|
nuclear@0
|
607 void FrameTimeManager::GetTimewarpMatrices(ovrHmd hmd, ovrEyeType eyeId,
|
nuclear@0
|
608 ovrPosef renderPose, ovrMatrix4f twmOut[2],
|
nuclear@0
|
609 double debugTimingOffsetInSeconds)
|
nuclear@0
|
610 {
|
nuclear@0
|
611 if (!hmd)
|
nuclear@0
|
612 {
|
nuclear@0
|
613 return;
|
nuclear@0
|
614 }
|
nuclear@0
|
615
|
nuclear@0
|
616 double timewarpStartEnd[2] = { 0.0, 0.0 };
|
nuclear@0
|
617 GetTimewarpPredictions(eyeId, timewarpStartEnd);
|
nuclear@0
|
618
|
nuclear@0
|
619 //TPH, to vary timing, to allow developers to debug, to shunt the predicted time forward
|
nuclear@0
|
620 //and back, and see if the SDK is truly delivering the correct time. Also to allow
|
nuclear@0
|
621 //illustration of the detrimental effects when this is not done right.
|
nuclear@0
|
622 timewarpStartEnd[0] += debugTimingOffsetInSeconds;
|
nuclear@0
|
623 timewarpStartEnd[1] += debugTimingOffsetInSeconds;
|
nuclear@0
|
624
|
nuclear@0
|
625
|
nuclear@0
|
626 //HMDState* p = (HMDState*)hmd;
|
nuclear@0
|
627 ovrTrackingState startState = ovrHmd_GetTrackingState(hmd, timewarpStartEnd[0]);
|
nuclear@0
|
628 ovrTrackingState endState = ovrHmd_GetTrackingState(hmd, timewarpStartEnd[1]);
|
nuclear@0
|
629
|
nuclear@0
|
630 if (TimewarpIMUTimeSeconds == 0.0)
|
nuclear@0
|
631 {
|
nuclear@0
|
632 // TODO: Figure out why this are not as accurate as ovr_GetTimeInSeconds()
|
nuclear@0
|
633 //TimewarpIMUTimeSeconds = startState.RawSensorData.TimeInSeconds;
|
nuclear@0
|
634 TimewarpIMUTimeSeconds = ovr_GetTimeInSeconds();
|
nuclear@0
|
635 }
|
nuclear@0
|
636
|
nuclear@0
|
637 Quatf quatFromStart = startState.HeadPose.ThePose.Orientation;
|
nuclear@0
|
638 Quatf quatFromEnd = endState.HeadPose.ThePose.Orientation;
|
nuclear@0
|
639 Quatf quatFromEye = renderPose.Orientation; //EyeRenderPoses[eyeId].Orientation;
|
nuclear@0
|
640 quatFromEye.Invert(); // because we need the view matrix, not the camera matrix
|
nuclear@0
|
641
|
nuclear@0
|
642 Quatf timewarpStartQuat = quatFromEye * quatFromStart;
|
nuclear@0
|
643 Quatf timewarpEndQuat = quatFromEye * quatFromEnd;
|
nuclear@0
|
644
|
nuclear@0
|
645 Matrix4f timewarpStart(timewarpStartQuat);
|
nuclear@0
|
646 Matrix4f timewarpEnd(timewarpEndQuat);
|
nuclear@0
|
647
|
nuclear@0
|
648
|
nuclear@0
|
649 // The real-world orientations have: X=right, Y=up, Z=backwards.
|
nuclear@0
|
650 // The vectors inside the mesh are in NDC to keep the shader simple: X=right, Y=down, Z=forwards.
|
nuclear@0
|
651 // So we need to perform a similarity transform on this delta matrix.
|
nuclear@0
|
652 // The verbose code would look like this:
|
nuclear@0
|
653 /*
|
nuclear@0
|
654 Matrix4f matBasisChange;
|
nuclear@0
|
655 matBasisChange.SetIdentity();
|
nuclear@0
|
656 matBasisChange.M[0][0] = 1.0f;
|
nuclear@0
|
657 matBasisChange.M[1][1] = -1.0f;
|
nuclear@0
|
658 matBasisChange.M[2][2] = -1.0f;
|
nuclear@0
|
659 Matrix4f matBasisChangeInv = matBasisChange.Inverted();
|
nuclear@0
|
660 matRenderFromNow = matBasisChangeInv * matRenderFromNow * matBasisChange;
|
nuclear@0
|
661 */
|
nuclear@0
|
662 // ...but of course all the above is a constant transform and much more easily done.
|
nuclear@0
|
663 // We flip the signs of the Y&Z row, then flip the signs of the Y&Z column,
|
nuclear@0
|
664 // and of course most of the flips cancel:
|
nuclear@0
|
665 // +++ +-- +--
|
nuclear@0
|
666 // +++ -> flip Y&Z columns -> +-- -> flip Y&Z rows -> -++
|
nuclear@0
|
667 // +++ +-- -++
|
nuclear@0
|
668 timewarpStart.M[0][1] = -timewarpStart.M[0][1];
|
nuclear@0
|
669 timewarpStart.M[0][2] = -timewarpStart.M[0][2];
|
nuclear@0
|
670 timewarpStart.M[1][0] = -timewarpStart.M[1][0];
|
nuclear@0
|
671 timewarpStart.M[2][0] = -timewarpStart.M[2][0];
|
nuclear@0
|
672
|
nuclear@0
|
673 timewarpEnd .M[0][1] = -timewarpEnd .M[0][1];
|
nuclear@0
|
674 timewarpEnd .M[0][2] = -timewarpEnd .M[0][2];
|
nuclear@0
|
675 timewarpEnd .M[1][0] = -timewarpEnd .M[1][0];
|
nuclear@0
|
676 timewarpEnd .M[2][0] = -timewarpEnd .M[2][0];
|
nuclear@0
|
677
|
nuclear@0
|
678 twmOut[0] = timewarpStart;
|
nuclear@0
|
679 twmOut[1] = timewarpEnd;
|
nuclear@0
|
680 }
|
nuclear@0
|
681
|
nuclear@0
|
682
|
nuclear@0
|
683 // Used by renderer to determine if it should time distortion rendering.
|
nuclear@0
|
684 bool FrameTimeManager::NeedDistortionTimeMeasurement() const
|
nuclear@0
|
685 {
|
nuclear@0
|
686 if (!VsyncEnabled)
|
nuclear@0
|
687 return false;
|
nuclear@0
|
688 return DistortionRenderTimes.GetCount() < DistortionRenderTimes.Capacity;
|
nuclear@0
|
689 }
|
nuclear@0
|
690
|
nuclear@0
|
691
|
nuclear@0
|
692 void FrameTimeManager::AddDistortionTimeMeasurement(double distortionTimeSeconds)
|
nuclear@0
|
693 {
|
nuclear@0
|
694 DistortionRenderTimes.AddTimeDelta(distortionTimeSeconds);
|
nuclear@0
|
695
|
nuclear@0
|
696 //Revisit dynamic pre-Timewarp delay adjustment logic
|
nuclear@0
|
697 //updateTimewarpTiming();
|
nuclear@0
|
698
|
nuclear@0
|
699 // If timewarp timing changes based on this sample, update it.
|
nuclear@0
|
700 double newTimewarpWaitDelta = calcTimewarpWaitDelta();
|
nuclear@0
|
701 if (newTimewarpWaitDelta != FrameTiming.Inputs.TimewarpWaitDelta)
|
nuclear@0
|
702 {
|
nuclear@0
|
703 FrameTiming.Inputs.TimewarpWaitDelta = newTimewarpWaitDelta;
|
nuclear@0
|
704 LocklessTiming.SetState(FrameTiming);
|
nuclear@0
|
705 }
|
nuclear@0
|
706 }
|
nuclear@0
|
707
|
nuclear@0
|
708
|
nuclear@0
|
709 void FrameTimeManager::UpdateFrameLatencyTrackingAfterEndFrame(
|
nuclear@0
|
710 unsigned char frameLatencyTestColor[3],
|
nuclear@0
|
711 const Util::FrameTimeRecordSet& rs)
|
nuclear@0
|
712 {
|
nuclear@0
|
713 // FrameTiming.NextFrameTime in this context (after EndFrame) is the end frame time.
|
nuclear@0
|
714 ScreenLatencyTracker.SaveDrawColor(frameLatencyTestColor[0],
|
nuclear@0
|
715 FrameTiming.NextFrameTime,
|
nuclear@0
|
716 RenderIMUTimeSeconds,
|
nuclear@0
|
717 TimewarpIMUTimeSeconds);
|
nuclear@0
|
718
|
nuclear@0
|
719 ScreenLatencyTracker.MatchRecord(rs);
|
nuclear@0
|
720
|
nuclear@0
|
721 // If screen delay changed, update timing.
|
nuclear@0
|
722 double newScreenDelay = calcScreenDelay();
|
nuclear@0
|
723 if (newScreenDelay != FrameTiming.Inputs.ScreenDelay)
|
nuclear@0
|
724 {
|
nuclear@0
|
725 FrameTiming.Inputs.ScreenDelay = newScreenDelay;
|
nuclear@0
|
726 LocklessTiming.SetState(FrameTiming);
|
nuclear@0
|
727 }
|
nuclear@0
|
728 }
|
nuclear@0
|
729
|
nuclear@0
|
730
|
nuclear@0
|
731 //-----------------------------------------------------------------------------------
|
nuclear@0
|
732 //Revisit dynamic pre-Timewarp delay adjustment logic
|
nuclear@0
|
733 /*
|
nuclear@0
|
734 void FrameTimeManager::TimewarpDelayAdjuster::Reset()
|
nuclear@0
|
735 {
|
nuclear@0
|
736 State = State_WaitingToReduceLevel;
|
nuclear@0
|
737 DelayLevel = 0;
|
nuclear@0
|
738 InitialFrameCounter = 0;
|
nuclear@0
|
739 TimewarpDelayReductionSeconds = 0.0;
|
nuclear@0
|
740 DelayLevelFinishTime = 0.0;
|
nuclear@0
|
741
|
nuclear@0
|
742 memset(WaitTimeIndexForLevel, 0, sizeof(WaitTimeIndexForLevel));
|
nuclear@0
|
743 // If we are at level 0, waits are infinite.
|
nuclear@0
|
744 WaitTimeIndexForLevel[0] = MaxTimeIndex;
|
nuclear@0
|
745 }
|
nuclear@0
|
746
|
nuclear@0
|
747
|
nuclear@0
|
748 void FrameTimeManager::TimewarpDelayAdjuster::
|
nuclear@0
|
749 UpdateTimewarpWaitIfSkippedFrames(FrameTimeManager* manager,
|
nuclear@0
|
750 double measuredFrameDelta, double nextFrameTime)
|
nuclear@0
|
751 {
|
nuclear@0
|
752 // Times in seconds
|
nuclear@0
|
753 const static double delayTimingTiers[7] = { 1.0, 5.0, 15.0, 30.0, 60.0, 120.0, 1000000.0 };
|
nuclear@0
|
754
|
nuclear@0
|
755 const double currentFrameDelta = manager->FrameTiming.Inputs.FrameDelta;
|
nuclear@0
|
756
|
nuclear@0
|
757
|
nuclear@0
|
758 // Once we detected frame spike, we skip several frames before testing again.
|
nuclear@0
|
759 if (InitialFrameCounter > 0)
|
nuclear@0
|
760 {
|
nuclear@0
|
761 InitialFrameCounter --;
|
nuclear@0
|
762 return;
|
nuclear@0
|
763 }
|
nuclear@0
|
764
|
nuclear@0
|
765 // Skipped frame would usually take 2x longer then regular frame
|
nuclear@0
|
766 if (measuredFrameDelta > currentFrameDelta * 1.8)
|
nuclear@0
|
767 {
|
nuclear@0
|
768 if (State == State_WaitingToReduceLevel)
|
nuclear@0
|
769 {
|
nuclear@0
|
770 // If we got here, escalate the level again.
|
nuclear@0
|
771 if (DelayLevel < MaxDelayLevel)
|
nuclear@0
|
772 {
|
nuclear@0
|
773 DelayLevel++;
|
nuclear@0
|
774 InitialFrameCounter = 3;
|
nuclear@0
|
775 }
|
nuclear@0
|
776 }
|
nuclear@0
|
777
|
nuclear@0
|
778 else if (State == State_VerifyingAfterReduce)
|
nuclear@0
|
779 {
|
nuclear@0
|
780 // So we went down to this level and tried to wait to see if there was
|
nuclear@0
|
781 // as skipped frame and there is -> go back up a level and incrment its timing tier
|
nuclear@0
|
782 if (DelayLevel < MaxDelayLevel)
|
nuclear@0
|
783 {
|
nuclear@0
|
784 DelayLevel++;
|
nuclear@0
|
785 State = State_WaitingToReduceLevel;
|
nuclear@0
|
786
|
nuclear@0
|
787 // For higher level delays reductions, i.e. more then half a frame,
|
nuclear@0
|
788 // we don't go into the infinite wait tier.
|
nuclear@0
|
789 int maxTimingTier = MaxTimeIndex;
|
nuclear@0
|
790 if (DelayLevel > MaxInfiniteTimingLevel)
|
nuclear@0
|
791 maxTimingTier--;
|
nuclear@0
|
792
|
nuclear@0
|
793 if (WaitTimeIndexForLevel[DelayLevel] < maxTimingTier )
|
nuclear@0
|
794 WaitTimeIndexForLevel[DelayLevel]++;
|
nuclear@0
|
795 }
|
nuclear@0
|
796 }
|
nuclear@0
|
797
|
nuclear@0
|
798 DelayLevelFinishTime = nextFrameTime +
|
nuclear@0
|
799 delayTimingTiers[WaitTimeIndexForLevel[DelayLevel]];
|
nuclear@0
|
800 TimewarpDelayReductionSeconds = currentFrameDelta * 0.125 * DelayLevel;
|
nuclear@0
|
801 manager->updateTimewarpTiming();
|
nuclear@0
|
802
|
nuclear@0
|
803 }
|
nuclear@0
|
804
|
nuclear@0
|
805 else if (nextFrameTime > DelayLevelFinishTime)
|
nuclear@0
|
806 {
|
nuclear@0
|
807 if (State == State_WaitingToReduceLevel)
|
nuclear@0
|
808 {
|
nuclear@0
|
809 if (DelayLevel > 0)
|
nuclear@0
|
810 {
|
nuclear@0
|
811 DelayLevel--;
|
nuclear@0
|
812 State = State_VerifyingAfterReduce;
|
nuclear@0
|
813 // Always use 1 sec to see if "down sampling mode" caused problems
|
nuclear@0
|
814 DelayLevelFinishTime = nextFrameTime + 1.0f;
|
nuclear@0
|
815 }
|
nuclear@0
|
816 }
|
nuclear@0
|
817 else if (State == State_VerifyingAfterReduce)
|
nuclear@0
|
818 {
|
nuclear@0
|
819 // Prior display level successfully reduced,
|
nuclear@0
|
820 // try to see we we could go down further after wait.
|
nuclear@0
|
821 WaitTimeIndexForLevel[DelayLevel+1] = 0;
|
nuclear@0
|
822 State = State_WaitingToReduceLevel;
|
nuclear@0
|
823 DelayLevelFinishTime = nextFrameTime +
|
nuclear@0
|
824 delayTimingTiers[WaitTimeIndexForLevel[DelayLevel]];
|
nuclear@0
|
825 }
|
nuclear@0
|
826
|
nuclear@0
|
827 // TBD: Update TimeWarpTiming
|
nuclear@0
|
828 TimewarpDelayReductionSeconds = currentFrameDelta * 0.125 * DelayLevel;
|
nuclear@0
|
829 manager->updateTimewarpTiming();
|
nuclear@0
|
830 }
|
nuclear@0
|
831
|
nuclear@0
|
832
|
nuclear@0
|
833 //static int oldDelayLevel = 0;
|
nuclear@0
|
834
|
nuclear@0
|
835 //if (oldDelayLevel != DelayLevel)
|
nuclear@0
|
836 //{
|
nuclear@0
|
837 //OVR_DEBUG_LOG(("DelayLevel:%d tReduction = %0.5f ", DelayLevel, TimewarpDelayReductionSeconds));
|
nuclear@0
|
838 //oldDelayLevel = DelayLevel;
|
nuclear@0
|
839 //}
|
nuclear@0
|
840 }
|
nuclear@0
|
841 */
|
nuclear@0
|
842
|
nuclear@0
|
843 //-----------------------------------------------------------------------------------
|
nuclear@0
|
844 // ***** TimeDeltaCollector
|
nuclear@0
|
845
|
nuclear@0
|
846 void TimeDeltaCollector::AddTimeDelta(double timeSeconds)
|
nuclear@0
|
847 {
|
nuclear@0
|
848 // avoid adding invalid timing values
|
nuclear@0
|
849 if(timeSeconds < 0.0f)
|
nuclear@0
|
850 return;
|
nuclear@0
|
851
|
nuclear@0
|
852 if (Count == Capacity)
|
nuclear@0
|
853 {
|
nuclear@0
|
854 for(int i=0; i< Count-1; i++)
|
nuclear@0
|
855 TimeBufferSeconds[i] = TimeBufferSeconds[i+1];
|
nuclear@0
|
856 Count--;
|
nuclear@0
|
857 }
|
nuclear@0
|
858 TimeBufferSeconds[Count++] = timeSeconds;
|
nuclear@0
|
859
|
nuclear@0
|
860 ReCalcMedian = true;
|
nuclear@0
|
861 }
|
nuclear@0
|
862
|
nuclear@0
|
863 // KevinJ: Better median function
|
nuclear@0
|
864 double CalculateListMedianRecursive(const double inputList[TimeDeltaCollector::Capacity], int inputListLength, int lessThanSum, int greaterThanSum)
|
nuclear@0
|
865 {
|
nuclear@0
|
866 double lessThanMedian[TimeDeltaCollector::Capacity], greaterThanMedian[TimeDeltaCollector::Capacity];
|
nuclear@0
|
867 int lessThanMedianListLength = 0, greaterThanMedianListLength = 0;
|
nuclear@0
|
868 double median = inputList[0];
|
nuclear@0
|
869 int i;
|
nuclear@0
|
870 for (i = 1; i < inputListLength; i++)
|
nuclear@0
|
871 {
|
nuclear@0
|
872 // If same value, spread among lists evenly
|
nuclear@0
|
873 if (inputList[i] < median || ((i & 1) == 0 && inputList[i] == median))
|
nuclear@0
|
874 lessThanMedian[lessThanMedianListLength++] = inputList[i];
|
nuclear@0
|
875 else
|
nuclear@0
|
876 greaterThanMedian[greaterThanMedianListLength++] = inputList[i];
|
nuclear@0
|
877 }
|
nuclear@0
|
878 if (lessThanMedianListLength + lessThanSum == greaterThanMedianListLength + greaterThanSum + 1 ||
|
nuclear@0
|
879 lessThanMedianListLength + lessThanSum == greaterThanMedianListLength + greaterThanSum - 1)
|
nuclear@0
|
880 return median;
|
nuclear@0
|
881
|
nuclear@0
|
882 if (lessThanMedianListLength + lessThanSum < greaterThanMedianListLength + greaterThanSum)
|
nuclear@0
|
883 {
|
nuclear@0
|
884 lessThanMedian[lessThanMedianListLength++] = median;
|
nuclear@0
|
885 return CalculateListMedianRecursive(greaterThanMedian, greaterThanMedianListLength, lessThanMedianListLength + lessThanSum, greaterThanSum);
|
nuclear@0
|
886 }
|
nuclear@0
|
887 else
|
nuclear@0
|
888 {
|
nuclear@0
|
889 greaterThanMedian[greaterThanMedianListLength++] = median;
|
nuclear@0
|
890 return CalculateListMedianRecursive(lessThanMedian, lessThanMedianListLength, lessThanSum, greaterThanMedianListLength + greaterThanSum);
|
nuclear@0
|
891 }
|
nuclear@0
|
892 }
|
nuclear@0
|
893 // KevinJ: Excludes Firmware hack
|
nuclear@0
|
894 double TimeDeltaCollector::GetMedianTimeDeltaNoFirmwareHack() const
|
nuclear@0
|
895 {
|
nuclear@0
|
896 if (ReCalcMedian)
|
nuclear@0
|
897 {
|
nuclear@0
|
898 ReCalcMedian = false;
|
nuclear@0
|
899 Median = CalculateListMedianRecursive(TimeBufferSeconds, Count, 0, 0);
|
nuclear@0
|
900 }
|
nuclear@0
|
901 return Median;
|
nuclear@0
|
902 }
|
nuclear@0
|
903 double TimeDeltaCollector::GetMedianTimeDelta() const
|
nuclear@0
|
904 {
|
nuclear@0
|
905 if(ReCalcMedian)
|
nuclear@0
|
906 {
|
nuclear@0
|
907 double SortedList[Capacity];
|
nuclear@0
|
908 bool used[Capacity];
|
nuclear@0
|
909
|
nuclear@0
|
910 memset(used, 0, sizeof(used));
|
nuclear@0
|
911 SortedList[0] = 0.0; // In case Count was 0...
|
nuclear@0
|
912
|
nuclear@0
|
913 // Probably the slowest way to find median...
|
nuclear@0
|
914 for (int i=0; i<Count; i++)
|
nuclear@0
|
915 {
|
nuclear@0
|
916 double smallestDelta = 1000000.0;
|
nuclear@0
|
917 int index = 0;
|
nuclear@0
|
918
|
nuclear@0
|
919 for (int j = 0; j < Count; j++)
|
nuclear@0
|
920 {
|
nuclear@0
|
921 if (!used[j])
|
nuclear@0
|
922 {
|
nuclear@0
|
923 if (TimeBufferSeconds[j] < smallestDelta)
|
nuclear@0
|
924 {
|
nuclear@0
|
925 smallestDelta = TimeBufferSeconds[j];
|
nuclear@0
|
926 index = j;
|
nuclear@0
|
927 }
|
nuclear@0
|
928 }
|
nuclear@0
|
929 }
|
nuclear@0
|
930
|
nuclear@0
|
931 // Mark as used
|
nuclear@0
|
932 used[index] = true;
|
nuclear@0
|
933 SortedList[i] = smallestDelta;
|
nuclear@0
|
934 }
|
nuclear@0
|
935
|
nuclear@0
|
936 // FIRMWARE HACK: Don't take the actual median, but err on the low time side
|
nuclear@0
|
937 Median = SortedList[Count/4];
|
nuclear@0
|
938 ReCalcMedian = false;
|
nuclear@0
|
939 }
|
nuclear@0
|
940
|
nuclear@0
|
941 return Median;
|
nuclear@0
|
942 }
|
nuclear@0
|
943
|
nuclear@0
|
944
|
nuclear@0
|
945 }} // namespace OVR::CAPI
|
nuclear@0
|
946
|