Mercurial > hg > Game > Games
comparison Orchestland/Assets/OVR/Scripts/OVRDisplay.cs @ 3:0030a1b971fb default tip
merge
author | Yuta ANSE <e135745@ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 17 Jul 2015 23:23:43 +0900 |
parents | f7675884f2a1 |
children |
comparison
equal
deleted
inserted
replaced
2:fdab88fc2cb9 | 3:0030a1b971fb |
---|---|
1 /************************************************************************************ | |
2 | |
3 Copyright : Copyright 2014 Oculus VR, LLC. All Rights reserved. | |
4 | |
5 Licensed under the Oculus VR Rift SDK License Version 3.2 (the "License"); | |
6 you may not use the Oculus VR Rift SDK except in compliance with the License, | |
7 which is provided at the time of installation or download, or which | |
8 otherwise accompanies this software in either electronic or hard copy form. | |
9 | |
10 You may obtain a copy of the License at | |
11 | |
12 http://www.oculusvr.com/licenses/LICENSE-3.2 | |
13 | |
14 Unless required by applicable law or agreed to in writing, the Oculus VR SDK | |
15 distributed under the License is distributed on an "AS IS" BASIS, | |
16 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
17 See the License for the specific language governing permissions and | |
18 limitations under the License. | |
19 | |
20 ************************************************************************************/ | |
21 | |
22 using System; | |
23 using System.Runtime.InteropServices; | |
24 using UnityEngine; | |
25 using Ovr; | |
26 | |
27 /// <summary> | |
28 /// Manages an Oculus Rift head-mounted display (HMD). | |
29 /// </summary> | |
30 public class OVRDisplay | |
31 { | |
32 /// <summary> | |
33 /// Specifies the size and field-of-view for one eye texture. | |
34 /// </summary> | |
35 public struct EyeRenderDesc | |
36 { | |
37 /// <summary> | |
38 /// The horizontal and vertical size of the texture. | |
39 /// </summary> | |
40 public Vector2 resolution; | |
41 | |
42 /// <summary> | |
43 /// The angle of the horizontal and vertical field of view in degrees. | |
44 /// </summary> | |
45 public Vector2 fov; | |
46 } | |
47 | |
48 /// <summary> | |
49 /// Contains latency measurements for a single frame of rendering. | |
50 /// </summary> | |
51 public struct LatencyData | |
52 { | |
53 /// <summary> | |
54 /// The time it took to render both eyes in seconds. | |
55 /// </summary> | |
56 public float render; | |
57 | |
58 /// <summary> | |
59 /// The time it took to perform TimeWarp in seconds. | |
60 /// </summary> | |
61 public float timeWarp; | |
62 | |
63 /// <summary> | |
64 /// The time between the end of TimeWarp and scan-out in seconds. | |
65 /// </summary> | |
66 public float postPresent; | |
67 } | |
68 | |
69 /// <summary> | |
70 /// If true, a physical HMD is attached to the system. | |
71 /// </summary> | |
72 /// <value><c>true</c> if is present; otherwise, <c>false</c>.</value> | |
73 public bool isPresent | |
74 { | |
75 get { | |
76 #if !UNITY_ANDROID || UNITY_EDITOR | |
77 return (OVRManager.capiHmd.GetTrackingState().StatusFlags & (uint)StatusBits.HmdConnected) != 0; | |
78 #else | |
79 return OVR_IsHMDPresent(); | |
80 #endif | |
81 } | |
82 } | |
83 | |
84 private int prevScreenWidth; | |
85 private int prevScreenHeight; | |
86 private bool needsSetTexture; | |
87 private float prevVirtualTextureScale; | |
88 private bool prevFullScreen; | |
89 private OVRPose[] eyePoses = new OVRPose[(int)OVREye.Count]; | |
90 private EyeRenderDesc[] eyeDescs = new EyeRenderDesc[(int)OVREye.Count]; | |
91 private RenderTexture[] eyeTextures = new RenderTexture[eyeTextureCount]; | |
92 private int[] eyeTextureIds = new int[eyeTextureCount]; | |
93 private int currEyeTextureIdx = 0; | |
94 private static int frameCount = 0; | |
95 | |
96 #if !UNITY_ANDROID && !UNITY_EDITOR | |
97 private bool needsSetViewport; | |
98 #endif | |
99 | |
100 #if UNITY_ANDROID && !UNITY_EDITOR | |
101 private const int eyeTextureCount = 3 * (int)OVREye.Count; // triple buffer | |
102 #else | |
103 private const int eyeTextureCount = 1 * (int)OVREye.Count; | |
104 #endif | |
105 | |
106 #if UNITY_ANDROID && !UNITY_EDITOR | |
107 private int nextEyeTextureIdx = 0; | |
108 #endif | |
109 | |
110 /// <summary> | |
111 /// Creates an instance of OVRDisplay. Called by OVRManager. | |
112 /// </summary> | |
113 public OVRDisplay() | |
114 { | |
115 #if !UNITY_ANDROID || UNITY_EDITOR | |
116 needsSetTexture = true; | |
117 prevFullScreen = Screen.fullScreen; | |
118 prevVirtualTextureScale = OVRManager.instance.virtualTextureScale; | |
119 #elif !UNITY_ANDROID && !UNITY_EDITOR | |
120 needsSetViewport = true; | |
121 #endif | |
122 | |
123 ConfigureEyeDesc(OVREye.Left); | |
124 ConfigureEyeDesc(OVREye.Right); | |
125 | |
126 for (int i = 0; i < eyeTextureCount; i += 2) | |
127 { | |
128 ConfigureEyeTexture(i, OVREye.Left, OVRManager.instance.nativeTextureScale); | |
129 ConfigureEyeTexture(i, OVREye.Right, OVRManager.instance.nativeTextureScale); | |
130 } | |
131 } | |
132 | |
133 /// <summary> | |
134 /// Updates the internal state of the OVRDisplay. Called by OVRManager. | |
135 /// </summary> | |
136 public void Update() | |
137 { | |
138 // HACK - needed to force DX11 into low persistence mode, remove after Unity patch release | |
139 if (frameCount < 2) | |
140 { | |
141 uint caps = OVRManager.capiHmd.GetEnabledCaps(); | |
142 caps ^= (uint)HmdCaps.LowPersistence; | |
143 OVRManager.capiHmd.SetEnabledCaps(caps); | |
144 } | |
145 | |
146 UpdateViewport(); | |
147 UpdateTextures(); | |
148 } | |
149 | |
150 /// <summary> | |
151 /// Marks the beginning of all rendering. | |
152 /// </summary> | |
153 public void BeginFrame() | |
154 { | |
155 bool updateFrameCount = !(OVRManager.instance.timeWarp && OVRManager.instance.freezeTimeWarp); | |
156 if (updateFrameCount) | |
157 { | |
158 frameCount++; | |
159 } | |
160 | |
161 OVRPluginEvent.IssueWithData(RenderEventType.BeginFrame, frameCount); | |
162 } | |
163 | |
164 /// <summary> | |
165 /// Marks the end of all rendering. | |
166 /// </summary> | |
167 public void EndFrame() | |
168 { | |
169 OVRPluginEvent.Issue(RenderEventType.EndFrame); | |
170 } | |
171 | |
172 /// <summary> | |
173 /// Gets the head pose at the current time or predicted at the given time. | |
174 /// </summary> | |
175 public OVRPose GetHeadPose(double predictionTime = 0d) | |
176 { | |
177 #if !UNITY_ANDROID || UNITY_EDITOR | |
178 double abs_time_plus_pred = Hmd.GetTimeInSeconds() + predictionTime; | |
179 | |
180 TrackingState state = OVRManager.capiHmd.GetTrackingState(abs_time_plus_pred); | |
181 | |
182 return state.HeadPose.ThePose.ToPose(); | |
183 #else | |
184 float px = 0, py = 0, pz = 0, ow = 0, ox = 0, oy = 0, oz = 0; | |
185 | |
186 double atTime = Time.time + predictionTime; | |
187 OVR_GetCameraPositionOrientation(ref px, ref py, ref pz, | |
188 ref ox, ref oy, ref oz, ref ow, atTime); | |
189 | |
190 return new OVRPose | |
191 { | |
192 position = new Vector3(px, py, -pz), | |
193 orientation = new Quaternion(-ox, -oy, oz, ow), | |
194 }; | |
195 #endif | |
196 } | |
197 | |
198 #if UNITY_ANDROID && !UNITY_EDITOR | |
199 private float w = 0, x = 0, y = 0, z = 0, fov = 90f; | |
200 #endif | |
201 | |
202 /// <summary> | |
203 /// Gets the pose of the given eye, predicted for the time when the current frame will scan out. | |
204 /// </summary> | |
205 public OVRPose GetEyePose(OVREye eye) | |
206 { | |
207 #if !UNITY_ANDROID || UNITY_EDITOR | |
208 bool updateEyePose = !(OVRManager.instance.timeWarp && OVRManager.instance.freezeTimeWarp); | |
209 if (updateEyePose) | |
210 { | |
211 eyePoses[(int)eye] = OVR_GetRenderPose(frameCount, (int)eye).ToPose(); | |
212 } | |
213 | |
214 return eyePoses[(int)eye]; | |
215 #else | |
216 if (eye == OVREye.Left) | |
217 OVR_GetSensorState( | |
218 false, | |
219 ref w, | |
220 ref x, | |
221 ref y, | |
222 ref z, | |
223 ref fov, | |
224 ref OVRManager.timeWarpViewNumber); | |
225 | |
226 Quaternion rot = new Quaternion(-x, -y, z, w); | |
227 | |
228 float eyeOffsetX = 0.5f * OVRManager.profile.ipd; | |
229 eyeOffsetX = (eye == OVREye.Left) ? -eyeOffsetX : eyeOffsetX; | |
230 Vector3 pos = rot * new Vector3(eyeOffsetX, 0.0f, 0.0f); | |
231 | |
232 return new OVRPose | |
233 { | |
234 position = pos, | |
235 orientation = rot, | |
236 }; | |
237 #endif | |
238 } | |
239 | |
240 /// <summary> | |
241 /// Gets the given eye's projection matrix. | |
242 /// </summary> | |
243 /// <param name="eyeId">Specifies the eye.</param> | |
244 /// <param name="nearClip">The distance to the near clipping plane.</param> | |
245 /// <param name="farClip">The distance to the far clipping plane.</param> | |
246 public Matrix4x4 GetProjection(int eyeId, float nearClip, float farClip) | |
247 { | |
248 #if !UNITY_ANDROID || UNITY_EDITOR | |
249 FovPort fov = OVRManager.capiHmd.GetDesc().DefaultEyeFov[eyeId]; | |
250 | |
251 return Hmd.GetProjection(fov, nearClip, farClip, true).ToMatrix4x4(); | |
252 #else | |
253 return new Matrix4x4(); | |
254 #endif | |
255 } | |
256 | |
257 /// <summary> | |
258 /// Occurs when the head pose is reset. | |
259 /// </summary> | |
260 public event System.Action RecenteredPose; | |
261 | |
262 /// <summary> | |
263 /// Recenters the head pose. | |
264 /// </summary> | |
265 public void RecenterPose() | |
266 { | |
267 #if !UNITY_ANDROID || UNITY_EDITOR | |
268 OVRManager.capiHmd.RecenterPose(); | |
269 #else | |
270 OVR_ResetSensorOrientation(); | |
271 #endif | |
272 | |
273 if (RecenteredPose != null) | |
274 { | |
275 RecenteredPose(); | |
276 } | |
277 } | |
278 | |
279 /// <summary> | |
280 /// Gets the current acceleration of the head. | |
281 /// </summary> | |
282 public Vector3 acceleration | |
283 { | |
284 get { | |
285 #if !UNITY_ANDROID || UNITY_EDITOR | |
286 return OVRManager.capiHmd.GetTrackingState().HeadPose.LinearAcceleration.ToVector3(); | |
287 #else | |
288 float x = 0.0f, y = 0.0f, z = 0.0f; | |
289 OVR_GetAcceleration(ref x, ref y, ref z); | |
290 return new Vector3(x, y, z); | |
291 #endif | |
292 } | |
293 } | |
294 | |
295 /// <summary> | |
296 /// Gets the current angular velocity of the head. | |
297 /// </summary> | |
298 public Vector3 angularVelocity | |
299 { | |
300 get { | |
301 #if !UNITY_ANDROID || UNITY_EDITOR | |
302 return OVRManager.capiHmd.GetTrackingState().HeadPose.AngularVelocity.ToVector3(); | |
303 #else | |
304 float x = 0.0f, y = 0.0f, z = 0.0f; | |
305 OVR_GetAngularVelocity(ref x, ref y, ref z); | |
306 return new Vector3(x, y, z); | |
307 #endif | |
308 } | |
309 } | |
310 | |
311 /// <summary> | |
312 /// Gets the resolution and field of view for the given eye. | |
313 /// </summary> | |
314 public EyeRenderDesc GetEyeRenderDesc(OVREye eye) | |
315 { | |
316 return eyeDescs[(int)eye]; | |
317 } | |
318 | |
319 /// <summary> | |
320 /// Gets the currently active render texture for the given eye. | |
321 /// </summary> | |
322 public RenderTexture GetEyeTexture(OVREye eye) | |
323 { | |
324 return eyeTextures[currEyeTextureIdx + (int)eye]; | |
325 } | |
326 | |
327 /// <summary> | |
328 /// Gets the currently active render texture's native ID for the given eye. | |
329 /// </summary> | |
330 public int GetEyeTextureId(OVREye eye) | |
331 { | |
332 return eyeTextureIds[currEyeTextureIdx + (int)eye]; | |
333 } | |
334 | |
335 /// <summary> | |
336 /// True if the direct mode display driver is active. | |
337 /// </summary> | |
338 public bool isDirectMode | |
339 { | |
340 get | |
341 { | |
342 #if !UNITY_ANDROID || UNITY_EDITOR | |
343 uint caps = OVRManager.capiHmd.GetDesc().HmdCaps; | |
344 uint mask = caps & (uint)HmdCaps.ExtendDesktop; | |
345 return mask == 0; | |
346 #else | |
347 return false; | |
348 #endif | |
349 } | |
350 } | |
351 | |
352 /// <summary> | |
353 /// If true, direct mode rendering will also show output in the main window. | |
354 /// </summary> | |
355 public bool mirrorMode | |
356 { | |
357 get | |
358 { | |
359 #if !UNITY_ANDROID || UNITY_EDITOR | |
360 uint caps = OVRManager.capiHmd.GetEnabledCaps(); | |
361 return (caps & (uint)HmdCaps.NoMirrorToWindow) == 0; | |
362 #else | |
363 return false; | |
364 #endif | |
365 } | |
366 | |
367 set | |
368 { | |
369 #if !UNITY_ANDROID || UNITY_EDITOR | |
370 uint caps = OVRManager.capiHmd.GetEnabledCaps(); | |
371 | |
372 if (((caps & (uint)HmdCaps.NoMirrorToWindow) == 0) == value) | |
373 return; | |
374 | |
375 if (value) | |
376 caps &= ~(uint)HmdCaps.NoMirrorToWindow; | |
377 else | |
378 caps |= (uint)HmdCaps.NoMirrorToWindow; | |
379 | |
380 OVRManager.capiHmd.SetEnabledCaps(caps); | |
381 #endif | |
382 } | |
383 } | |
384 | |
385 /// <summary> | |
386 /// If true, TimeWarp will be used to correct the output of each OVRCameraRig for rotational latency. | |
387 /// </summary> | |
388 internal bool timeWarp | |
389 { | |
390 get { return (distortionCaps & (int)DistortionCaps.TimeWarp) != 0; } | |
391 set | |
392 { | |
393 if (value != timeWarp) | |
394 distortionCaps ^= (int)DistortionCaps.TimeWarp; | |
395 } | |
396 } | |
397 | |
398 /// <summary> | |
399 /// If true, VR output will be rendered upside-down. | |
400 /// </summary> | |
401 internal bool flipInput | |
402 { | |
403 get { return (distortionCaps & (int)DistortionCaps.FlipInput) != 0; } | |
404 set | |
405 { | |
406 if (value != flipInput) | |
407 distortionCaps ^= (int)DistortionCaps.FlipInput; | |
408 } | |
409 } | |
410 | |
411 /// <summary> | |
412 /// Enables and disables distortion rendering capabilities from the Ovr.DistortionCaps enum. | |
413 /// </summary> | |
414 public uint distortionCaps | |
415 { | |
416 get | |
417 { | |
418 return _distortionCaps; | |
419 } | |
420 | |
421 set | |
422 { | |
423 if (value == _distortionCaps) | |
424 return; | |
425 | |
426 _distortionCaps = value; | |
427 #if !UNITY_ANDROID || UNITY_EDITOR | |
428 OVR_SetDistortionCaps(value); | |
429 #endif | |
430 } | |
431 } | |
432 private uint _distortionCaps = | |
433 #if (UNITY_STANDALONE_OSX || UNITY_EDITOR_OSX) | |
434 (uint)DistortionCaps.ProfileNoTimewarpSpinWaits | | |
435 #endif | |
436 (uint)DistortionCaps.Chromatic | | |
437 (uint)DistortionCaps.Vignette | | |
438 (uint)DistortionCaps.SRGB | | |
439 (uint)DistortionCaps.Overdrive; | |
440 | |
441 /// <summary> | |
442 /// Gets the current measured latency values. | |
443 /// </summary> | |
444 public LatencyData latency | |
445 { | |
446 get { | |
447 #if !UNITY_ANDROID || UNITY_EDITOR | |
448 float[] values = { 0.0f, 0.0f, 0.0f }; | |
449 float[] latencies = OVRManager.capiHmd.GetFloatArray("DK2Latency", values); | |
450 | |
451 return new LatencyData | |
452 { | |
453 render = latencies[0], | |
454 timeWarp = latencies[1], | |
455 postPresent = latencies[2] | |
456 }; | |
457 #else | |
458 return new LatencyData | |
459 { | |
460 render = 0.0f, | |
461 timeWarp = 0.0f, | |
462 postPresent = 0.0f | |
463 }; | |
464 #endif | |
465 } | |
466 } | |
467 | |
468 private void UpdateViewport() | |
469 { | |
470 #if !UNITY_ANDROID && !UNITY_EDITOR | |
471 needsSetViewport = needsSetViewport | |
472 || Screen.width != prevScreenWidth | |
473 || Screen.height != prevScreenHeight; | |
474 | |
475 if (needsSetViewport) | |
476 { | |
477 SetViewport(0, 0, Screen.width, Screen.height); | |
478 | |
479 prevScreenWidth = Screen.width; | |
480 prevScreenHeight = Screen.height; | |
481 | |
482 needsSetViewport = false; | |
483 } | |
484 #endif | |
485 } | |
486 | |
487 private void UpdateTextures() | |
488 { | |
489 for (int i = 0; i < eyeTextureCount; i++) | |
490 { | |
491 if (!eyeTextures[i].IsCreated()) | |
492 { | |
493 eyeTextures[i].Create(); | |
494 eyeTextureIds[i] = eyeTextures[i].GetNativeTextureID(); | |
495 | |
496 #if !UNITY_ANDROID || UNITY_EDITOR | |
497 needsSetTexture = true; | |
498 #endif | |
499 } | |
500 } | |
501 | |
502 #if !UNITY_ANDROID || UNITY_EDITOR | |
503 needsSetTexture = needsSetTexture | |
504 || OVRManager.instance.virtualTextureScale != prevVirtualTextureScale | |
505 || Screen.fullScreen != prevFullScreen | |
506 || OVR_UnityGetModeChange(); | |
507 | |
508 if (needsSetTexture) | |
509 { | |
510 for (int i = 0; i < eyeTextureCount; i++) | |
511 { | |
512 if (eyeTextures[i].GetNativeTexturePtr() == System.IntPtr.Zero) | |
513 return; | |
514 | |
515 OVR_SetTexture(i, eyeTextures[i].GetNativeTexturePtr(), OVRManager.instance.virtualTextureScale); | |
516 } | |
517 | |
518 prevVirtualTextureScale = OVRManager.instance.virtualTextureScale; | |
519 prevFullScreen = Screen.fullScreen; | |
520 OVR_UnitySetModeChange(false); | |
521 | |
522 needsSetTexture = false; | |
523 } | |
524 #else | |
525 currEyeTextureIdx = nextEyeTextureIdx; | |
526 nextEyeTextureIdx = (nextEyeTextureIdx + 2) % eyeTextureCount; | |
527 #endif | |
528 } | |
529 | |
530 private void ConfigureEyeDesc(OVREye eye) | |
531 { | |
532 #if !UNITY_ANDROID || UNITY_EDITOR | |
533 HmdDesc desc = OVRManager.capiHmd.GetDesc(); | |
534 FovPort fov = desc.DefaultEyeFov[(int)eye]; | |
535 fov.LeftTan = fov.RightTan = Mathf.Max(fov.LeftTan, fov.RightTan); | |
536 fov.UpTan = fov.DownTan = Mathf.Max(fov.UpTan, fov.DownTan); | |
537 | |
538 // Configure Stereo settings. Default pixel density is one texel per pixel. | |
539 float desiredPixelDensity = 1f; | |
540 Sizei texSize = OVRManager.capiHmd.GetFovTextureSize((Ovr.Eye)eye, fov, desiredPixelDensity); | |
541 | |
542 float fovH = 2f * Mathf.Rad2Deg * Mathf.Atan(fov.LeftTan); | |
543 float fovV = 2f * Mathf.Rad2Deg * Mathf.Atan(fov.UpTan); | |
544 | |
545 eyeDescs[(int)eye] = new EyeRenderDesc() | |
546 { | |
547 resolution = texSize.ToVector2(), | |
548 fov = new Vector2(fovH, fovV) | |
549 }; | |
550 #else | |
551 eyeDescs[(int)eye] = new EyeRenderDesc() | |
552 { | |
553 resolution = new Vector2(1024, 1024), | |
554 fov = new Vector2(90, 90) | |
555 }; | |
556 #endif | |
557 } | |
558 | |
559 private void ConfigureEyeTexture(int eyeBufferIndex, OVREye eye, float scale) | |
560 { | |
561 int eyeIndex = eyeBufferIndex + (int)eye; | |
562 | |
563 EyeRenderDesc eyeDesc = eyeDescs[(int)eye]; | |
564 | |
565 int w = (int)(eyeDesc.resolution.x * scale); | |
566 int h = (int)(eyeDesc.resolution.y * scale); | |
567 | |
568 eyeTextures[eyeIndex] = new RenderTexture(w, h, OVRManager.instance.eyeTextureDepth, OVRManager.instance.eyeTextureFormat); | |
569 eyeTextures[eyeIndex].antiAliasing = (QualitySettings.antiAliasing == 0) ? 1 : QualitySettings.antiAliasing; | |
570 eyeTextures[eyeIndex].Create(); | |
571 eyeTextureIds[eyeIndex] = eyeTextures[eyeIndex].GetNativeTextureID(); | |
572 } | |
573 | |
574 public void ForceSymmetricProj(bool enabled) | |
575 { | |
576 #if !UNITY_ANDROID || UNITY_EDITOR | |
577 OVR_ForceSymmetricProj(enabled); | |
578 #endif | |
579 } | |
580 | |
581 public void SetViewport(int x, int y, int w, int h) | |
582 { | |
583 #if !UNITY_ANDROID || UNITY_EDITOR | |
584 OVR_SetViewport(x, y, w, h); | |
585 #endif | |
586 } | |
587 | |
588 private const string LibOVR = "OculusPlugin"; | |
589 | |
590 #if UNITY_ANDROID && !UNITY_EDITOR | |
591 //TODO: Get rid of these functions and implement OVR.CAPI.Hmd on Android. | |
592 | |
593 [DllImport(LibOVR)] | |
594 private static extern bool OVR_ResetSensorOrientation(); | |
595 [DllImport(LibOVR)] | |
596 private static extern bool OVR_GetAcceleration(ref float x, ref float y, ref float z); | |
597 [DllImport(LibOVR)] | |
598 private static extern bool OVR_GetAngularVelocity(ref float x, ref float y, ref float z); | |
599 [DllImport(LibOVR)] | |
600 private static extern bool OVR_IsHMDPresent(); | |
601 [DllImport(LibOVR)] | |
602 private static extern bool OVR_GetCameraPositionOrientation( | |
603 ref float px, | |
604 ref float py, | |
605 ref float pz, | |
606 ref float ox, | |
607 ref float oy, | |
608 ref float oz, | |
609 ref float ow, | |
610 double atTime); | |
611 [DllImport(LibOVR)] | |
612 private static extern void OVR_GetDistortionMeshInfo( | |
613 ref int resH, | |
614 ref int resV, | |
615 ref float fovH, | |
616 ref float fovV); | |
617 [DllImport(LibOVR)] | |
618 private static extern void OVR_SetLowPersistenceMode(bool on); | |
619 [DllImport(LibOVR)] | |
620 private static extern bool OVR_GetSensorState( | |
621 bool monoscopic, | |
622 ref float w, | |
623 ref float x, | |
624 ref float y, | |
625 ref float z, | |
626 ref float fov, | |
627 ref int viewNumber); | |
628 #else | |
629 [DllImport(LibOVR, CallingConvention = CallingConvention.Cdecl)] | |
630 private static extern void OVR_SetDistortionCaps(uint distortionCaps); | |
631 [DllImport(LibOVR, CallingConvention = CallingConvention.Cdecl)] | |
632 private static extern bool OVR_SetViewport(int x, int y, int w, int h); | |
633 [DllImport(LibOVR, CallingConvention = CallingConvention.Cdecl)] | |
634 private static extern Posef OVR_GetRenderPose(int frameIndex, int eyeId); | |
635 [DllImport(LibOVR, CallingConvention = CallingConvention.Cdecl)] | |
636 private static extern bool OVR_SetTexture(int id, System.IntPtr texture, float scale = 1); | |
637 [DllImport(LibOVR, CallingConvention = CallingConvention.Cdecl)] | |
638 private static extern bool OVR_UnityGetModeChange(); | |
639 [DllImport(LibOVR, CallingConvention = CallingConvention.Cdecl)] | |
640 private static extern bool OVR_UnitySetModeChange(bool isChanged); | |
641 [DllImport(LibOVR, CallingConvention = CallingConvention.Cdecl)] | |
642 private static extern void OVR_ForceSymmetricProj(bool isEnabled); | |
643 #endif | |
644 } |