1
0
forked from cgvr/DeltaVR

Initial Commit

This commit is contained in:
Toomas Tamm
2020-11-28 16:54:41 +02:00
parent 97292ee26e
commit ea967135f2
4217 changed files with 2945663 additions and 0 deletions

View File

@@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 1f92828e69374384b8cb197653871a6e
folderAsset: yes
timeCreated: 1502989983
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,305 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
public abstract class OVRCameraComposition : OVRComposition {
protected GameObject cameraFramePlaneObject = null;
protected float cameraFramePlaneDistance;
protected readonly bool hasCameraDeviceOpened = false;
protected readonly bool useDynamicLighting = false;
internal readonly OVRPlugin.CameraDevice cameraDevice = OVRPlugin.CameraDevice.WebCamera0;
private Mesh boundaryMesh = null;
private float boundaryMeshTopY = 0.0f;
private float boundaryMeshBottomY = 0.0f;
private OVRManager.VirtualGreenScreenType boundaryMeshType = OVRManager.VirtualGreenScreenType.Off;
protected OVRCameraComposition(GameObject parentObject, Camera mainCamera, OVRManager.CameraDevice inCameraDevice, bool inUseDynamicLighting, OVRManager.DepthQuality depthQuality)
: base(parentObject, mainCamera)
{
cameraDevice = OVRCompositionUtil.ConvertCameraDevice(inCameraDevice);
Debug.Assert(!hasCameraDeviceOpened);
Debug.Assert(!OVRPlugin.IsCameraDeviceAvailable(cameraDevice) || !OVRPlugin.HasCameraDeviceOpened(cameraDevice));
hasCameraDeviceOpened = false;
useDynamicLighting = inUseDynamicLighting;
bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice);
if (useDynamicLighting && !cameraSupportsDepth)
{
Debug.LogWarning("The camera device doesn't support depth. The result of dynamic lighting might not be correct");
}
if (OVRPlugin.IsCameraDeviceAvailable(cameraDevice))
{
OVRPlugin.CameraExtrinsics extrinsics;
OVRPlugin.CameraIntrinsics intrinsics;
if (OVRPlugin.GetExternalCameraCount() > 0 && OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
{
OVRPlugin.SetCameraDevicePreferredColorFrameSize(cameraDevice, intrinsics.ImageSensorPixelResolution.w, intrinsics.ImageSensorPixelResolution.h);
}
if (useDynamicLighting)
{
OVRPlugin.SetCameraDeviceDepthSensingMode(cameraDevice, OVRPlugin.CameraDeviceDepthSensingMode.Fill);
OVRPlugin.CameraDeviceDepthQuality quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
if (depthQuality == OVRManager.DepthQuality.Low)
{
quality = OVRPlugin.CameraDeviceDepthQuality.Low;
}
else if (depthQuality == OVRManager.DepthQuality.Medium)
{
quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
}
else if (depthQuality == OVRManager.DepthQuality.High)
{
quality = OVRPlugin.CameraDeviceDepthQuality.High;
}
else
{
Debug.LogWarning("Unknown depth quality");
}
OVRPlugin.SetCameraDevicePreferredDepthQuality(cameraDevice, quality);
}
Debug.LogFormat("Opening camera device {0}", cameraDevice);
OVRPlugin.OpenCameraDevice(cameraDevice);
if (OVRPlugin.HasCameraDeviceOpened(cameraDevice))
{
Debug.LogFormat("Opened camera device {0}", cameraDevice);
hasCameraDeviceOpened = true;
}
}
}
public override void Cleanup()
{
OVRCompositionUtil.SafeDestroy(ref cameraFramePlaneObject);
if (hasCameraDeviceOpened)
{
Debug.LogFormat("Close camera device {0}", cameraDevice);
OVRPlugin.CloseCameraDevice(cameraDevice);
}
}
public override void RecenterPose()
{
boundaryMesh = null;
}
protected void RefreshCameraFramePlaneObject(GameObject parentObject, Camera mixedRealityCamera, bool useDynamicLighting)
{
OVRCompositionUtil.SafeDestroy(ref cameraFramePlaneObject);
Debug.Assert(cameraFramePlaneObject == null);
cameraFramePlaneObject = GameObject.CreatePrimitive(PrimitiveType.Quad);
cameraFramePlaneObject.name = "OculusMRC_CameraFrame";
cameraFramePlaneObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
cameraFramePlaneObject.GetComponent<Collider>().enabled = false;
cameraFramePlaneObject.GetComponent<MeshRenderer>().shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
Material cameraFrameMaterial = new Material(Shader.Find(useDynamicLighting ? "Oculus/OVRMRCameraFrameLit" : "Oculus/OVRMRCameraFrame"));
cameraFramePlaneObject.GetComponent<MeshRenderer>().material = cameraFrameMaterial;
cameraFrameMaterial.SetColor("_Color", Color.white);
cameraFrameMaterial.SetFloat("_Visible", 0.0f);
cameraFramePlaneObject.transform.localScale = new Vector3(4, 4, 4);
cameraFramePlaneObject.SetActive(true);
OVRCameraFrameCompositionManager cameraFrameCompositionManager = mixedRealityCamera.gameObject.AddComponent<OVRCameraFrameCompositionManager>();
cameraFrameCompositionManager.cameraFrameGameObj = cameraFramePlaneObject;
cameraFrameCompositionManager.composition = this;
}
private bool nullcameraRigWarningDisplayed = false;
protected void UpdateCameraFramePlaneObject(Camera mainCamera, Camera mixedRealityCamera, RenderTexture boundaryMeshMaskTexture)
{
bool hasError = false;
Material cameraFrameMaterial = cameraFramePlaneObject.GetComponent<MeshRenderer>().material;
Texture2D colorTexture = Texture2D.blackTexture;
Texture2D depthTexture = Texture2D.whiteTexture;
if (OVRPlugin.IsCameraDeviceColorFrameAvailable(cameraDevice))
{
colorTexture = OVRPlugin.GetCameraDeviceColorFrameTexture(cameraDevice);
}
else
{
Debug.LogWarning("Camera: color frame not ready");
hasError = true;
}
bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice);
if (useDynamicLighting && cameraSupportsDepth)
{
if (OVRPlugin.IsCameraDeviceDepthFrameAvailable(cameraDevice))
{
depthTexture = OVRPlugin.GetCameraDeviceDepthFrameTexture(cameraDevice);
}
else
{
Debug.LogWarning("Camera: depth frame not ready");
hasError = true;
}
}
if (!hasError)
{
Vector3 offset = mainCamera.transform.position - mixedRealityCamera.transform.position;
float distance = Vector3.Dot(mixedRealityCamera.transform.forward, offset);
cameraFramePlaneDistance = distance;
cameraFramePlaneObject.transform.position = mixedRealityCamera.transform.position + mixedRealityCamera.transform.forward * distance;
cameraFramePlaneObject.transform.rotation = mixedRealityCamera.transform.rotation;
float tanFov = Mathf.Tan(mixedRealityCamera.fieldOfView * Mathf.Deg2Rad * 0.5f);
cameraFramePlaneObject.transform.localScale = new Vector3(distance * mixedRealityCamera.aspect * tanFov * 2.0f, distance * tanFov * 2.0f, 1.0f);
float worldHeight = distance * tanFov * 2.0f;
float worldWidth = worldHeight * mixedRealityCamera.aspect;
float cullingDistance = float.MaxValue;
if (OVRManager.instance.virtualGreenScreenType != OVRManager.VirtualGreenScreenType.Off)
{
RefreshBoundaryMesh(mixedRealityCamera, out cullingDistance);
}
cameraFrameMaterial.mainTexture = colorTexture;
cameraFrameMaterial.SetTexture("_DepthTex", depthTexture);
cameraFrameMaterial.SetVector("_FlipParams", new Vector4((OVRManager.instance.flipCameraFrameHorizontally ? 1.0f : 0.0f), (OVRManager.instance.flipCameraFrameVertically ? 1.0f : 0.0f), 0.0f, 0.0f));
cameraFrameMaterial.SetColor("_ChromaKeyColor", OVRManager.instance.chromaKeyColor);
cameraFrameMaterial.SetFloat("_ChromaKeySimilarity", OVRManager.instance.chromaKeySimilarity);
cameraFrameMaterial.SetFloat("_ChromaKeySmoothRange", OVRManager.instance.chromaKeySmoothRange);
cameraFrameMaterial.SetFloat("_ChromaKeySpillRange", OVRManager.instance.chromaKeySpillRange);
cameraFrameMaterial.SetVector("_TextureDimension", new Vector4(colorTexture.width, colorTexture.height, 1.0f / colorTexture.width, 1.0f / colorTexture.height));
cameraFrameMaterial.SetVector("_TextureWorldSize", new Vector4(worldWidth, worldHeight, 0, 0));
cameraFrameMaterial.SetFloat("_SmoothFactor", OVRManager.instance.dynamicLightingSmoothFactor);
cameraFrameMaterial.SetFloat("_DepthVariationClamp", OVRManager.instance.dynamicLightingDepthVariationClampingValue);
cameraFrameMaterial.SetFloat("_CullingDistance", cullingDistance);
if (OVRManager.instance.virtualGreenScreenType == OVRManager.VirtualGreenScreenType.Off || boundaryMesh == null || boundaryMeshMaskTexture == null)
{
cameraFrameMaterial.SetTexture("_MaskTex", Texture2D.whiteTexture);
}
else
{
if (cameraRig == null)
{
if (!nullcameraRigWarningDisplayed)
{
Debug.LogWarning("Could not find the OVRCameraRig/CenterEyeAnchor object. Please check if the OVRCameraRig has been setup properly. The virtual green screen has been temporarily disabled");
nullcameraRigWarningDisplayed = true;
}
cameraFrameMaterial.SetTexture("_MaskTex", Texture2D.whiteTexture);
}
else
{
if (nullcameraRigWarningDisplayed)
{
Debug.Log("OVRCameraRig/CenterEyeAnchor object found. Virtual green screen is activated");
nullcameraRigWarningDisplayed = false;
}
cameraFrameMaterial.SetTexture("_MaskTex", boundaryMeshMaskTexture);
}
}
}
}
protected void RefreshBoundaryMesh(Camera camera, out float cullingDistance)
{
float depthTolerance = OVRManager.instance.virtualGreenScreenApplyDepthCulling ? OVRManager.instance.virtualGreenScreenDepthTolerance : float.PositiveInfinity;
cullingDistance = OVRCompositionUtil.GetMaximumBoundaryDistance(camera, OVRCompositionUtil.ToBoundaryType(OVRManager.instance.virtualGreenScreenType)) + depthTolerance;
if (boundaryMesh == null || boundaryMeshType != OVRManager.instance.virtualGreenScreenType || boundaryMeshTopY != OVRManager.instance.virtualGreenScreenTopY || boundaryMeshBottomY != OVRManager.instance.virtualGreenScreenBottomY)
{
boundaryMeshTopY = OVRManager.instance.virtualGreenScreenTopY;
boundaryMeshBottomY = OVRManager.instance.virtualGreenScreenBottomY;
boundaryMesh = OVRCompositionUtil.BuildBoundaryMesh(OVRCompositionUtil.ToBoundaryType(OVRManager.instance.virtualGreenScreenType), boundaryMeshTopY, boundaryMeshBottomY);
boundaryMeshType = OVRManager.instance.virtualGreenScreenType;
// Creating GameObject for testing purpose only
//GameObject boundaryMeshObject = new GameObject("BoundaryMeshObject");
//boundaryMeshObject.AddComponent<MeshFilter>().mesh = boundaryMesh;
//boundaryMeshObject.AddComponent<MeshRenderer>();
}
}
public class OVRCameraFrameCompositionManager : MonoBehaviour
{
public GameObject cameraFrameGameObj;
public OVRCameraComposition composition;
public RenderTexture boundaryMeshMaskTexture;
private Material cameraFrameMaterial;
private Material whiteMaterial;
void Start()
{
Shader shader = Shader.Find("Oculus/Unlit");
if (!shader)
{
Debug.LogError("Oculus/Unlit shader does not exist");
return;
}
whiteMaterial = new Material(shader);
whiteMaterial.color = Color.white;
}
void OnPreRender()
{
if (OVRManager.instance.virtualGreenScreenType != OVRManager.VirtualGreenScreenType.Off && boundaryMeshMaskTexture != null && composition.boundaryMesh != null)
{
RenderTexture oldRT = RenderTexture.active;
RenderTexture.active = boundaryMeshMaskTexture;
// The camera matrices haven't been setup when OnPreRender() is executed. Load the projection manually
GL.PushMatrix();
GL.LoadProjectionMatrix(GetComponent<Camera>().projectionMatrix);
GL.Clear(false, true, Color.black);
for (int i = 0; i < whiteMaterial.passCount; ++i)
{
if (whiteMaterial.SetPass(i))
{
Graphics.DrawMeshNow(composition.boundaryMesh, composition.cameraRig.ComputeTrackReferenceMatrix());
}
}
GL.PopMatrix();
RenderTexture.active = oldRT;
}
if (cameraFrameGameObj)
{
if (cameraFrameMaterial == null)
cameraFrameMaterial = cameraFrameGameObj.GetComponent<MeshRenderer>().material;
cameraFrameMaterial.SetFloat("_Visible", 1.0f);
}
}
void OnPostRender()
{
if (cameraFrameGameObj)
{
Debug.Assert(cameraFrameMaterial);
cameraFrameMaterial.SetFloat("_Visible", 0.0f);
}
}
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 70818bad1fe6859439b190a61dfb6eb8
timeCreated: 1503089686
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,105 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if UNITY_ANDROID && !UNITY_EDITOR
#define OVR_ANDROID_MRC
#endif
using UnityEngine;
using System.Collections;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
public abstract class OVRComposition {
public bool cameraInTrackingSpace = false;
public OVRCameraRig cameraRig = null;
protected OVRComposition(GameObject parentObject, Camera mainCamera)
{
RefreshCameraRig(parentObject, mainCamera);
}
public abstract OVRManager.CompositionMethod CompositionMethod();
public abstract void Update(GameObject gameObject, Camera mainCamera);
public abstract void Cleanup();
public virtual void RecenterPose() { }
protected bool usingLastAttachedNodePose = false;
protected OVRPose lastAttachedNodePose = new OVRPose(); // Sometimes the attach node pose is not readable (lose tracking, low battery, etc.) Use the last pose instead when it happens
public void RefreshCameraRig(GameObject parentObject, Camera mainCamera)
{
OVRCameraRig cameraRig = mainCamera.GetComponentInParent<OVRCameraRig>();
if (cameraRig == null)
{
cameraRig = parentObject.GetComponent<OVRCameraRig>();
}
cameraInTrackingSpace = (cameraRig != null && cameraRig.trackingSpace != null);
this.cameraRig = cameraRig;
Debug.Log(cameraRig == null ? "[OVRComposition] CameraRig not found" : "[OVRComposition] CameraRig found");
}
public OVRPose ComputeCameraWorldSpacePose(OVRPlugin.CameraExtrinsics extrinsics)
{
OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
OVRPose worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
return worldSpacePose;
}
public OVRPose ComputeCameraTrackingSpacePose(OVRPlugin.CameraExtrinsics extrinsics)
{
OVRPose trackingSpacePose = new OVRPose();
OVRPose cameraTrackingSpacePose = extrinsics.RelativePose.ToOVRPose();
#if OVR_ANDROID_MRC
OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
cameraTrackingSpacePose = stageToLocalPose * cameraTrackingSpacePose;
#endif
trackingSpacePose = cameraTrackingSpacePose;
if (extrinsics.AttachedToNode != OVRPlugin.Node.None && OVRPlugin.GetNodePresent(extrinsics.AttachedToNode))
{
if (usingLastAttachedNodePose)
{
Debug.Log("The camera attached node get tracked");
usingLastAttachedNodePose = false;
}
OVRPose attachedNodePose = OVRPlugin.GetNodePose(extrinsics.AttachedToNode, OVRPlugin.Step.Render).ToOVRPose();
lastAttachedNodePose = attachedNodePose;
trackingSpacePose = attachedNodePose * trackingSpacePose;
}
else
{
if (extrinsics.AttachedToNode != OVRPlugin.Node.None)
{
if (!usingLastAttachedNodePose)
{
Debug.LogWarning("The camera attached node could not be tracked, using the last pose");
usingLastAttachedNodePose = true;
}
trackingSpacePose = lastAttachedNodePose * trackingSpacePose;
}
}
return trackingSpacePose;
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 829a382f3380d4b46ad9670463232a0b
timeCreated: 1502990005
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,168 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections.Generic;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
internal class OVRCompositionUtil {
public static void SafeDestroy(GameObject obj)
{
if (Application.isPlaying)
{
GameObject.Destroy(obj);
}
else
{
GameObject.DestroyImmediate(obj);
}
}
public static void SafeDestroy(ref GameObject obj)
{
if (obj != null)
{
SafeDestroy(obj);
obj = null;
}
}
public static OVRPlugin.CameraDevice ConvertCameraDevice(OVRManager.CameraDevice cameraDevice)
{
if (cameraDevice == OVRManager.CameraDevice.WebCamera0)
{
return OVRPlugin.CameraDevice.WebCamera0;
}
else if (cameraDevice == OVRManager.CameraDevice.WebCamera1)
{
return OVRPlugin.CameraDevice.WebCamera1;
}
else if (cameraDevice == OVRManager.CameraDevice.ZEDCamera)
{
return OVRPlugin.CameraDevice.ZEDCamera;
}
else
{
return OVRPlugin.CameraDevice.None;
}
}
public static OVRBoundary.BoundaryType ToBoundaryType(OVRManager.VirtualGreenScreenType type)
{
if (type == OVRManager.VirtualGreenScreenType.OuterBoundary)
{
return OVRBoundary.BoundaryType.OuterBoundary;
}
else if (type == OVRManager.VirtualGreenScreenType.PlayArea)
{
return OVRBoundary.BoundaryType.PlayArea;
}
else
{
Debug.LogWarning("Unmatched VirtualGreenScreenType");
return OVRBoundary.BoundaryType.OuterBoundary;
}
}
public static Vector3 GetWorldPosition(Vector3 trackingSpacePosition)
{
OVRPose tsPose;
tsPose.position = trackingSpacePosition;
tsPose.orientation = Quaternion.identity;
OVRPose wsPose = OVRExtensions.ToWorldSpacePose(tsPose);
Vector3 pos = wsPose.position;
return pos;
}
public static float GetMaximumBoundaryDistance(Camera camera, OVRBoundary.BoundaryType boundaryType)
{
if (!OVRManager.boundary.GetConfigured())
{
return float.MaxValue;
}
Vector3[] geometry = OVRManager.boundary.GetGeometry(boundaryType);
if (geometry.Length == 0)
{
return float.MaxValue;
}
float maxDistance = -float.MaxValue;
foreach (Vector3 v in geometry)
{
Vector3 pos = GetWorldPosition(v);
float distance = Vector3.Dot(camera.transform.forward, pos);
if (maxDistance < distance)
{
maxDistance = distance;
}
}
return maxDistance;
}
public static Mesh BuildBoundaryMesh(OVRBoundary.BoundaryType boundaryType, float topY, float bottomY)
{
if (!OVRManager.boundary.GetConfigured())
{
return null;
}
List<Vector3> geometry = new List<Vector3>(OVRManager.boundary.GetGeometry(boundaryType));
if (geometry.Count == 0)
{
return null;
}
geometry.Add(geometry[0]);
int numPoints = geometry.Count;
Vector3[] vertices = new Vector3[numPoints * 2];
Vector2[] uvs = new Vector2[numPoints * 2];
for (int i = 0; i < numPoints; ++i)
{
Vector3 v = geometry[i];
vertices[i] = new Vector3(v.x, bottomY, v.z);
vertices[i + numPoints] = new Vector3(v.x, topY, v.z);
uvs[i] = new Vector2((float)i / (numPoints - 1), 0.0f);
uvs[i + numPoints] = new Vector2(uvs[i].x, 1.0f);
}
int[] triangles = new int[(numPoints - 1) * 2 * 3];
for (int i = 0; i < numPoints - 1; ++i)
{
// the geometry is built clockwised. only the back faces should be rendered in the camera frame mask
triangles[i * 6 + 0] = i;
triangles[i * 6 + 1] = i + numPoints;
triangles[i * 6 + 2] = i + 1 + numPoints;
triangles[i * 6 + 3] = i;
triangles[i * 6 + 4] = i + 1 + numPoints;
triangles[i * 6 + 5] = i + 1;
}
Mesh mesh = new Mesh();
mesh.vertices = vertices;
mesh.uv = uvs;
mesh.triangles = triangles;
return mesh;
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 43bf91d46b2eb874a842be95aee2cc9a
timeCreated: 1502992822
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,171 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
public class OVRDirectComposition : OVRCameraComposition
{
private GameObject previousMainCameraObject = null;
public GameObject directCompositionCameraGameObject = null;
public Camera directCompositionCamera = null;
public RenderTexture boundaryMeshMaskTexture = null;
public override OVRManager.CompositionMethod CompositionMethod() { return OVRManager.CompositionMethod.Direct; }
public OVRDirectComposition(GameObject parentObject, Camera mainCamera, OVRManager.CameraDevice cameraDevice, bool useDynamicLighting, OVRManager.DepthQuality depthQuality)
: base(parentObject, mainCamera, cameraDevice, useDynamicLighting, depthQuality)
{
RefreshCameraObjects(parentObject, mainCamera);
}
private void RefreshCameraObjects(GameObject parentObject, Camera mainCamera)
{
if (!hasCameraDeviceOpened)
{
Debug.LogWarning("[OVRDirectComposition] RefreshCameraObjects(): Unable to open camera device " + cameraDevice);
return;
}
if (mainCamera.gameObject != previousMainCameraObject)
{
Debug.LogFormat("[OVRDirectComposition] Camera refreshed. Rebind camera to {0}", mainCamera.gameObject.name);
OVRCompositionUtil.SafeDestroy(ref directCompositionCameraGameObject);
directCompositionCamera = null;
RefreshCameraRig(parentObject, mainCamera);
Debug.Assert(directCompositionCameraGameObject == null);
directCompositionCameraGameObject = Object.Instantiate(mainCamera.gameObject);
directCompositionCameraGameObject.name = "OculusMRC_DirectCompositionCamera";
directCompositionCameraGameObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
if (directCompositionCameraGameObject.GetComponent<AudioListener>())
{
Object.Destroy(directCompositionCameraGameObject.GetComponent<AudioListener>());
}
if (directCompositionCameraGameObject.GetComponent<OVRManager>())
{
Object.Destroy(directCompositionCameraGameObject.GetComponent<OVRManager>());
}
directCompositionCamera = directCompositionCameraGameObject.GetComponent<Camera>();
directCompositionCamera.stereoTargetEye = StereoTargetEyeMask.None;
directCompositionCamera.depth = float.MaxValue;
directCompositionCamera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
directCompositionCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
Debug.Log("DirectComposition activated : useDynamicLighting " + (useDynamicLighting ? "ON" : "OFF"));
RefreshCameraFramePlaneObject(parentObject, directCompositionCamera, useDynamicLighting);
previousMainCameraObject = mainCamera.gameObject;
}
}
public override void Update(GameObject gameObject, Camera mainCamera)
{
if (!hasCameraDeviceOpened)
{
return;
}
RefreshCameraObjects(gameObject, mainCamera);
if (!OVRPlugin.SetHandNodePoseStateLatency(OVRManager.instance.handPoseStateLatency))
{
Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency);
}
directCompositionCamera.clearFlags = mainCamera.clearFlags;
directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
directCompositionCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
directCompositionCamera.nearClipPlane = mainCamera.nearClipPlane;
directCompositionCamera.farClipPlane = mainCamera.farClipPlane;
if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
{
OVRPose trackingSpacePose = new OVRPose();
trackingSpacePose.position = OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel ?
OVRMixedReality.fakeCameraEyeLevelPosition :
OVRMixedReality.fakeCameraFloorLevelPosition;
trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
directCompositionCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
directCompositionCamera.aspect = OVRMixedReality.fakeCameraAspect;
if (cameraInTrackingSpace)
{
directCompositionCamera.transform.FromOVRPose(trackingSpacePose, true);
}
else
{
OVRPose worldSpacePose = new OVRPose();
worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
directCompositionCamera.transform.FromOVRPose(worldSpacePose);
}
}
else
{
OVRPlugin.CameraExtrinsics extrinsics;
OVRPlugin.CameraIntrinsics intrinsics;
// So far, only support 1 camera for MR and always use camera index 0
if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
{
float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
directCompositionCamera.fieldOfView = fovY;
directCompositionCamera.aspect = aspect;
if (cameraInTrackingSpace)
{
OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
directCompositionCamera.transform.FromOVRPose(trackingSpacePose, true);
}
else
{
OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
directCompositionCamera.transform.FromOVRPose(worldSpacePose);
}
}
else
{
Debug.LogWarning("Failed to get external camera information");
}
}
if (hasCameraDeviceOpened)
{
if (boundaryMeshMaskTexture == null || boundaryMeshMaskTexture.width != Screen.width || boundaryMeshMaskTexture.height != Screen.height)
{
boundaryMeshMaskTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8);
boundaryMeshMaskTexture.Create();
}
UpdateCameraFramePlaneObject(mainCamera, directCompositionCamera, boundaryMeshMaskTexture);
directCompositionCamera.GetComponent<OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = boundaryMeshMaskTexture;
}
}
public override void Cleanup()
{
base.Cleanup();
OVRCompositionUtil.SafeDestroy(ref directCompositionCameraGameObject);
directCompositionCamera = null;
Debug.Log("DirectComposition deactivated");
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 8e9d1c62d6c68c7429ce265558cfd2b2
timeCreated: 1502990248
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,505 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if UNITY_ANDROID && !UNITY_EDITOR
#define OVR_ANDROID_MRC
#endif
using UnityEngine;
using System.Collections.Generic;
using System.Threading;
using UnityEngine.Rendering;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
public class OVRExternalComposition : OVRComposition
{
private GameObject previousMainCameraObject = null;
public GameObject foregroundCameraGameObject = null;
public Camera foregroundCamera = null;
public GameObject backgroundCameraGameObject = null;
public Camera backgroundCamera = null;
#if OVR_ANDROID_MRC
public bool renderCombinedFrame = true;
public AudioListener audioListener;
public OVRMRAudioFilter audioFilter;
public RenderTexture[] mrcRenderTextureArray = new RenderTexture[2];
public int frameIndex;
public int lastMrcEncodeFrameSyncId;
// when rendererSupportsCameraRect is false, mrcRenderTextureArray would only store the background frame (regular width)
public RenderTexture[] mrcForegroundRenderTextureArray = new RenderTexture[2];
// this is used for moving MRC camera where we would need to be able to synchronize the camera position from the game with that on the client for composition
public double[] cameraPoseTimeArray = new double[2];
#endif
public override OVRManager.CompositionMethod CompositionMethod() { return OVRManager.CompositionMethod.External; }
public OVRExternalComposition(GameObject parentObject, Camera mainCamera)
: base(parentObject, mainCamera)
{
#if OVR_ANDROID_MRC
renderCombinedFrame = true;
if (GraphicsSettings.renderPipelineAsset != null)
{
Debug.Log("[OVRExternalComposition] scriptable rendering pipeline detected, Camera.rect is not supported");
renderCombinedFrame = false;
}
int frameWidth;
int frameHeight;
OVRPlugin.Media.GetMrcFrameSize(out frameWidth, out frameHeight);
Debug.LogFormat("[OVRExternalComposition] Create render texture {0}, {1}", renderCombinedFrame ? frameWidth : frameWidth/2, frameHeight);
for (int i=0; i<2; ++i)
{
mrcRenderTextureArray[i] = new RenderTexture(renderCombinedFrame ? frameWidth : frameWidth/2, frameHeight, 24, RenderTextureFormat.ARGB32);
mrcRenderTextureArray[i].Create();
cameraPoseTimeArray[i] = 0.0;
}
frameIndex = 0;
lastMrcEncodeFrameSyncId = -1;
if (!renderCombinedFrame)
{
Debug.LogFormat("[OVRExternalComposition] Create extra render textures for foreground");
for (int i = 0; i < 2; ++i)
{
mrcForegroundRenderTextureArray[i] = new RenderTexture(frameWidth / 2, frameHeight, 24, RenderTextureFormat.ARGB32);
mrcForegroundRenderTextureArray[i].Create();
}
}
#endif
RefreshCameraObjects(parentObject, mainCamera);
}
private void RefreshCameraObjects(GameObject parentObject, Camera mainCamera)
{
if (mainCamera.gameObject != previousMainCameraObject)
{
Debug.LogFormat("[OVRExternalComposition] Camera refreshed. Rebind camera to {0}", mainCamera.gameObject.name);
OVRCompositionUtil.SafeDestroy(ref backgroundCameraGameObject);
backgroundCamera = null;
OVRCompositionUtil.SafeDestroy(ref foregroundCameraGameObject);
foregroundCamera = null;
RefreshCameraRig(parentObject, mainCamera);
Debug.Assert(backgroundCameraGameObject == null);
backgroundCameraGameObject = Object.Instantiate(mainCamera.gameObject);
backgroundCameraGameObject.name = "OculusMRC_BackgroundCamera";
backgroundCameraGameObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
if (backgroundCameraGameObject.GetComponent<AudioListener>())
{
Object.Destroy(backgroundCameraGameObject.GetComponent<AudioListener>());
}
if (backgroundCameraGameObject.GetComponent<OVRManager>())
{
Object.Destroy(backgroundCameraGameObject.GetComponent<OVRManager>());
}
backgroundCamera = backgroundCameraGameObject.GetComponent<Camera>();
backgroundCamera.tag = "Untagged";
backgroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
backgroundCamera.depth = 99990.0f;
backgroundCamera.rect = new Rect(0.0f, 0.0f, 0.5f, 1.0f);
backgroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
#if OVR_ANDROID_MRC
backgroundCamera.targetTexture = mrcRenderTextureArray[0];
if (!renderCombinedFrame)
{
backgroundCamera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
}
#endif
Debug.Assert(foregroundCameraGameObject == null);
foregroundCameraGameObject = Object.Instantiate(mainCamera.gameObject);
foregroundCameraGameObject.name = "OculusMRC_ForgroundCamera";
foregroundCameraGameObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
if (foregroundCameraGameObject.GetComponent<AudioListener>())
{
Object.Destroy(foregroundCameraGameObject.GetComponent<AudioListener>());
}
if (foregroundCameraGameObject.GetComponent<OVRManager>())
{
Object.Destroy(foregroundCameraGameObject.GetComponent<OVRManager>());
}
foregroundCamera = foregroundCameraGameObject.GetComponent<Camera>();
foregroundCamera.tag = "Untagged";
foregroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
foregroundCamera.depth = backgroundCamera.depth + 1.0f; // enforce the forground be rendered after the background
foregroundCamera.rect = new Rect(0.5f, 0.0f, 0.5f, 1.0f);
foregroundCamera.clearFlags = CameraClearFlags.Color;
#if OVR_ANDROID_MRC
foregroundCamera.backgroundColor = OVRManager.instance.externalCompositionBackdropColorQuest;
#else
foregroundCamera.backgroundColor = OVRManager.instance.externalCompositionBackdropColorRift;
#endif
foregroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
#if OVR_ANDROID_MRC
if (renderCombinedFrame)
{
foregroundCamera.targetTexture = mrcRenderTextureArray[0];
}
else
{
foregroundCamera.targetTexture = mrcForegroundRenderTextureArray[0];
foregroundCamera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
}
#endif
previousMainCameraObject = mainCamera.gameObject;
}
}
#if OVR_ANDROID_MRC
private void RefreshAudioFilter()
{
if (cameraRig != null && (audioListener == null || !audioListener.enabled || !audioListener.gameObject.activeInHierarchy))
{
CleanupAudioFilter();
AudioListener tmpAudioListener = cameraRig.centerEyeAnchor.gameObject.activeInHierarchy ? cameraRig.centerEyeAnchor.GetComponent<AudioListener>() : null;
if (tmpAudioListener != null && !tmpAudioListener.enabled) tmpAudioListener = null;
if (tmpAudioListener == null)
{
if (Camera.main != null && Camera.main.gameObject.activeInHierarchy)
{
tmpAudioListener = Camera.main.GetComponent<AudioListener>();
if (tmpAudioListener != null && !tmpAudioListener.enabled) tmpAudioListener = null;
}
}
if (tmpAudioListener == null)
{
Object[] allListeners = Object.FindObjectsOfType<AudioListener>();
foreach (var l in allListeners)
{
AudioListener al = l as AudioListener;
if (al != null && al.enabled && al.gameObject.activeInHierarchy)
{
tmpAudioListener = al;
break;
}
}
}
if (tmpAudioListener == null)
{
Debug.LogWarning("[OVRExternalComposition] No AudioListener in scene");
}
else
{
Debug.LogFormat("[OVRExternalComposition] AudioListener found, obj {0}", tmpAudioListener.gameObject.name);
}
audioListener = tmpAudioListener;
if(audioListener != null)
{
audioFilter = audioListener.gameObject.AddComponent<OVRMRAudioFilter>();
audioFilter.composition = this;
Debug.LogFormat("OVRMRAudioFilter added");
}
}
}
private float[] cachedAudioDataArray = null;
private int CastMrcFrame(int castTextureIndex)
{
int audioFrames;
int audioChannels;
GetAndResetAudioData(ref cachedAudioDataArray, out audioFrames, out audioChannels);
int syncId = -1;
//Debug.Log("EncodeFrameThreadObject EncodeMrcFrame");
bool ret = false;
if (OVRPlugin.Media.GetMrcInputVideoBufferType() == OVRPlugin.Media.InputVideoBufferType.TextureHandle)
{
ret = OVRPlugin.Media.EncodeMrcFrame(mrcRenderTextureArray[castTextureIndex].GetNativeTexturePtr(),
renderCombinedFrame ? System.IntPtr.Zero : mrcForegroundRenderTextureArray[castTextureIndex].GetNativeTexturePtr(),
cachedAudioDataArray, audioFrames, audioChannels, AudioSettings.dspTime, cameraPoseTimeArray[castTextureIndex], ref syncId);
}
else
{
ret = OVRPlugin.Media.EncodeMrcFrame(mrcRenderTextureArray[castTextureIndex], cachedAudioDataArray, audioFrames, audioChannels, AudioSettings.dspTime, cameraPoseTimeArray[castTextureIndex], ref syncId);
}
if (!ret)
{
Debug.LogWarning("EncodeMrcFrame failed. Likely caused by OBS plugin disconnection");
return -1;
}
return syncId;
}
private void SetCameraTargetTexture(int drawTextureIndex)
{
if (renderCombinedFrame)
{
RenderTexture texture = mrcRenderTextureArray[drawTextureIndex];
if (backgroundCamera.targetTexture != texture)
{
backgroundCamera.targetTexture = texture;
}
if (foregroundCamera.targetTexture != texture)
{
foregroundCamera.targetTexture = texture;
}
}
else
{
RenderTexture bgTexture = mrcRenderTextureArray[drawTextureIndex];
RenderTexture fgTexture = mrcForegroundRenderTextureArray[drawTextureIndex];
if (backgroundCamera.targetTexture != bgTexture)
{
backgroundCamera.targetTexture = bgTexture;
}
if (foregroundCamera.targetTexture != fgTexture)
{
foregroundCamera.targetTexture = fgTexture;
}
}
}
#endif
public override void Update(GameObject gameObject, Camera mainCamera)
{
RefreshCameraObjects(gameObject, mainCamera);
OVRPlugin.SetHandNodePoseStateLatency(0.0); // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0
// For third-person camera to use for calculating camera position with different anchors
OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
OVRPose localToStagePose = stageToLocalPose.Inverse();
OVRPose head = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.Head, OVRPlugin.Step.Render).ToOVRPose();
OVRPose leftC = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render).ToOVRPose();
OVRPose rightC = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandRight, OVRPlugin.Step.Render).ToOVRPose();
OVRPlugin.Media.SetMrcHeadsetControllerPose(head.ToPosef(), leftC.ToPosef(), rightC.ToPosef());
#if OVR_ANDROID_MRC
RefreshAudioFilter();
int drawTextureIndex = (frameIndex / 2) % 2;
int castTextureIndex = 1 - drawTextureIndex;
backgroundCamera.enabled = (frameIndex % 2) == 0;
foregroundCamera.enabled = (frameIndex % 2) == 1;
if (frameIndex % 2 == 0)
{
if (lastMrcEncodeFrameSyncId != -1)
{
OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
lastMrcEncodeFrameSyncId = -1;
}
lastMrcEncodeFrameSyncId = CastMrcFrame(castTextureIndex);
SetCameraTargetTexture(drawTextureIndex);
}
++ frameIndex;
#endif
backgroundCamera.clearFlags = mainCamera.clearFlags;
backgroundCamera.backgroundColor = mainCamera.backgroundColor;
backgroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
backgroundCamera.nearClipPlane = mainCamera.nearClipPlane;
backgroundCamera.farClipPlane = mainCamera.farClipPlane;
foregroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
foregroundCamera.farClipPlane = mainCamera.farClipPlane;
if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
{
OVRPose worldSpacePose = new OVRPose();
OVRPose trackingSpacePose = new OVRPose();
trackingSpacePose.position = OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel ?
OVRMixedReality.fakeCameraEyeLevelPosition :
OVRMixedReality.fakeCameraFloorLevelPosition;
trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
backgroundCamera.aspect = OVRMixedReality.fakeCameraAspect;
foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
foregroundCamera.aspect = OVRMixedReality.fakeCameraAspect;
if (cameraInTrackingSpace)
{
backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
}
else
{
backgroundCamera.transform.FromOVRPose(worldSpacePose);
foregroundCamera.transform.FromOVRPose(worldSpacePose);
}
}
else
{
OVRPlugin.CameraExtrinsics extrinsics;
OVRPlugin.CameraIntrinsics intrinsics;
// So far, only support 1 camera for MR and always use camera index 0
if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
{
float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
backgroundCamera.fieldOfView = fovY;
backgroundCamera.aspect = aspect;
foregroundCamera.fieldOfView = fovY;
foregroundCamera.aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
if (cameraInTrackingSpace)
{
OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
}
else
{
OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
backgroundCamera.transform.FromOVRPose(worldSpacePose);
foregroundCamera.transform.FromOVRPose(worldSpacePose);
}
#if OVR_ANDROID_MRC
cameraPoseTimeArray[drawTextureIndex] = extrinsics.LastChangedTimeSeconds;
#endif
}
else
{
Debug.LogError("Failed to get external camera information");
return;
}
}
Vector3 headToExternalCameraVec = mainCamera.transform.position - foregroundCamera.transform.position;
float clipDistance = Vector3.Dot(headToExternalCameraVec, foregroundCamera.transform.forward);
foregroundCamera.farClipPlane = Mathf.Max(foregroundCamera.nearClipPlane + 0.001f, clipDistance);
}
#if OVR_ANDROID_MRC
private void CleanupAudioFilter()
{
if (audioFilter)
{
audioFilter.composition = null;
Object.Destroy(audioFilter);
Debug.LogFormat("OVRMRAudioFilter destroyed");
audioFilter = null;
}
}
#endif
public override void Cleanup()
{
OVRCompositionUtil.SafeDestroy(ref backgroundCameraGameObject);
backgroundCamera = null;
OVRCompositionUtil.SafeDestroy(ref foregroundCameraGameObject);
foregroundCamera = null;
Debug.Log("ExternalComposition deactivated");
#if OVR_ANDROID_MRC
if (lastMrcEncodeFrameSyncId != -1)
{
OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
lastMrcEncodeFrameSyncId = -1;
}
CleanupAudioFilter();
for (int i=0; i<2; ++i)
{
mrcRenderTextureArray[i].Release();
mrcRenderTextureArray[i] = null;
if (!renderCombinedFrame)
{
mrcForegroundRenderTextureArray[i].Release();
mrcForegroundRenderTextureArray[i] = null;
}
}
frameIndex = 0;
#endif
}
private readonly object audioDataLock = new object();
private List<float> cachedAudioData = new List<float>(16384);
private int cachedChannels = 0;
public void CacheAudioData(float[] data, int channels)
{
lock(audioDataLock)
{
if (channels != cachedChannels)
{
cachedAudioData.Clear();
}
cachedChannels = channels;
cachedAudioData.AddRange(data);
//Debug.LogFormat("[CacheAudioData] dspTime {0} indata {1} channels {2} accu_len {3}", AudioSettings.dspTime, data.Length, channels, cachedAudioData.Count);
}
}
public void GetAndResetAudioData(ref float[] audioData, out int audioFrames, out int channels)
{
lock(audioDataLock)
{
//Debug.LogFormat("[GetAndResetAudioData] dspTime {0} accu_len {1}", AudioSettings.dspTime, cachedAudioData.Count);
if (audioData == null || audioData.Length < cachedAudioData.Count)
{
audioData = new float[cachedAudioData.Capacity];
}
cachedAudioData.CopyTo(audioData);
audioFrames = cachedAudioData.Count;
channels = cachedChannels;
cachedAudioData.Clear();
}
}
}
#if OVR_ANDROID_MRC
public class OVRMRAudioFilter : MonoBehaviour
{
private bool running = false;
public OVRExternalComposition composition;
void Start()
{
running = true;
}
void OnAudioFilterRead(float[] data, int channels)
{
if (!running)
return;
if (composition != null)
{
composition.CacheAudioData(data, channels);
}
}
}
#endif
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 2c109ff55176f71418ec2c06d1b5d28e
timeCreated: 1502990231
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,4 @@
public class OVRSandwichComposition
{
// deprecated since SDK 1.41
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 3c02efcdd3fb2aa4e9c641b0c2a54b9a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: e87d4bbdfc8d17445b4a41760b401026
folderAsset: yes
timeCreated: 1510282190
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,80 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEditor;
using UnityEditor.SceneManagement;
using BoneId = OVRSkeleton.BoneId;
[CustomEditor(typeof(OVRCustomSkeleton))]
public class OVRCustomSkeletonEditor : Editor
{
public override void OnInspectorGUI()
{
DrawPropertiesExcluding(serializedObject, new string[] { "_customBones" });
serializedObject.ApplyModifiedProperties();
OVRCustomSkeleton skeleton = (OVRCustomSkeleton)target;
OVRSkeleton.SkeletonType skeletonType = skeleton.GetSkeletonType();
if (skeletonType == OVRSkeleton.SkeletonType.None)
{
EditorGUILayout.HelpBox("Please select a SkeletonType.", MessageType.Warning);
}
else
{
if (GUILayout.Button("Auto Map Bones"))
{
skeleton.TryAutoMapBonesByName();
EditorUtility.SetDirty(skeleton);
EditorSceneManager.MarkSceneDirty(skeleton.gameObject.scene);
}
EditorGUILayout.LabelField("Bones", EditorStyles.boldLabel);
BoneId start = skeleton.GetCurrentStartBoneId();
BoneId end = skeleton.GetCurrentEndBoneId();
if (start != BoneId.Invalid && end != BoneId.Invalid)
{
for (int i = (int)start; i < (int)end; ++i)
{
string boneName = BoneLabelFromBoneId((BoneId)i);
skeleton.CustomBones[i] = (Transform)EditorGUILayout.ObjectField(boneName, skeleton.CustomBones[i], typeof(Transform), true);
}
}
}
}
// force aliased enum values to the more appropriate value
private static string BoneLabelFromBoneId(BoneId boneId)
{
if (boneId == BoneId.Hand_Start)
{
return "Hand_WristRoot";
}
else if (boneId == BoneId.Hand_MaxSkinnable)
{
return "Hand_ThumbTip";
}
else
{
return boneId.ToString();
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 48b4582957a398741abd6d10bcb62042
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,159 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEditor;
using System.Diagnostics;
public static class OVREditorUtil {
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupBoolField(Object target, string name, ref bool member, ref bool modified)
{
SetupBoolField(target, new GUIContent(name), ref member, ref modified);
}
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupBoolField(Object target, GUIContent name, ref bool member, ref bool modified)
{
EditorGUI.BeginChangeCheck();
bool value = EditorGUILayout.Toggle(name, member);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
member = value;
modified = true;
}
}
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupIntField(Object target, string name, ref int member, ref bool modified)
{
SetupIntField(target, new GUIContent(name), ref member, ref modified);
}
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupIntField(Object target, GUIContent name, ref int member, ref bool modified)
{
EditorGUI.BeginChangeCheck();
int value = EditorGUILayout.IntField(name, member);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
member = value;
modified = true;
}
}
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupFloatField(Object target, string name, ref float member, ref bool modified)
{
SetupFloatField(target, new GUIContent(name), ref member, ref modified);
}
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupFloatField(Object target, GUIContent name, ref float member, ref bool modified)
{
EditorGUI.BeginChangeCheck();
float value = EditorGUILayout.FloatField(name, member);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
member = value;
modified = true;
}
}
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupDoubleField(Object target, string name, ref double member, ref bool modified)
{
SetupDoubleField(target, new GUIContent(name), ref member, ref modified);
}
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupDoubleField(Object target, GUIContent name, ref double member, ref bool modified)
{
EditorGUI.BeginChangeCheck();
double value = EditorGUILayout.DoubleField(name, member);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
member = value;
modified = true;
}
}
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupColorField(Object target, string name, ref Color member, ref bool modified)
{
SetupColorField(target, new GUIContent(name), ref member, ref modified);
}
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupColorField(Object target, GUIContent name, ref Color member, ref bool modified)
{
EditorGUI.BeginChangeCheck();
Color value = EditorGUILayout.ColorField(name, member);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
member = value;
modified = true;
}
}
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupLayerMaskField(Object target, string name, ref LayerMask layerMask, string[] layerMaskOptions, ref bool modified)
{
SetupLayerMaskField(target, new GUIContent(name), ref layerMask, layerMaskOptions, ref modified);
}
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupLayerMaskField(Object target, GUIContent name, ref LayerMask layerMask, string[] layerMaskOptions, ref bool modified)
{
EditorGUI.BeginChangeCheck();
int value = EditorGUILayout.MaskField(name, layerMask, layerMaskOptions);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
layerMask = value;
}
}
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupEnumField<T>(Object target, string name, ref T member, ref bool modified) where T : struct
{
SetupEnumField(target, new GUIContent(name), ref member, ref modified);
}
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupEnumField<T>(Object target, GUIContent name, ref T member, ref bool modified) where T : struct
{
EditorGUI.BeginChangeCheck();
T value = (T)(object)EditorGUILayout.EnumPopup(name, member as System.Enum);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
member = value;
modified = true;
}
}
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupInputField(Object target, string name, ref string member, ref bool modified)
{
SetupInputField(target, new GUIContent(name), ref member, ref modified);
}
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
public static void SetupInputField(Object target, GUIContent name, ref string member, ref bool modified)
{
EditorGUI.BeginChangeCheck();
string value = EditorGUILayout.TextField(name, member);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
member = value;
modified = true;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 00e66be22bd6053489650de094c5efa8
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,146 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using UnityEditor;
using System.Collections.Generic;
using System.Reflection;
[CustomEditor(typeof(OVRManager))]
public class OVRManagerEditor : Editor
{
override public void OnInspectorGUI()
{
#if UNITY_ANDROID
OVRProjectConfig projectConfig = OVRProjectConfig.GetProjectConfig();
OVRProjectConfigEditor.DrawTargetDeviceInspector(projectConfig);
EditorGUILayout.Space();
#endif
DrawDefaultInspector();
bool modified = false;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
OVRManager manager = (OVRManager)target;
EditorGUILayout.Space();
EditorGUILayout.LabelField("Display", EditorStyles.boldLabel);
OVREditorUtil.SetupBoolField(target, new GUIContent("Enable Specific Color Gamut",
"If checked, the target HMD will perform a color space transformation"), ref manager.enableColorGamut, ref modified);
if (manager.enableColorGamut)
{
OVREditorUtil.SetupEnumField(target, new GUIContent("Color Gamut",
"The target color gamut when displayed on the HMD"), ref manager.colorGamut, ref modified);
}
#endif
#if UNITY_ANDROID
EditorGUILayout.Space();
OVRProjectConfigEditor.DrawProjectConfigInspector(projectConfig);
EditorGUILayout.Space();
EditorGUILayout.LabelField("Mixed Reality Capture for Quest (experimental)", EditorStyles.boldLabel);
EditorGUI.indentLevel++;
OVREditorUtil.SetupEnumField(target, "ActivationMode", ref manager.mrcActivationMode, ref modified);
EditorGUI.indentLevel--;
#endif
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
EditorGUILayout.Space();
EditorGUILayout.LabelField("Mixed Reality Capture", EditorStyles.boldLabel);
OVREditorUtil.SetupBoolField(target, "Show Properties", ref manager.expandMixedRealityCapturePropertySheet, ref modified);
if (manager.expandMixedRealityCapturePropertySheet)
{
string[] layerMaskOptions = new string[32];
for (int i=0; i<32; ++i)
{
layerMaskOptions[i] = LayerMask.LayerToName(i);
if (layerMaskOptions[i].Length == 0)
{
layerMaskOptions[i] = "<Layer " + i.ToString() + ">";
}
}
EditorGUI.indentLevel++;
EditorGUILayout.Space();
OVREditorUtil.SetupBoolField(target, "enableMixedReality", ref manager.enableMixedReality, ref modified);
OVREditorUtil.SetupEnumField(target, "compositionMethod", ref manager.compositionMethod, ref modified);
OVREditorUtil.SetupLayerMaskField(target, "extraHiddenLayers", ref manager.extraHiddenLayers, layerMaskOptions, ref modified);
if (manager.compositionMethod == OVRManager.CompositionMethod.External)
{
EditorGUILayout.Space();
EditorGUILayout.LabelField("External Composition", EditorStyles.boldLabel);
EditorGUI.indentLevel++;
OVREditorUtil.SetupColorField(target, "backdropColor (target, Rift)", ref manager.externalCompositionBackdropColorRift, ref modified);
OVREditorUtil.SetupColorField(target, "backdropColor (target, Quest)", ref manager.externalCompositionBackdropColorQuest, ref modified);
}
if (manager.compositionMethod == OVRManager.CompositionMethod.Direct)
{
EditorGUILayout.Space();
EditorGUILayout.LabelField("Direct Composition", EditorStyles.boldLabel);
EditorGUI.indentLevel++;
EditorGUILayout.Space();
EditorGUILayout.LabelField("Camera", EditorStyles.boldLabel);
OVREditorUtil.SetupEnumField(target, "capturingCameraDevice", ref manager.capturingCameraDevice, ref modified);
OVREditorUtil.SetupBoolField(target, "flipCameraFrameHorizontally", ref manager.flipCameraFrameHorizontally, ref modified);
OVREditorUtil.SetupBoolField(target, "flipCameraFrameVertically", ref manager.flipCameraFrameVertically, ref modified);
EditorGUILayout.Space();
EditorGUILayout.LabelField("Chroma Key", EditorStyles.boldLabel);
OVREditorUtil.SetupColorField(target, "chromaKeyColor", ref manager.chromaKeyColor, ref modified);
OVREditorUtil.SetupFloatField(target, "chromaKeySimilarity", ref manager.chromaKeySimilarity, ref modified);
OVREditorUtil.SetupFloatField(target, "chromaKeySmoothRange", ref manager.chromaKeySmoothRange, ref modified);
OVREditorUtil.SetupFloatField(target, "chromaKeySpillRange", ref manager.chromaKeySpillRange, ref modified);
EditorGUILayout.Space();
EditorGUILayout.LabelField("Dynamic Lighting", EditorStyles.boldLabel);
OVREditorUtil.SetupBoolField(target, "useDynamicLighting", ref manager.useDynamicLighting, ref modified);
OVREditorUtil.SetupEnumField(target, "depthQuality", ref manager.depthQuality, ref modified);
OVREditorUtil.SetupFloatField(target, "dynamicLightingSmoothFactor", ref manager.dynamicLightingSmoothFactor, ref modified);
OVREditorUtil.SetupFloatField(target, "dynamicLightingDepthVariationClampingValue", ref manager.dynamicLightingDepthVariationClampingValue, ref modified);
EditorGUILayout.Space();
EditorGUILayout.LabelField("Virtual Green Screen", EditorStyles.boldLabel);
OVREditorUtil.SetupEnumField(target, "virtualGreenScreenType", ref manager.virtualGreenScreenType, ref modified);
OVREditorUtil.SetupFloatField(target, "virtualGreenScreenTopY", ref manager.virtualGreenScreenTopY, ref modified);
OVREditorUtil.SetupFloatField(target, "virtualGreenScreenBottomY", ref manager.virtualGreenScreenBottomY, ref modified);
OVREditorUtil.SetupBoolField(target, "virtualGreenScreenApplyDepthCulling", ref manager.virtualGreenScreenApplyDepthCulling, ref modified);
OVREditorUtil.SetupFloatField(target, "virtualGreenScreenDepthTolerance", ref manager.virtualGreenScreenDepthTolerance, ref modified);
EditorGUILayout.Space();
EditorGUILayout.LabelField("Latency Control", EditorStyles.boldLabel);
OVREditorUtil.SetupFloatField(target, "handPoseStateLatency", ref manager.handPoseStateLatency, ref modified);
EditorGUI.indentLevel--;
}
EditorGUI.indentLevel--;
}
#endif
if (modified)
{
EditorUtility.SetDirty(target);
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 9b07d18088099f94fa00fc15e64b2b17
timeCreated: 1502747851
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,162 @@
Shader "Unlit/OVROverlayDestRectEditor"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_SrcRectLeft("SrcRectLeft", Vector) = (0,0,1,1)
_SrcRectRight("SrcRectRight", Vector) = (0,0,1,1)
_DestRectLeft ("DestRectLeft", Vector) = (0,0,1,1)
_DestRectRight("DestRectRight", Vector) = (0,0,1,1)
_BackgroundColor("Background Color", Color) = (0.225, 0.225, 0.225, 1)
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 100
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
float4 leftDragX : TEXCOORD1;
float4 leftDragY : TEXCOORD2;
float4 rightDragX : TEXCOORD3;
float4 rightDragY : TEXCOORD4;
};
sampler2D _MainTex;
float4 _MainTex_ST;
float4 _SrcRectLeft;
float4 _SrcRectRight;
float4 _DestRectLeft;
float4 _DestRectRight;
fixed4 _BackgroundColor;
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
// Add padding
o.uv = (o.uv - 0.5) * (256.0 + 8.0) / (256.0) + 0.5;
// left
o.leftDragX.x = _DestRectLeft.x;
o.leftDragY.x = _DestRectLeft.y + _DestRectLeft.w * 0.5;
// right
o.leftDragX.y = _DestRectLeft.x + _DestRectLeft.z;
o.leftDragY.y = _DestRectLeft.y + _DestRectLeft.w * 0.5;
// top
o.leftDragX.z = _DestRectLeft.x + _DestRectLeft.z * 0.5;
o.leftDragY.z = _DestRectLeft.y;
// bottom
o.leftDragX.w = _DestRectLeft.x + _DestRectLeft.z * 0.5;
o.leftDragY.w = _DestRectLeft.y + _DestRectLeft.w;
// right
o.rightDragX.x = _DestRectRight.x;
o.rightDragY.x = _DestRectRight.y + _DestRectRight.w * 0.5;
// right
o.rightDragX.y = _DestRectRight.x + _DestRectRight.z;
o.rightDragY.y = _DestRectRight.y + _DestRectRight.w * 0.5;
// top
o.rightDragX.z = _DestRectRight.x + _DestRectRight.z * 0.5;
o.rightDragY.z = _DestRectRight.y;
// bottom
o.rightDragX.w = _DestRectRight.x + _DestRectRight.z * 0.5;
o.rightDragY.w = _DestRectRight.y + _DestRectRight.w;
return o;
}
float onDrag(float2 uv, float x, float y)
{
const float pixelSize = 6;
return abs(uv.x - x) < ((pixelSize / 2) / 128.0) && abs(uv.y - y) < ((pixelSize / 2) / 128.0);
}
float onLine(float2 uv, float4 rect)
{
return
(abs(uv.x - rect.x) < (1 / 128.0) && uv.y >= rect.y && uv.y <= rect.y + rect.w) ||
(abs(uv.x - rect.x - rect.z) < (1 / 128.0) && uv.y >= rect.y && uv.y <= rect.y + rect.w) ||
(abs(uv.y - rect.y) < (1 / 128.0) && uv.x >= rect.x && uv.x <= rect.x + rect.z) ||
(abs(uv.y - rect.y - rect.w) < (1 / 128.0) && uv.x >= rect.x && uv.x <= rect.x + rect.z);
}
float checkerboard(float2 uv)
{
float x = floor(uv.x * (16 + 2));
float y = floor(uv.y * 8);
return 2 * ((x + y) / 2.0 - floor((x + y) / 2.0));
}
fixed4 frag (v2f i) : SV_Target
{
float isLeftEye = i.uv < 0.5;
float2 leftUV = float2(i.uv.x * (256.0 + 32.0) / 128.0, i.uv.y);
float2 rightUV = float2(1 - ((1 - i.uv.x) * (256.0 + 32.0) / 128.0), i.uv.y);
float2 uv = i.uv;
float2 textureUV = i.uv;
if (isLeftEye)
{
uv = (leftUV - _DestRectLeft.xy) / _DestRectLeft.zw;
textureUV = uv * _SrcRectLeft.zw + _SrcRectLeft.xy;
}
else
{
uv = (rightUV - _DestRectRight.xy) / _DestRectRight.zw;
textureUV = uv * _SrcRectRight.zw + _SrcRectRight.xy;
}
// sample the texture
fixed4 col = tex2D(_MainTex, float2(textureUV.x, 1 - textureUV.y));
if (uv.x < 0 || uv.x > 1 || uv.y < 0 || uv.y > 1)
{
col.a = 0;
}
col.rgb = lerp(0.41 - 0.13 * checkerboard(i.uv), col.rgb, col.a);
if (i.uv.x < 0 || i.uv.x > 1 || i.uv.y < 0 || i.uv.y > 1 || abs(i.uv.x - 0.5) < (14 / 256.0))
{
col = _BackgroundColor;
}
// now draw clipping objects
float left = isLeftEye && (onLine(leftUV, _DestRectLeft) ||
onDrag(leftUV, i.leftDragX.x, i.leftDragY.x) ||
onDrag(leftUV, i.leftDragX.y, i.leftDragY.y) ||
onDrag(leftUV, i.leftDragX.z, i.leftDragY.z) ||
onDrag(leftUV, i.leftDragX.w, i.leftDragY.w));
float right = (!isLeftEye) && (onLine(rightUV, _DestRectRight) ||
onDrag(rightUV, i.rightDragX.x, i.rightDragY.x) ||
onDrag(rightUV, i.rightDragX.y, i.rightDragY.y) ||
onDrag(rightUV, i.rightDragX.z, i.rightDragY.z) ||
onDrag(rightUV, i.rightDragX.w, i.rightDragY.w));
return lerp(col, fixed4(left, right, 0, 1), left || right);
}
ENDCG
}
}
}

View File

@@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 7c52c9bacdbb59f4a973dd1849d03106
ShaderImporter:
externalObjects: {}
defaultTextures: []
nonModifiableTextures: []
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,581 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEditor;
[CustomEditor(typeof(OVROverlay))]
public class OVROverlayEditor : Editor
{
/// <summary>
/// Common Video Types, to ease source and dest rect creation
/// </summary>
public enum StereoType
{
Custom = 0,
Mono = 1,
Stereo = 2,
StereoLeftRight = 3,
StereoTopBottom = 4,
}
public enum DisplayType
{
Custom = 0,
Full = 1,
Half = 2
}
private bool sourceRectsVisible = false;
private bool destRectsVisible = false;
private Material _SrcRectMaterial;
protected Material SrcRectMaterial
{
get
{
if (_SrcRectMaterial == null)
{
string[] shaders = AssetDatabase.FindAssets("OVROverlaySrcRectEditor");
if (shaders.Length > 0)
{
Shader shader = (Shader)AssetDatabase.LoadAssetAtPath(AssetDatabase.GUIDToAssetPath(shaders[0]), typeof(Shader));
if (shader != null)
{
_SrcRectMaterial = new Material(shader);
}
}
}
return _SrcRectMaterial;
}
}
private Material _DestRectMaterial;
protected Material DestRectMaterial
{
get
{
if (_DestRectMaterial == null)
{
string[] shaders = AssetDatabase.FindAssets("OVROverlayDestRectEditor");
if (shaders.Length > 0)
{
Shader shader = (Shader)AssetDatabase.LoadAssetAtPath(AssetDatabase.GUIDToAssetPath(shaders[0]), typeof(Shader));
if (shader != null)
{
_DestRectMaterial = new Material(shader);
}
}
}
return _DestRectMaterial;
}
}
private TextureRect _DraggingRect;
private Side _DraggingSide;
enum TextureRect
{
None,
SrcLeft,
SrcRight,
DestLeft,
DestRight
}
enum Side
{
Left,
Right,
Top,
Bottom
}
public override void OnInspectorGUI()
{
OVROverlay overlay = (OVROverlay)target;
if (overlay == null)
{
return;
}
EditorGUILayout.LabelField("Display Order", EditorStyles.boldLabel);
overlay.currentOverlayType = (OVROverlay.OverlayType)EditorGUILayout.EnumPopup(new GUIContent("Current Overlay Type", "Whether this overlay should layer behind the scene or in front of it"), overlay.currentOverlayType);
overlay.compositionDepth = EditorGUILayout.IntField(new GUIContent("Composition Depth", "Depth value used to sort OVROverlays in the scene, smaller value appears in front"), overlay.compositionDepth);
overlay.noDepthBufferTesting = EditorGUILayout.Toggle(new GUIContent("No Depth Buffer Testing", "The noDepthBufferTesting will stop layer's depth buffer compositing even if the engine has \"Shared Depth Buffer\" enabled"), overlay.noDepthBufferTesting);
EditorGUILayout.Space();
EditorGUILayout.LabelField(new GUIContent("Overlay Shape", "The shape of this overlay"), EditorStyles.boldLabel);
overlay.currentOverlayShape = (OVROverlay.OverlayShape)EditorGUILayout.EnumPopup(new GUIContent("Overlay Shape", "The shape of this overlay"), overlay.currentOverlayShape);
EditorGUILayout.Space();
EditorGUILayout.Separator();
EditorGUILayout.LabelField("Textures", EditorStyles.boldLabel);
#if UNITY_ANDROID
bool lastIsExternalSurface = overlay.isExternalSurface;
overlay.isExternalSurface = EditorGUILayout.Toggle(new GUIContent("Is External Surface", "On Android, retrieve an Android Surface object to render to (e.g., video playback)"), overlay.isExternalSurface);
if (lastIsExternalSurface)
{
overlay.externalSurfaceWidth = EditorGUILayout.IntField("External Surface Width", overlay.externalSurfaceWidth);
overlay.externalSurfaceHeight = EditorGUILayout.IntField("External Surface Height", overlay.externalSurfaceHeight);
overlay.isProtectedContent = EditorGUILayout.Toggle(new GUIContent("Is Protected Content", "The external surface has L1 widevine protection."), overlay.isProtectedContent);
}
else
#endif
{
if (overlay.textures == null)
{
overlay.textures = new Texture[2];
}
if (overlay.textures.Length < 2)
{
Texture[] tmp = new Texture[2];
for (int i = 0; i < overlay.textures.Length; i++)
{
tmp[i] = overlay.textures[i];
}
overlay.textures = tmp;
}
var labelControlRect = EditorGUILayout.GetControlRect();
EditorGUI.LabelField(new Rect(labelControlRect.x, labelControlRect.y, labelControlRect.width / 2, labelControlRect.height), new GUIContent("Left Texture", "Texture used for the left eye"));
EditorGUI.LabelField(new Rect(labelControlRect.x + labelControlRect.width / 2, labelControlRect.y, labelControlRect.width / 2, labelControlRect.height), new GUIContent("Right Texture", "Texture used for the right eye"));
var textureControlRect = EditorGUILayout.GetControlRect(GUILayout.Height(64));
overlay.textures[0] = (Texture)EditorGUI.ObjectField(new Rect(textureControlRect.x, textureControlRect.y, 64, textureControlRect.height), overlay.textures[0], typeof(Texture), true);
Texture right = (Texture)EditorGUI.ObjectField(new Rect(textureControlRect.x + textureControlRect.width / 2, textureControlRect.y, 64, textureControlRect.height), overlay.textures[1] != null ? overlay.textures[1] : overlay.textures[0], typeof(Texture), true);
if (right == overlay.textures[0])
{
overlay.textures[1] = null;
}
else
{
overlay.textures[1] = right;
}
overlay.isDynamic = EditorGUILayout.Toggle(new GUIContent("Dynamic Texture", "This texture will be updated dynamically at runtime (e.g., Video)"), overlay.isDynamic);
#if !UNITY_ANDROID
overlay.isProtectedContent = EditorGUILayout.Toggle(new GUIContent("Is Protected Content", "The texture has copy protection, e.g., HDCP"), overlay.isProtectedContent);
#endif
}
if (overlay.currentOverlayShape == OVROverlay.OverlayShape.Cylinder || overlay.currentOverlayShape == OVROverlay.OverlayShape.Equirect || overlay.currentOverlayShape == OVROverlay.OverlayShape.Quad)
{
EditorGUILayout.Separator();
EditorGUILayout.Space();
EditorGUILayout.LabelField("Texture Rects", EditorStyles.boldLabel);
bool lastOverrideTextureRectMatrix = overlay.overrideTextureRectMatrix;
overlay.overrideTextureRectMatrix = !EditorGUILayout.Toggle(new GUIContent("Use Default Rects", overlay.textures[1] == null ? "If you need to use a single texture as a stereo image, uncheck this box" : "Uncheck this box if you need to clip you textures or layer"), !overlay.overrideTextureRectMatrix);
if (lastOverrideTextureRectMatrix)
{
sourceRectsVisible = EditorGUILayout.Foldout(sourceRectsVisible, new GUIContent("Source Rects", "What portion of the source texture will ultimately be shown in each eye."));
if (sourceRectsVisible)
{
var mat = SrcRectMaterial;
if (mat != null)
{
Rect drawRect = EditorGUILayout.GetControlRect(GUILayout.Height(128 + 8));
Vector4 srcLeft = new Vector4(Mathf.Max(0.0f, overlay.srcRectLeft.x), Mathf.Max(0.0f, overlay.srcRectLeft.y), Mathf.Min(1.0f - overlay.srcRectLeft.x, overlay.srcRectLeft.width), Mathf.Min(1.0f - overlay.srcRectLeft.y, overlay.srcRectLeft.height));
Vector4 srcRight = new Vector4(Mathf.Max(0.0f, overlay.srcRectRight.x), Mathf.Max(0.0f, overlay.srcRectRight.y), Mathf.Min(1.0f - overlay.srcRectRight.x, overlay.srcRectRight.width), Mathf.Min(1.0f - overlay.srcRectRight.y, overlay.srcRectRight.height));
if (overlay.invertTextureRects)
{
srcLeft.y = 1 - srcLeft.y - srcLeft.w;
srcRight.y = 1 - srcRight.y - srcRight.w;
}
mat.SetVector("_SrcRectLeft", srcLeft);
mat.SetVector("_SrcRectRight", srcRight);
// center our draw rect
var drawRectCentered = new Rect(drawRect.x + drawRect.width / 2 - 128 - 4, drawRect.y, 256 + 8, drawRect.height);
EditorGUI.DrawPreviewTexture(drawRectCentered, overlay.textures[0] ?? Texture2D.blackTexture, mat);
var drawRectInset = new Rect(drawRectCentered.x + 4, drawRectCentered.y + 4, drawRectCentered.width - 8, drawRectCentered.height - 8);
UpdateRectDragging(drawRectInset, drawRectInset, TextureRect.SrcLeft, TextureRect.SrcRight, overlay.invertTextureRects, ref overlay.srcRectLeft, ref overlay.srcRectRight);
CreateCursorRects(drawRectInset, overlay.srcRectLeft, overlay.invertTextureRects);
CreateCursorRects(drawRectInset, overlay.srcRectRight, overlay.invertTextureRects);
}
var labelControlRect = EditorGUILayout.GetControlRect();
EditorGUI.LabelField(new Rect(labelControlRect.x, labelControlRect.y, labelControlRect.width / 2, labelControlRect.height), new GUIContent("Left Source Rect", "The rect in the source image that will be displayed on the left eye layer"));
EditorGUI.LabelField(new Rect(labelControlRect.x + labelControlRect.width / 2, labelControlRect.y, labelControlRect.width / 2, labelControlRect.height), new GUIContent("Right Source Rect", "The rect in the source image that will be displayed on the right eye layer"));
var rectControlRect = EditorGUILayout.GetControlRect(GUILayout.Height(34));
overlay.srcRectLeft = Clamp01(EditorGUI.RectField(new Rect(rectControlRect.x, rectControlRect.y, rectControlRect.width / 2 - 20, rectControlRect.height), overlay.srcRectLeft));
overlay.srcRectRight = Clamp01(EditorGUI.RectField(new Rect(rectControlRect.x + rectControlRect.width / 2, rectControlRect.y, rectControlRect.width / 2 - 20, rectControlRect.height), overlay.srcRectRight));
EditorGUILayout.BeginHorizontal();
if (overlay.textures[1] != null)
{
if (GUILayout.Button(new GUIContent("Reset To Default", "Reset Source Rects to default")))
{
SetRectsByVideoType(overlay, StereoType.Stereo, DisplayType.Custom);
}
}
else
{
if (GUILayout.Button(new GUIContent("Monoscopic", "Display the full Texture in both eyes")))
{
SetRectsByVideoType(overlay, StereoType.Mono, DisplayType.Custom);
}
if (GUILayout.Button(new GUIContent("Stereo Left/Right", "The left half of the texture is displayed in the left eye, and the right half in the right eye")))
{
SetRectsByVideoType(overlay, StereoType.StereoLeftRight, DisplayType.Custom);
}
if (GUILayout.Button(new GUIContent("Stereo Top/Bottom", "The top half of the texture is displayed in the left eye, and the bottom half in the right eye")))
{
SetRectsByVideoType(overlay, StereoType.StereoTopBottom, DisplayType.Custom);
}
}
EditorGUILayout.EndHorizontal();
}
destRectsVisible = EditorGUILayout.Foldout(destRectsVisible, new GUIContent("Destination Rects", "What portion of the destination texture that the source will be rendered into."));
if (destRectsVisible)
{
var mat = DestRectMaterial;
if (mat != null)
{
Rect drawRect = EditorGUILayout.GetControlRect(GUILayout.Height(128 + 8));
Vector4 srcLeft = new Vector4(Mathf.Max(0.0f, overlay.srcRectLeft.x), Mathf.Max(0.0f, overlay.srcRectLeft.y), Mathf.Min(1.0f - overlay.srcRectLeft.x, overlay.srcRectLeft.width), Mathf.Min(1.0f - overlay.srcRectLeft.y, overlay.srcRectLeft.height));
Vector4 srcRight = new Vector4(Mathf.Max(0.0f, overlay.srcRectRight.x), Mathf.Max(0.0f, overlay.srcRectRight.y), Mathf.Min(1.0f - overlay.srcRectRight.x, overlay.srcRectRight.width), Mathf.Min(1.0f - overlay.srcRectRight.y, overlay.srcRectRight.height));
Vector4 destLeft = new Vector4(Mathf.Max(0.0f, overlay.destRectLeft.x), Mathf.Max(0.0f, overlay.destRectLeft.y), Mathf.Min(1.0f - overlay.destRectLeft.x, overlay.destRectLeft.width), Mathf.Min(1.0f - overlay.destRectLeft.y, overlay.destRectLeft.height));
Vector4 destRight = new Vector4(Mathf.Max(0.0f, overlay.destRectRight.x), Mathf.Max(0.0f, overlay.destRectRight.y), Mathf.Min(1.0f - overlay.destRectRight.x, overlay.destRectRight.width), Mathf.Min(1.0f - overlay.destRectRight.y, overlay.destRectRight.height));
if (overlay.invertTextureRects)
{
srcLeft.y = 1 - srcLeft.y - srcLeft.w;
srcRight.y = 1 - srcRight.y - srcRight.w;
destLeft.y = 1 - destLeft.y - destLeft.w;
destRight.y = 1 - destRight.y - destRight.w;
}
mat.SetVector("_SrcRectLeft", srcLeft);
mat.SetVector("_SrcRectRight", srcRight);
mat.SetVector("_DestRectLeft", destLeft);
mat.SetVector("_DestRectRight", destRight);
mat.SetColor("_BackgroundColor", EditorGUIUtility.isProSkin ? (Color)new Color32(56, 56, 56, 255) : (Color)new Color32(194, 194, 194, 255));
var drawRectCentered = new Rect(drawRect.x + drawRect.width / 2 - 128 - 16 - 4, drawRect.y, 256 + 32 + 8, drawRect.height);
// center our draw rect
EditorGUI.DrawPreviewTexture(drawRectCentered, overlay.textures[0] ?? Texture2D.blackTexture, mat);
var drawRectInsetLeft = new Rect(drawRectCentered.x + 4, drawRectCentered.y + 4, drawRectCentered.width / 2 - 20, drawRectCentered.height - 8);
var drawRectInsetRight = new Rect(drawRectCentered.x + drawRectCentered.width / 2 + 16, drawRectCentered.y + 4, drawRectCentered.width / 2 - 20, drawRectCentered.height - 8);
UpdateRectDragging(drawRectInsetLeft, drawRectInsetRight, TextureRect.DestLeft, TextureRect.DestRight, overlay.invertTextureRects, ref overlay.destRectLeft, ref overlay.destRectRight);
CreateCursorRects(drawRectInsetLeft, overlay.destRectLeft, overlay.invertTextureRects);
CreateCursorRects(drawRectInsetRight, overlay.destRectRight, overlay.invertTextureRects);
}
var labelControlRect = EditorGUILayout.GetControlRect();
EditorGUI.LabelField(new Rect(labelControlRect.x, labelControlRect.y, labelControlRect.width / 2, labelControlRect.height), new GUIContent("Left Destination Rect", "The rect in the destination layer the left eye will display to"));
EditorGUI.LabelField(new Rect(labelControlRect.x + labelControlRect.width / 2, labelControlRect.y, labelControlRect.width / 2, labelControlRect.height), new GUIContent("Right Destination Rect", "The rect in the destination layer the right eye will display to"));
var rectControlRect = EditorGUILayout.GetControlRect(GUILayout.Height(34));
overlay.destRectLeft = Clamp01(EditorGUI.RectField(new Rect(rectControlRect.x, rectControlRect.y, rectControlRect.width / 2 - 20, rectControlRect.height), overlay.destRectLeft));
overlay.destRectRight = Clamp01(EditorGUI.RectField(new Rect(rectControlRect.x + rectControlRect.width / 2, rectControlRect.y, rectControlRect.width / 2 - 20, rectControlRect.height), overlay.destRectRight));
if (overlay.currentOverlayShape == OVROverlay.OverlayShape.Equirect)
{
EditorGUILayout.BeginHorizontal();
if (GUILayout.Button(new GUIContent("360 Video", "Display the full 360 layer")))
{
SetRectsByVideoType(overlay, StereoType.Custom, DisplayType.Full);
}
if (GUILayout.Button(new GUIContent("180 Video", "Display the front 180 layer")))
{
SetRectsByVideoType(overlay, StereoType.Custom, DisplayType.Half);
}
EditorGUILayout.EndHorizontal();
}
else
{
if (GUILayout.Button(new GUIContent("Reset To Default", "Reset Source Rects to default")))
{
SetRectsByVideoType(overlay, StereoType.Custom, DisplayType.Full);
}
}
}
overlay.invertTextureRects = EditorGUILayout.Toggle(new GUIContent("Invert Rect Coordinates", "Check this box to use the top left corner of the texture as the origin"), overlay.invertTextureRects);
}
}
EditorGUILayout.Separator();
EditorGUILayout.LabelField("Color Scale", EditorStyles.boldLabel);
EditorGUILayout.Space();
overlay.overridePerLayerColorScaleAndOffset = EditorGUILayout.Toggle(new GUIContent("Override Color Scale", "Manually set color scale and offset of this layer, regardless of what the global values are from OVRManager.SetColorScaleAndOffset()."), overlay.overridePerLayerColorScaleAndOffset);
if (overlay.overridePerLayerColorScaleAndOffset)
{
Vector4 colorScale = EditorGUILayout.Vector4Field(new GUIContent("Color Scale", "Scale that the color values for this overlay will be multiplied by."), overlay.colorScale);
Vector4 colorOffset = EditorGUILayout.Vector4Field(new GUIContent("Color Offset", "Offset that the color values for this overlay will be added to."), overlay.colorOffset);
overlay.SetPerLayerColorScaleAndOffset(colorScale, colorOffset);
}
EditorGUILayout.Separator();
EditorGUILayout.LabelField("Preview", EditorStyles.boldLabel);
overlay.previewInEditor = EditorGUILayout.Toggle(new GUIContent("Preview in Editor (Experimental)", "Preview the overlay in the editor using a mesh renderer."), overlay.previewInEditor);
EditorUtility.SetDirty(overlay);
}
private Rect Clamp01(Rect rect)
{
rect.x = Mathf.Clamp01(rect.x);
rect.y = Mathf.Clamp01(rect.y);
rect.width = Mathf.Clamp01(rect.width);
rect.height = Mathf.Clamp01(rect.height);
return rect;
}
private bool IsUnitRect(Rect rect)
{
return IsRect(rect, 0, 0, 1, 1);
}
private bool IsRect(Rect rect, float x, float y, float w, float h)
{
return rect.x == x && rect.y == y && rect.width == w && rect.height == h;
}
private StereoType GetStereoType(OVROverlay overlay)
{
if (overlay.textures[0] != null && overlay.textures[1] != null)
{
if (IsUnitRect(overlay.srcRectLeft) && IsUnitRect(overlay.srcRectRight))
{
return StereoType.Stereo;
}
else
{
return StereoType.Custom;
}
}
else if (overlay.textures[0] != null)
{
if (IsUnitRect(overlay.srcRectLeft) && IsUnitRect(overlay.srcRectRight))
{
return StereoType.Mono;
}
else if (IsRect(overlay.srcRectLeft, 0, 0, 0.5f, 1f) && IsRect(overlay.srcRectRight, 0.5f, 0, 0.5f, 1f))
{
return StereoType.StereoLeftRight;
}
else if (overlay.invertTextureRects && IsRect(overlay.srcRectLeft, 0, 0.0f, 1f, 0.5f) && IsRect(overlay.srcRectRight, 0f, 0.5f, 1f, 0.5f))
{
return StereoType.StereoTopBottom;
}
else if (!overlay.invertTextureRects && IsRect(overlay.srcRectLeft, 0, 0.5f, 1f, 0.5f) && IsRect(overlay.srcRectRight, 0f, 0f, 1f, 0.5f))
{
return StereoType.StereoTopBottom;
}
else
{
return StereoType.Custom;
}
}
else
{
return StereoType.Mono;
}
}
private void SetRectsByVideoType(OVROverlay overlay, StereoType stereoType, DisplayType displayType)
{
Rect srcRectLeft, srcRectRight, destRectLeft, destRectRight;
switch (displayType)
{
case DisplayType.Full:
destRectLeft = destRectRight = new Rect(0, 0, 1, 1);
break;
case DisplayType.Half:
destRectLeft = destRectRight = new Rect(0.25f, 0, 0.5f, 1);
break;
default:
destRectLeft = overlay.destRectLeft;
destRectRight = overlay.destRectRight;
break;
}
switch (stereoType)
{
case StereoType.Mono:
case StereoType.Stereo:
srcRectLeft = srcRectRight = new Rect(0, 0, 1, 1);
break;
case StereoType.StereoTopBottom:
if (overlay.invertTextureRects)
{
srcRectLeft = new Rect(0, 0.0f, 1, 0.5f);
srcRectRight = new Rect(0, 0.5f, 1, 0.5f);
}
else
{
srcRectLeft = new Rect(0, 0.5f, 1, 0.5f);
srcRectRight = new Rect(0, 0.0f, 1, 0.5f);
}
break;
case StereoType.StereoLeftRight:
srcRectLeft = new Rect(0, 0, 0.5f, 1);
srcRectRight = new Rect(0.5f, 0, 0.5f, 1);
break;
default:
srcRectLeft = overlay.srcRectLeft;
srcRectRight = overlay.srcRectRight;
break;
}
overlay.SetSrcDestRects(srcRectLeft, srcRectRight, destRectLeft, destRectRight);
}
private void GetCursorPoints(Rect drawRect, Rect selectRect, bool invertY, out Vector2 leftPos, out Vector2 rightPos, out Vector2 topPos, out Vector2 bottomPos)
{
if (invertY)
{
selectRect.y = 1 - selectRect.y - selectRect.height;
}
leftPos = new Vector2(drawRect.x + selectRect.x * drawRect.width, drawRect.y + (1 - selectRect.y - selectRect.height / 2) * drawRect.height);
rightPos = new Vector2(drawRect.x + (selectRect.x + selectRect.width) * drawRect.width, drawRect.y + (1 - selectRect.y - selectRect.height / 2) * drawRect.height);
topPos = new Vector2(drawRect.x + (selectRect.x + selectRect.width / 2) * drawRect.width, drawRect.y + (1 - selectRect.y - selectRect.height) * drawRect.height);
bottomPos = new Vector2(drawRect.x + (selectRect.x + selectRect.width / 2) * drawRect.width, drawRect.y + (1 - selectRect.y) * drawRect.height);
if (invertY)
{
// swap top and bottom
var tmp = topPos;
topPos = bottomPos;
bottomPos = tmp;
}
}
private void CreateCursorRects(Rect drawRect, Rect selectRect, bool invertY)
{
Vector2 leftPos, rightPos, topPos, bottomPos;
GetCursorPoints(drawRect, selectRect, invertY, out leftPos, out rightPos, out topPos, out bottomPos);
EditorGUIUtility.AddCursorRect(new Rect(leftPos - 5 * Vector2.one, 10 * Vector2.one), MouseCursor.ResizeHorizontal);
EditorGUIUtility.AddCursorRect(new Rect(rightPos - 5 * Vector2.one, 10 * Vector2.one), MouseCursor.ResizeHorizontal);
EditorGUIUtility.AddCursorRect(new Rect(topPos - 5 * Vector2.one, 10 * Vector2.one), MouseCursor.ResizeVertical);
EditorGUIUtility.AddCursorRect(new Rect(bottomPos - 5 * Vector2.one, 10 * Vector2.one), MouseCursor.ResizeVertical);
}
private bool IsOverRectControls(Rect drawRect, Vector2 mousePos, Rect selectRect, bool invertY, ref Side side)
{
Vector2 leftPos, rightPos, topPos, bottomPos;
GetCursorPoints(drawRect, selectRect, invertY, out leftPos, out rightPos, out topPos, out bottomPos);
if ((leftPos - mousePos).sqrMagnitude <= 25)
{
side = Side.Left;
return true;
}
if ((rightPos - mousePos).sqrMagnitude <= 25)
{
side = Side.Right;
return true;
}
if ((topPos - mousePos).sqrMagnitude <= 25)
{
side = Side.Top;
return true;
}
if ((bottomPos - mousePos).sqrMagnitude <= 25)
{
side = Side.Bottom;
return true;
}
return false;
}
private void UpdateRectDragging(Rect drawingRectLeft, Rect drawingRectRight, TextureRect rectLeftType, TextureRect rectRightType, bool invertY, ref Rect rectLeft, ref Rect rectRight)
{
if (!Event.current.isMouse || Event.current.button != 0)
{
return;
}
if (Event.current.type == EventType.MouseUp)
{
_DraggingRect = TextureRect.None;
return;
}
Vector2 mousePos = Event.current.mousePosition;
if (_DraggingRect == TextureRect.None && Event.current.type == EventType.MouseDown)
{
if (IsOverRectControls(drawingRectLeft, mousePos, rectLeft, invertY, ref _DraggingSide))
{
_DraggingRect = rectLeftType;
}
if (_DraggingRect == TextureRect.None || Event.current.shift)
{
if (IsOverRectControls(drawingRectRight, mousePos, rectRight, invertY, ref _DraggingSide))
{
_DraggingRect = rectRightType;
}
}
}
if (_DraggingRect == rectLeftType)
{
SetRectSideValue(drawingRectLeft, mousePos, _DraggingSide, invertY, ref rectLeft);
}
if (_DraggingRect == rectRightType)
{
SetRectSideValue(drawingRectRight, mousePos, _DraggingSide, invertY, ref rectRight);
}
}
private void SetRectSideValue(Rect drawingRect, Vector2 mousePos, Side side, bool invertY, ref Rect rect)
{
// quantize to 1/32
float x = Mathf.Clamp01(Mathf.Round(((mousePos.x - drawingRect.x) / drawingRect.width) * 32) / 32.0f);
float y = Mathf.Clamp01(Mathf.Round(((mousePos.y - drawingRect.y) / drawingRect.height) * 32) / 32.0f);
if (!invertY)
{
y = 1 - y;
}
switch (side)
{
case Side.Left:
float xMax = rect.xMax;
rect.x = Mathf.Min(x, xMax);
rect.width = xMax - rect.x;
break;
case Side.Right:
rect.width = Mathf.Max(0, x - rect.x);
break;
case Side.Bottom:
float yMax = rect.yMax;
rect.y = Mathf.Min(y, yMax);
rect.height = yMax - rect.y;
break;
case Side.Top:
rect.height = Mathf.Max(0, y - rect.y);
break;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: fde3aeb28643f6c48a48f926ac7207e0
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,138 @@
Shader "Unlit/OVROverlaySrcRectEditor"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_SrcRectLeft ("SrcRectLeft", Vector) = (0,0,1,1)
_SrcRectRight("SrcRectRight", Vector) = (0,0,1,1)
_BackgroundColor("Background Color", Color) = (0.225, 0.225, 0.225, 1)
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 100
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
float4 leftDragX : TEXCOORD1;
float4 leftDragY : TEXCOORD2;
float4 rightDragX : TEXCOORD3;
float4 rightDragY : TEXCOORD4;
};
sampler2D _MainTex;
float4 _MainTex_ST;
float4 _SrcRectLeft;
float4 _SrcRectRight;
fixed4 _BackgroundColor;
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
// Add padding
o.uv = (o.uv - 0.5) * (256.0 + 8.0) / (256.0) + 0.5;
// left
o.leftDragX.x = _SrcRectLeft.x;
o.leftDragY.x = _SrcRectLeft.y + _SrcRectLeft.w * 0.5;
// right
o.leftDragX.y = _SrcRectLeft.x + _SrcRectLeft.z;
o.leftDragY.y = _SrcRectLeft.y + _SrcRectLeft.w * 0.5;
// top
o.leftDragX.z = _SrcRectLeft.x + _SrcRectLeft.z * 0.5;
o.leftDragY.z = _SrcRectLeft.y;
// bottom
o.leftDragX.w = _SrcRectLeft.x + _SrcRectLeft.z * 0.5;
o.leftDragY.w = _SrcRectLeft.y + _SrcRectLeft.w;
// right
o.rightDragX.x = _SrcRectRight.x;
o.rightDragY.x = _SrcRectRight.y + _SrcRectRight.w * 0.5;
// right
o.rightDragX.y = _SrcRectRight.x + _SrcRectRight.z;
o.rightDragY.y = _SrcRectRight.y + _SrcRectRight.w * 0.5;
// top
o.rightDragX.z = _SrcRectRight.x + _SrcRectRight.z * 0.5;
o.rightDragY.z = _SrcRectRight.y;
// bottom
o.rightDragX.w = _SrcRectRight.x + _SrcRectRight.z * 0.5;
o.rightDragY.w = _SrcRectRight.y + _SrcRectRight.w;
return o;
}
float onDrag(float2 uv, float x, float y)
{
const float pixelSize = 6;
return abs(uv.x - x) < ((pixelSize / 2) / 256.0) && abs(uv.y - y) < ((pixelSize / 2) / 128.0);
}
float onLine(float2 uv, float4 rect)
{
return
(abs(uv.x - rect.x) < (1 / 256.0) && uv.y >= rect.y && uv.y <= rect.y + rect.w) ||
(abs(uv.x - rect.x - rect.z) < (1 / 256.0) && uv.y >= rect.y && uv.y <= rect.y + rect.w) ||
(abs(uv.y - rect.y) < (1 / 128.0) && uv.x >= rect.x && uv.x <= rect.x + rect.z) ||
(abs(uv.y - rect.y - rect.w) < (1 / 128.0) && uv.x >= rect.x && uv.x <= rect.x + rect.z);
}
float checkerboard(float2 uv)
{
float x = floor(uv.x * (16));
float y = floor(uv.y * 8);
return 2 * ((x + y) / 2.0 - floor((x + y) / 2.0));
}
fixed4 frag (v2f i) : SV_Target
{
// sample the texture
fixed4 col = tex2D(_MainTex, i.uv);
col.rgb = lerp(0.41 - 0.13 * checkerboard(i.uv), col.rgb, col.a);
if (i.uv.x < 0 || i.uv.x > 1 || i.uv.y < 0 || i.uv.y > 1)
{
col = _BackgroundColor;
}
float2 uv = i.uv.xy;
// now draw clipping objects
float left = onLine(uv, _SrcRectLeft) ||
onDrag(uv, i.leftDragX.x, i.leftDragY.x) ||
onDrag(uv, i.leftDragX.y, i.leftDragY.y) ||
onDrag(uv, i.leftDragX.z, i.leftDragY.z) ||
onDrag(uv, i.leftDragX.w, i.leftDragY.w);
float right = onLine(uv, _SrcRectRight) ||
onDrag(uv, i.rightDragX.x, i.rightDragY.x) ||
onDrag(uv, i.rightDragX.y, i.rightDragY.y) ||
onDrag(uv, i.rightDragX.z, i.rightDragY.z) ||
onDrag(uv, i.rightDragX.w, i.rightDragY.w);
return lerp(col, fixed4(left, right, 0, 1), left || right);
}
ENDCG
}
}
}

View File

@@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 589b36d0aa66c7349bcff8750b670434
ShaderImporter:
externalObjects: {}
defaultTextures: []
nonModifiableTextures: []
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,102 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEditor;
[CustomEditor(typeof(OVRProjectConfig))]
public class OVRProjectConfigEditor : Editor
{
override public void OnInspectorGUI()
{
OVRProjectConfig projectConfig = (OVRProjectConfig)target;
DrawTargetDeviceInspector(projectConfig);
EditorGUILayout.Space();
DrawProjectConfigInspector(projectConfig);
}
public static void DrawTargetDeviceInspector(OVRProjectConfig projectConfig)
{
bool hasModified = false;
// Target Devices
EditorGUILayout.LabelField("Target Devices", EditorStyles.boldLabel);
foreach (OVRProjectConfig.DeviceType deviceType in System.Enum.GetValues(typeof(OVRProjectConfig.DeviceType)))
{
bool oldSupportsDevice = projectConfig.targetDeviceTypes.Contains(deviceType);
bool newSupportsDevice = oldSupportsDevice;
OVREditorUtil.SetupBoolField(projectConfig, ObjectNames.NicifyVariableName(deviceType.ToString()), ref newSupportsDevice, ref hasModified);
if (newSupportsDevice && !oldSupportsDevice)
{
projectConfig.targetDeviceTypes.Add(deviceType);
}
else if (oldSupportsDevice && !newSupportsDevice)
{
projectConfig.targetDeviceTypes.Remove(deviceType);
}
}
if (hasModified)
{
OVRProjectConfig.CommitProjectConfig(projectConfig);
}
}
public static void DrawProjectConfigInspector(OVRProjectConfig projectConfig)
{
bool hasModified = false;
EditorGUI.BeginDisabledGroup(!projectConfig.targetDeviceTypes.Contains(OVRProjectConfig.DeviceType.Quest));
EditorGUILayout.LabelField("Quest Features", EditorStyles.boldLabel);
// Show overlay support option
OVREditorUtil.SetupBoolField(projectConfig, new GUIContent("Focus Aware",
"If checked, the new overlay will be displayed when the user presses the home button. The game will not be paused, but will now receive InputFocusLost and InputFocusAcquired events."),
ref projectConfig.focusAware, ref hasModified);
if (!projectConfig.focusAware && projectConfig.requiresSystemKeyboard)
{
projectConfig.requiresSystemKeyboard = false;
hasModified = true;
}
// Hand Tracking Support
OVREditorUtil.SetupEnumField(projectConfig, "Hand Tracking Support", ref projectConfig.handTrackingSupport, ref hasModified);
// System Keyboard Support
OVREditorUtil.SetupBoolField(projectConfig, new GUIContent("Requires System Keyboard",
"*Requires Focus Awareness* If checked, the Oculus System keyboard will be enabled for Unity input fields and any calls to open/close the Unity TouchScreenKeyboard."),
ref projectConfig.requiresSystemKeyboard, ref hasModified);
if (projectConfig.requiresSystemKeyboard && !projectConfig.focusAware)
{
projectConfig.focusAware = true;
hasModified = true;
}
EditorGUI.EndDisabledGroup();
EditorGUILayout.Space();
EditorGUI.BeginDisabledGroup(false);
EditorGUILayout.LabelField("Android Build Settings", EditorStyles.boldLabel);
// Show overlay support option
OVREditorUtil.SetupBoolField(projectConfig, new GUIContent("Skip Unneeded Shaders",
"If checked, prevent building shaders that are not used by default to reduce time spent when building."),
ref projectConfig.skipUnneededShaders, ref hasModified);
EditorGUI.EndDisabledGroup();
EditorGUILayout.Space();
EditorGUILayout.LabelField("Security", EditorStyles.boldLabel);
OVREditorUtil.SetupInputField(projectConfig, "Custom Security XML Path", ref projectConfig.securityXmlPath, ref hasModified);
OVREditorUtil.SetupBoolField(projectConfig, "Disable Backups", ref projectConfig.disableBackups, ref hasModified);
OVREditorUtil.SetupBoolField(projectConfig, "Enable NSC Configuration", ref projectConfig.enableNSCConfig, ref hasModified);
// apply any pending changes to project config
if (hasModified)
{
OVRProjectConfig.CommitProjectConfig(projectConfig);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 950d95332920b814ea41df294856f96a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,17 @@
{
"name": "Oculus.VR.Scripts.Editor",
"references": [
"Oculus.VR",
"Oculus.VR.Editor"
],
"optionalUnityReferences": [],
"includePlatforms": [
"Editor"
],
"excludePlatforms": [],
"allowUnsafeCode": false,
"overrideReferences": false,
"precompiledReferences": [],
"autoReferenced": true,
"defineConstraints": []
}

View File

@@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 7305c54a43f3814439df347c7519653e
AssemblyDefinitionImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,235 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if USING_XR_MANAGEMENT && USING_XR_SDK_OCULUS
#define USING_XR_SDK
#endif
#if UNITY_2020_1_OR_NEWER
#define REQUIRES_XR_SDK
#endif
using System;
using System.Collections.Generic;
using UnityEngine;
using System.Runtime.InteropServices;
#if !USING_XR_SDK && !REQUIRES_XR_SDK
using Boundary = UnityEngine.Experimental.XR.Boundary;
#endif
/// <summary>
/// Provides access to the Oculus boundary system.
/// </summary>
public class OVRBoundary
{
/// <summary>
/// Specifies a tracked node that can be queried through the boundary system.
/// </summary>
public enum Node
{
HandLeft = OVRPlugin.Node.HandLeft, ///< Tracks the left hand node.
HandRight = OVRPlugin.Node.HandRight, ///< Tracks the right hand node.
Head = OVRPlugin.Node.Head, ///< Tracks the head node.
}
/// <summary>
/// Specifies a boundary type surface.
/// </summary>
public enum BoundaryType
{
OuterBoundary = OVRPlugin.BoundaryType.OuterBoundary, ///< Outer boundary that closely matches the user's configured walls.
PlayArea = OVRPlugin.BoundaryType.PlayArea, ///< Smaller convex area inset within the outer boundary.
}
/// <summary>
/// Provides test results of boundary system queries.
/// </summary>
public struct BoundaryTestResult
{
public bool IsTriggering; ///< Returns true if the queried test would violate and/or trigger the tested boundary types.
public float ClosestDistance; ///< Returns the distance between the queried test object and the closest tested boundary type.
public Vector3 ClosestPoint; ///< Returns the closest point to the queried test object.
public Vector3 ClosestPointNormal; ///< Returns the normal of the closest point to the queried test object.
}
/// <summary>
/// Returns true if the boundary system is currently configured with valid boundary data.
/// </summary>
public bool GetConfigured()
{
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
return OVRPlugin.GetBoundaryConfigured();
else
{
#if !USING_XR_SDK && !REQUIRES_XR_SDK
return Boundary.configured;
#else
return false;
#endif
}
}
/// <summary>
/// Returns the results of testing a tracked node against the specified boundary type.
/// All points are returned in local tracking space shared by tracked nodes and accessible through OVRCameraRig's trackingSpace anchor.
/// </summary>
public OVRBoundary.BoundaryTestResult TestNode(OVRBoundary.Node node, OVRBoundary.BoundaryType boundaryType)
{
OVRPlugin.BoundaryTestResult ovrpRes = OVRPlugin.TestBoundaryNode((OVRPlugin.Node)node, (OVRPlugin.BoundaryType)boundaryType);
OVRBoundary.BoundaryTestResult res = new OVRBoundary.BoundaryTestResult()
{
IsTriggering = (ovrpRes.IsTriggering == OVRPlugin.Bool.True),
ClosestDistance = ovrpRes.ClosestDistance,
ClosestPoint = ovrpRes.ClosestPoint.FromFlippedZVector3f(),
ClosestPointNormal = ovrpRes.ClosestPointNormal.FromFlippedZVector3f(),
};
return res;
}
/// <summary>
/// Returns the results of testing a 3d point against the specified boundary type.
/// The test point is expected in local tracking space.
/// All points are returned in local tracking space shared by tracked nodes and accessible through OVRCameraRig's trackingSpace anchor.
/// </summary>
public OVRBoundary.BoundaryTestResult TestPoint(Vector3 point, OVRBoundary.BoundaryType boundaryType)
{
OVRPlugin.BoundaryTestResult ovrpRes = OVRPlugin.TestBoundaryPoint(point.ToFlippedZVector3f(), (OVRPlugin.BoundaryType)boundaryType);
OVRBoundary.BoundaryTestResult res = new OVRBoundary.BoundaryTestResult()
{
IsTriggering = (ovrpRes.IsTriggering == OVRPlugin.Bool.True),
ClosestDistance = ovrpRes.ClosestDistance,
ClosestPoint = ovrpRes.ClosestPoint.FromFlippedZVector3f(),
ClosestPointNormal = ovrpRes.ClosestPointNormal.FromFlippedZVector3f(),
};
return res;
}
private static int cachedVector3fSize = Marshal.SizeOf(typeof(OVRPlugin.Vector3f));
private static OVRNativeBuffer cachedGeometryNativeBuffer = new OVRNativeBuffer(0);
private static float[] cachedGeometryManagedBuffer = new float[0];
private List<Vector3> cachedGeometryList = new List<Vector3>();
/// <summary>
/// Returns an array of 3d points (in clockwise order) that define the specified boundary type.
/// All points are returned in local tracking space shared by tracked nodes and accessible through OVRCameraRig's trackingSpace anchor.
/// </summary>
public Vector3[] GetGeometry(OVRBoundary.BoundaryType boundaryType)
{
if (OVRManager.loadedXRDevice != OVRManager.XRDevice.Oculus)
{
#if !USING_XR_SDK && !REQUIRES_XR_SDK
if (Boundary.TryGetGeometry(cachedGeometryList, (boundaryType == BoundaryType.PlayArea) ? Boundary.Type.PlayArea : Boundary.Type.TrackedArea))
{
Vector3[] arr = cachedGeometryList.ToArray();
return arr;
}
#endif
Debug.LogError("This functionality is not supported in your current version of Unity.");
return null;
}
int pointsCount = 0;
if (OVRPlugin.GetBoundaryGeometry2((OVRPlugin.BoundaryType)boundaryType, IntPtr.Zero, ref pointsCount))
{
if (pointsCount > 0)
{
int requiredNativeBufferCapacity = pointsCount * cachedVector3fSize;
if (cachedGeometryNativeBuffer.GetCapacity() < requiredNativeBufferCapacity)
cachedGeometryNativeBuffer.Reset(requiredNativeBufferCapacity);
int requiredManagedBufferCapacity = pointsCount * 3;
if (cachedGeometryManagedBuffer.Length < requiredManagedBufferCapacity)
cachedGeometryManagedBuffer = new float[requiredManagedBufferCapacity];
if (OVRPlugin.GetBoundaryGeometry2((OVRPlugin.BoundaryType)boundaryType, cachedGeometryNativeBuffer.GetPointer(), ref pointsCount))
{
Marshal.Copy(cachedGeometryNativeBuffer.GetPointer(), cachedGeometryManagedBuffer, 0, requiredManagedBufferCapacity);
Vector3[] points = new Vector3[pointsCount];
for (int i = 0; i < pointsCount; i++)
{
points[i] = new OVRPlugin.Vector3f()
{
x = cachedGeometryManagedBuffer[3 * i + 0],
y = cachedGeometryManagedBuffer[3 * i + 1],
z = cachedGeometryManagedBuffer[3 * i + 2],
}.FromFlippedZVector3f();
}
return points;
}
}
}
return new Vector3[0];
}
/// <summary>
/// Returns a vector that indicates the spatial dimensions of the specified boundary type. (x = width, y = height, z = depth)
/// </summary>
public Vector3 GetDimensions(OVRBoundary.BoundaryType boundaryType)
{
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
return OVRPlugin.GetBoundaryDimensions((OVRPlugin.BoundaryType)boundaryType).FromVector3f();
else
{
#if !USING_XR_SDK && !REQUIRES_XR_SDK
Vector3 dimensions;
if (Boundary.TryGetDimensions(out dimensions, (boundaryType == BoundaryType.PlayArea) ? Boundary.Type.PlayArea : Boundary.Type.TrackedArea))
return dimensions;
#endif
return Vector3.zero;
}
}
/// <summary>
/// Returns true if the boundary system is currently visible.
/// </summary>
public bool GetVisible()
{
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
return OVRPlugin.GetBoundaryVisible();
else
{
#if !USING_XR_SDK && !REQUIRES_XR_SDK
return Boundary.visible;
#else
return false;
#endif
}
}
/// <summary>
/// Requests that the boundary system visibility be set to the specified value.
/// The actual visibility can be overridden by the system (i.e., proximity trigger) or by the user (boundary system disabled)
/// </summary>
public void SetVisible(bool value)
{
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
OVRPlugin.SetBoundaryVisible(value);
else
{
#if !USING_XR_SDK && !REQUIRES_XR_SDK
Boundary.visible = value;
#endif
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 332b8e08854932543ba356eec601c0ef
timeCreated: 1470352252
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,446 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if USING_XR_MANAGEMENT && USING_XR_SDK_OCULUS
#define USING_XR_SDK
#endif
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using Node = UnityEngine.XR.XRNode;
/// <summary>
/// A head-tracked stereoscopic virtual reality camera rig.
/// </summary>
[ExecuteInEditMode]
public class OVRCameraRig : MonoBehaviour
{
/// <summary>
/// The left eye camera.
/// </summary>
public Camera leftEyeCamera { get { return (usePerEyeCameras) ? _leftEyeCamera : _centerEyeCamera; } }
/// <summary>
/// The right eye camera.
/// </summary>
public Camera rightEyeCamera { get { return (usePerEyeCameras) ? _rightEyeCamera : _centerEyeCamera; } }
/// <summary>
/// Provides a root transform for all anchors in tracking space.
/// </summary>
public Transform trackingSpace { get; private set; }
/// <summary>
/// Always coincides with the pose of the left eye.
/// </summary>
public Transform leftEyeAnchor { get; private set; }
/// <summary>
/// Always coincides with average of the left and right eye poses.
/// </summary>
public Transform centerEyeAnchor { get; private set; }
/// <summary>
/// Always coincides with the pose of the right eye.
/// </summary>
public Transform rightEyeAnchor { get; private set; }
/// <summary>
/// Always coincides with the pose of the left hand.
/// </summary>
public Transform leftHandAnchor { get; private set; }
/// <summary>
/// Always coincides with the pose of the right hand.
/// </summary>
public Transform rightHandAnchor { get; private set; }
/// <summary>
/// Anchors controller pose to fix offset issues for the left hand.
/// </summary>
public Transform leftControllerAnchor { get; private set; }
/// <summary>
/// Anchors controller pose to fix offset issues for the right hand.
/// </summary>
public Transform rightControllerAnchor { get; private set; }
/// <summary>
/// Always coincides with the pose of the sensor.
/// </summary>
public Transform trackerAnchor { get; private set; }
/// <summary>
/// Occurs when the eye pose anchors have been set.
/// </summary>
public event System.Action<OVRCameraRig> UpdatedAnchors;
/// <summary>
/// If true, separate cameras will be used for the left and right eyes.
/// </summary>
public bool usePerEyeCameras = false;
/// <summary>
/// If true, all tracked anchors are updated in FixedUpdate instead of Update to favor physics fidelity.
/// \note: This will cause visible judder unless you tick exactly once per frame using a custom physics
/// update, because you'll be sampling the position at different times into each frame.
/// </summary>
public bool useFixedUpdateForTracking = false;
/// <summary>
/// If true, the cameras on the eyeAnchors will be disabled.
/// \note: The main camera of the game will be used to provide VR rendering. And the tracking space anchors will still be updated to provide reference poses.
/// </summary>
public bool disableEyeAnchorCameras = false;
protected bool _skipUpdate = false;
protected readonly string trackingSpaceName = "TrackingSpace";
protected readonly string trackerAnchorName = "TrackerAnchor";
protected readonly string leftEyeAnchorName = "LeftEyeAnchor";
protected readonly string centerEyeAnchorName = "CenterEyeAnchor";
protected readonly string rightEyeAnchorName = "RightEyeAnchor";
protected readonly string leftHandAnchorName = "LeftHandAnchor";
protected readonly string rightHandAnchorName = "RightHandAnchor";
protected readonly string leftControllerAnchorName = "LeftControllerAnchor";
protected readonly string rightControllerAnchorName = "RightControllerAnchor";
protected Camera _centerEyeCamera;
protected Camera _leftEyeCamera;
protected Camera _rightEyeCamera;
#region Unity Messages
protected virtual void Awake()
{
_skipUpdate = true;
EnsureGameObjectIntegrity();
}
protected virtual void Start()
{
UpdateAnchors(true, true);
Application.onBeforeRender += OnBeforeRenderCallback;
}
protected virtual void FixedUpdate()
{
if (useFixedUpdateForTracking)
UpdateAnchors(true, true);
}
protected virtual void Update()
{
_skipUpdate = false;
if (!useFixedUpdateForTracking)
UpdateAnchors(true, true);
}
protected virtual void OnDestroy()
{
Application.onBeforeRender -= OnBeforeRenderCallback;
}
#endregion
protected virtual void UpdateAnchors(bool updateEyeAnchors, bool updateHandAnchors)
{
if (!OVRManager.OVRManagerinitialized)
return;
EnsureGameObjectIntegrity();
if (!Application.isPlaying)
return;
if (_skipUpdate)
{
centerEyeAnchor.FromOVRPose(OVRPose.identity, true);
leftEyeAnchor.FromOVRPose(OVRPose.identity, true);
rightEyeAnchor.FromOVRPose(OVRPose.identity, true);
return;
}
bool monoscopic = OVRManager.instance.monoscopic;
bool hmdPresent = OVRNodeStateProperties.IsHmdPresent();
OVRPose tracker = OVRManager.tracker.GetPose();
trackerAnchor.localRotation = tracker.orientation;
Quaternion emulatedRotation = Quaternion.Euler(-OVRManager.instance.headPoseRelativeOffsetRotation.x, -OVRManager.instance.headPoseRelativeOffsetRotation.y, OVRManager.instance.headPoseRelativeOffsetRotation.z);
//Note: in the below code, when using UnityEngine's API, we only update anchor transforms if we have a new, fresh value this frame.
//If we don't, it could mean that tracking is lost, etc. so the pose should not change in the virtual world.
//This can be thought of as similar to calling InputTracking GetLocalPosition and Rotation, but only for doing so when the pose is valid.
//If false is returned for any of these calls, then a new pose is not valid and thus should not be updated.
if (updateEyeAnchors)
{
if (hmdPresent)
{
Vector3 centerEyePosition = Vector3.zero;
Quaternion centerEyeRotation = Quaternion.identity;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.CenterEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyePosition))
centerEyeAnchor.localPosition = centerEyePosition;
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.CenterEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyeRotation))
centerEyeAnchor.localRotation = centerEyeRotation;
}
else
{
centerEyeAnchor.localRotation = emulatedRotation;
centerEyeAnchor.localPosition = OVRManager.instance.headPoseRelativeOffsetTranslation;
}
if (!hmdPresent || monoscopic)
{
leftEyeAnchor.localPosition = centerEyeAnchor.localPosition;
rightEyeAnchor.localPosition = centerEyeAnchor.localPosition;
leftEyeAnchor.localRotation = centerEyeAnchor.localRotation;
rightEyeAnchor.localRotation = centerEyeAnchor.localRotation;
}
else
{
Vector3 leftEyePosition = Vector3.zero;
Vector3 rightEyePosition = Vector3.zero;
Quaternion leftEyeRotation = Quaternion.identity;
Quaternion rightEyeRotation = Quaternion.identity;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.LeftEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render, out leftEyePosition))
leftEyeAnchor.localPosition = leftEyePosition;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.RightEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render, out rightEyePosition))
rightEyeAnchor.localPosition = rightEyePosition;
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.LeftEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render, out leftEyeRotation))
leftEyeAnchor.localRotation = leftEyeRotation;
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.RightEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render, out rightEyeRotation))
rightEyeAnchor.localRotation = rightEyeRotation;
}
}
if (updateHandAnchors)
{
//Need this for controller offset because if we're on OpenVR, we want to set the local poses as specified by Unity, but if we're not, OVRInput local position is the right anchor
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR)
{
Vector3 leftPos = Vector3.zero;
Vector3 rightPos = Vector3.zero;
Quaternion leftQuat = Quaternion.identity;
Quaternion rightQuat = Quaternion.identity;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.LeftHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftPos))
leftHandAnchor.localPosition = leftPos;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.RightHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightPos))
rightHandAnchor.localPosition = rightPos;
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.LeftHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftQuat))
leftHandAnchor.localRotation = leftQuat;
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.RightHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightQuat))
rightHandAnchor.localRotation = rightQuat;
}
else
{
leftHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.LTouch);
rightHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.RTouch);
leftHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.LTouch);
rightHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.RTouch);
}
trackerAnchor.localPosition = tracker.position;
OVRPose leftOffsetPose = OVRPose.identity;
OVRPose rightOffsetPose = OVRPose.identity;
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR)
{
leftOffsetPose = OVRManager.GetOpenVRControllerOffset(Node.LeftHand);
rightOffsetPose = OVRManager.GetOpenVRControllerOffset(Node.RightHand);
//Sets poses of left and right nodes, local to the tracking space.
OVRManager.SetOpenVRLocalPose(trackingSpace.InverseTransformPoint(leftControllerAnchor.position),
trackingSpace.InverseTransformPoint(rightControllerAnchor.position),
Quaternion.Inverse(trackingSpace.rotation) * leftControllerAnchor.rotation,
Quaternion.Inverse(trackingSpace.rotation) * rightControllerAnchor.rotation);
}
rightControllerAnchor.localPosition = rightOffsetPose.position;
rightControllerAnchor.localRotation = rightOffsetPose.orientation;
leftControllerAnchor.localPosition = leftOffsetPose.position;
leftControllerAnchor.localRotation = leftOffsetPose.orientation;
}
RaiseUpdatedAnchorsEvent();
}
protected virtual void OnBeforeRenderCallback()
{
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus) //Restrict late-update to only Oculus devices
{
bool controllersNeedUpdate = OVRManager.instance.LateControllerUpdate;
#if USING_XR_SDK
//For the XR SDK, we need to late update head pose, not just the controllers, because the functionality
//is no longer built-in to the Engine. Under legacy, late camera update is done by default. In the XR SDK, you must use
//Tracked Pose Driver to get this by default, which we do not use. So, we have to manually late update camera poses.
UpdateAnchors(true, controllersNeedUpdate);
#else
if (controllersNeedUpdate)
UpdateAnchors(false, true);
#endif
}
}
protected virtual void RaiseUpdatedAnchorsEvent()
{
if (UpdatedAnchors != null)
{
UpdatedAnchors(this);
}
}
public virtual void EnsureGameObjectIntegrity()
{
bool monoscopic = OVRManager.instance != null ? OVRManager.instance.monoscopic : false;
if (trackingSpace == null)
trackingSpace = ConfigureAnchor(null, trackingSpaceName);
if (leftEyeAnchor == null)
leftEyeAnchor = ConfigureAnchor(trackingSpace, leftEyeAnchorName);
if (centerEyeAnchor == null)
centerEyeAnchor = ConfigureAnchor(trackingSpace, centerEyeAnchorName);
if (rightEyeAnchor == null)
rightEyeAnchor = ConfigureAnchor(trackingSpace, rightEyeAnchorName);
if (leftHandAnchor == null)
leftHandAnchor = ConfigureAnchor(trackingSpace, leftHandAnchorName);
if (rightHandAnchor == null)
rightHandAnchor = ConfigureAnchor(trackingSpace, rightHandAnchorName);
if (trackerAnchor == null)
trackerAnchor = ConfigureAnchor(trackingSpace, trackerAnchorName);
if (leftControllerAnchor == null)
leftControllerAnchor = ConfigureAnchor(leftHandAnchor, leftControllerAnchorName);
if (rightControllerAnchor == null)
rightControllerAnchor = ConfigureAnchor(rightHandAnchor, rightControllerAnchorName);
if (_centerEyeCamera == null || _leftEyeCamera == null || _rightEyeCamera == null)
{
_centerEyeCamera = centerEyeAnchor.GetComponent<Camera>();
_leftEyeCamera = leftEyeAnchor.GetComponent<Camera>();
_rightEyeCamera = rightEyeAnchor.GetComponent<Camera>();
if (_centerEyeCamera == null)
{
_centerEyeCamera = centerEyeAnchor.gameObject.AddComponent<Camera>();
_centerEyeCamera.tag = "MainCamera";
}
if (_leftEyeCamera == null)
{
_leftEyeCamera = leftEyeAnchor.gameObject.AddComponent<Camera>();
_leftEyeCamera.tag = "MainCamera";
}
if (_rightEyeCamera == null)
{
_rightEyeCamera = rightEyeAnchor.gameObject.AddComponent<Camera>();
_rightEyeCamera.tag = "MainCamera";
}
_centerEyeCamera.stereoTargetEye = StereoTargetEyeMask.Both;
_leftEyeCamera.stereoTargetEye = StereoTargetEyeMask.Left;
_rightEyeCamera.stereoTargetEye = StereoTargetEyeMask.Right;
}
if (monoscopic && !OVRPlugin.EyeTextureArrayEnabled)
{
// Output to left eye only when in monoscopic mode
if (_centerEyeCamera.stereoTargetEye != StereoTargetEyeMask.Left)
{
_centerEyeCamera.stereoTargetEye = StereoTargetEyeMask.Left;
}
}
else
{
if (_centerEyeCamera.stereoTargetEye != StereoTargetEyeMask.Both)
{
_centerEyeCamera.stereoTargetEye = StereoTargetEyeMask.Both;
}
}
if (disableEyeAnchorCameras)
{
_centerEyeCamera.enabled = false;
_leftEyeCamera.enabled = false;
_rightEyeCamera.enabled = false;
}
else
{
// disable the right eye camera when in monoscopic mode
if (_centerEyeCamera.enabled == usePerEyeCameras ||
_leftEyeCamera.enabled == !usePerEyeCameras ||
_rightEyeCamera.enabled == !(usePerEyeCameras && (!monoscopic || OVRPlugin.EyeTextureArrayEnabled)))
{
_skipUpdate = true;
}
_centerEyeCamera.enabled = !usePerEyeCameras;
_leftEyeCamera.enabled = usePerEyeCameras;
_rightEyeCamera.enabled = (usePerEyeCameras && (!monoscopic || OVRPlugin.EyeTextureArrayEnabled));
}
}
protected virtual Transform ConfigureAnchor(Transform root, string name)
{
Transform anchor = (root != null) ? root.Find(name) : null;
if (anchor == null)
{
anchor = transform.Find(name);
}
if (anchor == null)
{
anchor = new GameObject(name).transform;
}
anchor.name = name;
anchor.parent = (root != null) ? root : transform;
anchor.localScale = Vector3.one;
anchor.localPosition = Vector3.zero;
anchor.localRotation = Quaternion.identity;
return anchor;
}
public virtual Matrix4x4 ComputeTrackReferenceMatrix()
{
if (centerEyeAnchor == null)
{
Debug.LogError("centerEyeAnchor is required");
return Matrix4x4.identity;
}
// The ideal approach would be using UnityEngine.VR.VRNode.TrackingReference, then we would not have to depend on the OVRCameraRig. Unfortunately, it is not available in Unity 5.4.3
OVRPose headPose = OVRPose.identity;
Vector3 pos;
Quaternion rot;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Position, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out pos))
headPose.position = pos;
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.Head, NodeStatePropertyType.Orientation, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out rot))
headPose.orientation = rot;
OVRPose invHeadPose = headPose.Inverse();
Matrix4x4 invHeadMatrix = Matrix4x4.TRS(invHeadPose.position, invHeadPose.orientation, Vector3.one);
Matrix4x4 ret = centerEyeAnchor.localToWorldMatrix * invHeadMatrix;
return ret;
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: df9f338034892c44ebb62d97894772f1
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,665 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if USING_XR_MANAGEMENT && USING_XR_SDK_OCULUS
#define USING_XR_SDK
#endif
#if UNITY_2020_1_OR_NEWER
#define REQUIRES_XR_SDK
#endif
using UnityEngine;
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
#if USING_XR_SDK
using UnityEngine.XR;
using UnityEngine.Experimental.XR;
#endif
using InputTracking = UnityEngine.XR.InputTracking;
using Node = UnityEngine.XR.XRNode;
using NodeState = UnityEngine.XR.XRNodeState;
using Device = UnityEngine.XR.XRDevice;
/// <summary>
/// Miscellaneous extension methods that any script can use.
/// </summary>
public static class OVRExtensions
{
/// <summary>
/// Converts the given world-space transform to an OVRPose in tracking space.
/// </summary>
public static OVRPose ToTrackingSpacePose(this Transform transform, Camera camera)
{
//Initializing to identity, but for all Oculus headsets, down below the pose will be initialized to the runtime's pose value, so identity will never be returned.
OVRPose headPose = OVRPose.identity;
Vector3 pos;
Quaternion rot;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Position, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out pos))
headPose.position = pos;
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.Head, NodeStatePropertyType.Orientation, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out rot))
headPose.orientation = rot;
var ret = headPose * transform.ToHeadSpacePose(camera);
return ret;
}
/// <summary>
/// Converts the given pose from tracking-space to world-space.
/// </summary>
public static OVRPose ToWorldSpacePose(OVRPose trackingSpacePose)
{
OVRPose headPose = OVRPose.identity;
Vector3 pos;
Quaternion rot;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Position, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out pos))
headPose.position = pos;
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.Head, NodeStatePropertyType.Orientation, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out rot))
headPose.orientation = rot;
// Transform from tracking-Space to head-Space
OVRPose poseInHeadSpace = headPose.Inverse() * trackingSpacePose;
// Transform from head space to world space
OVRPose ret = Camera.main.transform.ToOVRPose() * poseInHeadSpace;
return ret;
}
/// <summary>
/// Converts the given world-space transform to an OVRPose in head space.
/// </summary>
public static OVRPose ToHeadSpacePose(this Transform transform, Camera camera)
{
return camera.transform.ToOVRPose().Inverse() * transform.ToOVRPose();
}
public static OVRPose ToOVRPose(this Transform t, bool isLocal = false)
{
OVRPose pose;
pose.orientation = (isLocal) ? t.localRotation : t.rotation;
pose.position = (isLocal) ? t.localPosition : t.position;
return pose;
}
public static void FromOVRPose(this Transform t, OVRPose pose, bool isLocal = false)
{
if (isLocal)
{
t.localRotation = pose.orientation;
t.localPosition = pose.position;
}
else
{
t.rotation = pose.orientation;
t.position = pose.position;
}
}
public static OVRPose ToOVRPose(this OVRPlugin.Posef p)
{
return new OVRPose()
{
position = new Vector3(p.Position.x, p.Position.y, -p.Position.z),
orientation = new Quaternion(-p.Orientation.x, -p.Orientation.y, p.Orientation.z, p.Orientation.w)
};
}
public static OVRTracker.Frustum ToFrustum(this OVRPlugin.Frustumf f)
{
return new OVRTracker.Frustum()
{
nearZ = f.zNear,
farZ = f.zFar,
fov = new Vector2()
{
x = Mathf.Rad2Deg * f.fovX,
y = Mathf.Rad2Deg * f.fovY
}
};
}
public static Color FromColorf(this OVRPlugin.Colorf c)
{
return new Color() { r = c.r, g = c.g, b = c.b, a = c.a };
}
public static OVRPlugin.Colorf ToColorf(this Color c)
{
return new OVRPlugin.Colorf() { r = c.r, g = c.g, b = c.b, a = c.a };
}
public static Vector3 FromVector3f(this OVRPlugin.Vector3f v)
{
return new Vector3() { x = v.x, y = v.y, z = v.z };
}
public static Vector3 FromFlippedXVector3f(this OVRPlugin.Vector3f v)
{
return new Vector3() { x = -v.x, y = v.y, z = v.z };
}
public static Vector3 FromFlippedZVector3f(this OVRPlugin.Vector3f v)
{
return new Vector3() { x = v.x, y = v.y, z = -v.z };
}
public static OVRPlugin.Vector3f ToVector3f(this Vector3 v)
{
return new OVRPlugin.Vector3f() { x = v.x, y = v.y, z = v.z };
}
public static OVRPlugin.Vector3f ToFlippedXVector3f(this Vector3 v)
{
return new OVRPlugin.Vector3f() { x = -v.x, y = v.y, z = v.z };
}
public static OVRPlugin.Vector3f ToFlippedZVector3f(this Vector3 v)
{
return new OVRPlugin.Vector3f() { x = v.x, y = v.y, z = -v.z };
}
public static Quaternion FromQuatf(this OVRPlugin.Quatf q)
{
return new Quaternion() { x = q.x, y = q.y, z = q.z, w = q.w };
}
public static Quaternion FromFlippedXQuatf(this OVRPlugin.Quatf q)
{
return new Quaternion() { x = q.x, y = -q.y, z = -q.z, w = q.w };
}
public static Quaternion FromFlippedZQuatf(this OVRPlugin.Quatf q)
{
return new Quaternion() { x = -q.x, y = -q.y, z = q.z, w = q.w };
}
public static OVRPlugin.Quatf ToQuatf(this Quaternion q)
{
return new OVRPlugin.Quatf() { x = q.x, y = q.y, z = q.z, w = q.w };
}
public static OVRPlugin.Quatf ToFlippedXQuatf(this Quaternion q)
{
return new OVRPlugin.Quatf() { x = q.x, y = -q.y, z = -q.z, w = q.w };
}
public static OVRPlugin.Quatf ToFlippedZQuatf(this Quaternion q)
{
return new OVRPlugin.Quatf() { x = -q.x, y = -q.y, z = q.z, w = q.w };
}
public static OVR.OpenVR.HmdMatrix34_t ConvertToHMDMatrix34(this Matrix4x4 m)
{
OVR.OpenVR.HmdMatrix34_t pose = new OVR.OpenVR.HmdMatrix34_t();
pose.m0 = m[0, 0];
pose.m1 = m[0, 1];
pose.m2 = -m[0, 2];
pose.m3 = m[0, 3];
pose.m4 = m[1, 0];
pose.m5 = m[1, 1];
pose.m6 = -m[1, 2];
pose.m7 = m[1, 3];
pose.m8 = -m[2, 0];
pose.m9 = -m[2, 1];
pose.m10 = m[2, 2];
pose.m11 = -m[2, 3];
return pose;
}
public static Transform FindChildRecursive(this Transform parent, string name)
{
foreach (Transform child in parent)
{
if (child.name.Contains(name))
return child;
var result = child.FindChildRecursive(name);
if (result != null)
return result;
}
return null;
}
}
//4 types of node state properties that can be queried with UnityEngine.XR
public enum NodeStatePropertyType
{
Acceleration,
AngularAcceleration,
Velocity,
AngularVelocity,
Position,
Orientation
}
public static class OVRNodeStateProperties
{
private static List<NodeState> nodeStateList = new List<NodeState>();
public static bool IsHmdPresent()
{
if (OVRManager.OVRManagerinitialized && OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
return OVRPlugin.hmdPresent;
#if USING_XR_SDK
XRDisplaySubsystem currentDisplaySubsystem = OVRManager.GetCurrentDisplaySubsystem();
if (currentDisplaySubsystem != null)
return currentDisplaySubsystem.running; //In 2019.3, this should be changed to currentDisplaySubsystem.isConnected, but this is a fine placeholder for now.
return false;
#elif REQUIRES_XR_SDK
return false;
#else
return Device.isPresent;
#endif
}
public static bool GetNodeStatePropertyVector3(Node nodeType, NodeStatePropertyType propertyType, OVRPlugin.Node ovrpNodeType, OVRPlugin.Step stepType, out Vector3 retVec)
{
retVec = Vector3.zero;
switch (propertyType)
{
case NodeStatePropertyType.Acceleration:
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
{
retVec = OVRPlugin.GetNodeAcceleration(ovrpNodeType, stepType).FromFlippedZVector3f();
return true;
}
if (GetUnityXRNodeStateVector3(nodeType, NodeStatePropertyType.Acceleration, out retVec))
return true;
break;
case NodeStatePropertyType.AngularAcceleration:
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
{
retVec = OVRPlugin.GetNodeAngularAcceleration(ovrpNodeType, stepType).FromFlippedZVector3f();
return true;
}
if (GetUnityXRNodeStateVector3(nodeType, NodeStatePropertyType.AngularAcceleration, out retVec))
return true;
break;
case NodeStatePropertyType.Velocity:
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
{
retVec = OVRPlugin.GetNodeVelocity(ovrpNodeType, stepType).FromFlippedZVector3f();
return true;
}
if (GetUnityXRNodeStateVector3(nodeType, NodeStatePropertyType.Velocity, out retVec))
return true;
break;
case NodeStatePropertyType.AngularVelocity:
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
{
retVec = OVRPlugin.GetNodeAngularVelocity(ovrpNodeType, stepType).FromFlippedZVector3f();
return true;
}
if (GetUnityXRNodeStateVector3(nodeType, NodeStatePropertyType.AngularVelocity, out retVec))
return true;
break;
case NodeStatePropertyType.Position:
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
{
retVec = OVRPlugin.GetNodePose(ovrpNodeType, stepType).ToOVRPose().position;
return true;
}
if (GetUnityXRNodeStateVector3(nodeType, NodeStatePropertyType.Position, out retVec))
return true;
break;
}
return false;
}
public static bool GetNodeStatePropertyQuaternion(Node nodeType, NodeStatePropertyType propertyType, OVRPlugin.Node ovrpNodeType, OVRPlugin.Step stepType, out Quaternion retQuat)
{
retQuat = Quaternion.identity;
switch (propertyType)
{
case NodeStatePropertyType.Orientation:
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
{
retQuat = OVRPlugin.GetNodePose(ovrpNodeType, stepType).ToOVRPose().orientation;
return true;
}
if (GetUnityXRNodeStateQuaternion(nodeType, NodeStatePropertyType.Orientation, out retQuat))
return true;
break;
}
return false;
}
private static bool ValidateProperty(Node nodeType, ref NodeState requestedNodeState)
{
InputTracking.GetNodeStates(nodeStateList);
if (nodeStateList.Count == 0)
return false;
bool nodeStateFound = false;
requestedNodeState = nodeStateList[0];
for (int i = 0; i < nodeStateList.Count; i++)
{
if (nodeStateList[i].nodeType == nodeType)
{
requestedNodeState = nodeStateList[i];
nodeStateFound = true;
break;
}
}
return nodeStateFound;
}
private static bool GetUnityXRNodeStateVector3(Node nodeType, NodeStatePropertyType propertyType, out Vector3 retVec)
{
retVec = Vector3.zero;
NodeState requestedNodeState = default(NodeState);
if (!ValidateProperty(nodeType, ref requestedNodeState))
return false;
if (propertyType == NodeStatePropertyType.Acceleration)
{
if (requestedNodeState.TryGetAcceleration(out retVec))
{
return true;
}
}
else if (propertyType == NodeStatePropertyType.AngularAcceleration)
{
if (requestedNodeState.TryGetAngularAcceleration(out retVec))
{
return true;
}
}
else if (propertyType == NodeStatePropertyType.Velocity)
{
if (requestedNodeState.TryGetVelocity(out retVec))
{
return true;
}
}
else if (propertyType == NodeStatePropertyType.AngularVelocity)
{
if (requestedNodeState.TryGetAngularVelocity(out retVec))
{
return true;
}
}
else if (propertyType == NodeStatePropertyType.Position)
{
if (requestedNodeState.TryGetPosition(out retVec))
{
return true;
}
}
return false;
}
private static bool GetUnityXRNodeStateQuaternion(Node nodeType, NodeStatePropertyType propertyType, out Quaternion retQuat)
{
retQuat = Quaternion.identity;
NodeState requestedNodeState = default(NodeState);
if (!ValidateProperty(nodeType, ref requestedNodeState))
return false;
if (propertyType == NodeStatePropertyType.Orientation)
{
if (requestedNodeState.TryGetRotation(out retQuat))
{
return true;
}
}
return false;
}
}
/// <summary>
/// An affine transformation built from a Unity position and orientation.
/// </summary>
[System.Serializable]
public struct OVRPose
{
/// <summary>
/// A pose with no translation or rotation.
/// </summary>
public static OVRPose identity
{
get {
return new OVRPose()
{
position = Vector3.zero,
orientation = Quaternion.identity
};
}
}
public override bool Equals(System.Object obj)
{
return obj is OVRPose && this == (OVRPose)obj;
}
public override int GetHashCode()
{
return position.GetHashCode() ^ orientation.GetHashCode();
}
public static bool operator ==(OVRPose x, OVRPose y)
{
return x.position == y.position && x.orientation == y.orientation;
}
public static bool operator !=(OVRPose x, OVRPose y)
{
return !(x == y);
}
/// <summary>
/// The position.
/// </summary>
public Vector3 position;
/// <summary>
/// The orientation.
/// </summary>
public Quaternion orientation;
/// <summary>
/// Multiplies two poses.
/// </summary>
public static OVRPose operator*(OVRPose lhs, OVRPose rhs)
{
var ret = new OVRPose();
ret.position = lhs.position + lhs.orientation * rhs.position;
ret.orientation = lhs.orientation * rhs.orientation;
return ret;
}
/// <summary>
/// Computes the inverse of the given pose.
/// </summary>
public OVRPose Inverse()
{
OVRPose ret;
ret.orientation = Quaternion.Inverse(orientation);
ret.position = ret.orientation * -position;
return ret;
}
/// <summary>
/// Converts the pose from left- to right-handed or vice-versa.
/// </summary>
public OVRPose flipZ()
{
var ret = this;
ret.position.z = -ret.position.z;
ret.orientation.z = -ret.orientation.z;
ret.orientation.w = -ret.orientation.w;
return ret;
}
// Warning: this function is not a strict reverse of OVRPlugin.Posef.ToOVRPose(), even after flipZ()
public OVRPlugin.Posef ToPosef_Legacy()
{
return new OVRPlugin.Posef()
{
Position = position.ToVector3f(),
Orientation = orientation.ToQuatf()
};
}
public OVRPlugin.Posef ToPosef()
{
OVRPlugin.Posef result = new OVRPlugin.Posef();
result.Position.x = position.x;
result.Position.y = position.y;
result.Position.z = -position.z;
result.Orientation.x = -orientation.x;
result.Orientation.y = -orientation.y;
result.Orientation.z = orientation.z;
result.Orientation.w = orientation.w;
return result;
}
}
/// <summary>
/// Encapsulates an 8-byte-aligned of unmanaged memory.
/// </summary>
public class OVRNativeBuffer : IDisposable
{
private bool disposed = false;
private int m_numBytes = 0;
private IntPtr m_ptr = IntPtr.Zero;
/// <summary>
/// Creates a buffer of the specified size.
/// </summary>
public OVRNativeBuffer(int numBytes)
{
Reallocate(numBytes);
}
/// <summary>
/// Releases unmanaged resources and performs other cleanup operations before the <see cref="OVRNativeBuffer"/> is
/// reclaimed by garbage collection.
/// </summary>
~OVRNativeBuffer()
{
Dispose(false);
}
/// <summary>
/// Reallocates the buffer with the specified new size.
/// </summary>
public void Reset(int numBytes)
{
Reallocate(numBytes);
}
/// <summary>
/// The current number of bytes in the buffer.
/// </summary>
public int GetCapacity()
{
return m_numBytes;
}
/// <summary>
/// A pointer to the unmanaged memory in the buffer, starting at the given offset in bytes.
/// </summary>
public IntPtr GetPointer(int byteOffset = 0)
{
if (byteOffset < 0 || byteOffset >= m_numBytes)
return IntPtr.Zero;
return (byteOffset == 0) ? m_ptr : new IntPtr(m_ptr.ToInt64() + byteOffset);
}
/// <summary>
/// Releases all resource used by the <see cref="OVRNativeBuffer"/> object.
/// </summary>
/// <remarks>Call <see cref="Dispose"/> when you are finished using the <see cref="OVRNativeBuffer"/>. The <see cref="Dispose"/>
/// method leaves the <see cref="OVRNativeBuffer"/> in an unusable state. After calling <see cref="Dispose"/>, you must
/// release all references to the <see cref="OVRNativeBuffer"/> so the garbage collector can reclaim the memory that
/// the <see cref="OVRNativeBuffer"/> was occupying.</remarks>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
private void Dispose(bool disposing)
{
if (disposed)
return;
if (disposing)
{
// dispose managed resources
}
// dispose unmanaged resources
Release();
disposed = true;
}
private void Reallocate(int numBytes)
{
Release();
if (numBytes > 0)
{
m_ptr = Marshal.AllocHGlobal(numBytes);
m_numBytes = numBytes;
}
else
{
m_ptr = IntPtr.Zero;
m_numBytes = 0;
}
}
private void Release()
{
if (m_ptr != IntPtr.Zero)
{
Marshal.FreeHGlobal(m_ptr);
m_ptr = IntPtr.Zero;
m_numBytes = 0;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 176f8d665b1d78048b1e87956698df6b
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,139 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if USING_XR_MANAGEMENT && USING_XR_SDK_OCULUS
#define USING_XR_SDK
#endif
#if UNITY_2020_1_OR_NEWER
#define REQUIRES_XR_SDK
#endif
using UnityEngine;
using System.Collections;
#if USING_XR_SDK
using UnityEngine.XR;
using UnityEngine.Experimental.XR;
#endif
/// <summary>
/// This is a simple behavior that can be attached to a parent of the CameraRig in order
/// to provide movement via the gamepad. This is useful when testing an application in
/// the Unity editor without the HMD.
/// To use it, create a game object in your scene and drag your CameraRig to be a child
/// of the game object. Then, add the OVRDebugHeadController behavior to the game object.
/// Alternatively, this behavior can be placed directly on the OVRCameraRig object, but
/// that is not guaranteed to work if OVRCameraRig functionality changes in the future.
/// In the parent case, the object with OVRDebugHeadController can be thougt of as a
/// platform that your camera is attached to. When the platform moves or rotates, the
/// camera moves or rotates, but the camera can still move independently while "on" the
/// platform.
/// In general, this behavior should be disabled when not debugging.
/// </summary>
public class OVRDebugHeadController : MonoBehaviour
{
[SerializeField]
public bool AllowPitchLook = false;
[SerializeField]
public bool AllowYawLook = true;
[SerializeField]
public bool InvertPitch = false;
[SerializeField]
public float GamePad_PitchDegreesPerSec = 90.0f;
[SerializeField]
public float GamePad_YawDegreesPerSec = 90.0f;
[SerializeField]
public bool AllowMovement = false;
[SerializeField]
public float ForwardSpeed = 2.0f;
[SerializeField]
public float StrafeSpeed = 2.0f;
protected OVRCameraRig CameraRig = null;
void Awake()
{
// locate the camera rig so we can use it to get the current camera transform each frame
OVRCameraRig[] CameraRigs = gameObject.GetComponentsInChildren<OVRCameraRig>();
if( CameraRigs.Length == 0 )
Debug.LogWarning("OVRCamParent: No OVRCameraRig attached.");
else if (CameraRigs.Length > 1)
Debug.LogWarning("OVRCamParent: More then 1 OVRCameraRig attached.");
else
CameraRig = CameraRigs[0];
}
// Use this for initialization
void Start ()
{
}
// Update is called once per frame
void Update ()
{
if ( AllowMovement )
{
float gamePad_FwdAxis = OVRInput.Get(OVRInput.RawAxis2D.LThumbstick).y;
float gamePad_StrafeAxis = OVRInput.Get(OVRInput.RawAxis2D.LThumbstick).x;
Vector3 fwdMove = ( CameraRig.centerEyeAnchor.rotation * Vector3.forward ) * gamePad_FwdAxis * Time.deltaTime * ForwardSpeed;
Vector3 strafeMove = ( CameraRig.centerEyeAnchor.rotation * Vector3.right ) * gamePad_StrafeAxis * Time.deltaTime * StrafeSpeed;
transform.position += fwdMove + strafeMove;
}
bool hasDevice = false;
#if USING_XR_SDK
XRDisplaySubsystem currentDisplaySubsystem = OVRManager.GetCurrentDisplaySubsystem();
if (currentDisplaySubsystem != null)
hasDevice = currentDisplaySubsystem.running;
#elif REQUIRES_XR_SDK
hasDevice = false;
#else
hasDevice = UnityEngine.XR.XRDevice.isPresent;
#endif
if ( !hasDevice && ( AllowYawLook || AllowPitchLook ) )
{
Quaternion r = transform.rotation;
if ( AllowYawLook )
{
float gamePadYaw = OVRInput.Get(OVRInput.RawAxis2D.RThumbstick).x;
float yawAmount = gamePadYaw * Time.deltaTime * GamePad_YawDegreesPerSec;
Quaternion yawRot = Quaternion.AngleAxis( yawAmount, Vector3.up );
r = yawRot * r;
}
if ( AllowPitchLook )
{
float gamePadPitch = OVRInput.Get(OVRInput.RawAxis2D.RThumbstick).y;
if ( Mathf.Abs( gamePadPitch ) > 0.0001f )
{
if ( InvertPitch )
{
gamePadPitch *= -1.0f;
}
float pitchAmount = gamePadPitch * Time.deltaTime * GamePad_PitchDegreesPerSec;
Quaternion pitchRot = Quaternion.AngleAxis( pitchAmount, Vector3.left );
r = r * pitchRot;
}
}
transform.rotation = r;
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 563681618daa71b4c89f979b1fd7170b
timeCreated: 1433450365
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,387 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if USING_XR_MANAGEMENT && USING_XR_SDK_OCULUS
#define USING_XR_SDK
#endif
#if UNITY_2020_1_OR_NEWER
#define REQUIRES_XR_SDK
#endif
using System;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using UnityEngine;
using System.Collections.Generic;
#if USING_XR_SDK
using UnityEngine.XR;
using UnityEngine.Experimental.XR;
#endif
using InputTracking = UnityEngine.XR.InputTracking;
using Node = UnityEngine.XR.XRNode;
using Settings = UnityEngine.XR.XRSettings;
/// <summary>
/// Manages an Oculus Rift head-mounted display (HMD).
/// </summary>
public class OVRDisplay
{
/// <summary>
/// Contains full fov information per eye
/// Under Symmetric Fov mode, UpFov == DownFov and LeftFov == RightFov.
/// </summary>
public struct EyeFov
{
public float UpFov;
public float DownFov;
public float LeftFov;
public float RightFov;
}
/// <summary>
/// Specifies the size and field-of-view for one eye texture.
/// </summary>
public struct EyeRenderDesc
{
/// <summary>
/// The horizontal and vertical size of the texture.
/// </summary>
public Vector2 resolution;
/// <summary>
/// The angle of the horizontal and vertical field of view in degrees.
/// For Symmetric FOV interface compatibility
/// Note this includes the fov angle from both sides
/// </summary>
public Vector2 fov;
/// <summary>
/// The full information of field of view in degrees.
/// When Asymmetric FOV isn't enabled, this returns the maximum fov angle
/// </summary>
public EyeFov fullFov;
}
/// <summary>
/// Contains latency measurements for a single frame of rendering.
/// </summary>
public struct LatencyData
{
/// <summary>
/// The time it took to render both eyes in seconds.
/// </summary>
public float render;
/// <summary>
/// The time it took to perform TimeWarp in seconds.
/// </summary>
public float timeWarp;
/// <summary>
/// The time between the end of TimeWarp and scan-out in seconds.
/// </summary>
public float postPresent;
public float renderError;
public float timeWarpError;
}
private bool needsConfigureTexture;
private EyeRenderDesc[] eyeDescs = new EyeRenderDesc[2];
private bool recenterRequested = false;
private int recenterRequestedFrameCount = int.MaxValue;
private int localTrackingSpaceRecenterCount = 0;
/// <summary>
/// Creates an instance of OVRDisplay. Called by OVRManager.
/// </summary>
public OVRDisplay()
{
UpdateTextures();
}
/// <summary>
/// Updates the internal state of the OVRDisplay. Called by OVRManager.
/// </summary>
public void Update()
{
UpdateTextures();
if (recenterRequested && Time.frameCount > recenterRequestedFrameCount)
{
Debug.Log("Recenter event detected");
if (RecenteredPose != null)
{
RecenteredPose();
}
recenterRequested = false;
recenterRequestedFrameCount = int.MaxValue;
}
if (OVRPlugin.GetSystemHeadsetType() >= OVRPlugin.SystemHeadset.Oculus_Quest &&
OVRPlugin.GetSystemHeadsetType() < OVRPlugin.SystemHeadset.Rift_DK1) // all Oculus Standalone headsets
{
int recenterCount = OVRPlugin.GetLocalTrackingSpaceRecenterCount();
if (localTrackingSpaceRecenterCount != recenterCount)
{
Debug.Log("Recenter event detected");
if (RecenteredPose != null)
{
RecenteredPose();
}
localTrackingSpaceRecenterCount = recenterCount;
}
}
}
/// <summary>
/// Occurs when the head pose is reset.
/// </summary>
public event System.Action RecenteredPose;
/// <summary>
/// Recenters the head pose.
/// </summary>
public void RecenterPose()
{
#if USING_XR_SDK
XRInputSubsystem currentInputSubsystem = OVRManager.GetCurrentInputSubsystem();
if (currentInputSubsystem != null)
{
currentInputSubsystem.TryRecenter();
}
#elif !REQUIRES_XR_SDK
InputTracking.Recenter();
#endif
// The current poses are cached for the current frame and won't be updated immediately
// after UnityEngine.VR.InputTracking.Recenter(). So we need to wait until next frame
// to trigger the RecenteredPose delegate. The application could expect the correct pose
// when the RecenteredPose delegate get called.
recenterRequested = true;
recenterRequestedFrameCount = Time.frameCount;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
OVRMixedReality.RecenterPose();
#endif
}
/// <summary>
/// Gets the current linear acceleration of the head.
/// </summary>
public Vector3 acceleration
{
get {
if (!OVRManager.isHmdPresent)
return Vector3.zero;
Vector3 retVec = Vector3.zero;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Acceleration, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
return retVec;
return Vector3.zero;
}
}
/// <summary>
/// Gets the current angular acceleration of the head in radians per second per second about each axis.
/// </summary>
public Vector3 angularAcceleration
{
get
{
if (!OVRManager.isHmdPresent)
return Vector3.zero;
Vector3 retVec = Vector3.zero;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.AngularAcceleration, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
return retVec;
return Vector3.zero;
}
}
/// <summary>
/// Gets the current linear velocity of the head in meters per second.
/// </summary>
public Vector3 velocity
{
get
{
if (!OVRManager.isHmdPresent)
return Vector3.zero;
Vector3 retVec = Vector3.zero;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Velocity, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
return retVec;
return Vector3.zero;
}
}
/// <summary>
/// Gets the current angular velocity of the head in radians per second about each axis.
/// </summary>
public Vector3 angularVelocity
{
get {
if (!OVRManager.isHmdPresent)
return Vector3.zero;
Vector3 retVec = Vector3.zero;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.AngularVelocity, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
return retVec;
return Vector3.zero;
}
}
/// <summary>
/// Gets the resolution and field of view for the given eye.
/// </summary>
public EyeRenderDesc GetEyeRenderDesc(UnityEngine.XR.XRNode eye)
{
return eyeDescs[(int)eye];
}
/// <summary>
/// Gets the current measured latency values.
/// </summary>
public LatencyData latency
{
get {
if (!OVRManager.isHmdPresent)
return new LatencyData();
string latency = OVRPlugin.latency;
var r = new Regex("Render: ([0-9]+[.][0-9]+)ms, TimeWarp: ([0-9]+[.][0-9]+)ms, PostPresent: ([0-9]+[.][0-9]+)ms", RegexOptions.None);
var ret = new LatencyData();
Match match = r.Match(latency);
if (match.Success)
{
ret.render = float.Parse(match.Groups[1].Value);
ret.timeWarp = float.Parse(match.Groups[2].Value);
ret.postPresent = float.Parse(match.Groups[3].Value);
}
return ret;
}
}
/// <summary>
/// Gets application's frame rate reported by oculus plugin
/// </summary>
public float appFramerate
{
get
{
if (!OVRManager.isHmdPresent)
return 0;
return OVRPlugin.GetAppFramerate();
}
}
/// <summary>
/// Gets the recommended MSAA level for optimal quality/performance the current device.
/// </summary>
public int recommendedMSAALevel
{
get
{
int result = OVRPlugin.recommendedMSAALevel;
if (result == 1)
result = 0;
return result;
}
}
/// <summary>
/// Gets the list of available display frequencies supported by this hardware.
/// </summary>
public float[] displayFrequenciesAvailable
{
get { return OVRPlugin.systemDisplayFrequenciesAvailable; }
}
/// <summary>
/// Gets and sets the current display frequency.
/// </summary>
public float displayFrequency
{
get
{
return OVRPlugin.systemDisplayFrequency;
}
set
{
OVRPlugin.systemDisplayFrequency = value;
}
}
private void UpdateTextures()
{
ConfigureEyeDesc(Node.LeftEye);
ConfigureEyeDesc(Node.RightEye);
}
private void ConfigureEyeDesc(Node eye)
{
if (!OVRManager.isHmdPresent)
return;
int eyeTextureWidth = Settings.eyeTextureWidth;
int eyeTextureHeight = Settings.eyeTextureHeight;
eyeDescs[(int)eye] = new EyeRenderDesc();
eyeDescs[(int)eye].resolution = new Vector2(eyeTextureWidth, eyeTextureHeight);
OVRPlugin.Frustumf2 frust;
if (OVRPlugin.GetNodeFrustum2((OVRPlugin.Node)eye, out frust))
{
eyeDescs[(int)eye].fullFov.LeftFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.LeftTan);
eyeDescs[(int)eye].fullFov.RightFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.RightTan);
eyeDescs[(int)eye].fullFov.UpFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.UpTan);
eyeDescs[(int)eye].fullFov.DownFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.DownTan);
}
else
{
OVRPlugin.Frustumf frustOld = OVRPlugin.GetEyeFrustum((OVRPlugin.Eye)eye);
eyeDescs[(int)eye].fullFov.LeftFov = Mathf.Rad2Deg * frustOld.fovX * 0.5f;
eyeDescs[(int)eye].fullFov.RightFov = Mathf.Rad2Deg * frustOld.fovX * 0.5f;
eyeDescs[(int)eye].fullFov.UpFov = Mathf.Rad2Deg * frustOld.fovY * 0.5f;
eyeDescs[(int)eye].fullFov.DownFov = Mathf.Rad2Deg * frustOld.fovY * 0.5f;
}
// Symmetric Fov uses the maximum fov angle
float maxFovX = Mathf.Max(eyeDescs[(int)eye].fullFov.LeftFov, eyeDescs[(int)eye].fullFov.RightFov);
float maxFovY = Mathf.Max(eyeDescs[(int)eye].fullFov.UpFov, eyeDescs[(int)eye].fullFov.DownFov);
eyeDescs[(int)eye].fov.x = maxFovX * 2.0f;
eyeDescs[(int)eye].fov.y = maxFovY * 2.0f;
if (!OVRPlugin.AsymmetricFovEnabled)
{
eyeDescs[(int)eye].fullFov.LeftFov = maxFovX;
eyeDescs[(int)eye].fullFov.RightFov = maxFovX;
eyeDescs[(int)eye].fullFov.UpFov = maxFovY;
eyeDescs[(int)eye].fullFov.DownFov = maxFovY;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: bb365ebe8e821fc4e81e9dca9d704357
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,388 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System;
using System.IO;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.InteropServices;
/// <summary>
/// Plays tactile effects on a tracked VR controller.
/// </summary>
public static class OVRHaptics
{
public readonly static OVRHapticsChannel[] Channels;
public readonly static OVRHapticsChannel LeftChannel;
public readonly static OVRHapticsChannel RightChannel;
private readonly static OVRHapticsOutput[] m_outputs;
static OVRHaptics()
{
Config.Load();
m_outputs = new OVRHapticsOutput[]
{
new OVRHapticsOutput((uint)OVRPlugin.Controller.LTouch),
new OVRHapticsOutput((uint)OVRPlugin.Controller.RTouch),
};
Channels = new OVRHapticsChannel[]
{
LeftChannel = new OVRHapticsChannel(0),
RightChannel = new OVRHapticsChannel(1),
};
}
/// <summary>
/// Determines the target format for haptics data on a specific device.
/// </summary>
public static class Config
{
public static int SampleRateHz { get; private set; }
public static int SampleSizeInBytes { get; private set; }
public static int MinimumSafeSamplesQueued { get; private set; }
public static int MinimumBufferSamplesCount { get; private set; }
public static int OptimalBufferSamplesCount { get; private set; }
public static int MaximumBufferSamplesCount { get; private set; }
static Config()
{
Load();
}
public static void Load()
{
OVRPlugin.HapticsDesc desc = OVRPlugin.GetControllerHapticsDesc((uint)OVRPlugin.Controller.RTouch);
SampleRateHz = desc.SampleRateHz;
SampleSizeInBytes = desc.SampleSizeInBytes;
MinimumSafeSamplesQueued = desc.MinimumSafeSamplesQueued;
MinimumBufferSamplesCount = desc.MinimumBufferSamplesCount;
OptimalBufferSamplesCount = desc.OptimalBufferSamplesCount;
MaximumBufferSamplesCount = desc.MaximumBufferSamplesCount;
}
}
/// <summary>
/// A track of haptics data that can be mixed or sequenced with another track.
/// </summary>
public class OVRHapticsChannel
{
private OVRHapticsOutput m_output;
/// <summary>
/// Constructs a channel targeting the specified output.
/// </summary>
public OVRHapticsChannel(uint outputIndex)
{
m_output = m_outputs[outputIndex];
}
/// <summary>
/// Cancels any currently-playing clips and immediatly plays the specified clip instead.
/// </summary>
public void Preempt(OVRHapticsClip clip)
{
m_output.Preempt(clip);
}
/// <summary>
/// Enqueues the specified clip to play after any currently-playing clips finish.
/// </summary>
public void Queue(OVRHapticsClip clip)
{
m_output.Queue(clip);
}
/// <summary>
/// Adds the specified clip to play simultaneously to the currently-playing clip(s).
/// </summary>
public void Mix(OVRHapticsClip clip)
{
m_output.Mix(clip);
}
/// <summary>
/// Cancels any currently-playing clips.
/// </summary>
public void Clear()
{
m_output.Clear();
}
}
private class OVRHapticsOutput
{
private class ClipPlaybackTracker
{
public int ReadCount { get; set; }
public OVRHapticsClip Clip { get; set; }
public ClipPlaybackTracker(OVRHapticsClip clip)
{
Clip = clip;
}
}
private bool m_lowLatencyMode = true;
private bool m_paddingEnabled = true;
private int m_prevSamplesQueued = 0;
private float m_prevSamplesQueuedTime = 0;
private int m_numPredictionHits = 0;
private int m_numPredictionMisses = 0;
private int m_numUnderruns = 0;
private List<ClipPlaybackTracker> m_pendingClips = new List<ClipPlaybackTracker>();
private uint m_controller = 0;
private OVRNativeBuffer m_nativeBuffer = new OVRNativeBuffer(OVRHaptics.Config.MaximumBufferSamplesCount * OVRHaptics.Config.SampleSizeInBytes);
private OVRHapticsClip m_paddingClip = new OVRHapticsClip();
public OVRHapticsOutput(uint controller)
{
#if UNITY_ANDROID
m_paddingEnabled = false;
#endif
m_controller = controller;
}
/// <summary>
/// The system calls this each frame to update haptics playback.
/// </summary>
public void Process()
{
var hapticsState = OVRPlugin.GetControllerHapticsState(m_controller);
float elapsedTime = Time.realtimeSinceStartup - m_prevSamplesQueuedTime;
if (m_prevSamplesQueued > 0)
{
int expectedSamples = m_prevSamplesQueued - (int)(elapsedTime * OVRHaptics.Config.SampleRateHz + 0.5f);
if (expectedSamples < 0)
expectedSamples = 0;
if ((hapticsState.SamplesQueued - expectedSamples) == 0)
m_numPredictionHits++;
else
m_numPredictionMisses++;
//Debug.Log(hapticsState.SamplesAvailable + "a " + hapticsState.SamplesQueued + "q " + expectedSamples + "e "
//+ "Prediction Accuracy: " + m_numPredictionHits / (float)(m_numPredictionMisses + m_numPredictionHits));
if ((expectedSamples > 0) && (hapticsState.SamplesQueued == 0))
{
m_numUnderruns++;
//Debug.LogError("Samples Underrun (" + m_controller + " #" + m_numUnderruns + ") -"
// + " Expected: " + expectedSamples
// + " Actual: " + hapticsState.SamplesQueued);
}
m_prevSamplesQueued = hapticsState.SamplesQueued;
m_prevSamplesQueuedTime = Time.realtimeSinceStartup;
}
int desiredSamplesCount = OVRHaptics.Config.OptimalBufferSamplesCount;
if (m_lowLatencyMode)
{
float sampleRateMs = 1000.0f / (float)OVRHaptics.Config.SampleRateHz;
float elapsedMs = elapsedTime * 1000.0f;
int samplesNeededPerFrame = (int)Mathf.Ceil(elapsedMs / sampleRateMs);
int lowLatencySamplesCount = OVRHaptics.Config.MinimumSafeSamplesQueued + samplesNeededPerFrame;
if (lowLatencySamplesCount < desiredSamplesCount)
desiredSamplesCount = lowLatencySamplesCount;
}
if (hapticsState.SamplesQueued > desiredSamplesCount)
return;
if (desiredSamplesCount > OVRHaptics.Config.MaximumBufferSamplesCount)
desiredSamplesCount = OVRHaptics.Config.MaximumBufferSamplesCount;
if (desiredSamplesCount > hapticsState.SamplesAvailable)
desiredSamplesCount = hapticsState.SamplesAvailable;
int acquiredSamplesCount = 0;
int clipIndex = 0;
while(acquiredSamplesCount < desiredSamplesCount && clipIndex < m_pendingClips.Count)
{
int numSamplesToCopy = desiredSamplesCount - acquiredSamplesCount;
int remainingSamplesInClip = m_pendingClips[clipIndex].Clip.Count - m_pendingClips[clipIndex].ReadCount;
if (numSamplesToCopy > remainingSamplesInClip)
numSamplesToCopy = remainingSamplesInClip;
if (numSamplesToCopy > 0)
{
int numBytes = numSamplesToCopy * OVRHaptics.Config.SampleSizeInBytes;
int dstOffset = acquiredSamplesCount * OVRHaptics.Config.SampleSizeInBytes;
int srcOffset = m_pendingClips[clipIndex].ReadCount * OVRHaptics.Config.SampleSizeInBytes;
Marshal.Copy(m_pendingClips[clipIndex].Clip.Samples, srcOffset, m_nativeBuffer.GetPointer(dstOffset), numBytes);
m_pendingClips[clipIndex].ReadCount += numSamplesToCopy;
acquiredSamplesCount += numSamplesToCopy;
}
clipIndex++;
}
for (int i = m_pendingClips.Count - 1; i >= 0 && m_pendingClips.Count > 0; i--)
{
if (m_pendingClips[i].ReadCount >= m_pendingClips[i].Clip.Count)
m_pendingClips.RemoveAt(i);
}
if (m_paddingEnabled)
{
int desiredPadding = desiredSamplesCount - (hapticsState.SamplesQueued + acquiredSamplesCount);
if (desiredPadding < (OVRHaptics.Config.MinimumBufferSamplesCount - acquiredSamplesCount))
desiredPadding = (OVRHaptics.Config.MinimumBufferSamplesCount - acquiredSamplesCount);
if (desiredPadding > hapticsState.SamplesAvailable)
desiredPadding = hapticsState.SamplesAvailable;
if (desiredPadding > 0)
{
int numBytes = desiredPadding * OVRHaptics.Config.SampleSizeInBytes;
int dstOffset = acquiredSamplesCount * OVRHaptics.Config.SampleSizeInBytes;
int srcOffset = 0;
Marshal.Copy(m_paddingClip.Samples, srcOffset, m_nativeBuffer.GetPointer(dstOffset), numBytes);
acquiredSamplesCount += desiredPadding;
}
}
if (acquiredSamplesCount > 0)
{
OVRPlugin.HapticsBuffer hapticsBuffer;
hapticsBuffer.Samples = m_nativeBuffer.GetPointer();
hapticsBuffer.SamplesCount = acquiredSamplesCount;
OVRPlugin.SetControllerHaptics(m_controller, hapticsBuffer);
hapticsState = OVRPlugin.GetControllerHapticsState(m_controller);
m_prevSamplesQueued = hapticsState.SamplesQueued;
m_prevSamplesQueuedTime = Time.realtimeSinceStartup;
}
}
/// <summary>
/// Immediately plays the specified clip without waiting for any currently-playing clip to finish.
/// </summary>
public void Preempt(OVRHapticsClip clip)
{
m_pendingClips.Clear();
m_pendingClips.Add(new ClipPlaybackTracker(clip));
}
/// <summary>
/// Enqueues the specified clip to play after any currently-playing clip finishes.
/// </summary>
public void Queue(OVRHapticsClip clip)
{
m_pendingClips.Add(new ClipPlaybackTracker(clip));
}
/// <summary>
/// Adds the samples from the specified clip to the ones in the currently-playing clip(s).
/// </summary>
public void Mix(OVRHapticsClip clip)
{
int numClipsToMix = 0;
int numSamplesToMix = 0;
int numSamplesRemaining = clip.Count;
while (numSamplesRemaining > 0 && numClipsToMix < m_pendingClips.Count)
{
int numSamplesRemainingInClip = m_pendingClips[numClipsToMix].Clip.Count - m_pendingClips[numClipsToMix].ReadCount;
numSamplesRemaining -= numSamplesRemainingInClip;
numSamplesToMix += numSamplesRemainingInClip;
numClipsToMix++;
}
if (numSamplesRemaining > 0)
{
numSamplesToMix += numSamplesRemaining;
numSamplesRemaining = 0;
}
if (numClipsToMix > 0)
{
OVRHapticsClip mixClip = new OVRHapticsClip(numSamplesToMix);
OVRHapticsClip a = clip;
int aReadCount = 0;
for (int i = 0; i < numClipsToMix; i++)
{
OVRHapticsClip b = m_pendingClips[i].Clip;
for(int bReadCount = m_pendingClips[i].ReadCount; bReadCount < b.Count; bReadCount++)
{
if (OVRHaptics.Config.SampleSizeInBytes == 1)
{
byte sample = 0; // TODO support multi-byte samples
if ((aReadCount < a.Count) && (bReadCount < b.Count))
{
sample = (byte)(Mathf.Clamp(a.Samples[aReadCount] + b.Samples[bReadCount], 0, System.Byte.MaxValue)); // TODO support multi-byte samples
aReadCount++;
}
else if (bReadCount < b.Count)
{
sample = b.Samples[bReadCount]; // TODO support multi-byte samples
}
mixClip.WriteSample(sample); // TODO support multi-byte samples
}
}
}
while (aReadCount < a.Count)
{
if (OVRHaptics.Config.SampleSizeInBytes == 1)
{
mixClip.WriteSample(a.Samples[aReadCount]); // TODO support multi-byte samples
}
aReadCount++;
}
m_pendingClips[0] = new ClipPlaybackTracker(mixClip);
for (int i = 1; i < numClipsToMix; i++)
{
m_pendingClips.RemoveAt(1);
}
}
else
{
m_pendingClips.Add(new ClipPlaybackTracker(clip));
}
}
public void Clear()
{
m_pendingClips.Clear();
}
}
/// <summary>
/// The system calls this each frame to update haptics playback.
/// </summary>
public static void Process()
{
Config.Load();
for (int i = 0; i < m_outputs.Length; i++)
{
m_outputs[i].Process();
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: d3b22b858e27329498781f145fa42610
timeCreated: 1463018541
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,164 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
/// <summary>
/// A PCM buffer of data for a haptics effect.
/// </summary>
public class OVRHapticsClip
{
/// <summary>
/// The current number of samples in the clip.
/// </summary>
public int Count { get; private set; }
/// <summary>
/// The maximum number of samples the clip can store.
/// </summary>
public int Capacity { get; private set; }
/// <summary>
/// The raw haptics data.
/// </summary>
public byte[] Samples { get; private set; }
public OVRHapticsClip()
{
Capacity = OVRHaptics.Config.MaximumBufferSamplesCount;
Samples = new byte[Capacity * OVRHaptics.Config.SampleSizeInBytes];
}
/// <summary>
/// Creates a clip with the specified capacity.
/// </summary>
public OVRHapticsClip(int capacity)
{
Capacity = (capacity >= 0) ? capacity : 0;
Samples = new byte[Capacity * OVRHaptics.Config.SampleSizeInBytes];
}
/// <summary>
/// Creates a clip with the specified data.
/// </summary>
public OVRHapticsClip(byte[] samples, int samplesCount)
{
Samples = samples;
Capacity = Samples.Length / OVRHaptics.Config.SampleSizeInBytes;
Count = (samplesCount >= 0) ? samplesCount : 0;
}
/// <summary>
/// Creates a clip by mixing the specified clips.
/// </summary>
public OVRHapticsClip(OVRHapticsClip a, OVRHapticsClip b)
{
int maxCount = a.Count;
if (b.Count > maxCount)
maxCount = b.Count;
Capacity = maxCount;
Samples = new byte[Capacity * OVRHaptics.Config.SampleSizeInBytes];
for (int i = 0; i < a.Count || i < b.Count; i++)
{
if (OVRHaptics.Config.SampleSizeInBytes == 1)
{
byte sample = 0; // TODO support multi-byte samples
if ((i < a.Count) && (i < b.Count))
sample = (byte)(Mathf.Clamp(a.Samples[i] + b.Samples[i], 0, System.Byte.MaxValue)); // TODO support multi-byte samples
else if (i < a.Count)
sample = a.Samples[i]; // TODO support multi-byte samples
else if (i < b.Count)
sample = b.Samples[i]; // TODO support multi-byte samples
WriteSample(sample); // TODO support multi-byte samples
}
}
}
/// <summary>
/// Creates a haptics clip from the specified audio clip.
/// </summary>
public OVRHapticsClip(AudioClip audioClip, int channel = 0)
{
float[] audioData = new float[audioClip.samples * audioClip.channels];
audioClip.GetData(audioData, 0);
InitializeFromAudioFloatTrack(audioData, audioClip.frequency, audioClip.channels, channel);
}
/// <summary>
/// Adds the specified sample to the end of the clip.
/// </summary>
public void WriteSample(byte sample) // TODO support multi-byte samples
{
if (Count >= Capacity)
{
//Debug.LogError("Attempted to write OVRHapticsClip sample out of range - Count:" + Count + " Capacity:" + Capacity);
return;
}
if (OVRHaptics.Config.SampleSizeInBytes == 1)
{
Samples[Count * OVRHaptics.Config.SampleSizeInBytes] = sample; // TODO support multi-byte samples
}
Count++;
}
/// <summary>
/// Clears the clip and resets its size to 0.
/// </summary>
public void Reset()
{
Count = 0;
}
private void InitializeFromAudioFloatTrack(float[] sourceData, double sourceFrequency, int sourceChannelCount, int sourceChannel)
{
double stepSizePrecise = (sourceFrequency + 1e-6) / OVRHaptics.Config.SampleRateHz;
if (stepSizePrecise < 1.0)
return;
int stepSize = (int)stepSizePrecise;
double stepSizeError = stepSizePrecise - stepSize;
double accumulatedStepSizeError = 0.0f;
int length = sourceData.Length;
Count = 0;
Capacity = length / sourceChannelCount / stepSize + 1;
Samples = new byte[Capacity * OVRHaptics.Config.SampleSizeInBytes];
int i = sourceChannel % sourceChannelCount;
while (i < length)
{
if (OVRHaptics.Config.SampleSizeInBytes == 1)
{
WriteSample((byte)(Mathf.Clamp01(Mathf.Abs(sourceData[i])) * System.Byte.MaxValue)); // TODO support multi-byte samples
}
i+= stepSize * sourceChannelCount;
accumulatedStepSizeError += stepSizeError;
if ((int)accumulatedStepSizeError > 0)
{
i+= (int)accumulatedStepSizeError * sourceChannelCount;
accumulatedStepSizeError = accumulatedStepSizeError - (int)accumulatedStepSizeError;
}
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: c123270a848515b458069b5242866451
timeCreated: 1467575852
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,181 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class OVRHeadsetEmulator : MonoBehaviour {
public enum OpMode
{
Off,
EditorOnly,
AlwaysOn
}
public OpMode opMode = OpMode.EditorOnly;
public bool resetHmdPoseOnRelease = true;
public bool resetHmdPoseByMiddleMouseButton = true;
public KeyCode[] activateKeys = new KeyCode[] { KeyCode.LeftControl, KeyCode.RightControl };
public KeyCode[] pitchKeys = new KeyCode[] { KeyCode.LeftAlt, KeyCode.RightAlt };
OVRManager manager;
const float MOUSE_SCALE_X = -2.0f;
const float MOUSE_SCALE_X_PITCH = -2.0f;
const float MOUSE_SCALE_Y = 2.0f;
const float MOUSE_SCALE_HEIGHT = 1.0f;
const float MAX_ROLL = 85.0f;
private bool lastFrameEmulationActivated = false;
private Vector3 recordedHeadPoseRelativeOffsetTranslation;
private Vector3 recordedHeadPoseRelativeOffsetRotation;
private bool hasSentEvent = false;
private bool emulatorHasInitialized = false;
private CursorLockMode previousCursorLockMode = CursorLockMode.None;
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
if (!emulatorHasInitialized)
{
if (OVRManager.OVRManagerinitialized)
{
previousCursorLockMode = Cursor.lockState;
manager = OVRManager.instance;
recordedHeadPoseRelativeOffsetTranslation = manager.headPoseRelativeOffsetTranslation;
recordedHeadPoseRelativeOffsetRotation = manager.headPoseRelativeOffsetRotation;
emulatorHasInitialized = true;
lastFrameEmulationActivated = false;
}
else
return;
}
bool emulationActivated = IsEmulationActivated();
if (emulationActivated)
{
if (!lastFrameEmulationActivated)
{
previousCursorLockMode = Cursor.lockState;
Cursor.lockState = CursorLockMode.Locked;
}
if (!lastFrameEmulationActivated && resetHmdPoseOnRelease)
{
manager.headPoseRelativeOffsetTranslation = recordedHeadPoseRelativeOffsetTranslation;
manager.headPoseRelativeOffsetRotation = recordedHeadPoseRelativeOffsetRotation;
}
if (resetHmdPoseByMiddleMouseButton && Input.GetMouseButton(2))
{
manager.headPoseRelativeOffsetTranslation = Vector3.zero;
manager.headPoseRelativeOffsetRotation = Vector3.zero;
}
else
{
Vector3 emulatedTranslation = manager.headPoseRelativeOffsetTranslation;
float deltaMouseScrollWheel = Input.GetAxis("Mouse ScrollWheel");
float emulatedHeight = deltaMouseScrollWheel * MOUSE_SCALE_HEIGHT;
emulatedTranslation.y += emulatedHeight;
manager.headPoseRelativeOffsetTranslation = emulatedTranslation;
float deltaX = Input.GetAxis("Mouse X");
float deltaY = Input.GetAxis("Mouse Y");
Vector3 emulatedAngles = manager.headPoseRelativeOffsetRotation;
float emulatedRoll = emulatedAngles.x;
float emulatedYaw = emulatedAngles.y;
float emulatedPitch = emulatedAngles.z;
if (IsTweakingPitch())
{
emulatedPitch += deltaX * MOUSE_SCALE_X_PITCH;
}
else
{
emulatedRoll += deltaY * MOUSE_SCALE_Y;
emulatedYaw += deltaX * MOUSE_SCALE_X;
}
manager.headPoseRelativeOffsetRotation = new Vector3(emulatedRoll, emulatedYaw, emulatedPitch);
}
if (!hasSentEvent)
{
OVRPlugin.SendEvent("headset_emulator", "activated");
hasSentEvent = true;
}
}
else
{
if (lastFrameEmulationActivated)
{
Cursor.lockState = previousCursorLockMode;
recordedHeadPoseRelativeOffsetTranslation = manager.headPoseRelativeOffsetTranslation;
recordedHeadPoseRelativeOffsetRotation = manager.headPoseRelativeOffsetRotation;
if (resetHmdPoseOnRelease)
{
manager.headPoseRelativeOffsetTranslation = Vector3.zero;
manager.headPoseRelativeOffsetRotation = Vector3.zero;
}
}
}
lastFrameEmulationActivated = emulationActivated;
}
bool IsEmulationActivated()
{
if (opMode == OpMode.Off)
{
return false;
}
else if (opMode == OpMode.EditorOnly && !Application.isEditor)
{
return false;
}
foreach (KeyCode key in activateKeys)
{
if (Input.GetKey(key))
return true;
}
return false;
}
bool IsTweakingPitch()
{
if (!IsEmulationActivated())
return false;
foreach (KeyCode key in pitchKeys)
{
if (Input.GetKey(key))
return true;
}
return false;
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 5178bc8574ce2bf4388e787a2e2af326
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: d210caf8a50e1954c80690fa858572ad
timeCreated: 1438295094
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,23 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Dummy subtype of PropertyAttribute for custom inspector to use.
/// </summary>
public class OVRLayerAttribute : PropertyAttribute {
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 701bfebb60063334f994e36546c103d6
timeCreated: 1499749327
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 7e933e81d3c20c74ea6fdc708a67e3a5
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: -100
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,142 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if UNITY_ANDROID && !UNITY_EDITOR
#define OVR_ANDROID_MRC
#endif
using System;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using System.Collections.Generic;
using UnityEngine;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
/// <summary>
/// Manages mix-reality elements
/// </summary>
internal static class OVRMixedReality
{
/// <summary>
/// For Debugging purpose, we can use preset parameters to fake a camera when external camera is not available
/// </summary>
public static bool useFakeExternalCamera = false;
public static Vector3 fakeCameraFloorLevelPosition = new Vector3(0.0f, 2.0f, -0.5f);
public static Vector3 fakeCameraEyeLevelPosition = fakeCameraFloorLevelPosition - new Vector3(0.0f, 1.8f, 0.0f);
public static Quaternion fakeCameraRotation = Quaternion.LookRotation((new Vector3(0.0f, fakeCameraFloorLevelPosition.y, 0.0f) - fakeCameraFloorLevelPosition).normalized, Vector3.up);
public static float fakeCameraFov = 60.0f;
public static float fakeCameraAspect = 16.0f / 9.0f;
/// <summary>
/// Composition object
/// </summary>
public static OVRComposition currentComposition = null;
/// <summary>
/// Updates the internal state of the Mixed Reality Camera. Called by OVRManager.
/// </summary>
public static void Update(GameObject parentObject, Camera mainCamera, OVRManager.CompositionMethod compositionMethod, bool useDynamicLighting, OVRManager.CameraDevice cameraDevice, OVRManager.DepthQuality depthQuality)
{
if (!OVRPlugin.initialized)
{
Debug.LogError("OVRPlugin not initialized");
return;
}
if (!OVRPlugin.IsMixedRealityInitialized())
{
OVRPlugin.InitializeMixedReality();
if (OVRPlugin.IsMixedRealityInitialized())
{
Debug.Log("OVRPlugin_MixedReality initialized");
}
else
{
Debug.LogError("Unable to initialize OVRPlugin_MixedReality");
return;
}
}
if (!OVRPlugin.IsMixedRealityInitialized())
{
return;
}
OVRPlugin.UpdateExternalCamera();
#if !OVR_ANDROID_MRC
OVRPlugin.UpdateCameraDevices();
#endif
#if OVR_ANDROID_MRC
useFakeExternalCamera = OVRPlugin.Media.UseMrcDebugCamera();
#endif
if (currentComposition != null && currentComposition.CompositionMethod() != compositionMethod)
{
currentComposition.Cleanup();
currentComposition = null;
}
if (compositionMethod == OVRManager.CompositionMethod.External)
{
if (currentComposition == null)
{
currentComposition = new OVRExternalComposition(parentObject, mainCamera);
}
}
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
else if (compositionMethod == OVRManager.CompositionMethod.Direct)
{
if (currentComposition == null)
{
currentComposition = new OVRDirectComposition(parentObject, mainCamera, cameraDevice, useDynamicLighting, depthQuality);
}
}
#endif
else
{
Debug.LogError("Unknown CompositionMethod : " + compositionMethod);
return;
}
currentComposition.Update(parentObject, mainCamera);
}
public static void Cleanup()
{
if (currentComposition != null)
{
currentComposition.Cleanup();
currentComposition = null;
}
if (OVRPlugin.IsMixedRealityInitialized())
{
OVRPlugin.ShutdownMixedReality();
}
}
public static void RecenterPose()
{
if (currentComposition != null)
{
currentComposition.RecenterPose();
}
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 5daf6258e951ab84bb8b3e3b03386396
timeCreated: 1497574405
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,33 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
#if UNITY_ANDROID && !UNITY_EDITOR
public abstract class OVROnCompleteListener : AndroidJavaProxy
{
public OVROnCompleteListener() : base("com.oculus.svclib.OnCompleteListener")
{
}
public abstract void onSuccess();
public abstract void onFailure();
}
#endif

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 8730118d7f00f9b47b09be73f7e91d2b
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 4444ce35d262aa648ad0c425a559b931
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,439 @@
/************************************************************************************
Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
/// <summary>
/// When attached to a GameObject with an OVROverlay component, OVROverlayMeshGenerator will use a mesh renderer
/// to preview the appearance of the OVROverlay as it would appear as a TimeWarp overlay on a headset.
/// </summary>
[RequireComponent(typeof(MeshFilter))]
[RequireComponent(typeof(MeshRenderer))]
[ExecuteInEditMode]
public class OVROverlayMeshGenerator : MonoBehaviour {
private Mesh _Mesh;
private List<Vector3> _Verts = new List<Vector3>();
private List<Vector2> _UV = new List<Vector2>();
private List<int> _Tris = new List<int>();
private OVROverlay _Overlay;
private MeshFilter _MeshFilter;
private MeshCollider _MeshCollider;
private MeshRenderer _MeshRenderer;
private Transform _CameraRoot;
private Transform _Transform;
private OVROverlay.OverlayShape _LastShape;
private Vector3 _LastPosition;
private Quaternion _LastRotation;
private Vector3 _LastScale;
private Rect _LastDestRectLeft;
private Rect _LastDestRectRight;
private Rect _LastSrcRectLeft;
private Texture _LastTexture;
private bool _Awake = false;
protected void Awake()
{
_MeshFilter = GetComponent<MeshFilter>();
_MeshCollider = GetComponent<MeshCollider>();
_MeshRenderer = GetComponent<MeshRenderer>();
_Transform = transform;
if (Camera.main && Camera.main.transform.parent)
{
_CameraRoot = Camera.main.transform.parent;
}
_Awake = true;
}
public void SetOverlay(OVROverlay overlay) {
_Overlay = overlay;
}
private Rect GetBoundingRect(Rect a, Rect b)
{
float xMin = Mathf.Min(a.x, b.x);
float xMax = Mathf.Max(a.x + a.width, b.x + b.width);
float yMin = Mathf.Min(a.y, b.y);
float yMax = Mathf.Max(a.y + a.height, b.y + b.height);
return new Rect(xMin, yMin, xMax - xMin, yMax - yMin);
}
protected void OnEnable() {
#if UNITY_EDITOR
UnityEditor.EditorApplication.update += Update;
#endif
}
protected void OnDisable() {
#if UNITY_EDITOR
UnityEditor.EditorApplication.update -= Update;
#endif
}
private void Update()
{
if (!Application.isEditor)
{
return;
}
if (!_Awake)
{
Awake();
}
if (_Overlay)
{
OVROverlay.OverlayShape shape = _Overlay.currentOverlayShape;
Vector3 position = _CameraRoot ? (_Transform.position - _CameraRoot.position) : _Transform.position;
Quaternion rotation = _Transform.rotation;
Vector3 scale = _Transform.lossyScale;
Rect destRectLeft = _Overlay.overrideTextureRectMatrix ? _Overlay.destRectLeft : new Rect(0, 0, 1, 1);
Rect destRectRight = _Overlay.overrideTextureRectMatrix ? _Overlay.destRectRight : new Rect(0, 0, 1, 1);
Rect srcRectLeft = _Overlay.overrideTextureRectMatrix ? _Overlay.srcRectLeft : new Rect(0, 0, 1, 1);
Texture texture = _Overlay.textures[0];
// Re-generate the mesh if necessary
if (_Mesh == null ||
_LastShape != shape ||
_LastPosition != position ||
_LastRotation != rotation ||
_LastScale != scale ||
_LastDestRectLeft != destRectLeft ||
_LastDestRectRight != destRectRight)
{
UpdateMesh(shape, position, rotation, scale, GetBoundingRect(destRectLeft, destRectRight));
_LastShape = shape;
_LastPosition = position;
_LastRotation = rotation;
_LastScale = scale;
_LastDestRectLeft = destRectLeft;
_LastDestRectRight = destRectRight;
}
// Generate the material and update textures if necessary
if (_MeshRenderer.sharedMaterial == null)
{
Material previewMat = new Material(Shader.Find("Unlit/Transparent"));
_MeshRenderer.sharedMaterial = previewMat;
}
if (_MeshRenderer.sharedMaterial.mainTexture != texture && !_Overlay.isExternalSurface)
{
_MeshRenderer.sharedMaterial.mainTexture = texture;
}
if (_LastSrcRectLeft != srcRectLeft)
{
_MeshRenderer.sharedMaterial.mainTextureOffset = srcRectLeft.position;
_MeshRenderer.sharedMaterial.mainTextureScale = srcRectLeft.size;
_LastSrcRectLeft = srcRectLeft;
}
}
}
private void UpdateMesh(OVROverlay.OverlayShape shape, Vector3 position, Quaternion rotation, Vector3 scale, Rect rect)
{
if (_MeshFilter)
{
if (_Mesh == null)
{
_Mesh = new Mesh() { name = "Overlay" };
_Mesh.hideFlags = HideFlags.DontSaveInBuild | HideFlags.DontSaveInEditor;
}
_Mesh.Clear();
_Verts.Clear();
_UV.Clear();
_Tris.Clear();
GenerateMesh(_Verts, _UV, _Tris, shape, position, rotation, scale, rect);
_Mesh.SetVertices(_Verts);
_Mesh.SetUVs(0, _UV);
_Mesh.SetTriangles(_Tris, 0);
_Mesh.UploadMeshData(false);
_MeshFilter.sharedMesh = _Mesh;
if (_MeshCollider)
{
_MeshCollider.sharedMesh = _Mesh;
}
}
}
public static void GenerateMesh(List<Vector3> verts, List<Vector2> uvs, List<int> tris, OVROverlay.OverlayShape shape, Vector3 position, Quaternion rotation, Vector3 scale, Rect rect)
{
switch (shape)
{
case OVROverlay.OverlayShape.Equirect:
BuildSphere(verts, uvs, tris, position, rotation, scale, rect);
break;
case OVROverlay.OverlayShape.Cubemap:
case OVROverlay.OverlayShape.OffcenterCubemap:
BuildCube(verts, uvs, tris, position, rotation, scale);
break;
case OVROverlay.OverlayShape.Quad:
BuildQuad(verts, uvs, tris, rect);
break;
case OVROverlay.OverlayShape.Cylinder:
BuildHemicylinder(verts, uvs, tris, scale, rect);
break;
}
}
private static Vector2 GetSphereUV(float theta, float phi, float expand_coef)
{
float thetaU = ((theta / (2 * Mathf.PI) - 0.5f) / expand_coef) + 0.5f;
float phiV = ((phi / Mathf.PI) / expand_coef) + 0.5f;
return new Vector2(thetaU, phiV);
}
private static Vector3 GetSphereVert(float theta, float phi)
{
return new Vector3(-Mathf.Sin(theta) * Mathf.Cos(phi), Mathf.Sin(phi), -Mathf.Cos(theta) * Mathf.Cos(phi));
}
public static void BuildSphere(List<Vector3> verts, List<Vector2> uv, List<int> triangles, Vector3 position, Quaternion rotation, Vector3 scale, Rect rect, float worldScale = 800, int latitudes = 128, int longitudes = 128, float expand_coef = 1.0f)
{
position = Quaternion.Inverse(rotation) * position;
latitudes = Mathf.CeilToInt(latitudes * rect.height);
longitudes = Mathf.CeilToInt(longitudes * rect.width);
float minTheta = Mathf.PI * 2 * ( rect.x);
float minPhi = Mathf.PI * (0.5f - rect.y - rect.height);
float thetaScale = Mathf.PI * 2 * rect.width / longitudes;
float phiScale = Mathf.PI * rect.height / latitudes;
for (int j = 0; j < latitudes + 1; j += 1)
{
for (int k = 0; k < longitudes + 1; k++)
{
float theta = minTheta + k * thetaScale;
float phi = minPhi + j * phiScale;
Vector2 suv = GetSphereUV(theta, phi, expand_coef);
uv.Add(new Vector2((suv.x - rect.x) / rect.width, (suv.y - rect.y) / rect.height));
Vector3 vert = GetSphereVert(theta, phi);
vert.x = (worldScale * vert.x - position.x) / scale.x;
vert.y = (worldScale * vert.y - position.y) / scale.y;
vert.z = (worldScale * vert.z - position.z) / scale.z;
verts.Add(vert);
}
}
for (int j = 0; j < latitudes; j++)
{
for (int k = 0; k < longitudes; k++)
{
triangles.Add((j * (longitudes + 1)) + k);
triangles.Add(((j + 1) * (longitudes + 1)) + k);
triangles.Add(((j + 1) * (longitudes + 1)) + k + 1);
triangles.Add(((j + 1) * (longitudes + 1)) + k + 1);
triangles.Add((j * (longitudes + 1)) + k + 1);
triangles.Add((j * (longitudes + 1)) + k);
}
}
}
private enum CubeFace
{
Right,
Left,
Top,
Bottom,
Front,
Back,
COUNT
}
private static readonly Vector3[] BottomLeft = new Vector3[]
{
new Vector3(-0.5f, -0.5f, -0.5f),
new Vector3(0.5f, -0.5f, 0.5f),
new Vector3(0.5f, 0.5f, -0.5f),
new Vector3(0.5f, -0.5f, 0.5f),
new Vector3(0.5f, -0.5f, -0.5f),
new Vector3(-0.5f, -0.5f, 0.5f)
};
private static readonly Vector3[] RightVector = new Vector3[]
{
Vector3.forward,
Vector3.back,
Vector3.left,
Vector3.left,
Vector3.left,
Vector3.right
};
private static readonly Vector3[] UpVector = new Vector3[]
{
Vector3.up,
Vector3.up,
Vector3.forward,
Vector3.back,
Vector3.up,
Vector3.up
};
private static Vector2 GetCubeUV(CubeFace face, Vector2 sideUV, float expand_coef)
{
sideUV = (sideUV - 0.5f * Vector2.one) / expand_coef + 0.5f * Vector2.one;
switch (face)
{
case CubeFace.Bottom:
return new Vector2(sideUV.x / 3, sideUV.y / 2);
case CubeFace.Front:
return new Vector2((1 + sideUV.x) / 3, sideUV.y / 2);
case CubeFace.Back:
return new Vector2((2 + sideUV.x) / 3, sideUV.y / 2);
case CubeFace.Right:
return new Vector2(sideUV.x / 3, (1 + sideUV.y) / 2);
case CubeFace.Left:
return new Vector2((1 + sideUV.x) / 3, (1 + sideUV.y) / 2);
case CubeFace.Top:
return new Vector2((2 + sideUV.x) / 3, (1 + sideUV.y) / 2);
default:
return Vector2.zero;
}
}
private static Vector3 GetCubeVert(CubeFace face, Vector2 sideUV, float expand_coef)
{
return BottomLeft[(int)face] + sideUV.x * RightVector[(int)face] + sideUV.y * UpVector[(int)face];
}
public static void BuildCube(List<Vector3> verts, List<Vector2> uv, List<int> triangles, Vector3 position, Quaternion rotation, Vector3 scale, float worldScale = 800, int subQuads = 1, float expand_coef = 1.01f)
{
position = Quaternion.Inverse(rotation) * position;
int vertsPerSide = (subQuads + 1) * (subQuads + 1);
for (int i = 0; i < (int)CubeFace.COUNT; i++)
{
for(int j = 0; j < subQuads + 1; j++)
{
for(int k = 0; k < subQuads + 1; k++)
{
float u = j / (float)subQuads;
float v = k / (float)subQuads;
uv.Add(GetCubeUV((CubeFace)i, new Vector2(u, v), expand_coef));
Vector3 vert = GetCubeVert((CubeFace)i, new Vector2(u, v), expand_coef);
vert.x = (worldScale * vert.x - position.x) / scale.x;
vert.y = (worldScale * vert.y - position.y) / scale.y;
vert.z = (worldScale * vert.z - position.z) / scale.z;
verts.Add(vert);
}
}
for(int j = 0; j < subQuads; j++)
{
for(int k = 0; k < subQuads; k++)
{
triangles.Add(vertsPerSide * i + ((j + 1) * (subQuads + 1)) + k);
triangles.Add(vertsPerSide * i + (j * (subQuads + 1)) + k);
triangles.Add(vertsPerSide * i + ((j + 1) * (subQuads + 1)) + k + 1);
triangles.Add(vertsPerSide * i + ((j + 1) * (subQuads + 1)) + k + 1);
triangles.Add(vertsPerSide * i + (j * (subQuads + 1)) + k);
triangles.Add(vertsPerSide * i + (j * (subQuads + 1)) + k + 1);
}
}
}
}
public static void BuildQuad(List<Vector3> verts, List<Vector2> uv, List<int> triangles, Rect rect)
{
verts.Add(new Vector3(rect.x - 0.5f, (1 - rect.y - rect.height) - 0.5f, 0));
verts.Add(new Vector3(rect.x - 0.5f, (1 - rect.y) - 0.5f, 0));
verts.Add(new Vector3(rect.x + rect.width - 0.5f, (1 - rect.y) - 0.5f, 0));
verts.Add(new Vector3(rect.x + rect.width - 0.5f, (1 - rect.y - rect.height) - 0.5f, 0));
uv.Add(new Vector2(0, 0));
uv.Add(new Vector2(0, 1));
uv.Add(new Vector2(1, 1));
uv.Add(new Vector2(1, 0));
triangles.Add(0);
triangles.Add(1);
triangles.Add(2);
triangles.Add(2);
triangles.Add(3);
triangles.Add(0);
}
public static void BuildHemicylinder(List<Vector3> verts, List<Vector2> uv, List<int> triangles, Vector3 scale, Rect rect, int longitudes = 128)
{
float height = Mathf.Abs(scale.y) * rect.height;
float radius = scale.z;
float arcLength = scale.x * rect.width;
float arcAngle = arcLength / radius;
float minAngle = scale.x * (-0.5f + rect.x) / radius;
int columns = Mathf.CeilToInt(longitudes * arcAngle / (2 * Mathf.PI));
// we don't want super tall skinny triangles because that can lead to artifacting.
// make triangles no more than 2x taller than wide
float triangleWidth = arcLength / columns;
float ratio = height / triangleWidth;
int rows = Mathf.CeilToInt(ratio / 2);
for (int j = 0; j < rows + 1; j += 1)
{
for (int k = 0; k < columns + 1; k++)
{
uv.Add(new Vector2((k / (float)columns), 1 - (j / (float)rows)));
Vector3 vert = Vector3.zero;
// because the scale is used to control the parameters, we need
// to reverse multiply by scale to appear correctly
vert.x = (Mathf.Sin(minAngle + (k * arcAngle / columns)) * radius) / scale.x;
vert.y = (0.5f - rect.y - rect.height + rect.height * (1 - j / (float)rows));
vert.z = (Mathf.Cos(minAngle + (k * arcAngle / columns)) * radius) / scale.z;
verts.Add(vert);
}
}
for (int j = 0; j < rows; j++)
{
for (int k = 0; k < columns; k++)
{
triangles.Add((j * (columns + 1)) + k);
triangles.Add(((j + 1) * (columns + 1)) + k + 1);
triangles.Add(((j + 1) * (columns + 1)) + k);
triangles.Add(((j + 1) * (columns + 1)) + k + 1);
triangles.Add((j * (columns + 1)) + k);
triangles.Add((j * (columns + 1)) + k + 1);
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 8a68b94cb9095964d9a403b7b40ce6f4
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,123 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
/// <summary>
/// Shows the Oculus plaform UI.
/// </summary>
public class OVRPlatformMenu : MonoBehaviour
{
/// <summary>
/// The key code.
/// </summary>
private OVRInput.RawButton inputCode = OVRInput.RawButton.Back;
public enum eHandler
{
ShowConfirmQuit,
RetreatOneLevel,
};
public eHandler shortPressHandler = eHandler.ShowConfirmQuit;
/// <summary>
/// Callback to handle short press. Returns true if ConfirmQuit menu should be shown.
/// </summary>
public System.Func<bool> OnShortPress;
private static Stack<string> sceneStack = new Stack<string>();
enum eBackButtonAction
{
NONE,
SHORT_PRESS
};
eBackButtonAction HandleBackButtonState()
{
eBackButtonAction action = eBackButtonAction.NONE;
if (OVRInput.GetDown(inputCode))
{
action = eBackButtonAction.SHORT_PRESS;
}
return action;
}
/// <summary>
/// Instantiate the cursor timer
/// </summary>
void Awake()
{
if (shortPressHandler == eHandler.RetreatOneLevel && OnShortPress == null)
OnShortPress = RetreatOneLevel;
if (!OVRManager.isHmdPresent)
{
enabled = false;
return;
}
sceneStack.Push(UnityEngine.SceneManagement.SceneManager.GetActiveScene().name);
}
/// <summary>
/// Show the confirm quit menu
/// </summary>
void ShowConfirmQuitMenu()
{
#if UNITY_ANDROID && !UNITY_EDITOR
Debug.Log("[PlatformUI-ConfirmQuit] Showing @ " + Time.time);
OVRManager.PlatformUIConfirmQuit();
#endif
}
/// <summary>
/// Sample handler for short press which retreats to the previous scene that used OVRPlatformMenu.
/// </summary>
private static bool RetreatOneLevel()
{
if (sceneStack.Count > 1)
{
string parentScene = sceneStack.Pop();
UnityEngine.SceneManagement.SceneManager.LoadSceneAsync (parentScene);
return false;
}
return true;
}
/// <summary>
/// Tests for long-press and activates global platform menu when detected.
/// as per the Unity integration doc, the back button responds to "mouse 1" button down/up/etc
/// </summary>
void Update()
{
#if UNITY_ANDROID
eBackButtonAction action = HandleBackButtonState();
if (action == eBackButtonAction.SHORT_PRESS)
{
if (OnShortPress == null || OnShortPress())
{
ShowConfirmQuitMenu();
}
}
#endif
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 2afcf575f4a68de4db434c7b7233c451
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 61abd23f3aff5394ba8027ee380760b8
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,49 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
using System.Threading;
/// <summary>
/// (Deprecated) Contains information about the user's preferences and body dimensions.
/// </summary>
public class OVRProfile : Object
{
[System.Obsolete]
public enum State
{
NOT_TRIGGERED,
LOADING,
READY,
ERROR
};
[System.Obsolete]
public string id { get { return "000abc123def"; } }
[System.Obsolete]
public string userName { get { return "Oculus User"; } }
[System.Obsolete]
public string locale { get { return "en_US"; } }
public float ipd { get { return Vector3.Distance (OVRPlugin.GetNodePose (OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render).ToOVRPose ().position, OVRPlugin.GetNodePose (OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render).ToOVRPose ().position); } }
public float eyeHeight { get { return OVRPlugin.eyeHeight; } }
public float eyeDepth { get { return OVRPlugin.eyeDepth; } }
public float neckHeight { get { return eyeHeight - 0.075f; } }
[System.Obsolete]
public State state { get { return State.READY; } }
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 08539141953f28e439731aaf7cd5362f
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,46 @@
using System.Collections.Generic;
using UnityEngine;
public class OVRResources : MonoBehaviour
{
private static AssetBundle resourceBundle;
private static List<string> assetNames;
public static UnityEngine.Object Load(string path)
{
if (Debug.isDebugBuild)
{
if(resourceBundle == null)
{
Debug.Log("[OVRResources] Resource bundle was not loaded successfully");
return null;
}
var result = assetNames.Find(s => s.Contains(path.ToLower()));
return resourceBundle.LoadAsset(result);
}
return Resources.Load(path);
}
public static T Load<T>(string path) where T : UnityEngine.Object
{
if (Debug.isDebugBuild)
{
if (resourceBundle == null)
{
Debug.Log("[OVRResources] Resource bundle was not loaded successfully");
return null;
}
var result = assetNames.Find(s => s.Contains(path.ToLower()));
return resourceBundle.LoadAsset<T>(result);
}
return Resources.Load<T>(path);
}
public static void SetResourceBundle(AssetBundle bundle)
{
resourceBundle = bundle;
assetNames = new List<string>();
assetNames.AddRange(resourceBundle.GetAllAssetNames());
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 17584d04fbb571344a3aa2b6593287c1
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,265 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
public class OVRSceneLoader : MonoBehaviour
{
public const string externalStoragePath = "/sdcard/Android/data";
public const string sceneLoadDataName = "SceneLoadData.txt";
public const string resourceBundleName = "asset_resources";
public float sceneCheckIntervalSeconds = 1f;
public float logCloseTime = 5.0f;
public Canvas mainCanvas;
public Text logTextBox;
private AsyncOperation loadSceneOperation;
private string formattedLogText;
private float closeLogTimer;
private bool closeLogDialogue;
private bool canvasPosUpdated;
private struct SceneInfo
{
public List<string> scenes;
public long version;
public SceneInfo(List<string> sceneList, long currentSceneEpochVersion)
{
scenes = sceneList;
version = currentSceneEpochVersion;
}
}
private string scenePath = "";
private string sceneLoadDataPath = "";
private List<AssetBundle> loadedAssetBundles = new List<AssetBundle>();
private SceneInfo currentSceneInfo;
private void Awake()
{
// Make it presist across scene to continue checking for changes
DontDestroyOnLoad(this.gameObject);
}
void Start()
{
string applicationPath = Path.Combine(externalStoragePath, Application.identifier);
scenePath = Path.Combine(applicationPath, "cache/scenes");
sceneLoadDataPath = Path.Combine(scenePath, sceneLoadDataName);
closeLogDialogue = false;
StartCoroutine(DelayCanvasPosUpdate());
currentSceneInfo = GetSceneInfo();
// Check valid scene info has been fetched, and load the scenes
if (currentSceneInfo.version != 0 && !string.IsNullOrEmpty(currentSceneInfo.scenes[0]))
{
LoadScene(currentSceneInfo);
}
}
private void LoadScene(SceneInfo sceneInfo)
{
AssetBundle mainSceneBundle = null;
Debug.Log("[OVRSceneLoader] Loading main scene: " + sceneInfo.scenes[0] + " with version " + sceneInfo.version.ToString());
logTextBox.text += "Target Scene: " + sceneInfo.scenes[0] + "\n";
logTextBox.text += "Version: " + sceneInfo.version.ToString() + "\n";
// Load main scene and dependent additive scenes (if any)
Debug.Log("[OVRSceneLoader] Loading scene bundle files.");
// Fetch all files under scene cache path, excluding unnecessary files such as scene metadata file
string[] bundles = Directory.GetFiles(scenePath, "*_*");
logTextBox.text += "Loading " + bundles.Length + " bundle(s) . . . ";
string mainSceneBundleFileName = "scene_" + sceneInfo.scenes[0].ToLower();
try
{
foreach (string b in bundles)
{
var assetBundle = AssetBundle.LoadFromFile(b);
if (assetBundle != null)
{
Debug.Log("[OVRSceneLoader] Loading file bundle: " + assetBundle.name == null ? "null" : assetBundle.name);
loadedAssetBundles.Add(assetBundle);
}
else
{
Debug.LogError("[OVRSceneLoader] Loading file bundle failed");
}
if (assetBundle.name == mainSceneBundleFileName)
{
mainSceneBundle = assetBundle;
}
if (assetBundle.name == resourceBundleName)
{
OVRResources.SetResourceBundle(assetBundle);
}
}
}
catch(Exception e)
{
logTextBox.text += "<color=red>" + e.Message + "</color>";
return;
}
logTextBox.text += "<color=green>DONE\n</color>";
if (mainSceneBundle != null)
{
logTextBox.text += "Loading Scene: {0:P0}\n";
formattedLogText = logTextBox.text;
string[] scenePaths = mainSceneBundle.GetAllScenePaths();
string sceneName = Path.GetFileNameWithoutExtension(scenePaths[0]);
loadSceneOperation = SceneManager.LoadSceneAsync(sceneName);
loadSceneOperation.completed += LoadSceneOperation_completed;
}
else
{
logTextBox.text += "<color=red>Failed to get main scene bundle.\n</color>";
}
}
private void LoadSceneOperation_completed(AsyncOperation obj)
{
StartCoroutine(onCheckSceneCoroutine());
StartCoroutine(DelayCanvasPosUpdate());
closeLogTimer = 0;
closeLogDialogue = true;
logTextBox.text += "Log closing in {0} seconds.\n";
formattedLogText = logTextBox.text;
}
public void Update()
{
// Display scene load percentage
if (loadSceneOperation != null)
{
if (!loadSceneOperation.isDone)
{
logTextBox.text = string.Format(formattedLogText, loadSceneOperation.progress + 0.1f);
if (loadSceneOperation.progress >= 0.9f)
{
logTextBox.text = formattedLogText.Replace("{0:P0}", "<color=green>DONE</color>");
logTextBox.text += "Transitioning to new scene.\nLoad times will vary depending on scene complexity.\n";
}
}
}
UpdateCanvasPosition();
// Wait a certain time before closing the log dialogue after the scene has transitioned
if (closeLogDialogue)
{
if (closeLogTimer < logCloseTime)
{
closeLogTimer += Time.deltaTime;
logTextBox.text = string.Format(formattedLogText, (int)(logCloseTime - closeLogTimer));
}
else
{
mainCanvas.gameObject.SetActive(false);
closeLogDialogue = false;
}
}
}
private void UpdateCanvasPosition()
{
// Update canvas camera reference and position if the main camera has changed
if (mainCanvas.worldCamera != Camera.main)
{
mainCanvas.worldCamera = Camera.main;
if (Camera.main != null)
{
Vector3 newPosition = Camera.main.transform.position + Camera.main.transform.forward * 0.3f;
gameObject.transform.position = newPosition;
gameObject.transform.rotation = Camera.main.transform.rotation;
}
}
}
private SceneInfo GetSceneInfo()
{
SceneInfo sceneInfo = new SceneInfo();
try
{
StreamReader reader = new StreamReader(sceneLoadDataPath);
sceneInfo.version = System.Convert.ToInt64(reader.ReadLine());
List<string> sceneList = new List<string>();
while (!reader.EndOfStream)
{
sceneList.Add(reader.ReadLine());
}
sceneInfo.scenes = sceneList;
}
catch
{
logTextBox.text += "<color=red>Failed to get scene info data.\n</color>";
}
return sceneInfo;
}
// Update canvas position after a slight delay to get accurate headset position after scene transitions
IEnumerator DelayCanvasPosUpdate()
{
yield return new WaitForSeconds(0.1f);
UpdateCanvasPosition();
}
IEnumerator onCheckSceneCoroutine()
{
SceneInfo newSceneInfo;
while (true)
{
newSceneInfo = GetSceneInfo();
if (newSceneInfo.version != currentSceneInfo.version)
{
Debug.Log("[OVRSceneLoader] Scene change detected.");
// Unload all asset bundles
foreach (var b in loadedAssetBundles)
{
if (b != null)
{
b.Unload(true);
}
}
loadedAssetBundles.Clear();
// Unload all scenes in the hierarchy including main scene and
// its dependent additive scenes.
int activeScenes = SceneManager.sceneCount;
for (int i = 0; i < activeScenes; i++)
{
SceneManager.UnloadSceneAsync(SceneManager.GetSceneAt(i));
}
DestroyAllGameObjects();
SceneManager.LoadSceneAsync("OVRTransitionScene");
break;
}
yield return new WaitForSeconds(sceneCheckIntervalSeconds);
}
}
void DestroyAllGameObjects()
{
foreach (GameObject go in Resources.FindObjectsOfTypeAll(typeof(GameObject)) as GameObject[])
{
Destroy(go);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a6d444f79f5ee4646b26c6d746385e80
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,193 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System;
using System.Runtime.InteropServices;
using UnityEngine;
/// <summary>
/// An infrared camera that tracks the position of a head-mounted display.
/// </summary>
public class OVRTracker
{
/// <summary>
/// The (symmetric) visible area in front of the sensor.
/// </summary>
public struct Frustum
{
/// <summary>
/// The sensor's minimum supported distance to the HMD.
/// </summary>
public float nearZ;
/// <summary>
/// The sensor's maximum supported distance to the HMD.
/// </summary>
public float farZ;
/// <summary>
/// The sensor's horizontal and vertical fields of view in degrees.
/// </summary>
public Vector2 fov;
}
/// <summary>
/// If true, a sensor is attached to the system.
/// </summary>
public bool isPresent
{
get {
if (!OVRManager.isHmdPresent)
return false;
return OVRPlugin.positionSupported;
}
}
/// <summary>
/// If true, the sensor is actively tracking the HMD's position. Otherwise the HMD may be temporarily occluded, the system may not support position tracking, etc.
/// </summary>
public bool isPositionTracked
{
get {
return OVRPlugin.positionTracked;
}
}
/// <summary>
/// If this is true and a sensor is available, the system will use position tracking when isPositionTracked is also true.
/// </summary>
public bool isEnabled
{
get {
if (!OVRManager.isHmdPresent)
return false;
return OVRPlugin.position;
}
set {
if (!OVRManager.isHmdPresent)
return;
OVRPlugin.position = value;
}
}
/// <summary>
/// Returns the number of sensors currently connected to the system.
/// </summary>
public int count
{
get {
int count = 0;
for (int i = 0; i < (int)OVRPlugin.Tracker.Count; ++i)
{
if (GetPresent(i))
count++;
}
return count;
}
}
/// <summary>
/// Gets the sensor's viewing frustum.
/// </summary>
public Frustum GetFrustum(int tracker = 0)
{
if (!OVRManager.isHmdPresent)
return new Frustum();
return OVRPlugin.GetTrackerFrustum((OVRPlugin.Tracker)tracker).ToFrustum();
}
/// <summary>
/// Gets the sensor's pose, relative to the head's pose at the time of the last pose recentering.
/// </summary>
public OVRPose GetPose(int tracker = 0)
{
if (!OVRManager.isHmdPresent)
return OVRPose.identity;
OVRPose p;
switch (tracker)
{
case 0:
p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerZero, OVRPlugin.Step.Render).ToOVRPose();
break;
case 1:
p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerOne, OVRPlugin.Step.Render).ToOVRPose();
break;
case 2:
p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerTwo, OVRPlugin.Step.Render).ToOVRPose();
break;
case 3:
p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerThree, OVRPlugin.Step.Render).ToOVRPose();
break;
default:
return OVRPose.identity;
}
return new OVRPose()
{
position = p.position,
orientation = p.orientation * Quaternion.Euler(0, 180, 0)
};
}
/// <summary>
/// If true, the pose of the sensor is valid and is ready to be queried.
/// </summary>
public bool GetPoseValid(int tracker = 0)
{
if (!OVRManager.isHmdPresent)
return false;
switch (tracker)
{
case 0:
return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerZero);
case 1:
return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerOne);
case 2:
return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerTwo);
case 3:
return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerThree);
default:
return false;
}
}
public bool GetPresent(int tracker = 0)
{
if (!OVRManager.isHmdPresent)
return false;
switch (tracker)
{
case 0:
return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerZero);
case 1:
return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerOne);
case 2:
return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerTwo);
case 3:
return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerThree);
default:
return false;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 7cb3c9d4cb0970e448c655096649e814
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,34 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if USING_XR_MANAGEMENT && USING_XR_SDK_OCULUS
#define USING_XR_SDK
#endif
using System.Runtime.InteropServices;
// C# wrapper for Unity XR SDK Native APIs.
#if USING_XR_SDK
public static class OculusXRPlugin
{
[DllImport("OculusXRPlugin")]
public static extern void SetColorScale(float x, float y, float z, float w);
[DllImport("OculusXRPlugin")]
public static extern void SetColorOffset(float x, float y, float z, float w);
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 903a593623dfcbf4a81205c0f7386ea9
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,5 @@
fileFormatVersion: 2
guid: c0c7a593695f68e4bbe0cabb0f4f93f2
folderAsset: yes
DefaultImporter:
userData:

View File

@@ -0,0 +1,49 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class OVRAudioSourceTest : MonoBehaviour
{
public float period = 2.0f;
private float nextActionTime;
// Start is called before the first frame update
void Start()
{
Material templateMaterial = GetComponent<Renderer>().material;
Material newMaterial = Instantiate<Material>(templateMaterial);
newMaterial.color = Color.green;
GetComponent<Renderer>().material = newMaterial;
nextActionTime = Time.time + period;
}
// Update is called once per frame
void Update()
{
if (Time.time > nextActionTime)
{
nextActionTime = Time.time + period;
Material mat = GetComponent<Renderer>().material;
if (mat.color == Color.green)
{
mat.color = Color.red;
}
else
{
mat.color = Color.green;
}
AudioSource audioSource = GetComponent<AudioSource>();
if (audioSource == null)
{
Debug.LogError("Unable to find AudioSource");
}
else
{
audioSource.Play();
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 358b12a21a8aa9540b435051f334fe9b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,34 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
// If there is a game object under the main camera which should not be cloned under Mixed Reality Capture,
// attaching this component would auto destroy that after the MRC camera get cloned
public class OVRAutoDestroyInMRC : MonoBehaviour {
// Use this for initialization
void Start () {
bool underMrcCamera = false;
Transform p = transform.parent;
while (p != null)
{
if (p.gameObject.name.StartsWith("OculusMRC_"))
{
underMrcCamera = true;
break;
}
p = p.parent;
}
if (underMrcCamera)
{
Destroy(gameObject);
}
}
// Update is called once per frame
void Update () {
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 42a68265e2d624d49ae7fced6a7e4d91
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,51 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Allows you to toggle chromatic aberration correction with a gamepad button press.
/// </summary>
public class OVRChromaticAberration : MonoBehaviour
{
/// <summary>
/// The button that will toggle chromatic aberration correction.
/// </summary>
public OVRInput.RawButton toggleButton = OVRInput.RawButton.X;
private bool chromatic = false;
void Start ()
{
// Enable/Disable Chromatic Aberration Correction.
// NOTE: Enabling Chromatic Aberration for mobile has a large performance cost.
OVRManager.instance.chromatic = chromatic;
}
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if (OVRInput.GetDown(toggleButton))
{
//*************************
// toggle chromatic aberration correction
//*************************
chromatic = !chromatic;
OVRManager.instance.chromatic = chromatic;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 3b56515a831f2fb44bc7ae02679aeebc
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,137 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
/// <summary>
/// Simple helper script that conditionally enables rendering of a controller if it is connected.
/// </summary>
public class OVRControllerHelper : MonoBehaviour
{
/// <summary>
/// The root GameObject that represents the Oculus Touch for Quest And RiftS Controller model (Left).
/// </summary>
public GameObject m_modelOculusTouchQuestAndRiftSLeftController;
/// <summary>
/// The root GameObject that represents the Oculus Touch for Quest And RiftS Controller model (Right).
/// </summary>
public GameObject m_modelOculusTouchQuestAndRiftSRightController;
/// <summary>
/// The root GameObject that represents the Oculus Touch for Rift Controller model (Left).
/// </summary>
public GameObject m_modelOculusTouchRiftLeftController;
/// <summary>
/// The root GameObject that represents the Oculus Touch for Rift Controller model (Right).
/// </summary>
public GameObject m_modelOculusTouchRiftRightController;
/// <summary>
/// The root GameObject that represents the Oculus Touch for Quest 2 Controller model (Left).
/// </summary>
public GameObject m_modelOculusTouchQuest2LeftController;
/// <summary>
/// The root GameObject that represents the Oculus Touch for Quest 2 Controller model (Right).
/// </summary>
public GameObject m_modelOculusTouchQuest2RightController;
/// <summary>
/// The controller that determines whether or not to enable rendering of the controller model.
/// </summary>
public OVRInput.Controller m_controller;
private enum ControllerType
{
QuestAndRiftS = 1,
Rift = 2,
Quest2 = 3,
}
private ControllerType activeControllerType = ControllerType.Rift;
private bool m_prevControllerConnected = false;
private bool m_prevControllerConnectedCached = false;
void Start()
{
OVRPlugin.SystemHeadset headset = OVRPlugin.GetSystemHeadsetType();
switch (headset)
{
case OVRPlugin.SystemHeadset.Rift_CV1:
activeControllerType = ControllerType.Rift;
break;
case OVRPlugin.SystemHeadset.Oculus_Quest_2:
activeControllerType = ControllerType.Quest2;
break;
default:
activeControllerType = ControllerType.QuestAndRiftS;
break;
}
Debug.LogFormat("OVRControllerHelp: Active controller type: {0} for product {1}", activeControllerType, OVRPlugin.productName);
// Hide all controller models until controller get connected
m_modelOculusTouchQuestAndRiftSLeftController.SetActive(false);
m_modelOculusTouchQuestAndRiftSRightController.SetActive(false);
m_modelOculusTouchRiftLeftController.SetActive(false);
m_modelOculusTouchRiftRightController.SetActive(false);
m_modelOculusTouchQuest2LeftController.SetActive(false);
m_modelOculusTouchQuest2RightController.SetActive(false);
}
void Update()
{
bool controllerConnected = OVRInput.IsControllerConnected(m_controller);
if ((controllerConnected != m_prevControllerConnected) || !m_prevControllerConnectedCached)
{
if (activeControllerType == ControllerType.Rift)
{
m_modelOculusTouchQuestAndRiftSLeftController.SetActive(false);
m_modelOculusTouchQuestAndRiftSRightController.SetActive(false);
m_modelOculusTouchRiftLeftController.SetActive(controllerConnected && (m_controller == OVRInput.Controller.LTouch));
m_modelOculusTouchRiftRightController.SetActive(controllerConnected && (m_controller == OVRInput.Controller.RTouch));
m_modelOculusTouchQuest2LeftController.SetActive(false);
m_modelOculusTouchQuest2RightController.SetActive(false);
}
else if (activeControllerType == ControllerType.Quest2)
{
m_modelOculusTouchQuestAndRiftSLeftController.SetActive(false);
m_modelOculusTouchQuestAndRiftSRightController.SetActive(false);
m_modelOculusTouchRiftLeftController.SetActive(false);
m_modelOculusTouchRiftRightController.SetActive(false);
m_modelOculusTouchQuest2LeftController.SetActive(controllerConnected && (m_controller == OVRInput.Controller.LTouch));
m_modelOculusTouchQuest2RightController.SetActive(controllerConnected && (m_controller == OVRInput.Controller.RTouch));
}
else /*if (activeControllerType == ControllerType.QuestAndRiftS)*/
{
m_modelOculusTouchQuestAndRiftSLeftController.SetActive(controllerConnected && (m_controller == OVRInput.Controller.LTouch));
m_modelOculusTouchQuestAndRiftSRightController.SetActive(controllerConnected && (m_controller == OVRInput.Controller.RTouch));
m_modelOculusTouchRiftLeftController.SetActive(false);
m_modelOculusTouchRiftRightController.SetActive(false);
m_modelOculusTouchQuest2LeftController.SetActive(false);
m_modelOculusTouchQuest2RightController.SetActive(false);
}
m_prevControllerConnected = controllerConnected;
m_prevControllerConnectedCached = true;
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: aed62bf3ae2456c408f247f96808ce96
timeCreated: 1486166271
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,195 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
using System.Collections.Generic;
using System.Text;
public class OVRControllerTest : MonoBehaviour
{
public class BoolMonitor
{
public delegate bool BoolGenerator();
private string m_name = "";
private BoolGenerator m_generator;
private bool m_prevValue = false;
private bool m_currentValue = false;
private bool m_currentValueRecentlyChanged = false;
private float m_displayTimeout = 0.0f;
private float m_displayTimer = 0.0f;
public BoolMonitor(string name, BoolGenerator generator, float displayTimeout = 0.5f)
{
m_name = name;
m_generator = generator;
m_displayTimeout = displayTimeout;
}
public void Update()
{
m_prevValue = m_currentValue;
m_currentValue = m_generator();
if (m_currentValue != m_prevValue)
{
m_currentValueRecentlyChanged = true;
m_displayTimer = m_displayTimeout;
}
if (m_displayTimer > 0.0f)
{
m_displayTimer -= Time.deltaTime;
if (m_displayTimer <= 0.0f)
{
m_currentValueRecentlyChanged = false;
m_displayTimer = 0.0f;
}
}
}
public void AppendToStringBuilder(ref StringBuilder sb)
{
sb.Append(m_name);
if (m_currentValue && m_currentValueRecentlyChanged)
sb.Append(": *True*\n");
else if (m_currentValue)
sb.Append(": True \n");
else if (!m_currentValue && m_currentValueRecentlyChanged)
sb.Append(": *False*\n");
else if (!m_currentValue)
sb.Append(": False \n");
}
}
public Text uiText;
private List<BoolMonitor> monitors;
private StringBuilder data;
void Start()
{
if (uiText != null)
{
uiText.supportRichText = false;
}
data = new StringBuilder(2048);
monitors = new List<BoolMonitor>()
{
// virtual
new BoolMonitor("One", () => OVRInput.Get(OVRInput.Button.One)),
new BoolMonitor("OneDown", () => OVRInput.GetDown(OVRInput.Button.One)),
new BoolMonitor("OneUp", () => OVRInput.GetUp(OVRInput.Button.One)),
new BoolMonitor("One (Touch)", () => OVRInput.Get(OVRInput.Touch.One)),
new BoolMonitor("OneDown (Touch)", () => OVRInput.GetDown(OVRInput.Touch.One)),
new BoolMonitor("OneUp (Touch)", () => OVRInput.GetUp(OVRInput.Touch.One)),
new BoolMonitor("Two", () => OVRInput.Get(OVRInput.Button.Two)),
new BoolMonitor("TwoDown", () => OVRInput.GetDown(OVRInput.Button.Two)),
new BoolMonitor("TwoUp", () => OVRInput.GetUp(OVRInput.Button.Two)),
new BoolMonitor("PrimaryIndexTrigger", () => OVRInput.Get(OVRInput.Button.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerDown", () => OVRInput.GetDown(OVRInput.Button.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerUp", () => OVRInput.GetUp(OVRInput.Button.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTrigger (Touch)", () => OVRInput.Get(OVRInput.Touch.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerDown (Touch)", () => OVRInput.GetDown(OVRInput.Touch.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerUp (Touch)", () => OVRInput.GetUp(OVRInput.Touch.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryHandTrigger", () => OVRInput.Get(OVRInput.Button.PrimaryHandTrigger)),
new BoolMonitor("PrimaryHandTriggerDown", () => OVRInput.GetDown(OVRInput.Button.PrimaryHandTrigger)),
new BoolMonitor("PrimaryHandTriggerUp", () => OVRInput.GetUp(OVRInput.Button.PrimaryHandTrigger)),
new BoolMonitor("Up", () => OVRInput.Get(OVRInput.Button.Up)),
new BoolMonitor("Down", () => OVRInput.Get(OVRInput.Button.Down)),
new BoolMonitor("Left", () => OVRInput.Get(OVRInput.Button.Left)),
new BoolMonitor("Right", () => OVRInput.Get(OVRInput.Button.Right)),
// raw
new BoolMonitor("Start", () => OVRInput.Get(OVRInput.RawButton.Start)),
new BoolMonitor("StartDown", () => OVRInput.GetDown(OVRInput.RawButton.Start)),
new BoolMonitor("StartUp", () => OVRInput.GetUp(OVRInput.RawButton.Start)),
new BoolMonitor("Back", () => OVRInput.Get(OVRInput.RawButton.Back)),
new BoolMonitor("BackDown", () => OVRInput.GetDown(OVRInput.RawButton.Back)),
new BoolMonitor("BackUp", () => OVRInput.GetUp(OVRInput.RawButton.Back)),
new BoolMonitor("A", () => OVRInput.Get(OVRInput.RawButton.A)),
new BoolMonitor("ADown", () => OVRInput.GetDown(OVRInput.RawButton.A)),
new BoolMonitor("AUp", () => OVRInput.GetUp(OVRInput.RawButton.A)),
};
}
static string prevConnected = "";
static BoolMonitor controllers = new BoolMonitor("Controllers Changed", () => { return OVRInput.GetConnectedControllers().ToString() != prevConnected; });
void Update()
{
OVRInput.Controller activeController = OVRInput.GetActiveController();
data.Length = 0;
byte battery = OVRInput.GetControllerBatteryPercentRemaining();
data.AppendFormat("Battery: {0}\n", battery);
float framerate = OVRPlugin.GetAppFramerate();
data.AppendFormat("Framerate: {0:F2}\n", framerate);
string activeControllerName = activeController.ToString();
data.AppendFormat("Active: {0}\n", activeControllerName);
string connectedControllerNames = OVRInput.GetConnectedControllers().ToString();
data.AppendFormat("Connected: {0}\n", connectedControllerNames);
data.AppendFormat("PrevConnected: {0}\n", prevConnected);
controllers.Update();
controllers.AppendToStringBuilder(ref data);
prevConnected = connectedControllerNames;
Quaternion rot = OVRInput.GetLocalControllerRotation(activeController);
data.AppendFormat("Orientation: ({0:F2}, {1:F2}, {2:F2}, {3:F2})\n", rot.x, rot.y, rot.z, rot.w);
Vector3 angVel = OVRInput.GetLocalControllerAngularVelocity(activeController);
data.AppendFormat("AngVel: ({0:F2}, {1:F2}, {2:F2})\n", angVel.x, angVel.y, angVel.z);
Vector3 angAcc = OVRInput.GetLocalControllerAngularAcceleration(activeController);
data.AppendFormat("AngAcc: ({0:F2}, {1:F2}, {2:F2})\n", angAcc.x, angAcc.y, angAcc.z);
Vector3 pos = OVRInput.GetLocalControllerPosition(activeController);
data.AppendFormat("Position: ({0:F2}, {1:F2}, {2:F2})\n", pos.x, pos.y, pos.z);
Vector3 vel = OVRInput.GetLocalControllerVelocity(activeController);
data.AppendFormat("Vel: ({0:F2}, {1:F2}, {2:F2})\n", vel.x, vel.y, vel.z);
Vector3 acc = OVRInput.GetLocalControllerAcceleration(activeController);
data.AppendFormat("Acc: ({0:F2}, {1:F2}, {2:F2})\n", acc.x, acc.y, acc.z);
float indexTrigger = OVRInput.Get(OVRInput.Axis1D.PrimaryIndexTrigger);
data.AppendFormat("PrimaryIndexTriggerAxis1D: ({0:F2})\n", indexTrigger);
float handTrigger = OVRInput.Get(OVRInput.Axis1D.PrimaryHandTrigger);
data.AppendFormat("PrimaryHandTriggerAxis1D: ({0:F2})\n", handTrigger);
for (int i = 0; i < monitors.Count; i++)
{
monitors[i].Update();
monitors[i].AppendToStringBuilder(ref data);
}
if (uiText != null)
{
uiText.text = data.ToString();
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e0a6abd1cb88e9245bd78dac49d7fd6e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,284 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
using System.IO;
/// <summary>
/// Helper script for capture cubemap and save it into PNG or JPG file
/// </summary>
/// <description>
/// How it works:
/// 1) This script can be attached to a existing game object, you can also use prefab Assets\OVR\Prefabs\OVRCubemapCaptureProbe
/// There are 2 ways to trigger a capture if you attached this script to a game object.
/// * Automatic capturing: if [autoTriggerAfterLaunch] is true, a automatic capturing will be triggered after [autoTriggerDelay] seconds.
/// * Keyboard trigger: press key [triggeredByKey], a capturing will be triggered.
/// 2) If you like to trigger the screen capture in your code logic, just call static function [OVRCubemapCapture.TriggerCubemapCapture] with proper input arguments.
/// </description>
public class OVRCubemapCapture : MonoBehaviour
{
/// <summary>
/// Enable the automatic screenshot trigger, which will capture a cubemap after autoTriggerDelay (seconds)
/// </summary>
public bool autoTriggerAfterLaunch = true;
public float autoTriggerDelay = 1.0f;
private float autoTriggerElapse = 0.0f;
/// <summary>
/// Trigger cubemap screenshot if user pressed key triggeredByKey
/// </summary>
public KeyCode triggeredByKey = KeyCode.F8;
/// <summary>
/// The complete file path for saving the cubemap screenshot, including the filename and extension
/// if pathName is blank, screenshots will be saved into %USERPROFILE%\Documents\OVR_ScreenShot360
/// </summary>
public string pathName;
/// <summary>
/// The cube face resolution
/// </summary>
public int cubemapSize = 2048;
// Update is called once per frame
void Update()
{
// Trigger after autoTriggerDelay
if (autoTriggerAfterLaunch)
{
autoTriggerElapse += Time.deltaTime;
if (autoTriggerElapse >= autoTriggerDelay)
{
autoTriggerAfterLaunch = false;
TriggerCubemapCapture(transform.position, cubemapSize, pathName);
}
}
// Trigger by press triggeredByKey
if ( Input.GetKeyDown( triggeredByKey ) )
{
TriggerCubemapCapture(transform.position, cubemapSize, pathName);
}
}
/// <summary>
/// Generate unity cubemap at specific location and save into JPG/PNG
/// </summary>
/// <description>
/// Default save folder: your app's persistentDataPath
/// Default file name: using current time OVR_hh_mm_ss.png
/// Note1: this will take a few seconds to finish
/// Note2: if you only want to specify path not filename, please end [pathName] with "/"
/// </description>
public static void TriggerCubemapCapture(Vector3 capturePos, int cubemapSize = 2048, string pathName = null)
{
GameObject ownerObj = new GameObject("CubemapCamera", typeof(Camera));
ownerObj.hideFlags = HideFlags.HideAndDontSave;
ownerObj.transform.position = capturePos;
ownerObj.transform.rotation = Quaternion.identity;
Camera camComponent = ownerObj.GetComponent<Camera>();
camComponent.farClipPlane = 10000.0f;
camComponent.enabled = false;
Cubemap cubemap = new Cubemap(cubemapSize, TextureFormat.RGB24, false);
RenderIntoCubemap(camComponent, cubemap);
SaveCubemapCapture(cubemap, pathName);
DestroyImmediate(cubemap);
DestroyImmediate(ownerObj);
}
public static void RenderIntoCubemap(Camera ownerCamera, Cubemap outCubemap)
{
int width = (int)outCubemap.width;
int height = (int)outCubemap.height;
CubemapFace[] faces = new CubemapFace[] { CubemapFace.PositiveX, CubemapFace.NegativeX, CubemapFace.PositiveY, CubemapFace.NegativeY, CubemapFace.PositiveZ, CubemapFace.NegativeZ };
Vector3[] faceAngles = new Vector3[] { new Vector3(0.0f, 90.0f, 0.0f), new Vector3(0.0f, -90.0f, 0.0f), new Vector3(-90.0f, 0.0f, 0.0f), new Vector3(90.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 180.0f, 0.0f) };
// Backup states
RenderTexture backupRenderTex = RenderTexture.active;
float backupFieldOfView = ownerCamera.fieldOfView;
float backupAspect = ownerCamera.aspect;
Quaternion backupRot = ownerCamera.transform.rotation;
//RenderTexture backupRT = ownerCamera.targetTexture;
// Enable 8X MSAA
RenderTexture faceTexture = new RenderTexture(width, height, 24);
faceTexture.antiAliasing = 8;
faceTexture.dimension = UnityEngine.Rendering.TextureDimension.Tex2D;
faceTexture.hideFlags = HideFlags.HideAndDontSave;
// For intermediate saving
Texture2D swapTex = new Texture2D(width, height, TextureFormat.RGB24, false);
swapTex.hideFlags = HideFlags.HideAndDontSave;
// Capture 6 Directions
ownerCamera.targetTexture = faceTexture;
ownerCamera.fieldOfView = 90;
ownerCamera.aspect = 1.0f;
Color[] mirroredPixels = new Color[swapTex.height * swapTex.width];
for (int i = 0; i < faces.Length; i++)
{
ownerCamera.transform.eulerAngles = faceAngles[i];
ownerCamera.Render();
RenderTexture.active = faceTexture;
swapTex.ReadPixels(new Rect(0, 0, width, height), 0, 0);
// Mirror vertically to meet the standard of unity cubemap
Color[] OrignalPixels = swapTex.GetPixels();
for (int y1 = 0; y1 < height; y1++)
{
for (int x1 = 0; x1 < width; x1++)
{
mirroredPixels[y1 * width + x1] = OrignalPixels[((height - 1 - y1) * width) + x1];
}
};
outCubemap.SetPixels(mirroredPixels, faces[i]);
}
outCubemap.SmoothEdges();
// Restore states
RenderTexture.active = backupRenderTex;
ownerCamera.fieldOfView = backupFieldOfView;
ownerCamera.aspect = backupAspect;
ownerCamera.transform.rotation = backupRot;
ownerCamera.targetTexture = backupRenderTex;
DestroyImmediate(swapTex);
DestroyImmediate(faceTexture);
}
/// <summary>
/// Save unity cubemap into NPOT 6x1 cubemap/texture atlas in the following format PX NX PY NY PZ NZ
/// </summary>
/// <description>
/// Supported format: PNG/JPG
/// Default file name: using current time OVR_hh_mm_ss.png
/// </description>
public static bool SaveCubemapCapture(Cubemap cubemap, string pathName = null)
{
string fileName;
string dirName;
int width = cubemap.width;
int height = cubemap.height;
int x = 0;
int y = 0;
bool saveToPNG = true;
if (string.IsNullOrEmpty(pathName))
{
dirName = Application.persistentDataPath + "/OVR_ScreenShot360/";
fileName = null;
}
else
{
dirName = Path.GetDirectoryName(pathName);
fileName = Path.GetFileName(pathName);
if (dirName[dirName.Length - 1] != '/' || dirName[dirName.Length - 1] != '\\')
dirName += "/";
}
if (string.IsNullOrEmpty(fileName))
fileName = "OVR_" + System.DateTime.Now.ToString("hh_mm_ss") + ".png";
string extName = Path.GetExtension(fileName);
if (extName == ".png")
{
saveToPNG = true;
}
else if (extName == ".jpg")
{
saveToPNG = false;
}
else
{
Debug.LogError("Unsupported file format" + extName);
return false;
}
// Validate path
try
{
System.IO.Directory.CreateDirectory(dirName);
}
catch (System.Exception e)
{
Debug.LogError("Failed to create path " + dirName + " since " + e.ToString());
return false;
}
// Create the new texture
Texture2D tex = new Texture2D(width * 6, height, TextureFormat.RGB24, false);
if (tex == null)
{
Debug.LogError("[OVRScreenshotWizard] Failed creating the texture!");
return false;
}
// Merge all the cubemap faces into the texture
// Reference cubemap format: http://docs.unity3d.com/Manual/class-Cubemap.html
CubemapFace[] faces = new CubemapFace[] { CubemapFace.PositiveX, CubemapFace.NegativeX, CubemapFace.PositiveY, CubemapFace.NegativeY, CubemapFace.PositiveZ, CubemapFace.NegativeZ };
for (int i = 0; i < faces.Length; i++)
{
// get the pixels from the cubemap
Color[] srcPixels = null;
Color[] pixels = cubemap.GetPixels(faces[i]);
// if desired, flip them as they are ordered left to right, bottom to top
srcPixels = new Color[pixels.Length];
for (int y1 = 0; y1 < height; y1++)
{
for (int x1 = 0; x1 < width; x1++)
{
srcPixels[y1 * width + x1] = pixels[((height - 1 - y1) * width) + x1];
}
}
// Copy them to the dest texture
tex.SetPixels(x, y, width, height, srcPixels);
x += width;
}
try
{
// Encode the texture and save it to disk
byte[] bytes = saveToPNG ? tex.EncodeToPNG() : tex.EncodeToJPG();
System.IO.File.WriteAllBytes(dirName + fileName, bytes);
Debug.Log("Cubemap file created " + dirName + fileName);
}
catch (System.Exception e)
{
Debug.LogError("Failed to save cubemap file since " + e.ToString());
return false;
}
DestroyImmediate(tex);
return true;
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 7a489178b0acf0147846b3873447beaf
timeCreated: 1464728890
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,28 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
using UnityEngine.EventSystems;
using UnityEngine.UI;
abstract public class OVRCursor : MonoBehaviour
{
public abstract void SetCursorRay(Transform ray);
public abstract void SetCursorStartDest(Vector3 start, Vector3 dest, Vector3 normal);
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f2233ce673fcb9f41bd0753f867b7f70
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,114 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[DefaultExecutionOrder(-80)]
public class OVRCustomSkeleton : OVRSkeleton
{
[SerializeField]
private List<Transform> _customBones = new List<Transform>(new Transform[(int)BoneId.Max]);
#if UNITY_EDITOR
private static readonly string[] _fbxBoneNames =
{
"wrist",
"forearm_stub",
"thumb0",
"thumb1",
"thumb2",
"thumb3",
"index1",
"index2",
"index3",
"middle1",
"middle2",
"middle3",
"ring1",
"ring2",
"ring3",
"pinky0",
"pinky1",
"pinky2",
"pinky3"
};
private static readonly string[] _fbxFingerNames =
{
"thumb",
"index",
"middle",
"ring",
"pinky"
};
private static readonly string[] _handPrefix = { "l_", "r_" };
#endif
public List<Transform> CustomBones { get { return _customBones; } }
#if UNITY_EDITOR
public void TryAutoMapBonesByName()
{
BoneId start = GetCurrentStartBoneId();
BoneId end = GetCurrentEndBoneId();
SkeletonType skeletonType = GetSkeletonType();
if (start != BoneId.Invalid && end != BoneId.Invalid)
{
for (int bi = (int)start; bi < (int)end; ++bi)
{
string fbxBoneName = FbxBoneNameFromBoneId(skeletonType, (BoneId)bi);
Transform t = transform.FindChildRecursive(fbxBoneName);
if (t != null)
{
_customBones[(int)bi] = t;
}
}
}
}
private static string FbxBoneNameFromBoneId(SkeletonType skeletonType, BoneId bi)
{
if (bi >= BoneId.Hand_ThumbTip && bi <= BoneId.Hand_PinkyTip)
{
return _handPrefix[(int)skeletonType] + _fbxFingerNames[(int)bi - (int)BoneId.Hand_ThumbTip] + "_finger_tip_marker";
}
else
{
return "b_" + _handPrefix[(int)skeletonType] + _fbxBoneNames[(int)bi];
}
}
#endif
protected override void InitializeBones(OVRPlugin.Skeleton skeleton)
{
_bones = new List<OVRBone>(new OVRBone[skeleton.NumBones]);
Bones = _bones.AsReadOnly();
for (int i = 0; i < skeleton.NumBones; ++i)
{
BoneId id = (BoneId)skeleton.Bones[i].Id;
short parentIdx = skeleton.Bones[i].ParentBoneIndex;
Transform t = _customBones[(int)id];
_bones[i] = new OVRBone(id, parentIdx, t);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 674a40251fe8ad841b18517ac5209957
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,422 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
using UnityEngine.UI;
//-------------------------------------------------------------------------------------
/// <summary>
/// Shows debug information on a heads-up display.
/// </summary>
public class OVRDebugInfo : MonoBehaviour
{
#region GameObjects for Debug Information UIs
GameObject debugUIManager;
GameObject debugUIObject;
GameObject riftPresent;
GameObject fps;
GameObject ipd;
GameObject fov;
GameObject height;
GameObject depth;
GameObject resolutionEyeTexture;
GameObject latencies;
GameObject texts;
#endregion
#region Debug strings
string strRiftPresent = null; // "VR DISABLED"
string strFPS = null; // "FPS: 0";
string strIPD = null; // "IPD: 0.000";
string strFOV = null; // "FOV: 0.0f";
string strHeight = null; // "Height: 0.0f";
string strDepth = null; // "Depth: 0.0f";
string strResolutionEyeTexture = null; // "Resolution : {0} x {1}"
string strLatencies = null; // "R: {0:F3} TW: {1:F3} PP: {2:F3} RE: {3:F3} TWE: {4:F3}"
#endregion
/// <summary>
/// Variables for FPS
/// </summary>
float updateInterval = 0.5f;
float accum = 0.0f;
int frames = 0;
float timeLeft = 0.0f;
/// <summary>
/// Managing for UI initialization
/// </summary>
bool initUIComponent = false;
bool isInited = false;
/// <summary>
/// UIs Y offset
/// </summary>
float offsetY = 55.0f;
/// <summary>
/// Managing for rift detection UI
/// </summary>
float riftPresentTimeout = 0.0f;
/// <summary>
/// Turn on / off VR variables
/// </summary>
bool showVRVars = false;
#region MonoBehaviour handler
/// <summary>
/// Initialization
/// </summary>
void Awake()
{
// Create canvas for using new GUI
debugUIManager = new GameObject();
debugUIManager.name = "DebugUIManager";
debugUIManager.transform.parent = GameObject.Find("LeftEyeAnchor").transform;
RectTransform rectTransform = debugUIManager.AddComponent<RectTransform>();
rectTransform.sizeDelta = new Vector2(100f, 100f);
rectTransform.localScale = new Vector3(0.001f, 0.001f, 0.001f);
rectTransform.localPosition = new Vector3(0.01f, 0.17f, 0.53f);
rectTransform.localEulerAngles = Vector3.zero;
Canvas canvas = debugUIManager.AddComponent<Canvas>();
canvas.renderMode = RenderMode.WorldSpace;
canvas.pixelPerfect = false;
}
/// <summary>
/// Updating VR variables and managing UI present
/// </summary>
void Update()
{
if (initUIComponent && !isInited)
{
InitUIComponents();
}
if (Input.GetKeyDown(KeyCode.Space) && riftPresentTimeout < 0.0f)
{
initUIComponent = true;
showVRVars ^= true;
}
UpdateDeviceDetection();
// Presenting VR variables
if (showVRVars)
{
debugUIManager.SetActive(true);
UpdateVariable();
UpdateStrings();
}
else
{
debugUIManager.SetActive(false);
}
}
/// <summary>
/// Initialize isInited value on OnDestroy
/// </summary>
void OnDestroy()
{
isInited = false;
}
#endregion
#region Private Functions
/// <summary>
/// Initialize UI GameObjects
/// </summary>
void InitUIComponents()
{
float posY = 0.0f;
int fontSize = 20;
debugUIObject = new GameObject();
debugUIObject.name = "DebugInfo";
debugUIObject.transform.parent = GameObject.Find("DebugUIManager").transform;
debugUIObject.transform.localPosition = new Vector3(0.0f, 100.0f, 0.0f);
debugUIObject.transform.localEulerAngles = Vector3.zero;
debugUIObject.transform.localScale = new Vector3(1.0f, 1.0f, 1.0f);
// Print out for FPS
if (!string.IsNullOrEmpty(strFPS))
{
fps = VariableObjectManager(fps, "FPS", posY -= offsetY, strFPS, fontSize);
}
// Print out for IPD
if (!string.IsNullOrEmpty(strIPD))
{
ipd = VariableObjectManager(ipd, "IPD", posY -= offsetY, strIPD, fontSize);
}
// Print out for FOV
if (!string.IsNullOrEmpty(strFOV))
{
fov = VariableObjectManager(fov, "FOV", posY -= offsetY, strFOV, fontSize);
}
// Print out for Height
if (!string.IsNullOrEmpty(strHeight))
{
height = VariableObjectManager(height, "Height", posY -= offsetY, strHeight, fontSize);
}
// Print out for Depth
if (!string.IsNullOrEmpty(strDepth))
{
depth = VariableObjectManager(depth, "Depth", posY -= offsetY, strDepth, fontSize);
}
// Print out for Resoulution of Eye Texture
if (!string.IsNullOrEmpty(strResolutionEyeTexture))
{
resolutionEyeTexture = VariableObjectManager(resolutionEyeTexture, "Resolution", posY -= offsetY, strResolutionEyeTexture, fontSize);
}
// Print out for Latency
if (!string.IsNullOrEmpty(strLatencies))
{
latencies = VariableObjectManager(latencies, "Latency", posY -= offsetY, strLatencies, 17);
posY = 0.0f;
}
initUIComponent = false;
isInited = true;
}
/// <summary>
/// Update VR Variables
/// </summary>
void UpdateVariable()
{
UpdateIPD();
UpdateEyeHeightOffset();
UpdateEyeDepthOffset();
UpdateFOV();
UpdateResolutionEyeTexture();
UpdateLatencyValues();
UpdateFPS();
}
/// <summary>
/// Update Strings
/// </summary>
void UpdateStrings()
{
if (debugUIObject == null)
return;
if (!string.IsNullOrEmpty(strFPS))
fps.GetComponentInChildren<Text>().text = strFPS;
if (!string.IsNullOrEmpty(strIPD))
ipd.GetComponentInChildren<Text>().text = strIPD;
if (!string.IsNullOrEmpty(strFOV))
fov.GetComponentInChildren<Text>().text = strFOV;
if (!string.IsNullOrEmpty(strResolutionEyeTexture))
resolutionEyeTexture.GetComponentInChildren<Text>().text = strResolutionEyeTexture;
if (!string.IsNullOrEmpty(strLatencies))
{
latencies.GetComponentInChildren<Text>().text = strLatencies;
latencies.GetComponentInChildren<Text>().fontSize = 14;
}
if (!string.IsNullOrEmpty(strHeight))
height.GetComponentInChildren<Text>().text = strHeight;
if (!string.IsNullOrEmpty(strDepth))
depth.GetComponentInChildren<Text>().text = strDepth;
}
/// <summary>
/// It's for rift present GUI
/// </summary>
void RiftPresentGUI(GameObject guiMainOBj)
{
riftPresent = ComponentComposition(riftPresent);
riftPresent.transform.SetParent(guiMainOBj.transform);
riftPresent.name = "RiftPresent";
RectTransform rectTransform = riftPresent.GetComponent<RectTransform>();
rectTransform.localPosition = new Vector3(0.0f, 0.0f, 0.0f);
rectTransform.localScale = new Vector3(1.0f, 1.0f, 1.0f);
rectTransform.localEulerAngles = Vector3.zero;
Text text = riftPresent.GetComponentInChildren<Text>();
text.text = strRiftPresent;
text.fontSize = 20;
}
/// <summary>
/// Updates the device detection.
/// </summary>
void UpdateDeviceDetection()
{
if (riftPresentTimeout >= 0.0f)
{
riftPresentTimeout -= Time.deltaTime;
}
}
/// <summary>
/// Object Manager for Variables
/// </summary>
/// <returns> gameobject for each Variable </returns>
GameObject VariableObjectManager(GameObject gameObject, string name, float posY, string str, int fontSize)
{
gameObject = ComponentComposition(gameObject);
gameObject.name = name;
gameObject.transform.SetParent(debugUIObject.transform);
RectTransform rectTransform = gameObject.GetComponent<RectTransform>();
rectTransform.localPosition = new Vector3(0.0f, posY -= offsetY, 0.0f);
Text text = gameObject.GetComponentInChildren<Text>();
text.text = str;
text.fontSize = fontSize;
gameObject.transform.localEulerAngles = Vector3.zero;
rectTransform.localScale = new Vector3(1.0f, 1.0f, 1.0f);
return gameObject;
}
/// <summary>
/// Component composition
/// </summary>
/// <returns> Composed gameobject. </returns>
GameObject ComponentComposition(GameObject GO)
{
GO = new GameObject();
GO.AddComponent<RectTransform>();
GO.AddComponent<CanvasRenderer>();
GO.AddComponent<Image>();
GO.GetComponent<RectTransform>().sizeDelta = new Vector2(350f, 50f);
GO.GetComponent<Image>().color = new Color(7f / 255f, 45f / 255f, 71f / 255f, 200f / 255f);
texts = new GameObject();
texts.AddComponent<RectTransform>();
texts.AddComponent<CanvasRenderer>();
texts.AddComponent<Text>();
texts.GetComponent<RectTransform>().sizeDelta = new Vector2(350f, 50f);
texts.GetComponent<Text>().font = Resources.GetBuiltinResource(typeof(Font), "Arial.ttf") as Font;
texts.GetComponent<Text>().alignment = TextAnchor.MiddleCenter;
texts.transform.SetParent(GO.transform);
texts.name = "TextBox";
return GO;
}
#endregion
#region Debugging variables handler
/// <summary>
/// Updates the IPD.
/// </summary>
void UpdateIPD()
{
strIPD = System.String.Format("IPD (mm): {0:F4}", OVRManager.profile.ipd * 1000.0f);
}
/// <summary>
/// Updates the eye height offset.
/// </summary>
void UpdateEyeHeightOffset()
{
float eyeHeight = OVRManager.profile.eyeHeight;
strHeight = System.String.Format("Eye Height (m): {0:F3}", eyeHeight);
}
/// <summary>
/// Updates the eye depth offset.
/// </summary>
void UpdateEyeDepthOffset()
{
float eyeDepth = OVRManager.profile.eyeDepth;
strDepth = System.String.Format("Eye Depth (m): {0:F3}", eyeDepth);
}
/// <summary>
/// Updates the FOV.
/// </summary>
void UpdateFOV()
{
OVRDisplay.EyeRenderDesc eyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.XR.XRNode.LeftEye);
strFOV = System.String.Format("FOV (deg): {0:F3}", eyeDesc.fov.y);
}
/// <summary>
/// Updates resolution of eye texture
/// </summary>
void UpdateResolutionEyeTexture()
{
OVRDisplay.EyeRenderDesc leftEyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.XR.XRNode.LeftEye);
OVRDisplay.EyeRenderDesc rightEyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.XR.XRNode.RightEye);
float scale = UnityEngine.XR.XRSettings.renderViewportScale;
float w = (int)(scale * (float)(leftEyeDesc.resolution.x + rightEyeDesc.resolution.x));
float h = (int)(scale * (float)Mathf.Max(leftEyeDesc.resolution.y, rightEyeDesc.resolution.y));
strResolutionEyeTexture = System.String.Format("Resolution : {0} x {1}", w, h);
}
/// <summary>
/// Updates latency values
/// </summary>
void UpdateLatencyValues()
{
#if !UNITY_ANDROID || UNITY_EDITOR
OVRDisplay.LatencyData latency = OVRManager.display.latency;
if (latency.render < 0.000001f && latency.timeWarp < 0.000001f && latency.postPresent < 0.000001f)
strLatencies = System.String.Format("Latency values are not available.");
else
strLatencies = System.String.Format("Render: {0:F3} TimeWarp: {1:F3} Post-Present: {2:F3}\nRender Error: {3:F3} TimeWarp Error: {4:F3}",
latency.render,
latency.timeWarp,
latency.postPresent,
latency.renderError,
latency.timeWarpError);
#endif
}
/// <summary>
/// Updates the FPS.
/// </summary>
void UpdateFPS()
{
timeLeft -= Time.unscaledDeltaTime;
accum += Time.unscaledDeltaTime;
++frames;
// Interval ended - update GUI text and start new interval
if (timeLeft <= 0.0)
{
// display two fractional digits (f2 format)
float fps = frames / accum;
strFPS = System.String.Format("FPS: {0:F2}", fps);
timeLeft += updateInterval;
accum = 0.0f;
frames = 0;
}
}
#endregion
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: b71d1996d67004241a3b69960856ffcb
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,277 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
using UnityEngine.EventSystems;
using UnityEngine.UI;
/// <summary>
/// UI pointer driven by gaze input.
/// </summary>
public class OVRGazePointer : OVRCursor {
private Transform gazeIcon; //the transform that rotates according to our movement
[Tooltip("Should the pointer be hidden when not over interactive objects.")]
public bool hideByDefault = true;
[Tooltip("Time after leaving interactive object before pointer fades.")]
public float showTimeoutPeriod = 1;
[Tooltip("Time after mouse pointer becoming inactive before pointer unfades.")]
public float hideTimeoutPeriod = 0.1f;
[Tooltip("Keep a faint version of the pointer visible while using a mouse")]
public bool dimOnHideRequest = true;
[Tooltip("Angular scale of pointer")]
public float depthScaleMultiplier = 0.03f;
public bool matchNormalOnPhysicsColliders;
/// <summary>
/// The gaze ray.
/// </summary>
public Transform rayTransform;
/// <summary>
/// Is gaze pointer current visible
/// </summary>
public bool hidden { get; private set; }
/// <summary>
/// Current scale applied to pointer
/// </summary>
public float currentScale { get; private set; }
/// <summary>
/// Current depth of pointer from camera
/// </summary>
private float depth;
private float hideUntilTime;
/// <summary>
/// How many times position has been set this frame. Used to detect when there are no position sets in a frame.
/// </summary>
private int positionSetsThisFrame = 0;
/// <summary>
/// Last time code requested the pointer be shown. Usually when pointer passes over interactive elements.
/// </summary>
private float lastShowRequestTime;
/// <summary>
/// Last time pointer was requested to be hidden. Usually mouse pointer activity.
/// </summary>
private float lastHideRequestTime;
// Optionally present GUI element displaying progress when using gaze-to-select mechanics
private OVRProgressIndicator progressIndicator;
private static OVRGazePointer _instance;
public static OVRGazePointer instance
{
// If there's no GazePointer already in the scene, instanciate one now.
get
{
if (_instance == null)
{
Debug.Log(string.Format("Instanciating GazePointer", 0));
_instance = (OVRGazePointer)GameObject.Instantiate((OVRGazePointer)Resources.Load("Prefabs/GazePointerRing", typeof(OVRGazePointer)));
}
return _instance;
}
}
/// <summary>
/// Used to determine alpha level of gaze cursor. Could also be used to determine cursor size, for example, as the cursor fades out.
/// </summary>
public float visibilityStrength
{
get
{
// It's possible there are reasons to show the cursor - such as it hovering over some UI - and reasons to hide
// the cursor - such as another input method (e.g. mouse) being used. We take both of these in to account.
float strengthFromShowRequest;
if (hideByDefault)
{
// fade the cursor out with time
strengthFromShowRequest = Mathf.Clamp01(1 - (Time.time - lastShowRequestTime) / showTimeoutPeriod);
}
else
{
// keep it fully visible
strengthFromShowRequest = 1;
}
// Now consider factors requesting pointer to be hidden
float strengthFromHideRequest;
strengthFromHideRequest = (lastHideRequestTime + hideTimeoutPeriod > Time.time) ? (dimOnHideRequest ? 0.1f : 0) : 1;
// Hide requests take priority
return Mathf.Min(strengthFromShowRequest, strengthFromHideRequest);
}
}
public float SelectionProgress
{
get
{
return progressIndicator ? progressIndicator.currentProgress : 0;
}
set
{
if (progressIndicator)
progressIndicator.currentProgress = value;
}
}
public void Awake()
{
currentScale = 1;
// Only allow one instance at runtime.
if (_instance != null && _instance != this)
{
enabled = false;
DestroyImmediate(this);
return;
}
_instance = this;
gazeIcon = transform.Find("GazeIcon");
progressIndicator = transform.GetComponent<OVRProgressIndicator>();
}
void Update ()
{
if (rayTransform == null && Camera.main != null)
rayTransform = Camera.main.transform;
// Move the gaze cursor to keep it in the middle of the view
transform.position = rayTransform.position + rayTransform.forward * depth;
// Should we show or hide the gaze cursor?
if (visibilityStrength == 0 && !hidden)
{
Hide();
}
else if (visibilityStrength > 0 && hidden)
{
Show();
}
}
/// <summary>
/// Set position and orientation of pointer
/// </summary>
/// <param name="pos"></param>
/// <param name="normal"></param>
public override void SetCursorStartDest(Vector3 _, Vector3 pos, Vector3 normal)
{
transform.position = pos;
if (!matchNormalOnPhysicsColliders) normal = rayTransform.forward;
// Set the rotation to match the normal of the surface it's on.
Quaternion newRot = transform.rotation;
newRot.SetLookRotation(normal, rayTransform.up);
transform.rotation = newRot;
// record depth so that distance doesn't pop when pointer leaves an object
depth = (rayTransform.position - pos).magnitude;
//set scale based on depth
currentScale = depth * depthScaleMultiplier;
transform.localScale = new Vector3(currentScale, currentScale, currentScale);
positionSetsThisFrame++;
RequestShow();
}
public override void SetCursorRay(Transform ray)
{
// We don't do anything here, because we already set this properly by default in Update.
}
void LateUpdate()
{
// This happens after all Updates so we know that if positionSetsThisFrame is zero then nothing set the position this frame
if (positionSetsThisFrame == 0)
{
// No geometry intersections, so gazing into space. Make the cursor face directly at the camera
Quaternion newRot = transform.rotation;
newRot.SetLookRotation(rayTransform.forward, rayTransform.up);
transform.rotation = newRot;
}
Quaternion iconRotation = gazeIcon.rotation;
iconRotation.SetLookRotation(transform.rotation * new Vector3(0, 0, 1));
gazeIcon.rotation = iconRotation;
positionSetsThisFrame = 0;
}
/// <summary>
/// Request the pointer be hidden
/// </summary>
public void RequestHide()
{
if (!dimOnHideRequest)
{
Hide();
}
lastHideRequestTime = Time.time;
}
/// <summary>
/// Request the pointer be shown. Hide requests take priority
/// </summary>
public void RequestShow()
{
Show();
lastShowRequestTime = Time.time;
}
// Disable/Enable child elements when we show/hide the cursor. For performance reasons.
void Hide()
{
foreach (Transform child in transform)
{
child.gameObject.SetActive(false);
}
if (GetComponent<Renderer>())
GetComponent<Renderer>().enabled = false;
hidden = true;
}
void Show()
{
foreach (Transform child in transform)
{
child.gameObject.SetActive(true);
}
if (GetComponent<Renderer>())
GetComponent<Renderer>().enabled = true;
hidden = false;
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 30530ad0e40d0a64ea26d753ee4996ea
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,22 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
public class OVRGearVrControllerTest : MonoBehaviour
{
// Deprecated since SDK 1.51
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 7acc4619d4cb5e64e9ed05e5a7a8099f
timeCreated: 1486173066
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,164 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System;
using UnityEngine;
/// <summary>
/// An object that can be grabbed and thrown by OVRGrabber.
/// </summary>
public class OVRGrabbable : MonoBehaviour
{
[SerializeField]
protected bool m_allowOffhandGrab = true;
[SerializeField]
protected bool m_snapPosition = false;
[SerializeField]
protected bool m_snapOrientation = false;
[SerializeField]
protected Transform m_snapOffset;
[SerializeField]
protected Collider[] m_grabPoints = null;
protected bool m_grabbedKinematic = false;
protected Collider m_grabbedCollider = null;
protected OVRGrabber m_grabbedBy = null;
/// <summary>
/// If true, the object can currently be grabbed.
/// </summary>
public bool allowOffhandGrab
{
get { return m_allowOffhandGrab; }
}
/// <summary>
/// If true, the object is currently grabbed.
/// </summary>
public bool isGrabbed
{
get { return m_grabbedBy != null; }
}
/// <summary>
/// If true, the object's position will snap to match snapOffset when grabbed.
/// </summary>
public bool snapPosition
{
get { return m_snapPosition; }
}
/// <summary>
/// If true, the object's orientation will snap to match snapOffset when grabbed.
/// </summary>
public bool snapOrientation
{
get { return m_snapOrientation; }
}
/// <summary>
/// An offset relative to the OVRGrabber where this object can snap when grabbed.
/// </summary>
public Transform snapOffset
{
get { return m_snapOffset; }
}
/// <summary>
/// Returns the OVRGrabber currently grabbing this object.
/// </summary>
public OVRGrabber grabbedBy
{
get { return m_grabbedBy; }
}
/// <summary>
/// The transform at which this object was grabbed.
/// </summary>
public Transform grabbedTransform
{
get { return m_grabbedCollider.transform; }
}
/// <summary>
/// The Rigidbody of the collider that was used to grab this object.
/// </summary>
public Rigidbody grabbedRigidbody
{
get { return m_grabbedCollider.attachedRigidbody; }
}
/// <summary>
/// The contact point(s) where the object was grabbed.
/// </summary>
public Collider[] grabPoints
{
get { return m_grabPoints; }
}
/// <summary>
/// Notifies the object that it has been grabbed.
/// </summary>
virtual public void GrabBegin(OVRGrabber hand, Collider grabPoint)
{
m_grabbedBy = hand;
m_grabbedCollider = grabPoint;
gameObject.GetComponent<Rigidbody>().isKinematic = true;
}
/// <summary>
/// Notifies the object that it has been released.
/// </summary>
virtual public void GrabEnd(Vector3 linearVelocity, Vector3 angularVelocity)
{
Rigidbody rb = gameObject.GetComponent<Rigidbody>();
rb.isKinematic = m_grabbedKinematic;
rb.velocity = linearVelocity;
rb.angularVelocity = angularVelocity;
m_grabbedBy = null;
m_grabbedCollider = null;
}
void Awake()
{
if (m_grabPoints.Length == 0)
{
// Get the collider from the grabbable
Collider collider = this.GetComponent<Collider>();
if (collider == null)
{
throw new ArgumentException("Grabbables cannot have zero grab points and no collider -- please add a grab point or collider.");
}
// Create a default grab point
m_grabPoints = new Collider[1] { collider };
}
}
protected virtual void Start()
{
m_grabbedKinematic = GetComponent<Rigidbody>().isKinematic;
}
void OnDestroy()
{
if (m_grabbedBy != null)
{
// Notify the hand to release destroyed grabbables
m_grabbedBy.ForceRelease(this);
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 02d61468f8b77ae4b92c344bc9a600fb
timeCreated: 1481833527
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,418 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Licensed under the Oculus Master SDK License Version 1.0 (the "License"); you may not use
the Utilities SDK except in compliance with the License, which is provided at the time of installation
or download, or which otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/oculusmastersdk-1.0/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System.Collections.Generic;
using UnityEngine;
/// <summary>
/// Allows grabbing and throwing of objects with the OVRGrabbable component on them.
/// </summary>
[RequireComponent(typeof(Rigidbody))]
public class OVRGrabber : MonoBehaviour
{
// Grip trigger thresholds for picking up objects, with some hysteresis.
public float grabBegin = 0.55f;
public float grabEnd = 0.35f;
// Demonstrates parenting the held object to the hand's transform when grabbed.
// When false, the grabbed object is moved every FixedUpdate using MovePosition.
// Note that MovePosition is required for proper physics simulation. If you set this to true, you can
// easily observe broken physics simulation by, for example, moving the bottom cube of a stacked
// tower and noting a complete loss of friction.
[SerializeField]
protected bool m_parentHeldObject = false;
// If true, this script will move the hand to the transform specified by m_parentTransform, using MovePosition in
// Update. This allows correct physics behavior, at the cost of some latency. In this usage scenario, you
// should NOT parent the hand to the hand anchor.
// (If m_moveHandPosition is false, this script will NOT update the game object's position.
// The hand gameObject can simply be attached to the hand anchor, which updates position in LateUpdate,
// gaining us a few ms of reduced latency.)
[SerializeField]
protected bool m_moveHandPosition = false;
// Child/attached transforms of the grabber, indicating where to snap held objects to (if you snap them).
// Also used for ranking grab targets in case of multiple candidates.
[SerializeField]
protected Transform m_gripTransform = null;
// Child/attached Colliders to detect candidate grabbable objects.
[SerializeField]
protected Collider[] m_grabVolumes = null;
// Should be OVRInput.Controller.LTouch or OVRInput.Controller.RTouch.
[SerializeField]
protected OVRInput.Controller m_controller;
// You can set this explicitly in the inspector if you're using m_moveHandPosition.
// Otherwise, you should typically leave this null and simply parent the hand to the hand anchor
// in your scene, using Unity's inspector.
[SerializeField]
protected Transform m_parentTransform;
[SerializeField]
protected GameObject m_player;
protected bool m_grabVolumeEnabled = true;
protected Vector3 m_lastPos;
protected Quaternion m_lastRot;
protected Quaternion m_anchorOffsetRotation;
protected Vector3 m_anchorOffsetPosition;
protected float m_prevFlex;
protected OVRGrabbable m_grabbedObj = null;
protected Vector3 m_grabbedObjectPosOff;
protected Quaternion m_grabbedObjectRotOff;
protected Dictionary<OVRGrabbable, int> m_grabCandidates = new Dictionary<OVRGrabbable, int>();
protected bool m_operatingWithoutOVRCameraRig = true;
/// <summary>
/// The currently grabbed object.
/// </summary>
public OVRGrabbable grabbedObject
{
get { return m_grabbedObj; }
}
public void ForceRelease(OVRGrabbable grabbable)
{
bool canRelease = (
(m_grabbedObj != null) &&
(m_grabbedObj == grabbable)
);
if (canRelease)
{
GrabEnd();
}
}
protected virtual void Awake()
{
m_anchorOffsetPosition = transform.localPosition;
m_anchorOffsetRotation = transform.localRotation;
if(!m_moveHandPosition)
{
// If we are being used with an OVRCameraRig, let it drive input updates, which may come from Update or FixedUpdate.
OVRCameraRig rig = transform.GetComponentInParent<OVRCameraRig>();
if (rig != null)
{
rig.UpdatedAnchors += (r) => {OnUpdatedAnchors();};
m_operatingWithoutOVRCameraRig = false;
}
}
}
protected virtual void Start()
{
m_lastPos = transform.position;
m_lastRot = transform.rotation;
if(m_parentTransform == null)
{
m_parentTransform = gameObject.transform;
}
// We're going to setup the player collision to ignore the hand collision.
SetPlayerIgnoreCollision(gameObject, true);
}
// Using Update instead of FixedUpdate. Doing this in FixedUpdate causes visible judder even with
// somewhat high tick rates, because variable numbers of ticks per frame will give hand poses of
// varying recency. We want a single hand pose sampled at the same time each frame.
// Note that this can lead to its own side effects. For example, if m_parentHeldObject is false, the
// grabbed objects will be moved with MovePosition. If this is called in Update while the physics
// tick rate is dramatically different from the application frame rate, other objects touched by
// the held object will see an incorrect velocity (because the move will occur over the time of the
// physics tick, not the render tick), and will respond to the incorrect velocity with potentially
// visible artifacts.
virtual public void Update()
{
if (m_operatingWithoutOVRCameraRig)
{
OnUpdatedAnchors();
}
}
// Hands follow the touch anchors by calling MovePosition each frame to reach the anchor.
// This is done instead of parenting to achieve workable physics. If you don't require physics on
// your hands or held objects, you may wish to switch to parenting.
void OnUpdatedAnchors()
{
Vector3 destPos = m_parentTransform.TransformPoint(m_anchorOffsetPosition);
Quaternion destRot = m_parentTransform.rotation * m_anchorOffsetRotation;
if (m_moveHandPosition)
{
GetComponent<Rigidbody>().MovePosition(destPos);
GetComponent<Rigidbody>().MoveRotation(destRot);
}
if (!m_parentHeldObject)
{
MoveGrabbedObject(destPos, destRot);
}
m_lastPos = transform.position;
m_lastRot = transform.rotation;
float prevFlex = m_prevFlex;
// Update values from inputs
m_prevFlex = OVRInput.Get(OVRInput.Axis1D.PrimaryHandTrigger, m_controller);
CheckForGrabOrRelease(prevFlex);
}
void OnDestroy()
{
if (m_grabbedObj != null)
{
GrabEnd();
}
}
void OnTriggerEnter(Collider otherCollider)
{
// Get the grab trigger
OVRGrabbable grabbable = otherCollider.GetComponent<OVRGrabbable>() ?? otherCollider.GetComponentInParent<OVRGrabbable>();
if (grabbable == null) return;
// Add the grabbable
int refCount = 0;
m_grabCandidates.TryGetValue(grabbable, out refCount);
m_grabCandidates[grabbable] = refCount + 1;
}
void OnTriggerExit(Collider otherCollider)
{
OVRGrabbable grabbable = otherCollider.GetComponent<OVRGrabbable>() ?? otherCollider.GetComponentInParent<OVRGrabbable>();
if (grabbable == null) return;
// Remove the grabbable
int refCount = 0;
bool found = m_grabCandidates.TryGetValue(grabbable, out refCount);
if (!found)
{
return;
}
if (refCount > 1)
{
m_grabCandidates[grabbable] = refCount - 1;
}
else
{
m_grabCandidates.Remove(grabbable);
}
}
protected void CheckForGrabOrRelease(float prevFlex)
{
if ((m_prevFlex >= grabBegin) && (prevFlex < grabBegin))
{
GrabBegin();
}
else if ((m_prevFlex <= grabEnd) && (prevFlex > grabEnd))
{
GrabEnd();
}
}
protected virtual void GrabBegin()
{
float closestMagSq = float.MaxValue;
OVRGrabbable closestGrabbable = null;
Collider closestGrabbableCollider = null;
// Iterate grab candidates and find the closest grabbable candidate
foreach (OVRGrabbable grabbable in m_grabCandidates.Keys)
{
bool canGrab = !(grabbable.isGrabbed && !grabbable.allowOffhandGrab);
if (!canGrab)
{
continue;
}
for (int j = 0; j < grabbable.grabPoints.Length; ++j)
{
Collider grabbableCollider = grabbable.grabPoints[j];
// Store the closest grabbable
Vector3 closestPointOnBounds = grabbableCollider.ClosestPointOnBounds(m_gripTransform.position);
float grabbableMagSq = (m_gripTransform.position - closestPointOnBounds).sqrMagnitude;
if (grabbableMagSq < closestMagSq)
{
closestMagSq = grabbableMagSq;
closestGrabbable = grabbable;
closestGrabbableCollider = grabbableCollider;
}
}
}
// Disable grab volumes to prevent overlaps
GrabVolumeEnable(false);
if (closestGrabbable != null)
{
if (closestGrabbable.isGrabbed)
{
closestGrabbable.grabbedBy.OffhandGrabbed(closestGrabbable);
}
m_grabbedObj = closestGrabbable;
m_grabbedObj.GrabBegin(this, closestGrabbableCollider);
m_lastPos = transform.position;
m_lastRot = transform.rotation;
// Set up offsets for grabbed object desired position relative to hand.
if(m_grabbedObj.snapPosition)
{
m_grabbedObjectPosOff = m_gripTransform.localPosition;
if(m_grabbedObj.snapOffset)
{
Vector3 snapOffset = m_grabbedObj.snapOffset.position;
if (m_controller == OVRInput.Controller.LTouch) snapOffset.x = -snapOffset.x;
m_grabbedObjectPosOff += snapOffset;
}
}
else
{
Vector3 relPos = m_grabbedObj.transform.position - transform.position;
relPos = Quaternion.Inverse(transform.rotation) * relPos;
m_grabbedObjectPosOff = relPos;
}
if (m_grabbedObj.snapOrientation)
{
m_grabbedObjectRotOff = m_gripTransform.localRotation;
if(m_grabbedObj.snapOffset)
{
m_grabbedObjectRotOff = m_grabbedObj.snapOffset.rotation * m_grabbedObjectRotOff;
}
}
else
{
Quaternion relOri = Quaternion.Inverse(transform.rotation) * m_grabbedObj.transform.rotation;
m_grabbedObjectRotOff = relOri;
}
// NOTE: force teleport on grab, to avoid high-speed travel to dest which hits a lot of other objects at high
// speed and sends them flying. The grabbed object may still teleport inside of other objects, but fixing that
// is beyond the scope of this demo.
MoveGrabbedObject(m_lastPos, m_lastRot, true);
// NOTE: This is to get around having to setup collision layers, but in your own project you might
// choose to remove this line in favor of your own collision layer setup.
SetPlayerIgnoreCollision(m_grabbedObj.gameObject, true);
if (m_parentHeldObject)
{
m_grabbedObj.transform.parent = transform;
}
}
}
protected virtual void MoveGrabbedObject(Vector3 pos, Quaternion rot, bool forceTeleport = false)
{
if (m_grabbedObj == null)
{
return;
}
Rigidbody grabbedRigidbody = m_grabbedObj.grabbedRigidbody;
Vector3 grabbablePosition = pos + rot * m_grabbedObjectPosOff;
Quaternion grabbableRotation = rot * m_grabbedObjectRotOff;
if (forceTeleport)
{
grabbedRigidbody.transform.position = grabbablePosition;
grabbedRigidbody.transform.rotation = grabbableRotation;
}
else
{
grabbedRigidbody.MovePosition(grabbablePosition);
grabbedRigidbody.MoveRotation(grabbableRotation);
}
}
protected void GrabEnd()
{
if (m_grabbedObj != null)
{
OVRPose localPose = new OVRPose { position = OVRInput.GetLocalControllerPosition(m_controller), orientation = OVRInput.GetLocalControllerRotation(m_controller) };
OVRPose offsetPose = new OVRPose { position = m_anchorOffsetPosition, orientation = m_anchorOffsetRotation };
localPose = localPose * offsetPose;
OVRPose trackingSpace = transform.ToOVRPose() * localPose.Inverse();
Vector3 linearVelocity = trackingSpace.orientation * OVRInput.GetLocalControllerVelocity(m_controller);
Vector3 angularVelocity = trackingSpace.orientation * OVRInput.GetLocalControllerAngularVelocity(m_controller);
GrabbableRelease(linearVelocity, angularVelocity);
}
// Re-enable grab volumes to allow overlap events
GrabVolumeEnable(true);
}
protected void GrabbableRelease(Vector3 linearVelocity, Vector3 angularVelocity)
{
m_grabbedObj.GrabEnd(linearVelocity, angularVelocity);
if(m_parentHeldObject) m_grabbedObj.transform.parent = null;
m_grabbedObj = null;
}
protected virtual void GrabVolumeEnable(bool enabled)
{
if (m_grabVolumeEnabled == enabled)
{
return;
}
m_grabVolumeEnabled = enabled;
for (int i = 0; i < m_grabVolumes.Length; ++i)
{
Collider grabVolume = m_grabVolumes[i];
grabVolume.enabled = m_grabVolumeEnabled;
}
if (!m_grabVolumeEnabled)
{
m_grabCandidates.Clear();
}
}
protected virtual void OffhandGrabbed(OVRGrabbable grabbable)
{
if (m_grabbedObj == grabbable)
{
GrabbableRelease(Vector3.zero, Vector3.zero);
}
}
protected void SetPlayerIgnoreCollision(GameObject grabbable, bool ignore)
{
if (m_player != null)
{
Collider[] playerColliders = m_player.GetComponentsInChildren<Collider>();
foreach (Collider pc in playerColliders)
{
Collider[] colliders = grabbable.GetComponentsInChildren<Collider>();
foreach (Collider c in colliders)
{
if(!c.isTrigger && !pc.isTrigger)
Physics.IgnoreCollision(c, pc, ignore);
}
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More