clean project
This commit is contained in:
9
Assets/Oculus/VR/Scripts/Composition.meta
Normal file
9
Assets/Oculus/VR/Scripts/Composition.meta
Normal file
@@ -0,0 +1,9 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 1f92828e69374384b8cb197653871a6e
|
||||
folderAsset: yes
|
||||
timeCreated: 1502989983
|
||||
licenseType: Store
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
330
Assets/Oculus/VR/Scripts/Composition/OVRCameraComposition.cs
Normal file
330
Assets/Oculus/VR/Scripts/Composition/OVRCameraComposition.cs
Normal file
@@ -0,0 +1,330 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using UnityEngine;
|
||||
using UnityEngine.Rendering;
|
||||
using System.Collections;
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
|
||||
|
||||
public abstract class OVRCameraComposition : OVRComposition {
|
||||
protected GameObject cameraFramePlaneObject = null;
|
||||
protected float cameraFramePlaneDistance;
|
||||
|
||||
protected readonly bool hasCameraDeviceOpened = false;
|
||||
|
||||
internal readonly OVRPlugin.CameraDevice cameraDevice = OVRPlugin.CameraDevice.WebCamera0;
|
||||
|
||||
private Mesh boundaryMesh = null;
|
||||
private float boundaryMeshTopY = 0.0f;
|
||||
private float boundaryMeshBottomY = 0.0f;
|
||||
private OVRManager.VirtualGreenScreenType boundaryMeshType = OVRManager.VirtualGreenScreenType.Off;
|
||||
private OVRCameraFrameCompositionManager cameraFrameCompositionManager = null;
|
||||
|
||||
protected OVRCameraComposition(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
|
||||
: base(parentObject, mainCamera, configuration)
|
||||
{
|
||||
cameraDevice = OVRCompositionUtil.ConvertCameraDevice(configuration.capturingCameraDevice);
|
||||
|
||||
Debug.Assert(!hasCameraDeviceOpened);
|
||||
Debug.Assert(!OVRPlugin.IsCameraDeviceAvailable(cameraDevice) || !OVRPlugin.HasCameraDeviceOpened(cameraDevice));
|
||||
hasCameraDeviceOpened = false;
|
||||
|
||||
bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice);
|
||||
if (configuration.useDynamicLighting && !cameraSupportsDepth)
|
||||
{
|
||||
Debug.LogWarning("The camera device doesn't support depth. The result of dynamic lighting might not be correct");
|
||||
}
|
||||
|
||||
if (OVRPlugin.IsCameraDeviceAvailable(cameraDevice))
|
||||
{
|
||||
OVRPlugin.CameraExtrinsics extrinsics;
|
||||
OVRPlugin.CameraIntrinsics intrinsics;
|
||||
if (OVRPlugin.GetExternalCameraCount() > 0 && OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
|
||||
{
|
||||
OVRPlugin.SetCameraDevicePreferredColorFrameSize(cameraDevice, intrinsics.ImageSensorPixelResolution.w, intrinsics.ImageSensorPixelResolution.h);
|
||||
}
|
||||
|
||||
if (configuration.useDynamicLighting)
|
||||
{
|
||||
OVRPlugin.SetCameraDeviceDepthSensingMode(cameraDevice, OVRPlugin.CameraDeviceDepthSensingMode.Fill);
|
||||
OVRPlugin.CameraDeviceDepthQuality quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
|
||||
if (configuration.depthQuality == OVRManager.DepthQuality.Low)
|
||||
{
|
||||
quality = OVRPlugin.CameraDeviceDepthQuality.Low;
|
||||
}
|
||||
else if (configuration.depthQuality == OVRManager.DepthQuality.Medium)
|
||||
{
|
||||
quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
|
||||
}
|
||||
else if (configuration.depthQuality == OVRManager.DepthQuality.High)
|
||||
{
|
||||
quality = OVRPlugin.CameraDeviceDepthQuality.High;
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("Unknown depth quality");
|
||||
}
|
||||
OVRPlugin.SetCameraDevicePreferredDepthQuality(cameraDevice, quality);
|
||||
}
|
||||
|
||||
Debug.LogFormat("Opening camera device {0}", cameraDevice);
|
||||
OVRPlugin.OpenCameraDevice(cameraDevice);
|
||||
if (OVRPlugin.HasCameraDeviceOpened(cameraDevice))
|
||||
{
|
||||
Debug.LogFormat("Opened camera device {0}", cameraDevice);
|
||||
hasCameraDeviceOpened = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public override void Cleanup()
|
||||
{
|
||||
OVRCompositionUtil.SafeDestroy(ref cameraFramePlaneObject);
|
||||
if (hasCameraDeviceOpened)
|
||||
{
|
||||
Debug.LogFormat("Close camera device {0}", cameraDevice);
|
||||
OVRPlugin.CloseCameraDevice(cameraDevice);
|
||||
}
|
||||
}
|
||||
|
||||
public override void RecenterPose()
|
||||
{
|
||||
boundaryMesh = null;
|
||||
}
|
||||
|
||||
protected void RefreshCameraFramePlaneObject(GameObject parentObject, Camera mixedRealityCamera, OVRMixedRealityCaptureConfiguration configuration)
|
||||
{
|
||||
OVRCompositionUtil.SafeDestroy(ref cameraFramePlaneObject);
|
||||
|
||||
Debug.Assert(cameraFramePlaneObject == null);
|
||||
cameraFramePlaneObject = GameObject.CreatePrimitive(PrimitiveType.Quad);
|
||||
cameraFramePlaneObject.name = "OculusMRC_CameraFrame";
|
||||
cameraFramePlaneObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
|
||||
cameraFramePlaneObject.GetComponent<Collider>().enabled = false;
|
||||
cameraFramePlaneObject.GetComponent<MeshRenderer>().shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
|
||||
Material cameraFrameMaterial = new Material(Shader.Find(configuration.useDynamicLighting ? "Oculus/OVRMRCameraFrameLit" : "Oculus/OVRMRCameraFrame"));
|
||||
cameraFramePlaneObject.GetComponent<MeshRenderer>().material = cameraFrameMaterial;
|
||||
cameraFrameMaterial.SetColor("_Color", Color.white);
|
||||
cameraFrameMaterial.SetFloat("_Visible", 0.0f);
|
||||
cameraFramePlaneObject.transform.localScale = new Vector3(4, 4, 4);
|
||||
cameraFramePlaneObject.SetActive(true);
|
||||
cameraFrameCompositionManager = mixedRealityCamera.gameObject.AddComponent<OVRCameraFrameCompositionManager>();
|
||||
cameraFrameCompositionManager.configuration = configuration;
|
||||
cameraFrameCompositionManager.cameraFrameGameObj = cameraFramePlaneObject;
|
||||
cameraFrameCompositionManager.composition = this;
|
||||
}
|
||||
|
||||
private bool nullcameraRigWarningDisplayed = false;
|
||||
protected void UpdateCameraFramePlaneObject(Camera mainCamera, Camera mixedRealityCamera, OVRMixedRealityCaptureConfiguration configuration, RenderTexture boundaryMeshMaskTexture)
|
||||
{
|
||||
cameraFrameCompositionManager.configuration = configuration;
|
||||
bool hasError = false;
|
||||
Material cameraFrameMaterial = cameraFramePlaneObject.GetComponent<MeshRenderer>().material;
|
||||
Texture2D colorTexture = Texture2D.blackTexture;
|
||||
Texture2D depthTexture = Texture2D.whiteTexture;
|
||||
if (OVRPlugin.IsCameraDeviceColorFrameAvailable(cameraDevice))
|
||||
{
|
||||
colorTexture = OVRPlugin.GetCameraDeviceColorFrameTexture(cameraDevice);
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("Camera: color frame not ready");
|
||||
hasError = true;
|
||||
}
|
||||
bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice);
|
||||
if (configuration.useDynamicLighting && cameraSupportsDepth)
|
||||
{
|
||||
if (OVRPlugin.IsCameraDeviceDepthFrameAvailable(cameraDevice))
|
||||
{
|
||||
depthTexture = OVRPlugin.GetCameraDeviceDepthFrameTexture(cameraDevice);
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("Camera: depth frame not ready");
|
||||
hasError = true;
|
||||
}
|
||||
}
|
||||
if (!hasError)
|
||||
{
|
||||
Vector3 offset = mainCamera.transform.position - mixedRealityCamera.transform.position;
|
||||
float distance = Vector3.Dot(mixedRealityCamera.transform.forward, offset);
|
||||
cameraFramePlaneDistance = distance;
|
||||
|
||||
cameraFramePlaneObject.transform.position = mixedRealityCamera.transform.position + mixedRealityCamera.transform.forward * distance;
|
||||
cameraFramePlaneObject.transform.rotation = mixedRealityCamera.transform.rotation;
|
||||
|
||||
float tanFov = Mathf.Tan(mixedRealityCamera.fieldOfView * Mathf.Deg2Rad * 0.5f);
|
||||
cameraFramePlaneObject.transform.localScale = new Vector3(distance * mixedRealityCamera.aspect * tanFov * 2.0f, distance * tanFov * 2.0f, 1.0f);
|
||||
|
||||
float worldHeight = distance * tanFov * 2.0f;
|
||||
float worldWidth = worldHeight * mixedRealityCamera.aspect;
|
||||
|
||||
float cullingDistance = float.MaxValue;
|
||||
|
||||
if (OVRManager.instance.virtualGreenScreenType != OVRManager.VirtualGreenScreenType.Off)
|
||||
{
|
||||
RefreshBoundaryMesh(mixedRealityCamera, configuration, out cullingDistance);
|
||||
}
|
||||
|
||||
cameraFrameMaterial.mainTexture = colorTexture;
|
||||
cameraFrameMaterial.SetTexture("_DepthTex", depthTexture);
|
||||
cameraFrameMaterial.SetVector("_FlipParams", new Vector4((configuration.flipCameraFrameHorizontally ? 1.0f : 0.0f), (configuration.flipCameraFrameVertically ? 1.0f : 0.0f), 0.0f, 0.0f));
|
||||
cameraFrameMaterial.SetColor("_ChromaKeyColor", configuration.chromaKeyColor);
|
||||
cameraFrameMaterial.SetFloat("_ChromaKeySimilarity", configuration.chromaKeySimilarity);
|
||||
cameraFrameMaterial.SetFloat("_ChromaKeySmoothRange", configuration.chromaKeySmoothRange);
|
||||
cameraFrameMaterial.SetFloat("_ChromaKeySpillRange", configuration.chromaKeySpillRange);
|
||||
cameraFrameMaterial.SetVector("_TextureDimension", new Vector4(colorTexture.width, colorTexture.height, 1.0f / colorTexture.width, 1.0f / colorTexture.height));
|
||||
cameraFrameMaterial.SetVector("_TextureWorldSize", new Vector4(worldWidth, worldHeight, 0, 0));
|
||||
cameraFrameMaterial.SetFloat("_SmoothFactor", configuration.dynamicLightingSmoothFactor);
|
||||
cameraFrameMaterial.SetFloat("_DepthVariationClamp", configuration.dynamicLightingDepthVariationClampingValue);
|
||||
cameraFrameMaterial.SetFloat("_CullingDistance", cullingDistance);
|
||||
if (configuration.virtualGreenScreenType == OVRManager.VirtualGreenScreenType.Off || boundaryMesh == null || boundaryMeshMaskTexture == null)
|
||||
{
|
||||
cameraFrameMaterial.SetTexture("_MaskTex", Texture2D.whiteTexture);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (cameraRig == null)
|
||||
{
|
||||
if (!nullcameraRigWarningDisplayed)
|
||||
{
|
||||
Debug.LogWarning("Could not find the OVRCameraRig/CenterEyeAnchor object. Please check if the OVRCameraRig has been setup properly. The virtual green screen has been temporarily disabled");
|
||||
nullcameraRigWarningDisplayed = true;
|
||||
}
|
||||
|
||||
cameraFrameMaterial.SetTexture("_MaskTex", Texture2D.whiteTexture);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (nullcameraRigWarningDisplayed)
|
||||
{
|
||||
Debug.Log("OVRCameraRig/CenterEyeAnchor object found. Virtual green screen is activated");
|
||||
nullcameraRigWarningDisplayed = false;
|
||||
}
|
||||
|
||||
cameraFrameMaterial.SetTexture("_MaskTex", boundaryMeshMaskTexture);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void RefreshBoundaryMesh(Camera camera, OVRMixedRealityCaptureConfiguration configuration, out float cullingDistance)
|
||||
{
|
||||
float depthTolerance = configuration.virtualGreenScreenApplyDepthCulling ? configuration.virtualGreenScreenDepthTolerance : float.PositiveInfinity;
|
||||
cullingDistance = OVRCompositionUtil.GetMaximumBoundaryDistance(camera, OVRCompositionUtil.ToBoundaryType(configuration.virtualGreenScreenType)) + depthTolerance;
|
||||
if (boundaryMesh == null || boundaryMeshType != configuration.virtualGreenScreenType || boundaryMeshTopY != configuration.virtualGreenScreenTopY || boundaryMeshBottomY != configuration.virtualGreenScreenBottomY)
|
||||
{
|
||||
boundaryMeshTopY = configuration.virtualGreenScreenTopY;
|
||||
boundaryMeshBottomY = configuration.virtualGreenScreenBottomY;
|
||||
boundaryMesh = OVRCompositionUtil.BuildBoundaryMesh(OVRCompositionUtil.ToBoundaryType(configuration.virtualGreenScreenType), boundaryMeshTopY, boundaryMeshBottomY);
|
||||
boundaryMeshType = configuration.virtualGreenScreenType;
|
||||
|
||||
// Creating GameObject for testing purpose only
|
||||
//GameObject boundaryMeshObject = new GameObject("BoundaryMeshObject");
|
||||
//boundaryMeshObject.AddComponent<MeshFilter>().mesh = boundaryMesh;
|
||||
//boundaryMeshObject.AddComponent<MeshRenderer>();
|
||||
}
|
||||
}
|
||||
|
||||
public class OVRCameraFrameCompositionManager : MonoBehaviour {
|
||||
|
||||
public OVRMixedRealityCaptureConfiguration configuration;
|
||||
public GameObject cameraFrameGameObj;
|
||||
public OVRCameraComposition composition;
|
||||
public RenderTexture boundaryMeshMaskTexture;
|
||||
private Material cameraFrameMaterial;
|
||||
private Material whiteMaterial;
|
||||
#if UNITY_2019_1_OR_NEWER
|
||||
private Camera mixedRealityCamera;
|
||||
#endif
|
||||
|
||||
void Start()
|
||||
{
|
||||
Shader shader = Shader.Find("Oculus/Unlit");
|
||||
if (!shader)
|
||||
{
|
||||
Debug.LogError("Oculus/Unlit shader does not exist");
|
||||
return;
|
||||
}
|
||||
whiteMaterial = new Material(shader);
|
||||
whiteMaterial.color = Color.white;
|
||||
#if UNITY_2019_1_OR_NEWER
|
||||
// Attach to render pipeline callbacks when on URP
|
||||
if(GraphicsSettings.renderPipelineAsset != null)
|
||||
{
|
||||
RenderPipelineManager.beginCameraRendering += OnCameraBeginRendering;
|
||||
RenderPipelineManager.endCameraRendering += OnCameraEndRendering;
|
||||
mixedRealityCamera = GetComponent<Camera>();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void OnPreRender()
|
||||
{
|
||||
if (configuration != null && configuration.virtualGreenScreenType != OVRManager.VirtualGreenScreenType.Off && boundaryMeshMaskTexture != null && composition.boundaryMesh != null)
|
||||
{
|
||||
RenderTexture oldRT = RenderTexture.active;
|
||||
RenderTexture.active = boundaryMeshMaskTexture;
|
||||
|
||||
// The camera matrices haven't been setup when OnPreRender() is executed. Load the projection manually
|
||||
GL.PushMatrix();
|
||||
GL.LoadProjectionMatrix(GetComponent<Camera>().projectionMatrix);
|
||||
|
||||
GL.Clear(false, true, Color.black);
|
||||
|
||||
for (int i = 0; i < whiteMaterial.passCount; ++i)
|
||||
{
|
||||
if (whiteMaterial.SetPass(i))
|
||||
{
|
||||
Graphics.DrawMeshNow(composition.boundaryMesh, composition.cameraRig.ComputeTrackReferenceMatrix());
|
||||
}
|
||||
}
|
||||
|
||||
GL.PopMatrix();
|
||||
RenderTexture.active = oldRT;
|
||||
}
|
||||
|
||||
if (cameraFrameGameObj)
|
||||
{
|
||||
if (cameraFrameMaterial == null)
|
||||
cameraFrameMaterial = cameraFrameGameObj.GetComponent<MeshRenderer>().material;
|
||||
cameraFrameMaterial.SetFloat("_Visible", 1.0f);
|
||||
}
|
||||
}
|
||||
void OnPostRender()
|
||||
{
|
||||
if (cameraFrameGameObj)
|
||||
{
|
||||
Debug.Assert(cameraFrameMaterial);
|
||||
cameraFrameMaterial.SetFloat("_Visible", 0.0f);
|
||||
}
|
||||
}
|
||||
|
||||
#if UNITY_2019_1_OR_NEWER
|
||||
private void OnCameraBeginRendering(ScriptableRenderContext renderContext, Camera camera)
|
||||
{
|
||||
if (mixedRealityCamera != null && mixedRealityCamera == camera)
|
||||
OnPreRender();
|
||||
}
|
||||
|
||||
private void OnCameraEndRendering(ScriptableRenderContext renderContext, Camera camera)
|
||||
{
|
||||
if (mixedRealityCamera != null && mixedRealityCamera == camera)
|
||||
OnPostRender();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 70818bad1fe6859439b190a61dfb6eb8
|
||||
timeCreated: 1503089686
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
100
Assets/Oculus/VR/Scripts/Composition/OVRComposition.cs
Normal file
100
Assets/Oculus/VR/Scripts/Composition/OVRComposition.cs
Normal file
@@ -0,0 +1,100 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
#if UNITY_ANDROID && !UNITY_EDITOR
|
||||
#define OVR_ANDROID_MRC
|
||||
#endif
|
||||
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
|
||||
|
||||
public abstract class OVRComposition {
|
||||
|
||||
public bool cameraInTrackingSpace = false;
|
||||
public OVRCameraRig cameraRig = null;
|
||||
|
||||
protected OVRComposition(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration) {
|
||||
RefreshCameraRig(parentObject, mainCamera);
|
||||
}
|
||||
|
||||
public abstract OVRManager.CompositionMethod CompositionMethod();
|
||||
|
||||
public abstract void Update(GameObject gameObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration, OVRManager.TrackingOrigin trackingOrigin);
|
||||
public abstract void Cleanup();
|
||||
|
||||
public virtual void RecenterPose() { }
|
||||
|
||||
protected bool usingLastAttachedNodePose = false;
|
||||
protected OVRPose lastAttachedNodePose = new OVRPose(); // Sometimes the attach node pose is not readable (lose tracking, low battery, etc.) Use the last pose instead when it happens
|
||||
|
||||
public void RefreshCameraRig(GameObject parentObject, Camera mainCamera)
|
||||
{
|
||||
OVRCameraRig cameraRig = mainCamera.GetComponentInParent<OVRCameraRig>();
|
||||
if (cameraRig == null)
|
||||
{
|
||||
cameraRig = parentObject.GetComponent<OVRCameraRig>();
|
||||
}
|
||||
cameraInTrackingSpace = (cameraRig != null && cameraRig.trackingSpace != null);
|
||||
this.cameraRig = cameraRig;
|
||||
Debug.Log(cameraRig == null ? "[OVRComposition] CameraRig not found" : "[OVRComposition] CameraRig found");
|
||||
}
|
||||
|
||||
public OVRPose ComputeCameraWorldSpacePose(OVRPlugin.CameraExtrinsics extrinsics)
|
||||
{
|
||||
OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
|
||||
OVRPose worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
|
||||
return worldSpacePose;
|
||||
}
|
||||
|
||||
public OVRPose ComputeCameraTrackingSpacePose(OVRPlugin.CameraExtrinsics extrinsics)
|
||||
{
|
||||
OVRPose trackingSpacePose = new OVRPose();
|
||||
|
||||
OVRPose cameraTrackingSpacePose = extrinsics.RelativePose.ToOVRPose();
|
||||
#if OVR_ANDROID_MRC
|
||||
OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
|
||||
cameraTrackingSpacePose = stageToLocalPose * cameraTrackingSpacePose;
|
||||
#endif
|
||||
trackingSpacePose = cameraTrackingSpacePose;
|
||||
|
||||
if (extrinsics.AttachedToNode != OVRPlugin.Node.None && OVRPlugin.GetNodePresent(extrinsics.AttachedToNode))
|
||||
{
|
||||
if (usingLastAttachedNodePose)
|
||||
{
|
||||
Debug.Log("The camera attached node get tracked");
|
||||
usingLastAttachedNodePose = false;
|
||||
}
|
||||
OVRPose attachedNodePose = OVRPlugin.GetNodePose(extrinsics.AttachedToNode, OVRPlugin.Step.Render).ToOVRPose();
|
||||
lastAttachedNodePose = attachedNodePose;
|
||||
trackingSpacePose = attachedNodePose * trackingSpacePose;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (extrinsics.AttachedToNode != OVRPlugin.Node.None)
|
||||
{
|
||||
if (!usingLastAttachedNodePose)
|
||||
{
|
||||
Debug.LogWarning("The camera attached node could not be tracked, using the last pose");
|
||||
usingLastAttachedNodePose = true;
|
||||
}
|
||||
trackingSpacePose = lastAttachedNodePose * trackingSpacePose;
|
||||
}
|
||||
}
|
||||
|
||||
return trackingSpacePose;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
12
Assets/Oculus/VR/Scripts/Composition/OVRComposition.cs.meta
Normal file
12
Assets/Oculus/VR/Scripts/Composition/OVRComposition.cs.meta
Normal file
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 829a382f3380d4b46ad9670463232a0b
|
||||
timeCreated: 1502990005
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
164
Assets/Oculus/VR/Scripts/Composition/OVRCompositionUtil.cs
Normal file
164
Assets/Oculus/VR/Scripts/Composition/OVRCompositionUtil.cs
Normal file
@@ -0,0 +1,164 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using UnityEngine;
|
||||
using System.Collections.Generic;
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
|
||||
|
||||
internal class OVRCompositionUtil {
|
||||
|
||||
public static void SafeDestroy(GameObject obj)
|
||||
{
|
||||
if (Application.isPlaying)
|
||||
{
|
||||
GameObject.Destroy(obj);
|
||||
}
|
||||
else
|
||||
{
|
||||
GameObject.DestroyImmediate(obj);
|
||||
}
|
||||
}
|
||||
|
||||
public static void SafeDestroy(ref GameObject obj)
|
||||
{
|
||||
if (obj != null)
|
||||
{
|
||||
SafeDestroy(obj);
|
||||
obj = null;
|
||||
}
|
||||
}
|
||||
|
||||
public static OVRPlugin.CameraDevice ConvertCameraDevice(OVRManager.CameraDevice cameraDevice)
|
||||
{
|
||||
if (cameraDevice == OVRManager.CameraDevice.WebCamera0)
|
||||
{
|
||||
return OVRPlugin.CameraDevice.WebCamera0;
|
||||
}
|
||||
else if (cameraDevice == OVRManager.CameraDevice.WebCamera1)
|
||||
{
|
||||
return OVRPlugin.CameraDevice.WebCamera1;
|
||||
}
|
||||
else if (cameraDevice == OVRManager.CameraDevice.ZEDCamera)
|
||||
{
|
||||
return OVRPlugin.CameraDevice.ZEDCamera;
|
||||
}
|
||||
else
|
||||
{
|
||||
return OVRPlugin.CameraDevice.None;
|
||||
}
|
||||
}
|
||||
|
||||
public static OVRBoundary.BoundaryType ToBoundaryType(OVRManager.VirtualGreenScreenType type)
|
||||
{
|
||||
/*if (type == OVRManager.VirtualGreenScreenType.OuterBoundary)
|
||||
{
|
||||
return OVRBoundary.BoundaryType.OuterBoundary;
|
||||
}
|
||||
else */if (type == OVRManager.VirtualGreenScreenType.PlayArea)
|
||||
{
|
||||
return OVRBoundary.BoundaryType.PlayArea;
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("Unmatched VirtualGreenScreenType");
|
||||
return OVRBoundary.BoundaryType.PlayArea;
|
||||
}
|
||||
}
|
||||
|
||||
public static Vector3 GetWorldPosition(Vector3 trackingSpacePosition)
|
||||
{
|
||||
OVRPose tsPose;
|
||||
tsPose.position = trackingSpacePosition;
|
||||
tsPose.orientation = Quaternion.identity;
|
||||
OVRPose wsPose = OVRExtensions.ToWorldSpacePose(tsPose);
|
||||
Vector3 pos = wsPose.position;
|
||||
return pos;
|
||||
}
|
||||
|
||||
public static float GetMaximumBoundaryDistance(Camera camera, OVRBoundary.BoundaryType boundaryType)
|
||||
{
|
||||
if (!OVRManager.boundary.GetConfigured())
|
||||
{
|
||||
return float.MaxValue;
|
||||
}
|
||||
|
||||
Vector3[] geometry = OVRManager.boundary.GetGeometry(boundaryType);
|
||||
if (geometry.Length == 0)
|
||||
{
|
||||
return float.MaxValue;
|
||||
}
|
||||
|
||||
float maxDistance = -float.MaxValue;
|
||||
foreach (Vector3 v in geometry)
|
||||
{
|
||||
Vector3 pos = GetWorldPosition(v);
|
||||
float distance = Vector3.Dot(camera.transform.forward, pos);
|
||||
if (maxDistance < distance)
|
||||
{
|
||||
maxDistance = distance;
|
||||
}
|
||||
}
|
||||
return maxDistance;
|
||||
}
|
||||
|
||||
public static Mesh BuildBoundaryMesh(OVRBoundary.BoundaryType boundaryType, float topY, float bottomY)
|
||||
{
|
||||
if (!OVRManager.boundary.GetConfigured())
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
List<Vector3> geometry = new List<Vector3>(OVRManager.boundary.GetGeometry(boundaryType));
|
||||
if (geometry.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
geometry.Add(geometry[0]);
|
||||
int numPoints = geometry.Count;
|
||||
|
||||
Vector3[] vertices = new Vector3[numPoints * 2];
|
||||
Vector2[] uvs = new Vector2[numPoints * 2];
|
||||
for (int i = 0; i < numPoints; ++i)
|
||||
{
|
||||
Vector3 v = geometry[i];
|
||||
vertices[i] = new Vector3(v.x, bottomY, v.z);
|
||||
vertices[i + numPoints] = new Vector3(v.x, topY, v.z);
|
||||
uvs[i] = new Vector2((float)i / (numPoints - 1), 0.0f);
|
||||
uvs[i + numPoints] = new Vector2(uvs[i].x, 1.0f);
|
||||
}
|
||||
|
||||
int[] triangles = new int[(numPoints - 1) * 2 * 3];
|
||||
for (int i = 0; i < numPoints - 1; ++i)
|
||||
{
|
||||
// the geometry is built clockwised. only the back faces should be rendered in the camera frame mask
|
||||
|
||||
triangles[i * 6 + 0] = i;
|
||||
triangles[i * 6 + 1] = i + numPoints;
|
||||
triangles[i * 6 + 2] = i + 1 + numPoints;
|
||||
|
||||
triangles[i * 6 + 3] = i;
|
||||
triangles[i * 6 + 4] = i + 1 + numPoints;
|
||||
triangles[i * 6 + 5] = i + 1;
|
||||
}
|
||||
|
||||
Mesh mesh = new Mesh();
|
||||
mesh.vertices = vertices;
|
||||
mesh.uv = uvs;
|
||||
mesh.triangles = triangles;
|
||||
return mesh;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 43bf91d46b2eb874a842be95aee2cc9a
|
||||
timeCreated: 1502992822
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
195
Assets/Oculus/VR/Scripts/Composition/OVRDirectComposition.cs
Normal file
195
Assets/Oculus/VR/Scripts/Composition/OVRDirectComposition.cs
Normal file
@@ -0,0 +1,195 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System;
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
using Object = UnityEngine.Object;
|
||||
|
||||
#if USING_URP
|
||||
using UnityEngine.Rendering.Universal;
|
||||
#endif
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
|
||||
|
||||
public class OVRDirectComposition : OVRCameraComposition
|
||||
{
|
||||
private GameObject previousMainCameraObject = null;
|
||||
public GameObject directCompositionCameraGameObject = null;
|
||||
public Camera directCompositionCamera = null;
|
||||
public RenderTexture boundaryMeshMaskTexture = null;
|
||||
|
||||
public override OVRManager.CompositionMethod CompositionMethod() { return OVRManager.CompositionMethod.Direct; }
|
||||
|
||||
public OVRDirectComposition(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
|
||||
: base(parentObject, mainCamera, configuration)
|
||||
{
|
||||
RefreshCameraObjects(parentObject, mainCamera, configuration);
|
||||
}
|
||||
|
||||
private void RefreshCameraObjects(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
|
||||
{
|
||||
if (!hasCameraDeviceOpened)
|
||||
{
|
||||
Debug.LogWarning("[OVRDirectComposition] RefreshCameraObjects(): Unable to open camera device " + cameraDevice);
|
||||
return;
|
||||
}
|
||||
|
||||
if (mainCamera.gameObject != previousMainCameraObject)
|
||||
{
|
||||
Debug.LogFormat("[OVRDirectComposition] Camera refreshed. Rebind camera to {0}", mainCamera.gameObject.name);
|
||||
|
||||
OVRCompositionUtil.SafeDestroy(ref directCompositionCameraGameObject);
|
||||
directCompositionCamera = null;
|
||||
|
||||
RefreshCameraRig(parentObject, mainCamera);
|
||||
|
||||
Debug.Assert(directCompositionCameraGameObject == null);
|
||||
if (configuration.instantiateMixedRealityCameraGameObject != null)
|
||||
{
|
||||
directCompositionCameraGameObject = configuration.instantiateMixedRealityCameraGameObject(mainCamera.gameObject, OVRManager.MrcCameraType.Normal);
|
||||
}
|
||||
else
|
||||
{
|
||||
directCompositionCameraGameObject = Object.Instantiate(mainCamera.gameObject);
|
||||
}
|
||||
directCompositionCameraGameObject.name = "OculusMRC_DirectCompositionCamera";
|
||||
directCompositionCameraGameObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
|
||||
if (directCompositionCameraGameObject.GetComponent<AudioListener>())
|
||||
{
|
||||
Object.Destroy(directCompositionCameraGameObject.GetComponent<AudioListener>());
|
||||
}
|
||||
if (directCompositionCameraGameObject.GetComponent<OVRManager>())
|
||||
{
|
||||
Object.Destroy(directCompositionCameraGameObject.GetComponent<OVRManager>());
|
||||
}
|
||||
directCompositionCamera = directCompositionCameraGameObject.GetComponent<Camera>();
|
||||
#if USING_MRC_COMPATIBLE_URP_VERSION
|
||||
var directCamData = directCompositionCamera.GetUniversalAdditionalCameraData();
|
||||
if (directCamData != null)
|
||||
{
|
||||
directCamData.allowXRRendering = false;
|
||||
}
|
||||
#elif USING_URP
|
||||
Debug.LogError("Using URP with MRC is only supported with URP version 10.0.0 or higher. Consider using Unity 2020 or higher.");
|
||||
#else
|
||||
directCompositionCamera.stereoTargetEye = StereoTargetEyeMask.None;
|
||||
#endif
|
||||
directCompositionCamera.depth = float.MaxValue;
|
||||
directCompositionCamera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
|
||||
directCompositionCamera.cullingMask = (directCompositionCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
|
||||
|
||||
|
||||
Debug.Log("DirectComposition activated : useDynamicLighting " + (configuration.useDynamicLighting ? "ON" : "OFF"));
|
||||
RefreshCameraFramePlaneObject(parentObject, directCompositionCamera, configuration);
|
||||
|
||||
previousMainCameraObject = mainCamera.gameObject;
|
||||
}
|
||||
}
|
||||
|
||||
public override void Update(GameObject gameObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration, OVRManager.TrackingOrigin trackingOrigin)
|
||||
{
|
||||
if (!hasCameraDeviceOpened)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
RefreshCameraObjects(gameObject, mainCamera, configuration);
|
||||
|
||||
if (!OVRPlugin.SetHandNodePoseStateLatency(configuration.handPoseStateLatency))
|
||||
{
|
||||
Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + configuration.handPoseStateLatency);
|
||||
}
|
||||
|
||||
directCompositionCamera.clearFlags = mainCamera.clearFlags;
|
||||
directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
|
||||
if (configuration.dynamicCullingMask)
|
||||
{
|
||||
directCompositionCamera.cullingMask = (mainCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
|
||||
}
|
||||
|
||||
directCompositionCamera.nearClipPlane = mainCamera.nearClipPlane;
|
||||
directCompositionCamera.farClipPlane = mainCamera.farClipPlane;
|
||||
|
||||
if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
|
||||
{
|
||||
OVRPose trackingSpacePose = new OVRPose();
|
||||
trackingSpacePose.position = trackingOrigin == OVRManager.TrackingOrigin.EyeLevel ?
|
||||
OVRMixedReality.fakeCameraEyeLevelPosition :
|
||||
OVRMixedReality.fakeCameraFloorLevelPosition;
|
||||
trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
|
||||
directCompositionCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
|
||||
directCompositionCamera.aspect = OVRMixedReality.fakeCameraAspect;
|
||||
if (cameraInTrackingSpace)
|
||||
{
|
||||
directCompositionCamera.transform.FromOVRPose(trackingSpacePose, true);
|
||||
}
|
||||
else
|
||||
{
|
||||
OVRPose worldSpacePose = new OVRPose();
|
||||
worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
|
||||
directCompositionCamera.transform.FromOVRPose(worldSpacePose);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
OVRPlugin.CameraExtrinsics extrinsics;
|
||||
OVRPlugin.CameraIntrinsics intrinsics;
|
||||
|
||||
// So far, only support 1 camera for MR and always use camera index 0
|
||||
if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
|
||||
{
|
||||
float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
|
||||
float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
|
||||
directCompositionCamera.fieldOfView = fovY;
|
||||
directCompositionCamera.aspect = aspect;
|
||||
if (cameraInTrackingSpace)
|
||||
{
|
||||
OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
|
||||
directCompositionCamera.transform.FromOVRPose(trackingSpacePose, true);
|
||||
}
|
||||
else
|
||||
{
|
||||
OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
|
||||
directCompositionCamera.transform.FromOVRPose(worldSpacePose);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("Failed to get external camera information");
|
||||
}
|
||||
}
|
||||
|
||||
if (hasCameraDeviceOpened)
|
||||
{
|
||||
if (boundaryMeshMaskTexture == null || boundaryMeshMaskTexture.width != Screen.width || boundaryMeshMaskTexture.height != Screen.height)
|
||||
{
|
||||
boundaryMeshMaskTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8);
|
||||
boundaryMeshMaskTexture.Create();
|
||||
}
|
||||
UpdateCameraFramePlaneObject(mainCamera, directCompositionCamera, configuration, boundaryMeshMaskTexture);
|
||||
directCompositionCamera.GetComponent<OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = boundaryMeshMaskTexture;
|
||||
}
|
||||
}
|
||||
|
||||
public override void Cleanup()
|
||||
{
|
||||
base.Cleanup();
|
||||
|
||||
OVRCompositionUtil.SafeDestroy(ref directCompositionCameraGameObject);
|
||||
directCompositionCamera = null;
|
||||
|
||||
Debug.Log("DirectComposition deactivated");
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 8e9d1c62d6c68c7429ce265558cfd2b2
|
||||
timeCreated: 1502990248
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
567
Assets/Oculus/VR/Scripts/Composition/OVRExternalComposition.cs
Normal file
567
Assets/Oculus/VR/Scripts/Composition/OVRExternalComposition.cs
Normal file
@@ -0,0 +1,567 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
#if UNITY_ANDROID && !UNITY_EDITOR
|
||||
#define OVR_ANDROID_MRC
|
||||
#endif
|
||||
|
||||
using System;
|
||||
using UnityEngine;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using UnityEngine.Rendering;
|
||||
using Object = UnityEngine.Object;
|
||||
|
||||
#if USING_URP
|
||||
using UnityEngine.Rendering.Universal;
|
||||
#endif
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
|
||||
|
||||
public class OVRExternalComposition : OVRComposition
|
||||
{
|
||||
private GameObject previousMainCameraObject = null;
|
||||
public GameObject foregroundCameraGameObject = null;
|
||||
public Camera foregroundCamera = null;
|
||||
public GameObject backgroundCameraGameObject = null;
|
||||
public Camera backgroundCamera = null;
|
||||
#if OVR_ANDROID_MRC
|
||||
private bool skipFrame = false;
|
||||
private float fpsThreshold = 80.0f;
|
||||
private bool isFrameSkipped = true;
|
||||
public bool renderCombinedFrame = false;
|
||||
public AudioListener audioListener;
|
||||
public OVRMRAudioFilter audioFilter;
|
||||
public RenderTexture[] mrcRenderTextureArray = new RenderTexture[2];
|
||||
public int frameIndex;
|
||||
public int lastMrcEncodeFrameSyncId;
|
||||
|
||||
// when rendererSupportsCameraRect is false, mrcRenderTextureArray would only store the background frame (regular width)
|
||||
public RenderTexture[] mrcForegroundRenderTextureArray = new RenderTexture[2];
|
||||
|
||||
// this is used for moving MRC camera where we would need to be able to synchronize the camera position from the game with that on the client for composition
|
||||
public double[] cameraPoseTimeArray = new double[2];
|
||||
#endif
|
||||
|
||||
public override OVRManager.CompositionMethod CompositionMethod() { return OVRManager.CompositionMethod.External; }
|
||||
|
||||
public OVRExternalComposition(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
|
||||
: base(parentObject, mainCamera, configuration)
|
||||
{
|
||||
|
||||
#if OVR_ANDROID_MRC
|
||||
renderCombinedFrame = false;
|
||||
|
||||
int frameWidth;
|
||||
int frameHeight;
|
||||
OVRPlugin.Media.GetMrcFrameSize(out frameWidth, out frameHeight);
|
||||
Debug.LogFormat("[OVRExternalComposition] Create render texture {0}, {1}", renderCombinedFrame ? frameWidth : frameWidth/2, frameHeight);
|
||||
for (int i=0; i<2; ++i)
|
||||
{
|
||||
mrcRenderTextureArray[i] = new RenderTexture(renderCombinedFrame ? frameWidth : frameWidth/2, frameHeight, 24, RenderTextureFormat.ARGB32);
|
||||
mrcRenderTextureArray[i].Create();
|
||||
cameraPoseTimeArray[i] = 0.0;
|
||||
}
|
||||
|
||||
skipFrame = OVRManager.display.displayFrequency > fpsThreshold;
|
||||
OVRManager.DisplayRefreshRateChanged += DisplayRefreshRateChanged;
|
||||
frameIndex = 0;
|
||||
lastMrcEncodeFrameSyncId = -1;
|
||||
|
||||
if (!renderCombinedFrame)
|
||||
{
|
||||
Debug.LogFormat("[OVRExternalComposition] Create extra render textures for foreground");
|
||||
for (int i = 0; i < 2; ++i)
|
||||
{
|
||||
mrcForegroundRenderTextureArray[i] = new RenderTexture(frameWidth / 2, frameHeight, 24, RenderTextureFormat.ARGB32);
|
||||
mrcForegroundRenderTextureArray[i].Create();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
RefreshCameraObjects(parentObject, mainCamera, configuration);
|
||||
}
|
||||
|
||||
private void RefreshCameraObjects(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
|
||||
{
|
||||
if (mainCamera.gameObject != previousMainCameraObject)
|
||||
{
|
||||
Debug.LogFormat("[OVRExternalComposition] Camera refreshed. Rebind camera to {0}", mainCamera.gameObject.name);
|
||||
|
||||
OVRCompositionUtil.SafeDestroy(ref backgroundCameraGameObject);
|
||||
backgroundCamera = null;
|
||||
OVRCompositionUtil.SafeDestroy(ref foregroundCameraGameObject);
|
||||
foregroundCamera = null;
|
||||
|
||||
RefreshCameraRig(parentObject, mainCamera);
|
||||
|
||||
Debug.Assert(backgroundCameraGameObject == null);
|
||||
if (configuration.instantiateMixedRealityCameraGameObject != null)
|
||||
{
|
||||
backgroundCameraGameObject = configuration.instantiateMixedRealityCameraGameObject(mainCamera.gameObject, OVRManager.MrcCameraType.Background);
|
||||
}
|
||||
else
|
||||
{
|
||||
backgroundCameraGameObject = Object.Instantiate(mainCamera.gameObject);
|
||||
}
|
||||
|
||||
backgroundCameraGameObject.name = "OculusMRC_BackgroundCamera";
|
||||
backgroundCameraGameObject.transform.parent =
|
||||
cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
|
||||
if (backgroundCameraGameObject.GetComponent<AudioListener>()) {
|
||||
Object.Destroy(backgroundCameraGameObject.GetComponent<AudioListener>());
|
||||
}
|
||||
|
||||
if (backgroundCameraGameObject.GetComponent<OVRManager>()) {
|
||||
Object.Destroy(backgroundCameraGameObject.GetComponent<OVRManager>());
|
||||
}
|
||||
backgroundCamera = backgroundCameraGameObject.GetComponent<Camera>();
|
||||
backgroundCamera.tag = "Untagged";
|
||||
#if USING_MRC_COMPATIBLE_URP_VERSION
|
||||
var backgroundCamData = backgroundCamera.GetUniversalAdditionalCameraData();
|
||||
if (backgroundCamData != null)
|
||||
{
|
||||
backgroundCamData.allowXRRendering = false;
|
||||
}
|
||||
#elif USING_URP
|
||||
Debug.LogError("Using URP with MRC is only supported with URP version 10.0.0 or higher. Consider using Unity 2020 or higher.");
|
||||
#else
|
||||
backgroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
|
||||
#endif
|
||||
backgroundCamera.depth = 99990.0f;
|
||||
backgroundCamera.rect = new Rect(0.0f, 0.0f, 0.5f, 1.0f);
|
||||
backgroundCamera.cullingMask = (backgroundCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
|
||||
#if OVR_ANDROID_MRC
|
||||
backgroundCamera.targetTexture = mrcRenderTextureArray[0];
|
||||
if (!renderCombinedFrame)
|
||||
{
|
||||
backgroundCamera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
|
||||
}
|
||||
#endif
|
||||
|
||||
Debug.Assert(foregroundCameraGameObject == null);
|
||||
if (configuration.instantiateMixedRealityCameraGameObject != null)
|
||||
{
|
||||
foregroundCameraGameObject = configuration.instantiateMixedRealityCameraGameObject(mainCamera.gameObject, OVRManager.MrcCameraType.Foreground);
|
||||
}
|
||||
else
|
||||
{
|
||||
foregroundCameraGameObject = Object.Instantiate(mainCamera.gameObject);
|
||||
}
|
||||
|
||||
foregroundCameraGameObject.name = "OculusMRC_ForgroundCamera";
|
||||
foregroundCameraGameObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
|
||||
if (foregroundCameraGameObject.GetComponent<AudioListener>())
|
||||
{
|
||||
Object.Destroy(foregroundCameraGameObject.GetComponent<AudioListener>());
|
||||
}
|
||||
if (foregroundCameraGameObject.GetComponent<OVRManager>())
|
||||
{
|
||||
Object.Destroy(foregroundCameraGameObject.GetComponent<OVRManager>());
|
||||
}
|
||||
foregroundCamera = foregroundCameraGameObject.GetComponent<Camera>();
|
||||
foregroundCamera.tag = "Untagged";
|
||||
#if USING_MRC_COMPATIBLE_URP_VERSION
|
||||
var foregroundCamData = foregroundCamera.GetUniversalAdditionalCameraData();
|
||||
if (foregroundCamData != null)
|
||||
{
|
||||
foregroundCamData.allowXRRendering = false;
|
||||
}
|
||||
#elif USING_URP
|
||||
Debug.LogError("Using URP with MRC is only supported with URP version 10.0.0 or higher. Consider using Unity 2020 or higher.");
|
||||
#else
|
||||
foregroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
|
||||
#endif
|
||||
foregroundCamera.depth = backgroundCamera.depth + 1.0f; // enforce the forground be rendered after the background
|
||||
foregroundCamera.rect = new Rect(0.5f, 0.0f, 0.5f, 1.0f);
|
||||
foregroundCamera.clearFlags = CameraClearFlags.Color;
|
||||
#if OVR_ANDROID_MRC
|
||||
foregroundCamera.backgroundColor = configuration.externalCompositionBackdropColorQuest;
|
||||
#else
|
||||
foregroundCamera.backgroundColor = configuration.externalCompositionBackdropColorRift;
|
||||
#endif
|
||||
foregroundCamera.cullingMask = (foregroundCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
|
||||
|
||||
#if OVR_ANDROID_MRC
|
||||
if (renderCombinedFrame)
|
||||
{
|
||||
foregroundCamera.targetTexture = mrcRenderTextureArray[0];
|
||||
}
|
||||
else
|
||||
{
|
||||
foregroundCamera.targetTexture = mrcForegroundRenderTextureArray[0];
|
||||
foregroundCamera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
|
||||
}
|
||||
#endif
|
||||
|
||||
previousMainCameraObject = mainCamera.gameObject;
|
||||
}
|
||||
}
|
||||
|
||||
#if OVR_ANDROID_MRC
|
||||
private void RefreshAudioFilter()
|
||||
{
|
||||
if (cameraRig != null && (audioListener == null || !audioListener.enabled || !audioListener.gameObject.activeInHierarchy))
|
||||
{
|
||||
CleanupAudioFilter();
|
||||
|
||||
AudioListener tmpAudioListener = cameraRig.centerEyeAnchor.gameObject.activeInHierarchy ? cameraRig.centerEyeAnchor.GetComponent<AudioListener>() : null;
|
||||
if (tmpAudioListener != null && !tmpAudioListener.enabled) tmpAudioListener = null;
|
||||
if (tmpAudioListener == null)
|
||||
{
|
||||
if (Camera.main != null && Camera.main.gameObject.activeInHierarchy)
|
||||
{
|
||||
tmpAudioListener = Camera.main.GetComponent<AudioListener>();
|
||||
if (tmpAudioListener != null && !tmpAudioListener.enabled) tmpAudioListener = null;
|
||||
}
|
||||
}
|
||||
if (tmpAudioListener == null)
|
||||
{
|
||||
Object[] allListeners = Object.FindObjectsOfType<AudioListener>();
|
||||
foreach (var l in allListeners)
|
||||
{
|
||||
AudioListener al = l as AudioListener;
|
||||
if (al != null && al.enabled && al.gameObject.activeInHierarchy)
|
||||
{
|
||||
tmpAudioListener = al;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (tmpAudioListener == null)
|
||||
{
|
||||
Debug.LogWarning("[OVRExternalComposition] No AudioListener in scene");
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogFormat("[OVRExternalComposition] AudioListener found, obj {0}", tmpAudioListener.gameObject.name);
|
||||
}
|
||||
audioListener = tmpAudioListener;
|
||||
|
||||
if(audioListener != null)
|
||||
{
|
||||
audioFilter = audioListener.gameObject.AddComponent<OVRMRAudioFilter>();
|
||||
audioFilter.composition = this;
|
||||
Debug.LogFormat("OVRMRAudioFilter added");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private float[] cachedAudioDataArray = null;
|
||||
|
||||
private int CastMrcFrame(int castTextureIndex)
|
||||
{
|
||||
int audioFrames;
|
||||
int audioChannels;
|
||||
GetAndResetAudioData(ref cachedAudioDataArray, out audioFrames, out audioChannels);
|
||||
|
||||
int syncId = -1;
|
||||
//Debug.Log("EncodeFrameThreadObject EncodeMrcFrame");
|
||||
bool ret = false;
|
||||
if (OVRPlugin.Media.GetMrcInputVideoBufferType() == OVRPlugin.Media.InputVideoBufferType.TextureHandle)
|
||||
{
|
||||
ret = OVRPlugin.Media.EncodeMrcFrame(mrcRenderTextureArray[castTextureIndex].GetNativeTexturePtr(),
|
||||
renderCombinedFrame ? System.IntPtr.Zero : mrcForegroundRenderTextureArray[castTextureIndex].GetNativeTexturePtr(),
|
||||
cachedAudioDataArray, audioFrames, audioChannels, AudioSettings.dspTime, cameraPoseTimeArray[castTextureIndex], ref syncId);
|
||||
}
|
||||
else
|
||||
{
|
||||
ret = OVRPlugin.Media.EncodeMrcFrame(mrcRenderTextureArray[castTextureIndex], cachedAudioDataArray, audioFrames, audioChannels, AudioSettings.dspTime, cameraPoseTimeArray[castTextureIndex], ref syncId);
|
||||
}
|
||||
|
||||
if (!ret)
|
||||
{
|
||||
Debug.LogWarning("EncodeMrcFrame failed. Likely caused by OBS plugin disconnection");
|
||||
return -1;
|
||||
}
|
||||
|
||||
return syncId;
|
||||
}
|
||||
|
||||
private void SetCameraTargetTexture(int drawTextureIndex)
|
||||
{
|
||||
if (renderCombinedFrame)
|
||||
{
|
||||
RenderTexture texture = mrcRenderTextureArray[drawTextureIndex];
|
||||
if (backgroundCamera.targetTexture != texture)
|
||||
{
|
||||
backgroundCamera.targetTexture = texture;
|
||||
}
|
||||
if (foregroundCamera.targetTexture != texture)
|
||||
{
|
||||
foregroundCamera.targetTexture = texture;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
RenderTexture bgTexture = mrcRenderTextureArray[drawTextureIndex];
|
||||
RenderTexture fgTexture = mrcForegroundRenderTextureArray[drawTextureIndex];
|
||||
if (backgroundCamera.targetTexture != bgTexture)
|
||||
{
|
||||
backgroundCamera.targetTexture = bgTexture;
|
||||
}
|
||||
if (foregroundCamera.targetTexture != fgTexture)
|
||||
{
|
||||
foregroundCamera.targetTexture = fgTexture;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
public override void Update(GameObject gameObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration, OVRManager.TrackingOrigin trackingOrigin)
|
||||
{
|
||||
#if OVR_ANDROID_MRC
|
||||
if (skipFrame && OVRPlugin.Media.IsCastingToRemoteClient()) {
|
||||
isFrameSkipped = !isFrameSkipped;
|
||||
if(isFrameSkipped) { return; }
|
||||
}
|
||||
#endif
|
||||
|
||||
RefreshCameraObjects(gameObject, mainCamera, configuration);
|
||||
|
||||
OVRPlugin.SetHandNodePoseStateLatency(0.0); // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0
|
||||
|
||||
// For third-person camera to use for calculating camera position with different anchors
|
||||
OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
|
||||
OVRPose localToStagePose = stageToLocalPose.Inverse();
|
||||
OVRPose head = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.Head, OVRPlugin.Step.Render).ToOVRPose();
|
||||
OVRPose leftC = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render).ToOVRPose();
|
||||
OVRPose rightC = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandRight, OVRPlugin.Step.Render).ToOVRPose();
|
||||
OVRPlugin.Media.SetMrcHeadsetControllerPose(head.ToPosef(), leftC.ToPosef(), rightC.ToPosef());
|
||||
|
||||
#if OVR_ANDROID_MRC
|
||||
RefreshAudioFilter();
|
||||
|
||||
int drawTextureIndex = (frameIndex / 2) % 2;
|
||||
int castTextureIndex = 1 - drawTextureIndex;
|
||||
|
||||
backgroundCamera.enabled = (frameIndex % 2) == 0;
|
||||
foregroundCamera.enabled = (frameIndex % 2) == 1;
|
||||
|
||||
if (frameIndex % 2 == 0)
|
||||
{
|
||||
if (lastMrcEncodeFrameSyncId != -1)
|
||||
{
|
||||
OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
|
||||
lastMrcEncodeFrameSyncId = -1;
|
||||
}
|
||||
lastMrcEncodeFrameSyncId = CastMrcFrame(castTextureIndex);
|
||||
SetCameraTargetTexture(drawTextureIndex);
|
||||
}
|
||||
|
||||
++ frameIndex;
|
||||
#endif
|
||||
|
||||
backgroundCamera.clearFlags = mainCamera.clearFlags;
|
||||
backgroundCamera.backgroundColor = mainCamera.backgroundColor;
|
||||
if (configuration.dynamicCullingMask)
|
||||
{
|
||||
backgroundCamera.cullingMask = (mainCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
|
||||
}
|
||||
backgroundCamera.nearClipPlane = mainCamera.nearClipPlane;
|
||||
backgroundCamera.farClipPlane = mainCamera.farClipPlane;
|
||||
|
||||
if (configuration.dynamicCullingMask)
|
||||
{
|
||||
foregroundCamera.cullingMask = (mainCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
|
||||
}
|
||||
foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
|
||||
foregroundCamera.farClipPlane = mainCamera.farClipPlane;
|
||||
|
||||
if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
|
||||
{
|
||||
OVRPose worldSpacePose = new OVRPose();
|
||||
OVRPose trackingSpacePose = new OVRPose();
|
||||
trackingSpacePose.position = trackingOrigin == OVRManager.TrackingOrigin.EyeLevel ?
|
||||
OVRMixedReality.fakeCameraEyeLevelPosition :
|
||||
OVRMixedReality.fakeCameraFloorLevelPosition;
|
||||
trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
|
||||
worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
|
||||
|
||||
backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
|
||||
backgroundCamera.aspect = OVRMixedReality.fakeCameraAspect;
|
||||
foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
|
||||
foregroundCamera.aspect = OVRMixedReality.fakeCameraAspect;
|
||||
|
||||
if (cameraInTrackingSpace)
|
||||
{
|
||||
backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
|
||||
foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
|
||||
}
|
||||
else
|
||||
{
|
||||
backgroundCamera.transform.FromOVRPose(worldSpacePose);
|
||||
foregroundCamera.transform.FromOVRPose(worldSpacePose);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
OVRPlugin.CameraExtrinsics extrinsics;
|
||||
OVRPlugin.CameraIntrinsics intrinsics;
|
||||
|
||||
// So far, only support 1 camera for MR and always use camera index 0
|
||||
if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
|
||||
{
|
||||
float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
|
||||
float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
|
||||
backgroundCamera.fieldOfView = fovY;
|
||||
backgroundCamera.aspect = aspect;
|
||||
foregroundCamera.fieldOfView = fovY;
|
||||
foregroundCamera.aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
|
||||
|
||||
if (cameraInTrackingSpace)
|
||||
{
|
||||
OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
|
||||
backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
|
||||
foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
|
||||
}
|
||||
else
|
||||
{
|
||||
OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
|
||||
backgroundCamera.transform.FromOVRPose(worldSpacePose);
|
||||
foregroundCamera.transform.FromOVRPose(worldSpacePose);
|
||||
}
|
||||
#if OVR_ANDROID_MRC
|
||||
cameraPoseTimeArray[drawTextureIndex] = extrinsics.LastChangedTimeSeconds;
|
||||
#endif
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("Failed to get external camera information");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
Vector3 headToExternalCameraVec = mainCamera.transform.position - foregroundCamera.transform.position;
|
||||
float clipDistance = Vector3.Dot(headToExternalCameraVec, foregroundCamera.transform.forward);
|
||||
foregroundCamera.farClipPlane = Mathf.Max(foregroundCamera.nearClipPlane + 0.001f, clipDistance);
|
||||
}
|
||||
|
||||
#if OVR_ANDROID_MRC
|
||||
private void CleanupAudioFilter()
|
||||
{
|
||||
if (audioFilter)
|
||||
{
|
||||
audioFilter.composition = null;
|
||||
Object.Destroy(audioFilter);
|
||||
Debug.LogFormat("OVRMRAudioFilter destroyed");
|
||||
audioFilter = null;
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
||||
|
||||
public override void Cleanup()
|
||||
{
|
||||
OVRCompositionUtil.SafeDestroy(ref backgroundCameraGameObject);
|
||||
backgroundCamera = null;
|
||||
OVRCompositionUtil.SafeDestroy(ref foregroundCameraGameObject);
|
||||
foregroundCamera = null;
|
||||
Debug.Log("ExternalComposition deactivated");
|
||||
|
||||
#if OVR_ANDROID_MRC
|
||||
if (lastMrcEncodeFrameSyncId != -1)
|
||||
{
|
||||
OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
|
||||
lastMrcEncodeFrameSyncId = -1;
|
||||
}
|
||||
|
||||
CleanupAudioFilter();
|
||||
|
||||
for (int i=0; i<2; ++i)
|
||||
{
|
||||
mrcRenderTextureArray[i].Release();
|
||||
mrcRenderTextureArray[i] = null;
|
||||
|
||||
if (!renderCombinedFrame)
|
||||
{
|
||||
mrcForegroundRenderTextureArray[i].Release();
|
||||
mrcForegroundRenderTextureArray[i] = null;
|
||||
}
|
||||
}
|
||||
|
||||
OVRManager.DisplayRefreshRateChanged -= DisplayRefreshRateChanged;
|
||||
frameIndex = 0;
|
||||
#endif
|
||||
}
|
||||
|
||||
private readonly object audioDataLock = new object();
|
||||
private List<float> cachedAudioData = new List<float>(16384);
|
||||
private int cachedChannels = 0;
|
||||
|
||||
public void CacheAudioData(float[] data, int channels)
|
||||
{
|
||||
lock(audioDataLock)
|
||||
{
|
||||
if (channels != cachedChannels)
|
||||
{
|
||||
cachedAudioData.Clear();
|
||||
}
|
||||
cachedChannels = channels;
|
||||
cachedAudioData.AddRange(data);
|
||||
//Debug.LogFormat("[CacheAudioData] dspTime {0} indata {1} channels {2} accu_len {3}", AudioSettings.dspTime, data.Length, channels, cachedAudioData.Count);
|
||||
}
|
||||
}
|
||||
|
||||
public void GetAndResetAudioData(ref float[] audioData, out int audioFrames, out int channels)
|
||||
{
|
||||
lock(audioDataLock)
|
||||
{
|
||||
//Debug.LogFormat("[GetAndResetAudioData] dspTime {0} accu_len {1}", AudioSettings.dspTime, cachedAudioData.Count);
|
||||
if (audioData == null || audioData.Length < cachedAudioData.Count)
|
||||
{
|
||||
audioData = new float[cachedAudioData.Capacity];
|
||||
}
|
||||
cachedAudioData.CopyTo(audioData);
|
||||
audioFrames = cachedAudioData.Count;
|
||||
channels = cachedChannels;
|
||||
cachedAudioData.Clear();
|
||||
}
|
||||
}
|
||||
|
||||
#if OVR_ANDROID_MRC
|
||||
|
||||
private void DisplayRefreshRateChanged(float fromRefreshRate, float toRefreshRate)
|
||||
{
|
||||
skipFrame = toRefreshRate > fpsThreshold;
|
||||
}
|
||||
#endif
|
||||
|
||||
}
|
||||
|
||||
#if OVR_ANDROID_MRC
|
||||
|
||||
public class OVRMRAudioFilter : MonoBehaviour
|
||||
{
|
||||
private bool running = false;
|
||||
|
||||
public OVRExternalComposition composition;
|
||||
|
||||
void Start()
|
||||
{
|
||||
running = true;
|
||||
}
|
||||
|
||||
void OnAudioFilterRead(float[] data, int channels)
|
||||
{
|
||||
if (!running)
|
||||
return;
|
||||
|
||||
if (composition != null)
|
||||
{
|
||||
composition.CacheAudioData(data, channels);
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 2c109ff55176f71418ec2c06d1b5d28e
|
||||
timeCreated: 1502990231
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,16 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
public class OVRSandwichComposition
|
||||
{
|
||||
// deprecated since SDK 1.41
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 3c02efcdd3fb2aa4e9c641b0c2a54b9a
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
9
Assets/Oculus/VR/Scripts/Editor.meta
Normal file
9
Assets/Oculus/VR/Scripts/Editor.meta
Normal file
@@ -0,0 +1,9 @@
|
||||
fileFormatVersion: 2
|
||||
guid: e87d4bbdfc8d17445b4a41760b401026
|
||||
folderAsset: yes
|
||||
timeCreated: 1510282190
|
||||
licenseType: Store
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
58
Assets/Oculus/VR/Scripts/Editor/OVRCustomSkeletonEditor.cs
Normal file
58
Assets/Oculus/VR/Scripts/Editor/OVRCustomSkeletonEditor.cs
Normal file
@@ -0,0 +1,58 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using UnityEditor;
|
||||
using UnityEditor.SceneManagement;
|
||||
using BoneId = OVRSkeleton.BoneId;
|
||||
|
||||
[CustomEditor(typeof(OVRCustomSkeleton))]
|
||||
public class OVRCustomSkeletonEditor : Editor
|
||||
{
|
||||
public override void OnInspectorGUI()
|
||||
{
|
||||
DrawPropertiesExcluding(serializedObject, new string[] { "_customBones" });
|
||||
serializedObject.ApplyModifiedProperties();
|
||||
|
||||
OVRCustomSkeleton skeleton = (OVRCustomSkeleton)target;
|
||||
OVRSkeleton.SkeletonType skeletonType = skeleton.GetSkeletonType();
|
||||
|
||||
if (skeletonType == OVRSkeleton.SkeletonType.None)
|
||||
{
|
||||
EditorGUILayout.HelpBox("Please select a SkeletonType.", MessageType.Warning);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (GUILayout.Button("Auto Map Bones"))
|
||||
{
|
||||
skeleton.TryAutoMapBonesByName();
|
||||
EditorUtility.SetDirty(skeleton);
|
||||
EditorSceneManager.MarkSceneDirty(skeleton.gameObject.scene);
|
||||
}
|
||||
|
||||
EditorGUILayout.LabelField("Bones", EditorStyles.boldLabel);
|
||||
BoneId start = skeleton.GetCurrentStartBoneId();
|
||||
BoneId end = skeleton.GetCurrentEndBoneId();
|
||||
if (start != BoneId.Invalid && end != BoneId.Invalid)
|
||||
{
|
||||
for (int i = (int)start; i < (int)end; ++i)
|
||||
{
|
||||
string boneName = OVRSkeleton.BoneLabelFromBoneId(skeletonType, (BoneId)i);
|
||||
skeleton.CustomBones[i] = (Transform)EditorGUILayout.ObjectField(boneName, skeleton.CustomBones[i], typeof(Transform), true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 48b4582957a398741abd6d10bcb62042
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
244
Assets/Oculus/VR/Scripts/Editor/OVREditorUtil.cs
Normal file
244
Assets/Oculus/VR/Scripts/Editor/OVREditorUtil.cs
Normal file
@@ -0,0 +1,244 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using UnityEditor;
|
||||
using System.Diagnostics;
|
||||
|
||||
public static class OVREditorUtil {
|
||||
|
||||
private static GUIContent tooltipLink = new GUIContent("[?]");
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupBoolField(Object target, string name, ref bool member, ref bool modified, string docLink = "")
|
||||
{
|
||||
SetupBoolField(target, new GUIContent(name), ref member, ref modified, docLink);
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupBoolField(Object target, GUIContent name, ref bool member, ref bool modified, string docLink = "")
|
||||
{
|
||||
EditorGUILayout.BeginHorizontal();
|
||||
|
||||
EditorGUI.BeginChangeCheck();
|
||||
bool value = EditorGUILayout.Toggle(name, member);
|
||||
if (EditorGUI.EndChangeCheck())
|
||||
{
|
||||
Undo.RecordObject(target, "Changed " + name);
|
||||
member = value;
|
||||
modified = true;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(docLink))
|
||||
{
|
||||
DisplayDocLink(docLink);
|
||||
}
|
||||
|
||||
EditorGUILayout.EndHorizontal();
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupIntField(Object target, string name, ref int member, ref bool modified)
|
||||
{
|
||||
SetupIntField(target, new GUIContent(name), ref member, ref modified);
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupIntField(Object target, GUIContent name, ref int member, ref bool modified)
|
||||
{
|
||||
EditorGUI.BeginChangeCheck();
|
||||
int value = EditorGUILayout.IntField(name, member);
|
||||
if (EditorGUI.EndChangeCheck())
|
||||
{
|
||||
Undo.RecordObject(target, "Changed " + name);
|
||||
member = value;
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupFloatField(Object target, string name, ref float member, ref bool modified)
|
||||
{
|
||||
SetupFloatField(target, new GUIContent(name), ref member, ref modified);
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupFloatField(Object target, GUIContent name, ref float member, ref bool modified)
|
||||
{
|
||||
EditorGUI.BeginChangeCheck();
|
||||
float value = EditorGUILayout.FloatField(name, member);
|
||||
if (EditorGUI.EndChangeCheck())
|
||||
{
|
||||
Undo.RecordObject(target, "Changed " + name);
|
||||
member = value;
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupDoubleField(Object target, string name, ref double member, ref bool modified)
|
||||
{
|
||||
SetupDoubleField(target, new GUIContent(name), ref member, ref modified);
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupDoubleField(Object target, GUIContent name, ref double member, ref bool modified)
|
||||
{
|
||||
EditorGUI.BeginChangeCheck();
|
||||
double value = EditorGUILayout.DoubleField(name, member);
|
||||
if (EditorGUI.EndChangeCheck())
|
||||
{
|
||||
Undo.RecordObject(target, "Changed " + name);
|
||||
member = value;
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupColorField(Object target, string name, ref Color member, ref bool modified)
|
||||
{
|
||||
SetupColorField(target, new GUIContent(name), ref member, ref modified);
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupColorField(Object target, GUIContent name, ref Color member, ref bool modified)
|
||||
{
|
||||
EditorGUI.BeginChangeCheck();
|
||||
Color value = EditorGUILayout.ColorField(name, member);
|
||||
if (EditorGUI.EndChangeCheck())
|
||||
{
|
||||
Undo.RecordObject(target, "Changed " + name);
|
||||
member = value;
|
||||
modified = true;
|
||||
}
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupLayerMaskField(Object target, string name, ref LayerMask layerMask, string[] layerMaskOptions, ref bool modified)
|
||||
{
|
||||
SetupLayerMaskField(target, new GUIContent(name), ref layerMask, layerMaskOptions, ref modified);
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupLayerMaskField(Object target, GUIContent name, ref LayerMask layerMask, string[] layerMaskOptions, ref bool modified)
|
||||
{
|
||||
EditorGUI.BeginChangeCheck();
|
||||
int value = EditorGUILayout.MaskField(name, layerMask, layerMaskOptions);
|
||||
if (EditorGUI.EndChangeCheck())
|
||||
{
|
||||
Undo.RecordObject(target, "Changed " + name);
|
||||
layerMask = value;
|
||||
}
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupEnumField<T>(Object target, string name, ref T member, ref bool modified, string docLink = "") where T : struct
|
||||
{
|
||||
SetupEnumField(target, new GUIContent(name), ref member, ref modified, docLink);
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupEnumField<T>(Object target, GUIContent name, ref T member, ref bool modified, string docLink = "") where T : struct
|
||||
{
|
||||
GUILayout.BeginHorizontal();
|
||||
|
||||
EditorGUI.BeginChangeCheck();
|
||||
T value = (T)(object)EditorGUILayout.EnumPopup(name, member as System.Enum);
|
||||
if (EditorGUI.EndChangeCheck())
|
||||
{
|
||||
Undo.RecordObject(target, "Changed " + name);
|
||||
member = value;
|
||||
modified = true;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(docLink))
|
||||
{
|
||||
DisplayDocLink(docLink);
|
||||
}
|
||||
|
||||
GUILayout.EndHorizontal();
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupInputField(Object target, string name, ref string member, ref bool modified, string docLink = "")
|
||||
{
|
||||
SetupInputField(target, new GUIContent(name), ref member, ref modified, docLink);
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupInputField(Object target, GUIContent name, ref string member, ref bool modified, string docLink = "")
|
||||
{
|
||||
GUILayout.BeginHorizontal();
|
||||
|
||||
EditorGUI.BeginChangeCheck();
|
||||
string value = EditorGUILayout.TextField(name, member);
|
||||
if (EditorGUI.EndChangeCheck())
|
||||
{
|
||||
Undo.RecordObject(target, "Changed " + name);
|
||||
member = value;
|
||||
modified = true;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(docLink))
|
||||
{
|
||||
DisplayDocLink(docLink);
|
||||
}
|
||||
|
||||
GUILayout.EndHorizontal();
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupTexture2DField(Object target, string name, ref Texture2D member, ref bool modified)
|
||||
{
|
||||
SetupTexture2DField(target, new GUIContent(name), ref member, ref modified);
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void SetupTexture2DField(Object target, GUIContent name, ref Texture2D member, ref bool modified, string docLink = "")
|
||||
{
|
||||
EditorGUILayout.BeginHorizontal();
|
||||
|
||||
EditorGUI.BeginChangeCheck();
|
||||
Texture2D value = (Texture2D)EditorGUILayout.ObjectField(name, member, typeof(Texture2D), false);
|
||||
if (EditorGUI.EndChangeCheck())
|
||||
{
|
||||
Undo.RecordObject(target, "Changed " + name);
|
||||
member = value;
|
||||
modified = true;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(docLink))
|
||||
{
|
||||
DisplayDocLink(docLink);
|
||||
}
|
||||
|
||||
EditorGUILayout.EndHorizontal();
|
||||
}
|
||||
|
||||
[Conditional("UNITY_EDITOR_WIN"), Conditional("UNITY_STANDALONE_WIN"), Conditional("UNITY_ANDROID")]
|
||||
public static void DisplayDocLink(string docLink)
|
||||
{
|
||||
#if UNITY_2021_1_OR_NEWER
|
||||
if (EditorGUILayout.LinkButton(tooltipLink))
|
||||
{
|
||||
Application.OpenURL(docLink);
|
||||
}
|
||||
#else
|
||||
if (GUILayout.Button(tooltipLink, GUILayout.ExpandWidth(false)))
|
||||
{
|
||||
Application.OpenURL(docLink);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
11
Assets/Oculus/VR/Scripts/Editor/OVREditorUtil.cs.meta
Normal file
11
Assets/Oculus/VR/Scripts/Editor/OVREditorUtil.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 00e66be22bd6053489650de094c5efa8
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
175
Assets/Oculus/VR/Scripts/Editor/OVRManagerEditor.cs
Normal file
175
Assets/Oculus/VR/Scripts/Editor/OVRManagerEditor.cs
Normal file
@@ -0,0 +1,175 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using UnityEngine;
|
||||
using UnityEditor;
|
||||
using System.Collections.Generic;
|
||||
using System.Reflection;
|
||||
|
||||
[CustomEditor(typeof(OVRManager))]
|
||||
public class OVRManagerEditor : Editor
|
||||
{
|
||||
override public void OnInspectorGUI()
|
||||
{
|
||||
OVRRuntimeSettings runtimeSettings = OVRRuntimeSettings.GetRuntimeSettings();
|
||||
|
||||
#if UNITY_ANDROID
|
||||
OVRProjectConfig projectConfig = OVRProjectConfig.GetProjectConfig();
|
||||
OVRProjectConfigEditor.DrawTargetDeviceInspector(projectConfig);
|
||||
|
||||
EditorGUILayout.Space();
|
||||
#endif
|
||||
|
||||
DrawDefaultInspector();
|
||||
|
||||
bool modified = false;
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
|
||||
OVRManager manager = (OVRManager)target;
|
||||
|
||||
EditorGUILayout.Space();
|
||||
EditorGUILayout.LabelField("Display", EditorStyles.boldLabel);
|
||||
|
||||
OVRManager.ColorSpace colorGamut = runtimeSettings.colorSpace;
|
||||
OVREditorUtil.SetupEnumField(target, new GUIContent("Color Gamut",
|
||||
"The target color gamut when displayed on the HMD"), ref colorGamut, ref modified,
|
||||
"https://developer.oculus.com/documentation/unity/unity-color-space/");
|
||||
manager.colorGamut = colorGamut;
|
||||
|
||||
if (modified)
|
||||
{
|
||||
runtimeSettings.colorSpace = colorGamut;
|
||||
OVRRuntimeSettings.CommitRuntimeSettings(runtimeSettings);
|
||||
}
|
||||
#endif
|
||||
|
||||
#if UNITY_ANDROID
|
||||
EditorGUILayout.Space();
|
||||
OVRProjectConfigEditor.DrawProjectConfigInspector(projectConfig);
|
||||
|
||||
EditorGUILayout.Space();
|
||||
EditorGUILayout.LabelField("Mixed Reality Capture for Quest", EditorStyles.boldLabel);
|
||||
EditorGUI.indentLevel++;
|
||||
OVREditorUtil.SetupEnumField(target, "ActivationMode", ref manager.mrcActivationMode, ref modified);
|
||||
EditorGUI.indentLevel--;
|
||||
#endif
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
|
||||
EditorGUILayout.Space();
|
||||
EditorGUILayout.BeginHorizontal();
|
||||
manager.expandMixedRealityCapturePropertySheet = EditorGUILayout.BeginFoldoutHeaderGroup(manager.expandMixedRealityCapturePropertySheet, "Mixed Reality Capture");
|
||||
OVREditorUtil.DisplayDocLink("https://developer.oculus.com/documentation/unity/unity-mrc/");
|
||||
EditorGUILayout.EndHorizontal();
|
||||
if (manager.expandMixedRealityCapturePropertySheet)
|
||||
{
|
||||
string[] layerMaskOptions = new string[32];
|
||||
for (int i=0; i<32; ++i)
|
||||
{
|
||||
layerMaskOptions[i] = LayerMask.LayerToName(i);
|
||||
if (layerMaskOptions[i].Length == 0)
|
||||
{
|
||||
layerMaskOptions[i] = "<Layer " + i.ToString() + ">";
|
||||
}
|
||||
}
|
||||
|
||||
EditorGUI.indentLevel++;
|
||||
|
||||
OVREditorUtil.SetupBoolField(target, "enableMixedReality", ref manager.enableMixedReality, ref modified);
|
||||
OVREditorUtil.SetupEnumField(target, "compositionMethod", ref manager.compositionMethod, ref modified);
|
||||
OVREditorUtil.SetupLayerMaskField(target, "extraHiddenLayers", ref manager.extraHiddenLayers, layerMaskOptions, ref modified);
|
||||
OVREditorUtil.SetupLayerMaskField(target, "extraVisibleLayers", ref manager.extraVisibleLayers, layerMaskOptions, ref modified);
|
||||
OVREditorUtil.SetupBoolField(target, "dynamicCullingMask", ref manager.dynamicCullingMask, ref modified);
|
||||
|
||||
if (manager.compositionMethod == OVRManager.CompositionMethod.External)
|
||||
{
|
||||
EditorGUILayout.Space();
|
||||
EditorGUILayout.LabelField("External Composition", EditorStyles.boldLabel);
|
||||
EditorGUI.indentLevel++;
|
||||
|
||||
OVREditorUtil.SetupColorField(target, "backdropColor (target, Rift)", ref manager.externalCompositionBackdropColorRift, ref modified);
|
||||
OVREditorUtil.SetupColorField(target, "backdropColor (target, Quest)", ref manager.externalCompositionBackdropColorQuest, ref modified);
|
||||
EditorGUI.indentLevel--;
|
||||
}
|
||||
|
||||
if (manager.compositionMethod == OVRManager.CompositionMethod.Direct)
|
||||
{
|
||||
EditorGUILayout.Space();
|
||||
EditorGUILayout.LabelField("Direct Composition", EditorStyles.boldLabel);
|
||||
EditorGUI.indentLevel++;
|
||||
|
||||
EditorGUILayout.Space();
|
||||
EditorGUILayout.LabelField("Camera", EditorStyles.boldLabel);
|
||||
OVREditorUtil.SetupEnumField(target, "capturingCameraDevice", ref manager.capturingCameraDevice, ref modified);
|
||||
OVREditorUtil.SetupBoolField(target, "flipCameraFrameHorizontally", ref manager.flipCameraFrameHorizontally, ref modified);
|
||||
OVREditorUtil.SetupBoolField(target, "flipCameraFrameVertically", ref manager.flipCameraFrameVertically, ref modified);
|
||||
|
||||
EditorGUILayout.Space();
|
||||
EditorGUILayout.LabelField("Chroma Key", EditorStyles.boldLabel);
|
||||
OVREditorUtil.SetupColorField(target, "chromaKeyColor", ref manager.chromaKeyColor, ref modified);
|
||||
OVREditorUtil.SetupFloatField(target, "chromaKeySimilarity", ref manager.chromaKeySimilarity, ref modified);
|
||||
OVREditorUtil.SetupFloatField(target, "chromaKeySmoothRange", ref manager.chromaKeySmoothRange, ref modified);
|
||||
OVREditorUtil.SetupFloatField(target, "chromaKeySpillRange", ref manager.chromaKeySpillRange, ref modified);
|
||||
|
||||
EditorGUILayout.Space();
|
||||
EditorGUILayout.LabelField("Dynamic Lighting", EditorStyles.boldLabel);
|
||||
OVREditorUtil.SetupBoolField(target, "useDynamicLighting", ref manager.useDynamicLighting, ref modified);
|
||||
OVREditorUtil.SetupEnumField(target, "depthQuality", ref manager.depthQuality, ref modified);
|
||||
OVREditorUtil.SetupFloatField(target, "dynamicLightingSmoothFactor", ref manager.dynamicLightingSmoothFactor, ref modified);
|
||||
OVREditorUtil.SetupFloatField(target, "dynamicLightingDepthVariationClampingValue", ref manager.dynamicLightingDepthVariationClampingValue, ref modified);
|
||||
|
||||
EditorGUILayout.Space();
|
||||
EditorGUILayout.LabelField("Virtual Green Screen", EditorStyles.boldLabel);
|
||||
OVREditorUtil.SetupEnumField(target, "virtualGreenScreenType", ref manager.virtualGreenScreenType, ref modified);
|
||||
OVREditorUtil.SetupFloatField(target, "virtualGreenScreenTopY", ref manager.virtualGreenScreenTopY, ref modified);
|
||||
OVREditorUtil.SetupFloatField(target, "virtualGreenScreenBottomY", ref manager.virtualGreenScreenBottomY, ref modified);
|
||||
OVREditorUtil.SetupBoolField(target, "virtualGreenScreenApplyDepthCulling", ref manager.virtualGreenScreenApplyDepthCulling, ref modified);
|
||||
OVREditorUtil.SetupFloatField(target, "virtualGreenScreenDepthTolerance", ref manager.virtualGreenScreenDepthTolerance, ref modified);
|
||||
|
||||
EditorGUILayout.Space();
|
||||
EditorGUILayout.LabelField("Latency Control", EditorStyles.boldLabel);
|
||||
OVREditorUtil.SetupFloatField(target, "handPoseStateLatency", ref manager.handPoseStateLatency, ref modified);
|
||||
EditorGUI.indentLevel--;
|
||||
}
|
||||
|
||||
EditorGUI.indentLevel--;
|
||||
}
|
||||
#endif
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
|
||||
// Insight Passthrough section
|
||||
#if UNITY_ANDROID
|
||||
bool passthroughCapabilityEnabled = projectConfig.insightPassthroughEnabled;
|
||||
EditorGUI.BeginDisabledGroup(!passthroughCapabilityEnabled);
|
||||
GUIContent enablePassthroughContent = new GUIContent("Enable Passthrough", "Enables passthrough functionality for the scene. Can be toggled at runtime. Passthrough Capability must be enabled in the project settings.");
|
||||
#else
|
||||
GUIContent enablePassthroughContent = new GUIContent("Enable Passthrough", "Enables passthrough functionality for the scene. Can be toggled at runtime.");
|
||||
#endif
|
||||
EditorGUILayout.Space();
|
||||
EditorGUILayout.LabelField("Insight Passthrough", EditorStyles.boldLabel);
|
||||
#if UNITY_ANDROID
|
||||
if (!passthroughCapabilityEnabled) {
|
||||
EditorGUILayout.LabelField("Requires Passthrough Capability to be enabled in the General section of the Quest features.", EditorStyles.wordWrappedLabel);
|
||||
}
|
||||
#endif
|
||||
OVREditorUtil.SetupBoolField(target, enablePassthroughContent, ref manager.isInsightPassthroughEnabled, ref modified);
|
||||
#if UNITY_ANDROID
|
||||
EditorGUI.EndDisabledGroup();
|
||||
#endif
|
||||
#endif
|
||||
|
||||
if (modified)
|
||||
{
|
||||
EditorUtility.SetDirty(target);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
12
Assets/Oculus/VR/Scripts/Editor/OVRManagerEditor.cs.meta
Normal file
12
Assets/Oculus/VR/Scripts/Editor/OVRManagerEditor.cs.meta
Normal file
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 9b07d18088099f94fa00fc15e64b2b17
|
||||
timeCreated: 1502747851
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
174
Assets/Oculus/VR/Scripts/Editor/OVROverlayDestRectEditor.shader
Normal file
174
Assets/Oculus/VR/Scripts/Editor/OVROverlayDestRectEditor.shader
Normal file
@@ -0,0 +1,174 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
Shader "Unlit/OVROverlayDestRectEditor"
|
||||
{
|
||||
Properties
|
||||
{
|
||||
_MainTex ("Texture", 2D) = "white" {}
|
||||
_SrcRectLeft("SrcRectLeft", Vector) = (0,0,1,1)
|
||||
_SrcRectRight("SrcRectRight", Vector) = (0,0,1,1)
|
||||
_DestRectLeft ("DestRectLeft", Vector) = (0,0,1,1)
|
||||
_DestRectRight("DestRectRight", Vector) = (0,0,1,1)
|
||||
_BackgroundColor("Background Color", Color) = (0.225, 0.225, 0.225, 1)
|
||||
}
|
||||
SubShader
|
||||
{
|
||||
Tags { "RenderType"="Opaque" }
|
||||
LOD 100
|
||||
|
||||
Pass
|
||||
{
|
||||
CGPROGRAM
|
||||
#pragma vertex vert
|
||||
#pragma fragment frag
|
||||
|
||||
#include "UnityCG.cginc"
|
||||
|
||||
struct appdata
|
||||
{
|
||||
float4 vertex : POSITION;
|
||||
float2 uv : TEXCOORD0;
|
||||
};
|
||||
|
||||
struct v2f
|
||||
{
|
||||
float2 uv : TEXCOORD0;
|
||||
float4 vertex : SV_POSITION;
|
||||
float4 leftDragX : TEXCOORD1;
|
||||
float4 leftDragY : TEXCOORD2;
|
||||
float4 rightDragX : TEXCOORD3;
|
||||
float4 rightDragY : TEXCOORD4;
|
||||
};
|
||||
|
||||
sampler2D _MainTex;
|
||||
float4 _MainTex_ST;
|
||||
|
||||
float4 _SrcRectLeft;
|
||||
float4 _SrcRectRight;
|
||||
float4 _DestRectLeft;
|
||||
float4 _DestRectRight;
|
||||
|
||||
fixed4 _BackgroundColor;
|
||||
|
||||
v2f vert (appdata v)
|
||||
{
|
||||
v2f o;
|
||||
o.vertex = UnityObjectToClipPos(v.vertex);
|
||||
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
|
||||
// Add padding
|
||||
o.uv = (o.uv - 0.5) * (256.0 + 8.0) / (256.0) + 0.5;
|
||||
|
||||
// left
|
||||
o.leftDragX.x = _DestRectLeft.x;
|
||||
o.leftDragY.x = _DestRectLeft.y + _DestRectLeft.w * 0.5;
|
||||
// right
|
||||
o.leftDragX.y = _DestRectLeft.x + _DestRectLeft.z;
|
||||
o.leftDragY.y = _DestRectLeft.y + _DestRectLeft.w * 0.5;
|
||||
// top
|
||||
o.leftDragX.z = _DestRectLeft.x + _DestRectLeft.z * 0.5;
|
||||
o.leftDragY.z = _DestRectLeft.y;
|
||||
// bottom
|
||||
o.leftDragX.w = _DestRectLeft.x + _DestRectLeft.z * 0.5;
|
||||
o.leftDragY.w = _DestRectLeft.y + _DestRectLeft.w;
|
||||
// right
|
||||
o.rightDragX.x = _DestRectRight.x;
|
||||
o.rightDragY.x = _DestRectRight.y + _DestRectRight.w * 0.5;
|
||||
// right
|
||||
o.rightDragX.y = _DestRectRight.x + _DestRectRight.z;
|
||||
o.rightDragY.y = _DestRectRight.y + _DestRectRight.w * 0.5;
|
||||
// top
|
||||
o.rightDragX.z = _DestRectRight.x + _DestRectRight.z * 0.5;
|
||||
o.rightDragY.z = _DestRectRight.y;
|
||||
// bottom
|
||||
o.rightDragX.w = _DestRectRight.x + _DestRectRight.z * 0.5;
|
||||
o.rightDragY.w = _DestRectRight.y + _DestRectRight.w;
|
||||
|
||||
return o;
|
||||
}
|
||||
|
||||
float onDrag(float2 uv, float x, float y)
|
||||
{
|
||||
const float pixelSize = 6;
|
||||
return abs(uv.x - x) < ((pixelSize / 2) / 128.0) && abs(uv.y - y) < ((pixelSize / 2) / 128.0);
|
||||
}
|
||||
|
||||
float onLine(float2 uv, float4 rect)
|
||||
{
|
||||
return
|
||||
(abs(uv.x - rect.x) < (1 / 128.0) && uv.y >= rect.y && uv.y <= rect.y + rect.w) ||
|
||||
(abs(uv.x - rect.x - rect.z) < (1 / 128.0) && uv.y >= rect.y && uv.y <= rect.y + rect.w) ||
|
||||
(abs(uv.y - rect.y) < (1 / 128.0) && uv.x >= rect.x && uv.x <= rect.x + rect.z) ||
|
||||
(abs(uv.y - rect.y - rect.w) < (1 / 128.0) && uv.x >= rect.x && uv.x <= rect.x + rect.z);
|
||||
}
|
||||
|
||||
float checkerboard(float2 uv)
|
||||
{
|
||||
float x = floor(uv.x * (16 + 2));
|
||||
float y = floor(uv.y * 8);
|
||||
|
||||
return 2 * ((x + y) / 2.0 - floor((x + y) / 2.0));
|
||||
}
|
||||
|
||||
fixed4 frag (v2f i) : SV_Target
|
||||
{
|
||||
float isLeftEye = i.uv < 0.5;
|
||||
float2 leftUV = float2(i.uv.x * (256.0 + 32.0) / 128.0, i.uv.y);
|
||||
float2 rightUV = float2(1 - ((1 - i.uv.x) * (256.0 + 32.0) / 128.0), i.uv.y);
|
||||
|
||||
float2 uv = i.uv;
|
||||
float2 textureUV = i.uv;
|
||||
if (isLeftEye)
|
||||
{
|
||||
uv = (leftUV - _DestRectLeft.xy) / _DestRectLeft.zw;
|
||||
textureUV = uv * _SrcRectLeft.zw + _SrcRectLeft.xy;
|
||||
}
|
||||
else
|
||||
{
|
||||
uv = (rightUV - _DestRectRight.xy) / _DestRectRight.zw;
|
||||
textureUV = uv * _SrcRectRight.zw + _SrcRectRight.xy;
|
||||
}
|
||||
|
||||
// sample the texture
|
||||
fixed4 col = tex2D(_MainTex, float2(textureUV.x, 1 - textureUV.y));
|
||||
|
||||
if (uv.x < 0 || uv.x > 1 || uv.y < 0 || uv.y > 1)
|
||||
{
|
||||
col.a = 0;
|
||||
}
|
||||
|
||||
col.rgb = lerp(0.41 - 0.13 * checkerboard(i.uv), col.rgb, col.a);
|
||||
|
||||
if (i.uv.x < 0 || i.uv.x > 1 || i.uv.y < 0 || i.uv.y > 1 || abs(i.uv.x - 0.5) < (14 / 256.0))
|
||||
{
|
||||
col = _BackgroundColor;
|
||||
}
|
||||
|
||||
// now draw clipping objects
|
||||
float left = isLeftEye && (onLine(leftUV, _DestRectLeft) ||
|
||||
onDrag(leftUV, i.leftDragX.x, i.leftDragY.x) ||
|
||||
onDrag(leftUV, i.leftDragX.y, i.leftDragY.y) ||
|
||||
onDrag(leftUV, i.leftDragX.z, i.leftDragY.z) ||
|
||||
onDrag(leftUV, i.leftDragX.w, i.leftDragY.w));
|
||||
|
||||
float right = (!isLeftEye) && (onLine(rightUV, _DestRectRight) ||
|
||||
onDrag(rightUV, i.rightDragX.x, i.rightDragY.x) ||
|
||||
onDrag(rightUV, i.rightDragX.y, i.rightDragY.y) ||
|
||||
onDrag(rightUV, i.rightDragX.z, i.rightDragY.z) ||
|
||||
onDrag(rightUV, i.rightDragX.w, i.rightDragY.w));
|
||||
|
||||
return lerp(col, fixed4(left, right, 0, 1), left || right);
|
||||
}
|
||||
ENDCG
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 7c52c9bacdbb59f4a973dd1849d03106
|
||||
ShaderImporter:
|
||||
externalObjects: {}
|
||||
defaultTextures: []
|
||||
nonModifiableTextures: []
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
631
Assets/Oculus/VR/Scripts/Editor/OVROverlayEditor.cs
Normal file
631
Assets/Oculus/VR/Scripts/Editor/OVROverlayEditor.cs
Normal file
@@ -0,0 +1,631 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using UnityEditor;
|
||||
|
||||
[CustomEditor(typeof(OVROverlay))]
|
||||
public class OVROverlayEditor : Editor
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Common Video Types, to ease source and dest rect creation
|
||||
/// </summary>
|
||||
public enum StereoType
|
||||
{
|
||||
Custom = 0,
|
||||
Mono = 1,
|
||||
Stereo = 2,
|
||||
StereoLeftRight = 3,
|
||||
StereoTopBottom = 4,
|
||||
}
|
||||
|
||||
public enum DisplayType
|
||||
{
|
||||
Custom = 0,
|
||||
Full = 1,
|
||||
Half = 2,
|
||||
}
|
||||
|
||||
private bool sourceRectsVisible = false;
|
||||
private bool destRectsVisible = false;
|
||||
|
||||
private Material _SrcRectMaterial;
|
||||
protected Material SrcRectMaterial
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_SrcRectMaterial == null)
|
||||
{
|
||||
string[] shaders = AssetDatabase.FindAssets("OVROverlaySrcRectEditor");
|
||||
|
||||
if (shaders.Length > 0)
|
||||
{
|
||||
Shader shader = (Shader)AssetDatabase.LoadAssetAtPath(AssetDatabase.GUIDToAssetPath(shaders[0]), typeof(Shader));
|
||||
|
||||
if (shader != null)
|
||||
{
|
||||
_SrcRectMaterial = new Material(shader);
|
||||
}
|
||||
}
|
||||
}
|
||||
return _SrcRectMaterial;
|
||||
}
|
||||
}
|
||||
|
||||
private Material _DestRectMaterial;
|
||||
protected Material DestRectMaterial
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_DestRectMaterial == null)
|
||||
{
|
||||
string[] shaders = AssetDatabase.FindAssets("OVROverlayDestRectEditor");
|
||||
|
||||
if (shaders.Length > 0)
|
||||
{
|
||||
Shader shader = (Shader)AssetDatabase.LoadAssetAtPath(AssetDatabase.GUIDToAssetPath(shaders[0]), typeof(Shader));
|
||||
|
||||
if (shader != null)
|
||||
{
|
||||
_DestRectMaterial = new Material(shader);
|
||||
}
|
||||
}
|
||||
}
|
||||
return _DestRectMaterial;
|
||||
}
|
||||
}
|
||||
|
||||
private TextureRect _DraggingRect;
|
||||
private Side _DraggingSide;
|
||||
|
||||
enum TextureRect
|
||||
{
|
||||
None,
|
||||
SrcLeft,
|
||||
SrcRight,
|
||||
DestLeft,
|
||||
DestRight
|
||||
}
|
||||
|
||||
enum Side
|
||||
{
|
||||
Left,
|
||||
Right,
|
||||
Top,
|
||||
Bottom
|
||||
}
|
||||
|
||||
private GUIContent[] selectableShapeNames;
|
||||
private OVROverlay.OverlayShape[] selectableShapeValues;
|
||||
|
||||
private void Awake()
|
||||
{
|
||||
List<GUIContent> selectableShapeNameList = new List<GUIContent>();
|
||||
List<OVROverlay.OverlayShape> selectableShapesValueList = new List<OVROverlay.OverlayShape>();
|
||||
foreach (OVROverlay.OverlayShape value in Enum.GetValues(typeof(OVROverlay.OverlayShape)))
|
||||
{
|
||||
if (!OVROverlay.IsPassthroughShape(value))
|
||||
{
|
||||
string name = Enum.GetName(typeof(OVROverlay.OverlayShape), value);
|
||||
selectableShapeNameList.Add(new GUIContent(name, name));
|
||||
selectableShapesValueList.Add(value);
|
||||
}
|
||||
}
|
||||
selectableShapeNames = selectableShapeNameList.ToArray();
|
||||
selectableShapeValues = selectableShapesValueList.ToArray();
|
||||
}
|
||||
|
||||
public override void OnInspectorGUI()
|
||||
{
|
||||
OVROverlay overlay = (OVROverlay)target;
|
||||
if (overlay == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
EditorGUILayout.LabelField("Display Order", EditorStyles.boldLabel);
|
||||
overlay.currentOverlayType = (OVROverlay.OverlayType)EditorGUILayout.EnumPopup(new GUIContent("Current Overlay Type", "Whether this overlay should layer behind the scene or in front of it"), overlay.currentOverlayType);
|
||||
overlay.compositionDepth = EditorGUILayout.IntField(new GUIContent("Composition Depth", "Depth value used to sort OVROverlays in the scene, smaller value appears in front"), overlay.compositionDepth);
|
||||
overlay.noDepthBufferTesting = EditorGUILayout.Toggle(new GUIContent("No Depth Buffer Testing", "The noDepthBufferTesting will stop layer's depth buffer compositing even if the engine has \"Shared Depth Buffer\" enabled"), overlay.noDepthBufferTesting);
|
||||
EditorGUILayout.Space();
|
||||
|
||||
EditorGUILayout.LabelField(new GUIContent("Overlay Shape", "The shape of this overlay"), EditorStyles.boldLabel);
|
||||
int currentShapeIndex = Array.IndexOf(selectableShapeValues, overlay.currentOverlayShape);
|
||||
if (currentShapeIndex == -1) {
|
||||
Debug.LogError("Invalid shape encountered");
|
||||
currentShapeIndex = 0;
|
||||
}
|
||||
currentShapeIndex = EditorGUILayout.Popup(new GUIContent("Overlay Shape", "The shape of this overlay"), currentShapeIndex, selectableShapeNames);
|
||||
overlay.currentOverlayShape = selectableShapeValues[currentShapeIndex];
|
||||
|
||||
EditorGUILayout.Space();
|
||||
|
||||
EditorGUILayout.LabelField("Layer Properties", EditorStyles.boldLabel);
|
||||
overlay.useBicubicFiltering = EditorGUILayout.Toggle(new GUIContent("Bicubic Filtering",
|
||||
"Whether this layer should use bicubic filtering. This can increase quality for small details on text and icons being viewed at farther distances."), overlay.useBicubicFiltering);
|
||||
|
||||
EditorGUILayout.Space();
|
||||
|
||||
EditorGUILayout.Separator();
|
||||
EditorGUILayout.LabelField("Textures", EditorStyles.boldLabel);
|
||||
|
||||
#if UNITY_ANDROID
|
||||
bool lastIsExternalSurface = overlay.isExternalSurface;
|
||||
overlay.isExternalSurface = EditorGUILayout.Toggle(new GUIContent("Is External Surface", "On Android, retrieve an Android Surface object to render to (e.g., video playback)"), overlay.isExternalSurface);
|
||||
|
||||
if (lastIsExternalSurface)
|
||||
{
|
||||
overlay.externalSurfaceWidth = EditorGUILayout.IntField("External Surface Width", overlay.externalSurfaceWidth);
|
||||
overlay.externalSurfaceHeight = EditorGUILayout.IntField("External Surface Height", overlay.externalSurfaceHeight);
|
||||
overlay.isProtectedContent = EditorGUILayout.Toggle(new GUIContent("Is Protected Content", "The external surface has L1 widevine protection."), overlay.isProtectedContent);
|
||||
}
|
||||
else
|
||||
#endif
|
||||
{
|
||||
if (overlay.textures == null)
|
||||
{
|
||||
overlay.textures = new Texture[2];
|
||||
}
|
||||
if (overlay.textures.Length < 2)
|
||||
{
|
||||
Texture[] tmp = new Texture[2];
|
||||
for (int i = 0; i < overlay.textures.Length; i++)
|
||||
{
|
||||
tmp[i] = overlay.textures[i];
|
||||
}
|
||||
overlay.textures = tmp;
|
||||
}
|
||||
|
||||
var labelControlRect = EditorGUILayout.GetControlRect();
|
||||
EditorGUI.LabelField(new Rect(labelControlRect.x, labelControlRect.y, labelControlRect.width / 2, labelControlRect.height), new GUIContent("Left Texture", "Texture used for the left eye"));
|
||||
EditorGUI.LabelField(new Rect(labelControlRect.x + labelControlRect.width / 2, labelControlRect.y, labelControlRect.width / 2, labelControlRect.height), new GUIContent("Right Texture", "Texture used for the right eye"));
|
||||
|
||||
|
||||
var textureControlRect = EditorGUILayout.GetControlRect(GUILayout.Height(64));
|
||||
|
||||
overlay.textures[0] = (Texture)EditorGUI.ObjectField(new Rect(textureControlRect.x, textureControlRect.y, 64, textureControlRect.height), overlay.textures[0], typeof(Texture), true);
|
||||
Texture right = (Texture)EditorGUI.ObjectField(new Rect(textureControlRect.x + textureControlRect.width / 2, textureControlRect.y, 64, textureControlRect.height), overlay.textures[1] != null ? overlay.textures[1] : overlay.textures[0], typeof(Texture), true);
|
||||
if (right == overlay.textures[0])
|
||||
{
|
||||
overlay.textures[1] = null;
|
||||
}
|
||||
else
|
||||
{
|
||||
overlay.textures[1] = right;
|
||||
}
|
||||
|
||||
overlay.isDynamic = EditorGUILayout.Toggle(new GUIContent("Dynamic Texture", "This texture will be updated dynamically at runtime (e.g., Video)"), overlay.isDynamic);
|
||||
#if !UNITY_ANDROID
|
||||
overlay.isProtectedContent = EditorGUILayout.Toggle(new GUIContent("Is Protected Content", "The texture has copy protection, e.g., HDCP"), overlay.isProtectedContent);
|
||||
#endif
|
||||
}
|
||||
if (overlay.currentOverlayShape == OVROverlay.OverlayShape.Cylinder || overlay.currentOverlayShape == OVROverlay.OverlayShape.Equirect || overlay.currentOverlayShape == OVROverlay.OverlayShape.Quad || overlay.currentOverlayShape == OVROverlay.OverlayShape.Fisheye)
|
||||
{
|
||||
|
||||
EditorGUILayout.Separator();
|
||||
EditorGUILayout.Space();
|
||||
EditorGUILayout.LabelField("Texture Rects", EditorStyles.boldLabel);
|
||||
|
||||
bool lastOverrideTextureRectMatrix = overlay.overrideTextureRectMatrix;
|
||||
overlay.overrideTextureRectMatrix = !EditorGUILayout.Toggle(new GUIContent("Use Default Rects", overlay.textures[1] == null ? "If you need to use a single texture as a stereo image, uncheck this box" : "Uncheck this box if you need to clip you textures or layer"), !overlay.overrideTextureRectMatrix);
|
||||
|
||||
if (lastOverrideTextureRectMatrix)
|
||||
{
|
||||
sourceRectsVisible = EditorGUILayout.Foldout(sourceRectsVisible, new GUIContent("Source Rects", "What portion of the source texture will ultimately be shown in each eye."));
|
||||
|
||||
if (sourceRectsVisible)
|
||||
{
|
||||
var mat = SrcRectMaterial;
|
||||
|
||||
if (mat != null)
|
||||
{
|
||||
Rect drawRect = EditorGUILayout.GetControlRect(GUILayout.Height(128 + 8));
|
||||
Vector4 srcLeft = new Vector4(Mathf.Max(0.0f, overlay.srcRectLeft.x), Mathf.Max(0.0f, overlay.srcRectLeft.y), Mathf.Min(1.0f - overlay.srcRectLeft.x, overlay.srcRectLeft.width), Mathf.Min(1.0f - overlay.srcRectLeft.y, overlay.srcRectLeft.height));
|
||||
Vector4 srcRight = new Vector4(Mathf.Max(0.0f, overlay.srcRectRight.x), Mathf.Max(0.0f, overlay.srcRectRight.y), Mathf.Min(1.0f - overlay.srcRectRight.x, overlay.srcRectRight.width), Mathf.Min(1.0f - overlay.srcRectRight.y, overlay.srcRectRight.height));
|
||||
|
||||
if (overlay.invertTextureRects)
|
||||
{
|
||||
srcLeft.y = 1 - srcLeft.y - srcLeft.w;
|
||||
srcRight.y = 1 - srcRight.y - srcRight.w;
|
||||
}
|
||||
mat.SetVector("_SrcRectLeft", srcLeft);
|
||||
mat.SetVector("_SrcRectRight", srcRight);
|
||||
// center our draw rect
|
||||
var drawRectCentered = new Rect(drawRect.x + drawRect.width / 2 - 128 - 4, drawRect.y, 256 + 8, drawRect.height);
|
||||
EditorGUI.DrawPreviewTexture(drawRectCentered, overlay.textures[0] ?? Texture2D.blackTexture, mat);
|
||||
|
||||
var drawRectInset = new Rect(drawRectCentered.x + 4, drawRectCentered.y + 4, drawRectCentered.width - 8, drawRectCentered.height - 8);
|
||||
UpdateRectDragging(drawRectInset, drawRectInset, TextureRect.SrcLeft, TextureRect.SrcRight, overlay.invertTextureRects, ref overlay.srcRectLeft, ref overlay.srcRectRight);
|
||||
CreateCursorRects(drawRectInset, overlay.srcRectLeft, overlay.invertTextureRects);
|
||||
CreateCursorRects(drawRectInset, overlay.srcRectRight, overlay.invertTextureRects);
|
||||
}
|
||||
|
||||
var labelControlRect = EditorGUILayout.GetControlRect();
|
||||
EditorGUI.LabelField(new Rect(labelControlRect.x, labelControlRect.y, labelControlRect.width / 2, labelControlRect.height), new GUIContent("Left Source Rect", "The rect in the source image that will be displayed on the left eye layer"));
|
||||
EditorGUI.LabelField(new Rect(labelControlRect.x + labelControlRect.width / 2, labelControlRect.y, labelControlRect.width / 2, labelControlRect.height), new GUIContent("Right Source Rect", "The rect in the source image that will be displayed on the right eye layer"));
|
||||
|
||||
var rectControlRect = EditorGUILayout.GetControlRect(GUILayout.Height(34));
|
||||
|
||||
overlay.srcRectLeft = Clamp01(EditorGUI.RectField(new Rect(rectControlRect.x, rectControlRect.y, rectControlRect.width / 2 - 20, rectControlRect.height), overlay.srcRectLeft));
|
||||
overlay.srcRectRight = Clamp01(EditorGUI.RectField(new Rect(rectControlRect.x + rectControlRect.width / 2, rectControlRect.y, rectControlRect.width / 2 - 20, rectControlRect.height), overlay.srcRectRight));
|
||||
|
||||
|
||||
EditorGUILayout.BeginHorizontal();
|
||||
if (overlay.textures[1] != null)
|
||||
{
|
||||
if (GUILayout.Button(new GUIContent("Reset To Default", "Reset Source Rects to default")))
|
||||
{
|
||||
SetRectsByVideoType(overlay, StereoType.Stereo, DisplayType.Custom);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (GUILayout.Button(new GUIContent("Monoscopic", "Display the full Texture in both eyes")))
|
||||
{
|
||||
SetRectsByVideoType(overlay, StereoType.Mono, DisplayType.Custom);
|
||||
}
|
||||
if (GUILayout.Button(new GUIContent("Stereo Left/Right", "The left half of the texture is displayed in the left eye, and the right half in the right eye")))
|
||||
{
|
||||
SetRectsByVideoType(overlay, StereoType.StereoLeftRight, DisplayType.Custom);
|
||||
}
|
||||
if (GUILayout.Button(new GUIContent("Stereo Top/Bottom", "The top half of the texture is displayed in the left eye, and the bottom half in the right eye")))
|
||||
{
|
||||
SetRectsByVideoType(overlay, StereoType.StereoTopBottom, DisplayType.Custom);
|
||||
}
|
||||
}
|
||||
EditorGUILayout.EndHorizontal();
|
||||
|
||||
}
|
||||
destRectsVisible = EditorGUILayout.Foldout(destRectsVisible, new GUIContent("Destination Rects", "What portion of the destination texture that the source will be rendered into."));
|
||||
if (destRectsVisible)
|
||||
{
|
||||
|
||||
var mat = DestRectMaterial;
|
||||
|
||||
if (mat != null)
|
||||
{
|
||||
Rect drawRect = EditorGUILayout.GetControlRect(GUILayout.Height(128 + 8));
|
||||
|
||||
Vector4 srcLeft = new Vector4(Mathf.Max(0.0f, overlay.srcRectLeft.x), Mathf.Max(0.0f, overlay.srcRectLeft.y), Mathf.Min(1.0f - overlay.srcRectLeft.x, overlay.srcRectLeft.width), Mathf.Min(1.0f - overlay.srcRectLeft.y, overlay.srcRectLeft.height));
|
||||
Vector4 srcRight = new Vector4(Mathf.Max(0.0f, overlay.srcRectRight.x), Mathf.Max(0.0f, overlay.srcRectRight.y), Mathf.Min(1.0f - overlay.srcRectRight.x, overlay.srcRectRight.width), Mathf.Min(1.0f - overlay.srcRectRight.y, overlay.srcRectRight.height));
|
||||
Vector4 destLeft = new Vector4(Mathf.Max(0.0f, overlay.destRectLeft.x), Mathf.Max(0.0f, overlay.destRectLeft.y), Mathf.Min(1.0f - overlay.destRectLeft.x, overlay.destRectLeft.width), Mathf.Min(1.0f - overlay.destRectLeft.y, overlay.destRectLeft.height));
|
||||
Vector4 destRight = new Vector4(Mathf.Max(0.0f, overlay.destRectRight.x), Mathf.Max(0.0f, overlay.destRectRight.y), Mathf.Min(1.0f - overlay.destRectRight.x, overlay.destRectRight.width), Mathf.Min(1.0f - overlay.destRectRight.y, overlay.destRectRight.height));
|
||||
|
||||
if (overlay.invertTextureRects)
|
||||
{
|
||||
srcLeft.y = 1 - srcLeft.y - srcLeft.w;
|
||||
srcRight.y = 1 - srcRight.y - srcRight.w;
|
||||
destLeft.y = 1 - destLeft.y - destLeft.w;
|
||||
destRight.y = 1 - destRight.y - destRight.w;
|
||||
}
|
||||
mat.SetVector("_SrcRectLeft", srcLeft);
|
||||
mat.SetVector("_SrcRectRight", srcRight);
|
||||
mat.SetVector("_DestRectLeft", destLeft);
|
||||
mat.SetVector("_DestRectRight", destRight);
|
||||
mat.SetColor("_BackgroundColor", EditorGUIUtility.isProSkin ? (Color)new Color32(56, 56, 56, 255) : (Color)new Color32(194, 194, 194, 255));
|
||||
|
||||
var drawRectCentered = new Rect(drawRect.x + drawRect.width / 2 - 128 - 16 - 4, drawRect.y, 256 + 32 + 8, drawRect.height);
|
||||
// center our draw rect
|
||||
EditorGUI.DrawPreviewTexture(drawRectCentered, overlay.textures[0] ?? Texture2D.blackTexture, mat);
|
||||
|
||||
var drawRectInsetLeft = new Rect(drawRectCentered.x + 4, drawRectCentered.y + 4, drawRectCentered.width / 2 - 20, drawRectCentered.height - 8);
|
||||
var drawRectInsetRight = new Rect(drawRectCentered.x + drawRectCentered.width / 2 + 16, drawRectCentered.y + 4, drawRectCentered.width / 2 - 20, drawRectCentered.height - 8);
|
||||
UpdateRectDragging(drawRectInsetLeft, drawRectInsetRight, TextureRect.DestLeft, TextureRect.DestRight, overlay.invertTextureRects, ref overlay.destRectLeft, ref overlay.destRectRight);
|
||||
|
||||
CreateCursorRects(drawRectInsetLeft, overlay.destRectLeft, overlay.invertTextureRects);
|
||||
CreateCursorRects(drawRectInsetRight, overlay.destRectRight, overlay.invertTextureRects);
|
||||
|
||||
}
|
||||
|
||||
var labelControlRect = EditorGUILayout.GetControlRect();
|
||||
EditorGUI.LabelField(new Rect(labelControlRect.x, labelControlRect.y, labelControlRect.width / 2, labelControlRect.height), new GUIContent("Left Destination Rect", "The rect in the destination layer the left eye will display to"));
|
||||
EditorGUI.LabelField(new Rect(labelControlRect.x + labelControlRect.width / 2, labelControlRect.y, labelControlRect.width / 2, labelControlRect.height), new GUIContent("Right Destination Rect", "The rect in the destination layer the right eye will display to"));
|
||||
|
||||
var rectControlRect = EditorGUILayout.GetControlRect(GUILayout.Height(34));
|
||||
|
||||
overlay.destRectLeft = Clamp01(EditorGUI.RectField(new Rect(rectControlRect.x, rectControlRect.y, rectControlRect.width / 2 - 20, rectControlRect.height), overlay.destRectLeft));
|
||||
overlay.destRectRight = Clamp01(EditorGUI.RectField(new Rect(rectControlRect.x + rectControlRect.width / 2, rectControlRect.y, rectControlRect.width / 2 - 20, rectControlRect.height), overlay.destRectRight));
|
||||
|
||||
|
||||
if (overlay.currentOverlayShape == OVROverlay.OverlayShape.Equirect)
|
||||
{
|
||||
EditorGUILayout.BeginHorizontal();
|
||||
if (GUILayout.Button(new GUIContent("360 Video", "Display the full 360 layer")))
|
||||
{
|
||||
SetRectsByVideoType(overlay, StereoType.Custom, DisplayType.Full);
|
||||
}
|
||||
if (GUILayout.Button(new GUIContent("180 Video", "Display the front 180 layer")))
|
||||
{
|
||||
SetRectsByVideoType(overlay, StereoType.Custom, DisplayType.Half);
|
||||
}
|
||||
EditorGUILayout.EndHorizontal();
|
||||
}
|
||||
else
|
||||
{
|
||||
if (GUILayout.Button(new GUIContent("Reset To Default", "Reset Source Rects to default")))
|
||||
{
|
||||
SetRectsByVideoType(overlay, StereoType.Custom, DisplayType.Full);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
overlay.invertTextureRects = EditorGUILayout.Toggle(new GUIContent("Invert Rect Coordinates", "Check this box to use the top left corner of the texture as the origin"), overlay.invertTextureRects);
|
||||
}
|
||||
}
|
||||
|
||||
EditorGUILayout.Separator();
|
||||
EditorGUILayout.LabelField("Color Scale", EditorStyles.boldLabel);
|
||||
EditorGUILayout.Space();
|
||||
overlay.overridePerLayerColorScaleAndOffset = EditorGUILayout.Toggle(new GUIContent("Override Color Scale", "Manually set color scale and offset of this layer, regardless of what the global values are from OVRManager.SetColorScaleAndOffset()."), overlay.overridePerLayerColorScaleAndOffset);
|
||||
if (overlay.overridePerLayerColorScaleAndOffset)
|
||||
{
|
||||
Vector4 colorScale = EditorGUILayout.Vector4Field(new GUIContent("Color Scale", "Scale that the color values for this overlay will be multiplied by."), overlay.colorScale);
|
||||
Vector4 colorOffset = EditorGUILayout.Vector4Field(new GUIContent("Color Offset", "Offset that the color values for this overlay will be added to."), overlay.colorOffset);
|
||||
overlay.SetPerLayerColorScaleAndOffset(colorScale, colorOffset);
|
||||
}
|
||||
|
||||
EditorGUILayout.Separator();
|
||||
EditorGUILayout.LabelField("Preview", EditorStyles.boldLabel);
|
||||
overlay.previewInEditor = EditorGUILayout.Toggle(new GUIContent("Preview in Editor (Experimental)", "Preview the overlay in the editor using a mesh renderer."), overlay.previewInEditor);
|
||||
|
||||
|
||||
|
||||
EditorUtility.SetDirty(overlay);
|
||||
}
|
||||
|
||||
private Rect Clamp01(Rect rect)
|
||||
{
|
||||
rect.x = Mathf.Clamp01(rect.x);
|
||||
rect.y = Mathf.Clamp01(rect.y);
|
||||
rect.width = Mathf.Clamp01(rect.width);
|
||||
rect.height = Mathf.Clamp01(rect.height);
|
||||
return rect;
|
||||
}
|
||||
|
||||
private bool IsUnitRect(Rect rect)
|
||||
{
|
||||
return IsRect(rect, 0, 0, 1, 1);
|
||||
}
|
||||
|
||||
private bool IsRect(Rect rect, float x, float y, float w, float h)
|
||||
{
|
||||
return rect.x == x && rect.y == y && rect.width == w && rect.height == h;
|
||||
}
|
||||
|
||||
private StereoType GetStereoType(OVROverlay overlay)
|
||||
{
|
||||
if (overlay.textures[0] != null && overlay.textures[1] != null)
|
||||
{
|
||||
if (IsUnitRect(overlay.srcRectLeft) && IsUnitRect(overlay.srcRectRight))
|
||||
{
|
||||
return StereoType.Stereo;
|
||||
}
|
||||
else
|
||||
{
|
||||
return StereoType.Custom;
|
||||
}
|
||||
}
|
||||
else if (overlay.textures[0] != null)
|
||||
{
|
||||
if (IsUnitRect(overlay.srcRectLeft) && IsUnitRect(overlay.srcRectRight))
|
||||
{
|
||||
return StereoType.Mono;
|
||||
}
|
||||
else if (IsRect(overlay.srcRectLeft, 0, 0, 0.5f, 1f) && IsRect(overlay.srcRectRight, 0.5f, 0, 0.5f, 1f))
|
||||
{
|
||||
return StereoType.StereoLeftRight;
|
||||
}
|
||||
else if (overlay.invertTextureRects && IsRect(overlay.srcRectLeft, 0, 0.0f, 1f, 0.5f) && IsRect(overlay.srcRectRight, 0f, 0.5f, 1f, 0.5f))
|
||||
{
|
||||
return StereoType.StereoTopBottom;
|
||||
}
|
||||
else if (!overlay.invertTextureRects && IsRect(overlay.srcRectLeft, 0, 0.5f, 1f, 0.5f) && IsRect(overlay.srcRectRight, 0f, 0f, 1f, 0.5f))
|
||||
{
|
||||
return StereoType.StereoTopBottom;
|
||||
}
|
||||
else
|
||||
{
|
||||
return StereoType.Custom;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
return StereoType.Mono;
|
||||
}
|
||||
}
|
||||
|
||||
private void SetRectsByVideoType(OVROverlay overlay, StereoType stereoType, DisplayType displayType)
|
||||
{
|
||||
Rect srcRectLeft, srcRectRight, destRectLeft, destRectRight;
|
||||
|
||||
switch (displayType)
|
||||
{
|
||||
case DisplayType.Full:
|
||||
destRectLeft = destRectRight = new Rect(0, 0, 1, 1);
|
||||
break;
|
||||
|
||||
case DisplayType.Half:
|
||||
destRectLeft = destRectRight = new Rect(0.25f, 0, 0.5f, 1);
|
||||
break;
|
||||
|
||||
default:
|
||||
destRectLeft = overlay.destRectLeft;
|
||||
destRectRight = overlay.destRectRight;
|
||||
break;
|
||||
}
|
||||
|
||||
switch (stereoType)
|
||||
{
|
||||
case StereoType.Mono:
|
||||
case StereoType.Stereo:
|
||||
srcRectLeft = srcRectRight = new Rect(0, 0, 1, 1);
|
||||
break;
|
||||
|
||||
case StereoType.StereoTopBottom:
|
||||
if (overlay.invertTextureRects)
|
||||
{
|
||||
srcRectLeft = new Rect(0, 0.0f, 1, 0.5f);
|
||||
srcRectRight = new Rect(0, 0.5f, 1, 0.5f);
|
||||
}
|
||||
else
|
||||
{
|
||||
srcRectLeft = new Rect(0, 0.5f, 1, 0.5f);
|
||||
srcRectRight = new Rect(0, 0.0f, 1, 0.5f);
|
||||
}
|
||||
break;
|
||||
|
||||
case StereoType.StereoLeftRight:
|
||||
srcRectLeft = new Rect(0, 0, 0.5f, 1);
|
||||
srcRectRight = new Rect(0.5f, 0, 0.5f, 1);
|
||||
break;
|
||||
|
||||
default:
|
||||
srcRectLeft = overlay.srcRectLeft;
|
||||
srcRectRight = overlay.srcRectRight;
|
||||
break;
|
||||
}
|
||||
overlay.SetSrcDestRects(srcRectLeft, srcRectRight, destRectLeft, destRectRight);
|
||||
}
|
||||
|
||||
private void GetCursorPoints(Rect drawRect, Rect selectRect, bool invertY, out Vector2 leftPos, out Vector2 rightPos, out Vector2 topPos, out Vector2 bottomPos)
|
||||
{
|
||||
if (invertY)
|
||||
{
|
||||
selectRect.y = 1 - selectRect.y - selectRect.height;
|
||||
}
|
||||
leftPos = new Vector2(drawRect.x + selectRect.x * drawRect.width, drawRect.y + (1 - selectRect.y - selectRect.height / 2) * drawRect.height);
|
||||
rightPos = new Vector2(drawRect.x + (selectRect.x + selectRect.width) * drawRect.width, drawRect.y + (1 - selectRect.y - selectRect.height / 2) * drawRect.height);
|
||||
topPos = new Vector2(drawRect.x + (selectRect.x + selectRect.width / 2) * drawRect.width, drawRect.y + (1 - selectRect.y - selectRect.height) * drawRect.height);
|
||||
bottomPos = new Vector2(drawRect.x + (selectRect.x + selectRect.width / 2) * drawRect.width, drawRect.y + (1 - selectRect.y) * drawRect.height);
|
||||
|
||||
if (invertY)
|
||||
{
|
||||
// swap top and bottom
|
||||
var tmp = topPos;
|
||||
topPos = bottomPos;
|
||||
bottomPos = tmp;
|
||||
}
|
||||
}
|
||||
|
||||
private void CreateCursorRects(Rect drawRect, Rect selectRect, bool invertY)
|
||||
{
|
||||
Vector2 leftPos, rightPos, topPos, bottomPos;
|
||||
GetCursorPoints(drawRect, selectRect, invertY, out leftPos, out rightPos, out topPos, out bottomPos);
|
||||
|
||||
EditorGUIUtility.AddCursorRect(new Rect(leftPos - 5 * Vector2.one, 10 * Vector2.one), MouseCursor.ResizeHorizontal);
|
||||
EditorGUIUtility.AddCursorRect(new Rect(rightPos - 5 * Vector2.one, 10 * Vector2.one), MouseCursor.ResizeHorizontal);
|
||||
EditorGUIUtility.AddCursorRect(new Rect(topPos - 5 * Vector2.one, 10 * Vector2.one), MouseCursor.ResizeVertical);
|
||||
EditorGUIUtility.AddCursorRect(new Rect(bottomPos - 5 * Vector2.one, 10 * Vector2.one), MouseCursor.ResizeVertical);
|
||||
}
|
||||
|
||||
private bool IsOverRectControls(Rect drawRect, Vector2 mousePos, Rect selectRect, bool invertY, ref Side side)
|
||||
{
|
||||
Vector2 leftPos, rightPos, topPos, bottomPos;
|
||||
GetCursorPoints(drawRect, selectRect, invertY, out leftPos, out rightPos, out topPos, out bottomPos);
|
||||
|
||||
if ((leftPos - mousePos).sqrMagnitude <= 25)
|
||||
{
|
||||
side = Side.Left;
|
||||
return true;
|
||||
}
|
||||
if ((rightPos - mousePos).sqrMagnitude <= 25)
|
||||
{
|
||||
side = Side.Right;
|
||||
return true;
|
||||
}
|
||||
if ((topPos - mousePos).sqrMagnitude <= 25)
|
||||
{
|
||||
side = Side.Top;
|
||||
return true;
|
||||
}
|
||||
if ((bottomPos - mousePos).sqrMagnitude <= 25)
|
||||
{
|
||||
side = Side.Bottom;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private void UpdateRectDragging(Rect drawingRectLeft, Rect drawingRectRight, TextureRect rectLeftType, TextureRect rectRightType, bool invertY, ref Rect rectLeft, ref Rect rectRight)
|
||||
{
|
||||
if (!Event.current.isMouse || Event.current.button != 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (Event.current.type == EventType.MouseUp)
|
||||
{
|
||||
_DraggingRect = TextureRect.None;
|
||||
return;
|
||||
}
|
||||
|
||||
Vector2 mousePos = Event.current.mousePosition;
|
||||
if (_DraggingRect == TextureRect.None && Event.current.type == EventType.MouseDown)
|
||||
{
|
||||
if (IsOverRectControls(drawingRectLeft, mousePos, rectLeft, invertY, ref _DraggingSide))
|
||||
{
|
||||
_DraggingRect = rectLeftType;
|
||||
}
|
||||
if (_DraggingRect == TextureRect.None || Event.current.shift)
|
||||
{
|
||||
if (IsOverRectControls(drawingRectRight, mousePos, rectRight, invertY, ref _DraggingSide))
|
||||
{
|
||||
_DraggingRect = rectRightType;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (_DraggingRect == rectLeftType)
|
||||
{
|
||||
SetRectSideValue(drawingRectLeft, mousePos, _DraggingSide, invertY, ref rectLeft);
|
||||
}
|
||||
if (_DraggingRect == rectRightType)
|
||||
{
|
||||
SetRectSideValue(drawingRectRight, mousePos, _DraggingSide, invertY, ref rectRight);
|
||||
}
|
||||
}
|
||||
|
||||
private void SetRectSideValue(Rect drawingRect, Vector2 mousePos, Side side, bool invertY, ref Rect rect)
|
||||
{
|
||||
// quantize to 1/32
|
||||
float x = Mathf.Clamp01(Mathf.Round(((mousePos.x - drawingRect.x) / drawingRect.width) * 32) / 32.0f);
|
||||
float y = Mathf.Clamp01(Mathf.Round(((mousePos.y - drawingRect.y) / drawingRect.height) * 32) / 32.0f);
|
||||
if (!invertY)
|
||||
{
|
||||
y = 1 - y;
|
||||
}
|
||||
|
||||
switch (side)
|
||||
{
|
||||
case Side.Left:
|
||||
float xMax = rect.xMax;
|
||||
rect.x = Mathf.Min(x, xMax);
|
||||
rect.width = xMax - rect.x;
|
||||
break;
|
||||
case Side.Right:
|
||||
rect.width = Mathf.Max(0, x - rect.x);
|
||||
break;
|
||||
case Side.Bottom:
|
||||
float yMax = rect.yMax;
|
||||
rect.y = Mathf.Min(y, yMax);
|
||||
rect.height = yMax - rect.y;
|
||||
break;
|
||||
case Side.Top:
|
||||
rect.height = Mathf.Max(0, y - rect.y);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
11
Assets/Oculus/VR/Scripts/Editor/OVROverlayEditor.cs.meta
Normal file
11
Assets/Oculus/VR/Scripts/Editor/OVROverlayEditor.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: fde3aeb28643f6c48a48f926ac7207e0
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
150
Assets/Oculus/VR/Scripts/Editor/OVROverlaySrcRectEditor.shader
Normal file
150
Assets/Oculus/VR/Scripts/Editor/OVROverlaySrcRectEditor.shader
Normal file
@@ -0,0 +1,150 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
Shader "Unlit/OVROverlaySrcRectEditor"
|
||||
{
|
||||
Properties
|
||||
{
|
||||
_MainTex ("Texture", 2D) = "white" {}
|
||||
_SrcRectLeft ("SrcRectLeft", Vector) = (0,0,1,1)
|
||||
_SrcRectRight("SrcRectRight", Vector) = (0,0,1,1)
|
||||
_BackgroundColor("Background Color", Color) = (0.225, 0.225, 0.225, 1)
|
||||
}
|
||||
SubShader
|
||||
{
|
||||
Tags { "RenderType"="Opaque" }
|
||||
LOD 100
|
||||
|
||||
Pass
|
||||
{
|
||||
CGPROGRAM
|
||||
#pragma vertex vert
|
||||
#pragma fragment frag
|
||||
|
||||
#include "UnityCG.cginc"
|
||||
|
||||
struct appdata
|
||||
{
|
||||
float4 vertex : POSITION;
|
||||
float2 uv : TEXCOORD0;
|
||||
};
|
||||
|
||||
struct v2f
|
||||
{
|
||||
float2 uv : TEXCOORD0;
|
||||
float4 vertex : SV_POSITION;
|
||||
float4 leftDragX : TEXCOORD1;
|
||||
float4 leftDragY : TEXCOORD2;
|
||||
float4 rightDragX : TEXCOORD3;
|
||||
float4 rightDragY : TEXCOORD4;
|
||||
};
|
||||
|
||||
sampler2D _MainTex;
|
||||
float4 _MainTex_ST;
|
||||
|
||||
float4 _SrcRectLeft;
|
||||
float4 _SrcRectRight;
|
||||
|
||||
fixed4 _BackgroundColor;
|
||||
|
||||
v2f vert (appdata v)
|
||||
{
|
||||
v2f o;
|
||||
o.vertex = UnityObjectToClipPos(v.vertex);
|
||||
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
|
||||
// Add padding
|
||||
o.uv = (o.uv - 0.5) * (256.0 + 8.0) / (256.0) + 0.5;
|
||||
|
||||
// left
|
||||
o.leftDragX.x = _SrcRectLeft.x;
|
||||
o.leftDragY.x = _SrcRectLeft.y + _SrcRectLeft.w * 0.5;
|
||||
// right
|
||||
o.leftDragX.y = _SrcRectLeft.x + _SrcRectLeft.z;
|
||||
o.leftDragY.y = _SrcRectLeft.y + _SrcRectLeft.w * 0.5;
|
||||
// top
|
||||
o.leftDragX.z = _SrcRectLeft.x + _SrcRectLeft.z * 0.5;
|
||||
o.leftDragY.z = _SrcRectLeft.y;
|
||||
// bottom
|
||||
o.leftDragX.w = _SrcRectLeft.x + _SrcRectLeft.z * 0.5;
|
||||
o.leftDragY.w = _SrcRectLeft.y + _SrcRectLeft.w;
|
||||
// right
|
||||
o.rightDragX.x = _SrcRectRight.x;
|
||||
o.rightDragY.x = _SrcRectRight.y + _SrcRectRight.w * 0.5;
|
||||
// right
|
||||
o.rightDragX.y = _SrcRectRight.x + _SrcRectRight.z;
|
||||
o.rightDragY.y = _SrcRectRight.y + _SrcRectRight.w * 0.5;
|
||||
// top
|
||||
o.rightDragX.z = _SrcRectRight.x + _SrcRectRight.z * 0.5;
|
||||
o.rightDragY.z = _SrcRectRight.y;
|
||||
// bottom
|
||||
o.rightDragX.w = _SrcRectRight.x + _SrcRectRight.z * 0.5;
|
||||
o.rightDragY.w = _SrcRectRight.y + _SrcRectRight.w;
|
||||
|
||||
return o;
|
||||
}
|
||||
|
||||
float onDrag(float2 uv, float x, float y)
|
||||
{
|
||||
const float pixelSize = 6;
|
||||
return abs(uv.x - x) < ((pixelSize / 2) / 256.0) && abs(uv.y - y) < ((pixelSize / 2) / 128.0);
|
||||
}
|
||||
|
||||
float onLine(float2 uv, float4 rect)
|
||||
{
|
||||
return
|
||||
(abs(uv.x - rect.x) < (1 / 256.0) && uv.y >= rect.y && uv.y <= rect.y + rect.w) ||
|
||||
(abs(uv.x - rect.x - rect.z) < (1 / 256.0) && uv.y >= rect.y && uv.y <= rect.y + rect.w) ||
|
||||
(abs(uv.y - rect.y) < (1 / 128.0) && uv.x >= rect.x && uv.x <= rect.x + rect.z) ||
|
||||
(abs(uv.y - rect.y - rect.w) < (1 / 128.0) && uv.x >= rect.x && uv.x <= rect.x + rect.z);
|
||||
}
|
||||
|
||||
float checkerboard(float2 uv)
|
||||
{
|
||||
float x = floor(uv.x * (16));
|
||||
float y = floor(uv.y * 8);
|
||||
|
||||
return 2 * ((x + y) / 2.0 - floor((x + y) / 2.0));
|
||||
}
|
||||
|
||||
fixed4 frag (v2f i) : SV_Target
|
||||
{
|
||||
// sample the texture
|
||||
fixed4 col = tex2D(_MainTex, i.uv);
|
||||
|
||||
col.rgb = lerp(0.41 - 0.13 * checkerboard(i.uv), col.rgb, col.a);
|
||||
|
||||
if (i.uv.x < 0 || i.uv.x > 1 || i.uv.y < 0 || i.uv.y > 1)
|
||||
{
|
||||
col = _BackgroundColor;
|
||||
}
|
||||
|
||||
float2 uv = i.uv.xy;
|
||||
|
||||
// now draw clipping objects
|
||||
float left = onLine(uv, _SrcRectLeft) ||
|
||||
onDrag(uv, i.leftDragX.x, i.leftDragY.x) ||
|
||||
onDrag(uv, i.leftDragX.y, i.leftDragY.y) ||
|
||||
onDrag(uv, i.leftDragX.z, i.leftDragY.z) ||
|
||||
onDrag(uv, i.leftDragX.w, i.leftDragY.w);
|
||||
|
||||
float right = onLine(uv, _SrcRectRight) ||
|
||||
onDrag(uv, i.rightDragX.x, i.rightDragY.x) ||
|
||||
onDrag(uv, i.rightDragX.y, i.rightDragY.y) ||
|
||||
onDrag(uv, i.rightDragX.z, i.rightDragY.z) ||
|
||||
onDrag(uv, i.rightDragX.w, i.rightDragY.w);
|
||||
|
||||
return lerp(col, fixed4(left, right, 0, 1), left || right);
|
||||
}
|
||||
ENDCG
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 589b36d0aa66c7349bcff8750b670434
|
||||
ShaderImporter:
|
||||
externalObjects: {}
|
||||
defaultTextures: []
|
||||
nonModifiableTextures: []
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
68
Assets/Oculus/VR/Scripts/Editor/OVRPassthroughLayerEditor.cs
Normal file
68
Assets/Oculus/VR/Scripts/Editor/OVRPassthroughLayerEditor.cs
Normal file
@@ -0,0 +1,68 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using UnityEditor;
|
||||
using UnityEngine;
|
||||
|
||||
using ColorMapEditorType = OVRPassthroughLayer.ColorMapEditorType;
|
||||
|
||||
[CustomEditor(typeof(OVRPassthroughLayer))]
|
||||
public class OVRPassthroughLayerEditor : Editor
|
||||
{
|
||||
public override void OnInspectorGUI()
|
||||
{
|
||||
OVRPassthroughLayer layer = (OVRPassthroughLayer)target;
|
||||
|
||||
layer.projectionSurfaceType = (OVRPassthroughLayer.ProjectionSurfaceType)EditorGUILayout.EnumPopup(
|
||||
new GUIContent("Projection Surface", "The type of projection surface for this Passthrough layer"),
|
||||
layer.projectionSurfaceType);
|
||||
|
||||
EditorGUILayout.Space();
|
||||
EditorGUILayout.LabelField("Compositing", EditorStyles.boldLabel);
|
||||
layer.overlayType = (OVROverlay.OverlayType)EditorGUILayout.EnumPopup(new GUIContent("Placement", "Whether this overlay should layer behind the scene or in front of it"), layer.overlayType);
|
||||
layer.compositionDepth = EditorGUILayout.IntField(new GUIContent("Composition Depth", "Depth value used to sort layers in the scene, smaller value appears in front"), layer.compositionDepth);
|
||||
|
||||
EditorGUILayout.Space();
|
||||
EditorGUILayout.LabelField("Style", EditorStyles.boldLabel);
|
||||
|
||||
layer.textureOpacity = EditorGUILayout.Slider("Opacity", layer.textureOpacity, 0, 1);
|
||||
|
||||
EditorGUILayout.Space();
|
||||
|
||||
layer.edgeRenderingEnabled = EditorGUILayout.Toggle(
|
||||
new GUIContent("Edge Rendering", "Highlight salient edges in the camera images in a specific color"),
|
||||
layer.edgeRenderingEnabled);
|
||||
layer.edgeColor = EditorGUILayout.ColorField("Edge Color", layer.edgeColor);
|
||||
|
||||
EditorGUILayout.Space();
|
||||
|
||||
System.Func<System.Enum, bool> hideCustomColorMapOption = option => (ColorMapEditorType)option != ColorMapEditorType.Custom;
|
||||
layer.colorMapEditorType = (ColorMapEditorType)EditorGUILayout.EnumPopup(
|
||||
new GUIContent("Color Map"),
|
||||
layer.colorMapEditorType,
|
||||
hideCustomColorMapOption,
|
||||
false);
|
||||
|
||||
if (layer.colorMapEditorType == ColorMapEditorType.Controls)
|
||||
{
|
||||
layer.colorMapEditorContrast = EditorGUILayout.Slider("Contrast", layer.colorMapEditorContrast, -1, 1);
|
||||
layer.colorMapEditorBrightness = EditorGUILayout.Slider("Brightness", layer.colorMapEditorBrightness, -1, 1);
|
||||
layer.colorMapEditorPosterize = EditorGUILayout.Slider("Posterize", layer.colorMapEditorPosterize, 0, 1);
|
||||
layer.colorMapEditorGradient = EditorGUILayout.GradientField("Colorize", layer.colorMapEditorGradient);
|
||||
}
|
||||
|
||||
if (GUI.changed)
|
||||
{
|
||||
EditorUtility.SetDirty(layer);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: f49479bb47dde564680a2a5bdf5a6dfe
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
33
Assets/Oculus/VR/Scripts/Editor/OVRProfilerDeprecated.cs
Normal file
33
Assets/Oculus/VR/Scripts/Editor/OVRProfilerDeprecated.cs
Normal file
@@ -0,0 +1,33 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
#if UNITY_EDITOR
|
||||
|
||||
using UnityEngine;
|
||||
using UnityEditor;
|
||||
using System.Collections.Generic;
|
||||
using Assets.OVR.Scripts;
|
||||
|
||||
public class OVRProfilerDeprecated : EditorWindow
|
||||
{
|
||||
[MenuItem("Oculus/Tools/(Deprecated) OVR Profiler", false, 200000)]
|
||||
static void Init()
|
||||
{
|
||||
Debug.LogWarning("OVR Profiler has been replaced by OVR Performance Lint Tool");
|
||||
// Get existing open window or if none, make a new one:
|
||||
EditorWindow.GetWindow(typeof(OVRLint));
|
||||
OVRPlugin.SendEvent("perf_lint", "activated");
|
||||
OVRLint.RunCheck();
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: ad582af7c1f87bf4b99ef951b26ec465
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
188
Assets/Oculus/VR/Scripts/Editor/OVRProjectConfigEditor.cs
Normal file
188
Assets/Oculus/VR/Scripts/Editor/OVRProjectConfigEditor.cs
Normal file
@@ -0,0 +1,188 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using UnityEditor;
|
||||
|
||||
[CustomEditor(typeof(OVRProjectConfig))]
|
||||
public class OVRProjectConfigEditor : Editor
|
||||
{
|
||||
override public void OnInspectorGUI()
|
||||
{
|
||||
OVRProjectConfig projectConfig = (OVRProjectConfig)target;
|
||||
DrawTargetDeviceInspector(projectConfig);
|
||||
EditorGUILayout.Space();
|
||||
DrawProjectConfigInspector(projectConfig);
|
||||
}
|
||||
|
||||
public static void DrawTargetDeviceInspector(OVRProjectConfig projectConfig)
|
||||
{
|
||||
// Target Devices
|
||||
EditorGUILayout.LabelField("Target Devices", EditorStyles.boldLabel);
|
||||
#if PRIORITIZE_OCULUS_XR_SETTINGS
|
||||
EditorGUILayout.BeginHorizontal();
|
||||
EditorGUILayout.LabelField("Configure Target Devices in Oculus XR Plugin Settings.", GUILayout.Width(320));
|
||||
GUILayout.FlexibleSpace();
|
||||
if (GUILayout.Button("Open Settings"))
|
||||
SettingsService.OpenProjectSettings("Project/XR Plug-in Management/Oculus");
|
||||
EditorGUILayout.EndHorizontal();
|
||||
#else
|
||||
bool hasModified = false;
|
||||
|
||||
foreach (OVRProjectConfig.DeviceType deviceType in System.Enum.GetValues(typeof(OVRProjectConfig.DeviceType)))
|
||||
{
|
||||
bool oldSupportsDevice = projectConfig.targetDeviceTypes.Contains(deviceType);
|
||||
bool newSupportsDevice = oldSupportsDevice;
|
||||
OVREditorUtil.SetupBoolField(projectConfig, ObjectNames.NicifyVariableName(deviceType.ToString()), ref newSupportsDevice, ref hasModified);
|
||||
|
||||
if (newSupportsDevice && !oldSupportsDevice)
|
||||
{
|
||||
projectConfig.targetDeviceTypes.Add(deviceType);
|
||||
}
|
||||
else if (oldSupportsDevice && !newSupportsDevice)
|
||||
{
|
||||
projectConfig.targetDeviceTypes.Remove(deviceType);
|
||||
}
|
||||
}
|
||||
|
||||
if (hasModified)
|
||||
{
|
||||
OVRProjectConfig.CommitProjectConfig(projectConfig);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
enum eProjectConfigTab
|
||||
{
|
||||
General = 0,
|
||||
BuildSettings,
|
||||
Security,
|
||||
Experimental,
|
||||
}
|
||||
static eProjectConfigTab selectedTab = 0;
|
||||
static string[] projectConfigTabStrs = null;
|
||||
|
||||
public static void DrawProjectConfigInspector(OVRProjectConfig projectConfig)
|
||||
{
|
||||
EditorGUILayout.BeginVertical(EditorStyles.helpBox);
|
||||
EditorGUILayout.LabelField("Quest Features", EditorStyles.boldLabel);
|
||||
|
||||
if (projectConfigTabStrs == null)
|
||||
{
|
||||
projectConfigTabStrs = Enum.GetNames(typeof(eProjectConfigTab));
|
||||
for (int i = 0; i < projectConfigTabStrs.Length; ++i)
|
||||
projectConfigTabStrs[i] = ObjectNames.NicifyVariableName(projectConfigTabStrs[i]);
|
||||
}
|
||||
|
||||
selectedTab = (eProjectConfigTab)GUILayout.SelectionGrid((int)selectedTab, projectConfigTabStrs, 3, GUI.skin.button);
|
||||
EditorGUILayout.Space(5);
|
||||
bool hasModified = false;
|
||||
switch (selectedTab)
|
||||
{
|
||||
case eProjectConfigTab.General:
|
||||
|
||||
// Show overlay support option
|
||||
EditorGUI.BeginDisabledGroup(true);
|
||||
EditorGUILayout.Toggle(new GUIContent("Focus Aware (Required)",
|
||||
"If checked, the new overlay will be displayed when the user presses the home button. The game will not be paused, but will now receive InputFocusLost and InputFocusAcquired events."), true);
|
||||
EditorGUI.EndDisabledGroup();
|
||||
|
||||
// Hand Tracking Support
|
||||
OVREditorUtil.SetupEnumField(projectConfig, "Hand Tracking Support", ref projectConfig.handTrackingSupport, ref hasModified);
|
||||
|
||||
OVREditorUtil.SetupEnumField(projectConfig, new GUIContent("Hand Tracking Frequency",
|
||||
"Note that a higher tracking frequency will reserve some performance headroom from the application's budget."),
|
||||
ref projectConfig.handTrackingFrequency, ref hasModified, "https://developer.oculus.com/documentation/unity/unity-handtracking/#enable-hand-tracking");
|
||||
|
||||
// Enable Render Model Support
|
||||
OVREditorUtil.SetupEnumField(projectConfig, new GUIContent("Render Model Support",
|
||||
"If enabled, the application will be able to load render models from the runtime."),
|
||||
ref projectConfig.renderModelSupport, ref hasModified);
|
||||
|
||||
// System Keyboard Support
|
||||
OVREditorUtil.SetupBoolField(projectConfig, new GUIContent("Requires System Keyboard",
|
||||
"If checked, the Oculus System keyboard will be enabled for Unity input fields and any calls to open/close the Unity TouchScreenKeyboard."),
|
||||
ref projectConfig.requiresSystemKeyboard, ref hasModified);
|
||||
|
||||
// Tracked Keyboard Support
|
||||
var trackedKeyboardSetting = projectConfig.trackedKeyboardSupport;
|
||||
OVREditorUtil.SetupEnumField(projectConfig, "Tracked Keyboard Support", ref projectConfig.trackedKeyboardSupport, ref hasModified);
|
||||
if (trackedKeyboardSetting != projectConfig.trackedKeyboardSupport && projectConfig.trackedKeyboardSupport > OVRProjectConfig.TrackedKeyboardSupport.None)
|
||||
projectConfig.renderModelSupport = OVRProjectConfig.RenderModelSupport.Enabled;
|
||||
if (projectConfig.trackedKeyboardSupport > OVRProjectConfig.TrackedKeyboardSupport.None && projectConfig.renderModelSupport == OVRProjectConfig.RenderModelSupport.Disabled)
|
||||
EditorGUILayout.LabelField("Render model support is required to load keyboard models from the runtime.");
|
||||
|
||||
// System Splash Screen
|
||||
OVREditorUtil.SetupTexture2DField(projectConfig, new GUIContent("System Splash Screen",
|
||||
"If set, the Splash Screen will be presented by the Operating System as a high quality composition layer at launch time."),
|
||||
ref projectConfig.systemSplashScreen, ref hasModified,
|
||||
"https://developer.oculus.com/documentation/unity/unity-splash-screen/");
|
||||
|
||||
// Allow optional 3-dof head-tracking
|
||||
OVREditorUtil.SetupBoolField(projectConfig, new GUIContent("Allow Optional 3DoF Head Tracking",
|
||||
"If checked, application can work in both 6DoF and 3DoF modes. It's highly recommended to keep it unchecked unless your project strongly needs the 3DoF head tracking."),
|
||||
ref projectConfig.allowOptional3DofHeadTracking, ref hasModified);
|
||||
|
||||
// Enable passthrough capability
|
||||
OVREditorUtil.SetupBoolField(projectConfig, new GUIContent("Passthrough Capability Enabled",
|
||||
"If checked, this application can use passthrough functionality. This option must be enabled at build time, otherwise initializing passthrough and creating passthrough layers in application scenes will fail."),
|
||||
ref projectConfig.insightPassthroughEnabled, ref hasModified);
|
||||
|
||||
break;
|
||||
|
||||
case eProjectConfigTab.BuildSettings:
|
||||
|
||||
OVREditorUtil.SetupBoolField(projectConfig, new GUIContent("Skip Unneeded Shaders",
|
||||
"If checked, prevent building shaders that are not used by default to reduce time spent when building."),
|
||||
ref projectConfig.skipUnneededShaders, ref hasModified,
|
||||
"https://developer.oculus.com/documentation/unity/unity-strip-shaders/");
|
||||
|
||||
break;
|
||||
|
||||
case eProjectConfigTab.Security:
|
||||
|
||||
OVREditorUtil.SetupBoolField(projectConfig, "Disable Backups", ref projectConfig.disableBackups, ref hasModified,
|
||||
"https://developer.android.com/guide/topics/data/autobackup#EnablingAutoBackup");
|
||||
OVREditorUtil.SetupBoolField(projectConfig, "Enable NSC Configuration", ref projectConfig.enableNSCConfig, ref hasModified,
|
||||
"https://developer.android.com/training/articles/security-config");
|
||||
EditorGUI.BeginDisabledGroup(!projectConfig.enableNSCConfig);
|
||||
++EditorGUI.indentLevel;
|
||||
OVREditorUtil.SetupInputField(projectConfig, "Custom Security XML Path", ref projectConfig.securityXmlPath, ref hasModified);
|
||||
--EditorGUI.indentLevel;
|
||||
EditorGUI.EndDisabledGroup();
|
||||
|
||||
break;
|
||||
|
||||
case eProjectConfigTab.Experimental:
|
||||
|
||||
// Experimental Features Enabled
|
||||
OVREditorUtil.SetupBoolField(projectConfig, new GUIContent("Experimental Features Enabled",
|
||||
"If checked, this application can use experimental features. Note that such features are for developer use only. This option must be disabled when submitting to the Oculus Store."),
|
||||
ref projectConfig.experimentalFeaturesEnabled, ref hasModified);
|
||||
|
||||
// Spatial Anchors Support
|
||||
OVREditorUtil.SetupEnumField(projectConfig, "Spatial Anchors Support", ref projectConfig.spatialAnchorsSupport, ref hasModified);
|
||||
|
||||
break;
|
||||
|
||||
}
|
||||
EditorGUILayout.EndVertical();
|
||||
|
||||
// apply any pending changes to project config
|
||||
if (hasModified)
|
||||
{
|
||||
OVRProjectConfig.CommitProjectConfig(projectConfig);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 950d95332920b814ea41df294856f96a
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"name": "Oculus.VR.Scripts.Editor",
|
||||
"references": [
|
||||
"Oculus.VR",
|
||||
"Oculus.VR.Editor"
|
||||
],
|
||||
"includePlatforms": [
|
||||
"Editor"
|
||||
],
|
||||
"excludePlatforms": [],
|
||||
"allowUnsafeCode": false,
|
||||
"overrideReferences": false,
|
||||
"precompiledReferences": [],
|
||||
"autoReferenced": true,
|
||||
"defineConstraints": [],
|
||||
"versionDefines": [
|
||||
{
|
||||
"name": "com.unity.xr.management",
|
||||
"expression": "",
|
||||
"define": "USING_XR_MANAGEMENT"
|
||||
},
|
||||
{
|
||||
"name": "com.unity.xr.oculus",
|
||||
"expression": "",
|
||||
"define": "USING_XR_SDK_OCULUS"
|
||||
},
|
||||
{
|
||||
"name": "com.unity.xr.oculus",
|
||||
"expression": "1.7.0",
|
||||
"define": "PRIORITIZE_OCULUS_XR_SETTINGS"
|
||||
}
|
||||
],
|
||||
"noEngineReferences": false
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 7305c54a43f3814439df347c7519653e
|
||||
AssemblyDefinitionImporter:
|
||||
externalObjects: {}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
237
Assets/Oculus/VR/Scripts/OVRBoundary.cs
Normal file
237
Assets/Oculus/VR/Scripts/OVRBoundary.cs
Normal file
@@ -0,0 +1,237 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
#if USING_XR_MANAGEMENT && USING_XR_SDK_OCULUS
|
||||
#define USING_XR_SDK
|
||||
#endif
|
||||
|
||||
#if UNITY_2020_1_OR_NEWER
|
||||
#define REQUIRES_XR_SDK
|
||||
#endif
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using System.Runtime.InteropServices;
|
||||
#if !USING_XR_SDK && !REQUIRES_XR_SDK
|
||||
using Boundary = UnityEngine.Experimental.XR.Boundary;
|
||||
#endif
|
||||
|
||||
/// <summary>
|
||||
/// Provides access to the Oculus boundary system.
|
||||
/// </summary>
|
||||
public class OVRBoundary
|
||||
{
|
||||
/// <summary>
|
||||
/// Specifies a tracked node that can be queried through the boundary system.
|
||||
/// </summary>
|
||||
public enum Node
|
||||
{
|
||||
HandLeft = OVRPlugin.Node.HandLeft, ///< Tracks the left hand node.
|
||||
HandRight = OVRPlugin.Node.HandRight, ///< Tracks the right hand node.
|
||||
Head = OVRPlugin.Node.Head, ///< Tracks the head node.
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Specifies a boundary type surface.
|
||||
/// </summary>
|
||||
public enum BoundaryType
|
||||
{
|
||||
[System.Obsolete("Deprecated. This enum value will not be supported in OpenXR", false)]
|
||||
OuterBoundary = OVRPlugin.BoundaryType.OuterBoundary, ///< Outer boundary that closely matches the user's configured walls.
|
||||
PlayArea = OVRPlugin.BoundaryType.PlayArea, ///< Smaller convex area inset within the outer boundary.
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Provides test results of boundary system queries.
|
||||
/// </summary>
|
||||
[System.Obsolete("Deprecated. This struct will not be supported in OpenXR", false)]
|
||||
public struct BoundaryTestResult
|
||||
{
|
||||
public bool IsTriggering; ///< Returns true if the queried test would violate and/or trigger the tested boundary types.
|
||||
public float ClosestDistance; ///< Returns the distance between the queried test object and the closest tested boundary type.
|
||||
public Vector3 ClosestPoint; ///< Returns the closest point to the queried test object.
|
||||
public Vector3 ClosestPointNormal; ///< Returns the normal of the closest point to the queried test object.
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if the boundary system is currently configured with valid boundary data.
|
||||
/// </summary>
|
||||
public bool GetConfigured()
|
||||
{
|
||||
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
|
||||
return OVRPlugin.GetBoundaryConfigured();
|
||||
else
|
||||
{
|
||||
#if !USING_XR_SDK && !REQUIRES_XR_SDK
|
||||
return Boundary.configured;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the results of testing a tracked node against the specified boundary type.
|
||||
/// All points are returned in local tracking space shared by tracked nodes and accessible through OVRCameraRig's trackingSpace anchor.
|
||||
/// </summary>
|
||||
[System.Obsolete("Deprecated. This function will not be supported in OpenXR", false)]
|
||||
public OVRBoundary.BoundaryTestResult TestNode(OVRBoundary.Node node, OVRBoundary.BoundaryType boundaryType)
|
||||
{
|
||||
OVRPlugin.BoundaryTestResult ovrpRes = OVRPlugin.TestBoundaryNode((OVRPlugin.Node)node, (OVRPlugin.BoundaryType)boundaryType);
|
||||
|
||||
OVRBoundary.BoundaryTestResult res = new OVRBoundary.BoundaryTestResult()
|
||||
{
|
||||
IsTriggering = (ovrpRes.IsTriggering == OVRPlugin.Bool.True),
|
||||
ClosestDistance = ovrpRes.ClosestDistance,
|
||||
ClosestPoint = ovrpRes.ClosestPoint.FromFlippedZVector3f(),
|
||||
ClosestPointNormal = ovrpRes.ClosestPointNormal.FromFlippedZVector3f(),
|
||||
};
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the results of testing a 3d point against the specified boundary type.
|
||||
/// The test point is expected in local tracking space.
|
||||
/// All points are returned in local tracking space shared by tracked nodes and accessible through OVRCameraRig's trackingSpace anchor.
|
||||
/// </summary>
|
||||
[System.Obsolete("Deprecated. This function will not be supported in OpenXR", false)]
|
||||
public OVRBoundary.BoundaryTestResult TestPoint(Vector3 point, OVRBoundary.BoundaryType boundaryType)
|
||||
{
|
||||
OVRPlugin.BoundaryTestResult ovrpRes = OVRPlugin.TestBoundaryPoint(point.ToFlippedZVector3f(), (OVRPlugin.BoundaryType)boundaryType);
|
||||
|
||||
OVRBoundary.BoundaryTestResult res = new OVRBoundary.BoundaryTestResult()
|
||||
{
|
||||
IsTriggering = (ovrpRes.IsTriggering == OVRPlugin.Bool.True),
|
||||
ClosestDistance = ovrpRes.ClosestDistance,
|
||||
ClosestPoint = ovrpRes.ClosestPoint.FromFlippedZVector3f(),
|
||||
ClosestPointNormal = ovrpRes.ClosestPointNormal.FromFlippedZVector3f(),
|
||||
};
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
private static int cachedVector3fSize = Marshal.SizeOf(typeof(OVRPlugin.Vector3f));
|
||||
private static OVRNativeBuffer cachedGeometryNativeBuffer = new OVRNativeBuffer(0);
|
||||
private static float[] cachedGeometryManagedBuffer = new float[0];
|
||||
private List<Vector3> cachedGeometryList = new List<Vector3>();
|
||||
/// <summary>
|
||||
/// Returns an array of 3d points (in clockwise order) that define the specified boundary type.
|
||||
/// All points are returned in local tracking space shared by tracked nodes and accessible through OVRCameraRig's trackingSpace anchor.
|
||||
/// </summary>
|
||||
public Vector3[] GetGeometry(OVRBoundary.BoundaryType boundaryType)
|
||||
{
|
||||
if (OVRManager.loadedXRDevice != OVRManager.XRDevice.Oculus)
|
||||
{
|
||||
#if !USING_XR_SDK && !REQUIRES_XR_SDK
|
||||
if (Boundary.TryGetGeometry(cachedGeometryList, (boundaryType == BoundaryType.PlayArea) ? Boundary.Type.PlayArea : Boundary.Type.TrackedArea))
|
||||
{
|
||||
Vector3[] arr = cachedGeometryList.ToArray();
|
||||
return arr;
|
||||
}
|
||||
#endif
|
||||
Debug.LogError("This functionality is not supported in your current version of Unity.");
|
||||
return null;
|
||||
}
|
||||
|
||||
int pointsCount = 0;
|
||||
if (OVRPlugin.GetBoundaryGeometry2((OVRPlugin.BoundaryType)boundaryType, IntPtr.Zero, ref pointsCount))
|
||||
{
|
||||
if (pointsCount > 0)
|
||||
{
|
||||
int requiredNativeBufferCapacity = pointsCount * cachedVector3fSize;
|
||||
if (cachedGeometryNativeBuffer.GetCapacity() < requiredNativeBufferCapacity)
|
||||
cachedGeometryNativeBuffer.Reset(requiredNativeBufferCapacity);
|
||||
|
||||
int requiredManagedBufferCapacity = pointsCount * 3;
|
||||
if (cachedGeometryManagedBuffer.Length < requiredManagedBufferCapacity)
|
||||
cachedGeometryManagedBuffer = new float[requiredManagedBufferCapacity];
|
||||
|
||||
if (OVRPlugin.GetBoundaryGeometry2((OVRPlugin.BoundaryType)boundaryType, cachedGeometryNativeBuffer.GetPointer(), ref pointsCount))
|
||||
{
|
||||
Marshal.Copy(cachedGeometryNativeBuffer.GetPointer(), cachedGeometryManagedBuffer, 0, requiredManagedBufferCapacity);
|
||||
|
||||
Vector3[] points = new Vector3[pointsCount];
|
||||
|
||||
for (int i = 0; i < pointsCount; i++)
|
||||
{
|
||||
points[i] = new OVRPlugin.Vector3f()
|
||||
{
|
||||
x = cachedGeometryManagedBuffer[3 * i + 0],
|
||||
y = cachedGeometryManagedBuffer[3 * i + 1],
|
||||
z = cachedGeometryManagedBuffer[3 * i + 2],
|
||||
}.FromFlippedZVector3f();
|
||||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new Vector3[0];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns a vector that indicates the spatial dimensions of the specified boundary type. (x = width, y = height, z = depth)
|
||||
/// </summary>
|
||||
public Vector3 GetDimensions(OVRBoundary.BoundaryType boundaryType)
|
||||
{
|
||||
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
|
||||
return OVRPlugin.GetBoundaryDimensions((OVRPlugin.BoundaryType)boundaryType).FromVector3f();
|
||||
|
||||
else
|
||||
{
|
||||
#if !USING_XR_SDK && !REQUIRES_XR_SDK
|
||||
Vector3 dimensions;
|
||||
if (Boundary.TryGetDimensions(out dimensions, (boundaryType == BoundaryType.PlayArea) ? Boundary.Type.PlayArea : Boundary.Type.TrackedArea))
|
||||
return dimensions;
|
||||
#endif
|
||||
return Vector3.zero;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if the boundary system is currently visible.
|
||||
/// </summary>
|
||||
[System.Obsolete("Deprecated. This function will not be supported in OpenXR", false)]
|
||||
public bool GetVisible()
|
||||
{
|
||||
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
|
||||
return OVRPlugin.GetBoundaryVisible();
|
||||
else
|
||||
{
|
||||
#if !USING_XR_SDK && !REQUIRES_XR_SDK
|
||||
return Boundary.visible;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Requests that the boundary system visibility be set to the specified value.
|
||||
/// The actual visibility can be overridden by the system (i.e., proximity trigger) or by the user (boundary system disabled)
|
||||
/// </summary>
|
||||
[System.Obsolete("Deprecated. This function will not be supported in OpenXR", false)]
|
||||
public void SetVisible(bool value)
|
||||
{
|
||||
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
|
||||
OVRPlugin.SetBoundaryVisible(value);
|
||||
else
|
||||
{
|
||||
#if !USING_XR_SDK && !REQUIRES_XR_SDK
|
||||
Boundary.visible = value;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
12
Assets/Oculus/VR/Scripts/OVRBoundary.cs.meta
Normal file
12
Assets/Oculus/VR/Scripts/OVRBoundary.cs.meta
Normal file
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 332b8e08854932543ba356eec601c0ef
|
||||
timeCreated: 1470352252
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
456
Assets/Oculus/VR/Scripts/OVRCameraRig.cs
Normal file
456
Assets/Oculus/VR/Scripts/OVRCameraRig.cs
Normal file
@@ -0,0 +1,456 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
#if USING_XR_MANAGEMENT && USING_XR_SDK_OCULUS
|
||||
#define USING_XR_SDK
|
||||
#endif
|
||||
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using UnityEngine.XR;
|
||||
using Node = UnityEngine.XR.XRNode;
|
||||
|
||||
/// <summary>
|
||||
/// A head-tracked stereoscopic virtual reality camera rig.
|
||||
/// </summary>
|
||||
[ExecuteInEditMode]
|
||||
public class OVRCameraRig : MonoBehaviour
|
||||
{
|
||||
/// <summary>
|
||||
/// The left eye camera.
|
||||
/// </summary>
|
||||
public Camera leftEyeCamera { get { return (usePerEyeCameras) ? _leftEyeCamera : _centerEyeCamera; } }
|
||||
/// <summary>
|
||||
/// The right eye camera.
|
||||
/// </summary>
|
||||
public Camera rightEyeCamera { get { return (usePerEyeCameras) ? _rightEyeCamera : _centerEyeCamera; } }
|
||||
/// <summary>
|
||||
/// Provides a root transform for all anchors in tracking space.
|
||||
/// </summary>
|
||||
public Transform trackingSpace { get; private set; }
|
||||
/// <summary>
|
||||
/// Always coincides with the pose of the left eye.
|
||||
/// </summary>
|
||||
public Transform leftEyeAnchor { get; private set; }
|
||||
/// <summary>
|
||||
/// Always coincides with average of the left and right eye poses.
|
||||
/// </summary>
|
||||
public Transform centerEyeAnchor { get; private set; }
|
||||
/// <summary>
|
||||
/// Always coincides with the pose of the right eye.
|
||||
/// </summary>
|
||||
public Transform rightEyeAnchor { get; private set; }
|
||||
/// <summary>
|
||||
/// Always coincides with the pose of the left hand.
|
||||
/// </summary>
|
||||
public Transform leftHandAnchor { get; private set; }
|
||||
/// <summary>
|
||||
/// Always coincides with the pose of the right hand.
|
||||
/// </summary>
|
||||
public Transform rightHandAnchor { get; private set; }
|
||||
/// <summary>
|
||||
/// Anchors controller pose to fix offset issues for the left hand.
|
||||
/// </summary>
|
||||
public Transform leftControllerAnchor { get; private set; }
|
||||
/// <summary>
|
||||
/// Anchors controller pose to fix offset issues for the right hand.
|
||||
/// </summary>
|
||||
public Transform rightControllerAnchor { get; private set; }
|
||||
/// <summary>
|
||||
/// Always coincides with the pose of the sensor.
|
||||
/// </summary>
|
||||
public Transform trackerAnchor { get; private set; }
|
||||
/// <summary>
|
||||
/// Occurs when the eye pose anchors have been set.
|
||||
/// </summary>
|
||||
public event System.Action<OVRCameraRig> UpdatedAnchors;
|
||||
/// <summary>
|
||||
/// If true, separate cameras will be used for the left and right eyes.
|
||||
/// </summary>
|
||||
public bool usePerEyeCameras = false;
|
||||
/// <summary>
|
||||
/// If true, all tracked anchors are updated in FixedUpdate instead of Update to favor physics fidelity.
|
||||
/// \note: This will cause visible judder unless you tick exactly once per frame using a custom physics
|
||||
/// update, because you'll be sampling the position at different times into each frame.
|
||||
/// </summary>
|
||||
public bool useFixedUpdateForTracking = false;
|
||||
/// <summary>
|
||||
/// If true, the cameras on the eyeAnchors will be disabled.
|
||||
/// \note: The main camera of the game will be used to provide VR rendering. And the tracking space anchors will still be updated to provide reference poses.
|
||||
/// </summary>
|
||||
public bool disableEyeAnchorCameras = false;
|
||||
|
||||
|
||||
protected bool _skipUpdate = false;
|
||||
protected readonly string trackingSpaceName = "TrackingSpace";
|
||||
protected readonly string trackerAnchorName = "TrackerAnchor";
|
||||
protected readonly string leftEyeAnchorName = "LeftEyeAnchor";
|
||||
protected readonly string centerEyeAnchorName = "CenterEyeAnchor";
|
||||
protected readonly string rightEyeAnchorName = "RightEyeAnchor";
|
||||
protected readonly string leftHandAnchorName = "LeftHandAnchor";
|
||||
protected readonly string rightHandAnchorName = "RightHandAnchor";
|
||||
protected readonly string leftControllerAnchorName = "LeftControllerAnchor";
|
||||
protected readonly string rightControllerAnchorName = "RightControllerAnchor";
|
||||
protected Camera _centerEyeCamera;
|
||||
protected Camera _leftEyeCamera;
|
||||
protected Camera _rightEyeCamera;
|
||||
|
||||
#region Unity Messages
|
||||
protected virtual void Awake()
|
||||
{
|
||||
_skipUpdate = true;
|
||||
EnsureGameObjectIntegrity();
|
||||
}
|
||||
|
||||
protected virtual void Start()
|
||||
{
|
||||
UpdateAnchors(true, true);
|
||||
Application.onBeforeRender += OnBeforeRenderCallback;
|
||||
}
|
||||
|
||||
protected virtual void FixedUpdate()
|
||||
{
|
||||
if (useFixedUpdateForTracking)
|
||||
UpdateAnchors(true, true);
|
||||
}
|
||||
|
||||
protected virtual void Update()
|
||||
{
|
||||
_skipUpdate = false;
|
||||
|
||||
if (!useFixedUpdateForTracking)
|
||||
UpdateAnchors(true, true);
|
||||
}
|
||||
|
||||
protected virtual void OnDestroy()
|
||||
{
|
||||
Application.onBeforeRender -= OnBeforeRenderCallback;
|
||||
}
|
||||
#endregion
|
||||
|
||||
protected virtual void UpdateAnchors(bool updateEyeAnchors, bool updateHandAnchors)
|
||||
{
|
||||
if (!OVRManager.OVRManagerinitialized)
|
||||
return;
|
||||
|
||||
EnsureGameObjectIntegrity();
|
||||
|
||||
if (!Application.isPlaying)
|
||||
return;
|
||||
|
||||
if (_skipUpdate)
|
||||
{
|
||||
centerEyeAnchor.FromOVRPose(OVRPose.identity, true);
|
||||
leftEyeAnchor.FromOVRPose(OVRPose.identity, true);
|
||||
rightEyeAnchor.FromOVRPose(OVRPose.identity, true);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
bool monoscopic = OVRManager.instance.monoscopic;
|
||||
bool hmdPresent = OVRNodeStateProperties.IsHmdPresent();
|
||||
|
||||
OVRPose tracker = OVRManager.tracker.GetPose();
|
||||
|
||||
trackerAnchor.localRotation = tracker.orientation;
|
||||
|
||||
Quaternion emulatedRotation = Quaternion.Euler(-OVRManager.instance.headPoseRelativeOffsetRotation.x, -OVRManager.instance.headPoseRelativeOffsetRotation.y, OVRManager.instance.headPoseRelativeOffsetRotation.z);
|
||||
|
||||
//Note: in the below code, when using UnityEngine's API, we only update anchor transforms if we have a new, fresh value this frame.
|
||||
//If we don't, it could mean that tracking is lost, etc. so the pose should not change in the virtual world.
|
||||
//This can be thought of as similar to calling InputTracking GetLocalPosition and Rotation, but only for doing so when the pose is valid.
|
||||
//If false is returned for any of these calls, then a new pose is not valid and thus should not be updated.
|
||||
if (updateEyeAnchors)
|
||||
{
|
||||
if (hmdPresent)
|
||||
{
|
||||
Vector3 centerEyePosition = Vector3.zero;
|
||||
Quaternion centerEyeRotation = Quaternion.identity;
|
||||
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.CenterEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyePosition))
|
||||
centerEyeAnchor.localPosition = centerEyePosition;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.CenterEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeCenter, OVRPlugin.Step.Render, out centerEyeRotation))
|
||||
centerEyeAnchor.localRotation = centerEyeRotation;
|
||||
}
|
||||
else
|
||||
{
|
||||
centerEyeAnchor.localRotation = emulatedRotation;
|
||||
centerEyeAnchor.localPosition = OVRManager.instance.headPoseRelativeOffsetTranslation;
|
||||
}
|
||||
|
||||
if (!hmdPresent || monoscopic)
|
||||
{
|
||||
leftEyeAnchor.localPosition = centerEyeAnchor.localPosition;
|
||||
rightEyeAnchor.localPosition = centerEyeAnchor.localPosition;
|
||||
leftEyeAnchor.localRotation = centerEyeAnchor.localRotation;
|
||||
rightEyeAnchor.localRotation = centerEyeAnchor.localRotation;
|
||||
}
|
||||
else
|
||||
{
|
||||
Vector3 leftEyePosition = Vector3.zero;
|
||||
Vector3 rightEyePosition = Vector3.zero;
|
||||
Quaternion leftEyeRotation = Quaternion.identity;
|
||||
Quaternion rightEyeRotation = Quaternion.identity;
|
||||
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.LeftEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render, out leftEyePosition))
|
||||
leftEyeAnchor.localPosition = leftEyePosition;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.RightEye, NodeStatePropertyType.Position, OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render, out rightEyePosition))
|
||||
rightEyeAnchor.localPosition = rightEyePosition;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.LeftEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render, out leftEyeRotation))
|
||||
leftEyeAnchor.localRotation = leftEyeRotation;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.RightEye, NodeStatePropertyType.Orientation, OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render, out rightEyeRotation))
|
||||
rightEyeAnchor.localRotation = rightEyeRotation;
|
||||
}
|
||||
}
|
||||
|
||||
if (updateHandAnchors)
|
||||
{
|
||||
//Need this for controller offset because if we're on OpenVR, we want to set the local poses as specified by Unity, but if we're not, OVRInput local position is the right anchor
|
||||
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR)
|
||||
{
|
||||
Vector3 leftPos = Vector3.zero;
|
||||
Vector3 rightPos = Vector3.zero;
|
||||
Quaternion leftQuat = Quaternion.identity;
|
||||
Quaternion rightQuat = Quaternion.identity;
|
||||
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.LeftHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftPos))
|
||||
leftHandAnchor.localPosition = leftPos;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.RightHand, NodeStatePropertyType.Position, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightPos))
|
||||
rightHandAnchor.localPosition = rightPos;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.LeftHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render, out leftQuat))
|
||||
leftHandAnchor.localRotation = leftQuat;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.RightHand, NodeStatePropertyType.Orientation, OVRPlugin.Node.HandRight, OVRPlugin.Step.Render, out rightQuat))
|
||||
rightHandAnchor.localRotation = rightQuat;
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
leftHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.LTouch);
|
||||
rightHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.RTouch);
|
||||
leftHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.LTouch);
|
||||
rightHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.RTouch);
|
||||
}
|
||||
|
||||
trackerAnchor.localPosition = tracker.position;
|
||||
|
||||
OVRPose leftOffsetPose = OVRPose.identity;
|
||||
OVRPose rightOffsetPose = OVRPose.identity;
|
||||
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.OpenVR)
|
||||
{
|
||||
leftOffsetPose = OVRManager.GetOpenVRControllerOffset(Node.LeftHand);
|
||||
rightOffsetPose = OVRManager.GetOpenVRControllerOffset(Node.RightHand);
|
||||
|
||||
//Sets poses of left and right nodes, local to the tracking space.
|
||||
OVRManager.SetOpenVRLocalPose(trackingSpace.InverseTransformPoint(leftControllerAnchor.position),
|
||||
trackingSpace.InverseTransformPoint(rightControllerAnchor.position),
|
||||
Quaternion.Inverse(trackingSpace.rotation) * leftControllerAnchor.rotation,
|
||||
Quaternion.Inverse(trackingSpace.rotation) * rightControllerAnchor.rotation);
|
||||
}
|
||||
rightControllerAnchor.localPosition = rightOffsetPose.position;
|
||||
rightControllerAnchor.localRotation = rightOffsetPose.orientation;
|
||||
leftControllerAnchor.localPosition = leftOffsetPose.position;
|
||||
leftControllerAnchor.localRotation = leftOffsetPose.orientation;
|
||||
}
|
||||
|
||||
#if USING_XR_SDK
|
||||
#if UNITY_2020_3_OR_NEWER
|
||||
if (OVRManager.instance.LateLatching)
|
||||
{
|
||||
XRDisplaySubsystem displaySubsystem = OVRManager.GetCurrentDisplaySubsystem();
|
||||
if (displaySubsystem != null)
|
||||
{
|
||||
displaySubsystem.MarkTransformLateLatched(centerEyeAnchor.transform, XRDisplaySubsystem.LateLatchNode.Head);
|
||||
displaySubsystem.MarkTransformLateLatched(leftHandAnchor, XRDisplaySubsystem.LateLatchNode.LeftHand);
|
||||
displaySubsystem.MarkTransformLateLatched(rightHandAnchor, XRDisplaySubsystem.LateLatchNode.RightHand);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
RaiseUpdatedAnchorsEvent();
|
||||
}
|
||||
|
||||
protected virtual void OnBeforeRenderCallback()
|
||||
{
|
||||
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus) //Restrict late-update to only Oculus devices
|
||||
{
|
||||
bool controllersNeedUpdate = OVRManager.instance.LateControllerUpdate;
|
||||
#if USING_XR_SDK
|
||||
//For the XR SDK, we need to late update head pose, not just the controllers, because the functionality
|
||||
//is no longer built-in to the Engine. Under legacy, late camera update is done by default. In the XR SDK, you must use
|
||||
//Tracked Pose Driver to get this by default, which we do not use. So, we have to manually late update camera poses.
|
||||
UpdateAnchors(true, controllersNeedUpdate);
|
||||
#else
|
||||
if (controllersNeedUpdate)
|
||||
UpdateAnchors(false, true);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
protected virtual void RaiseUpdatedAnchorsEvent()
|
||||
{
|
||||
if (UpdatedAnchors != null)
|
||||
{
|
||||
UpdatedAnchors(this);
|
||||
}
|
||||
}
|
||||
|
||||
public virtual void EnsureGameObjectIntegrity()
|
||||
{
|
||||
bool monoscopic = OVRManager.instance != null ? OVRManager.instance.monoscopic : false;
|
||||
|
||||
if (trackingSpace == null)
|
||||
trackingSpace = ConfigureAnchor(null, trackingSpaceName);
|
||||
|
||||
if (leftEyeAnchor == null)
|
||||
leftEyeAnchor = ConfigureAnchor(trackingSpace, leftEyeAnchorName);
|
||||
|
||||
if (centerEyeAnchor == null)
|
||||
centerEyeAnchor = ConfigureAnchor(trackingSpace, centerEyeAnchorName);
|
||||
|
||||
if (rightEyeAnchor == null)
|
||||
rightEyeAnchor = ConfigureAnchor(trackingSpace, rightEyeAnchorName);
|
||||
|
||||
if (leftHandAnchor == null)
|
||||
leftHandAnchor = ConfigureAnchor(trackingSpace, leftHandAnchorName);
|
||||
|
||||
if (rightHandAnchor == null)
|
||||
rightHandAnchor = ConfigureAnchor(trackingSpace, rightHandAnchorName);
|
||||
|
||||
if (trackerAnchor == null)
|
||||
trackerAnchor = ConfigureAnchor(trackingSpace, trackerAnchorName);
|
||||
|
||||
if (leftControllerAnchor == null)
|
||||
leftControllerAnchor = ConfigureAnchor(leftHandAnchor, leftControllerAnchorName);
|
||||
|
||||
if (rightControllerAnchor == null)
|
||||
rightControllerAnchor = ConfigureAnchor(rightHandAnchor, rightControllerAnchorName);
|
||||
|
||||
if (_centerEyeCamera == null || _leftEyeCamera == null || _rightEyeCamera == null)
|
||||
{
|
||||
_centerEyeCamera = centerEyeAnchor.GetComponent<Camera>();
|
||||
_leftEyeCamera = leftEyeAnchor.GetComponent<Camera>();
|
||||
_rightEyeCamera = rightEyeAnchor.GetComponent<Camera>();
|
||||
|
||||
if (_centerEyeCamera == null)
|
||||
{
|
||||
_centerEyeCamera = centerEyeAnchor.gameObject.AddComponent<Camera>();
|
||||
_centerEyeCamera.tag = "MainCamera";
|
||||
}
|
||||
|
||||
if (_leftEyeCamera == null)
|
||||
{
|
||||
_leftEyeCamera = leftEyeAnchor.gameObject.AddComponent<Camera>();
|
||||
_leftEyeCamera.tag = "MainCamera";
|
||||
}
|
||||
|
||||
if (_rightEyeCamera == null)
|
||||
{
|
||||
_rightEyeCamera = rightEyeAnchor.gameObject.AddComponent<Camera>();
|
||||
_rightEyeCamera.tag = "MainCamera";
|
||||
}
|
||||
|
||||
_centerEyeCamera.stereoTargetEye = StereoTargetEyeMask.Both;
|
||||
_leftEyeCamera.stereoTargetEye = StereoTargetEyeMask.Left;
|
||||
_rightEyeCamera.stereoTargetEye = StereoTargetEyeMask.Right;
|
||||
}
|
||||
|
||||
if (monoscopic && !OVRPlugin.EyeTextureArrayEnabled)
|
||||
{
|
||||
// Output to left eye only when in monoscopic mode
|
||||
if (_centerEyeCamera.stereoTargetEye != StereoTargetEyeMask.Left)
|
||||
{
|
||||
_centerEyeCamera.stereoTargetEye = StereoTargetEyeMask.Left;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (_centerEyeCamera.stereoTargetEye != StereoTargetEyeMask.Both)
|
||||
{
|
||||
_centerEyeCamera.stereoTargetEye = StereoTargetEyeMask.Both;
|
||||
}
|
||||
}
|
||||
|
||||
if (disableEyeAnchorCameras)
|
||||
{
|
||||
_centerEyeCamera.enabled = false;
|
||||
_leftEyeCamera.enabled = false;
|
||||
_rightEyeCamera.enabled = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
// disable the right eye camera when in monoscopic mode
|
||||
if (_centerEyeCamera.enabled == usePerEyeCameras ||
|
||||
_leftEyeCamera.enabled == !usePerEyeCameras ||
|
||||
_rightEyeCamera.enabled == !(usePerEyeCameras && (!monoscopic || OVRPlugin.EyeTextureArrayEnabled)))
|
||||
{
|
||||
_skipUpdate = true;
|
||||
}
|
||||
|
||||
_centerEyeCamera.enabled = !usePerEyeCameras;
|
||||
_leftEyeCamera.enabled = usePerEyeCameras;
|
||||
_rightEyeCamera.enabled = (usePerEyeCameras && (!monoscopic || OVRPlugin.EyeTextureArrayEnabled));
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
protected virtual Transform ConfigureAnchor(Transform root, string name)
|
||||
{
|
||||
Transform anchor = (root != null) ? root.Find(name) : null;
|
||||
|
||||
if (anchor == null)
|
||||
{
|
||||
anchor = transform.Find(name);
|
||||
}
|
||||
|
||||
if (anchor == null)
|
||||
{
|
||||
anchor = new GameObject(name).transform;
|
||||
}
|
||||
|
||||
anchor.name = name;
|
||||
anchor.parent = (root != null) ? root : transform;
|
||||
anchor.localScale = Vector3.one;
|
||||
anchor.localPosition = Vector3.zero;
|
||||
anchor.localRotation = Quaternion.identity;
|
||||
|
||||
return anchor;
|
||||
}
|
||||
|
||||
public virtual Matrix4x4 ComputeTrackReferenceMatrix()
|
||||
{
|
||||
if (centerEyeAnchor == null)
|
||||
{
|
||||
Debug.LogError("centerEyeAnchor is required");
|
||||
return Matrix4x4.identity;
|
||||
}
|
||||
|
||||
// The ideal approach would be using UnityEngine.VR.VRNode.TrackingReference, then we would not have to depend on the OVRCameraRig. Unfortunately, it is not available in Unity 5.4.3
|
||||
|
||||
OVRPose headPose = OVRPose.identity;
|
||||
|
||||
Vector3 pos;
|
||||
Quaternion rot;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Position, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out pos))
|
||||
headPose.position = pos;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.Head, NodeStatePropertyType.Orientation, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out rot))
|
||||
headPose.orientation = rot;
|
||||
|
||||
OVRPose invHeadPose = headPose.Inverse();
|
||||
Matrix4x4 invHeadMatrix = Matrix4x4.TRS(invHeadPose.position, invHeadPose.orientation, Vector3.one);
|
||||
|
||||
Matrix4x4 ret = centerEyeAnchor.localToWorldMatrix * invHeadMatrix;
|
||||
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
8
Assets/Oculus/VR/Scripts/OVRCameraRig.cs.meta
Normal file
8
Assets/Oculus/VR/Scripts/OVRCameraRig.cs.meta
Normal file
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: df9f338034892c44ebb62d97894772f1
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
715
Assets/Oculus/VR/Scripts/OVRCommon.cs
Normal file
715
Assets/Oculus/VR/Scripts/OVRCommon.cs
Normal file
@@ -0,0 +1,715 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
#if USING_XR_MANAGEMENT && USING_XR_SDK_OCULUS
|
||||
#define USING_XR_SDK
|
||||
#endif
|
||||
|
||||
#if UNITY_2020_1_OR_NEWER
|
||||
#define REQUIRES_XR_SDK
|
||||
#endif
|
||||
|
||||
using UnityEngine;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.InteropServices;
|
||||
|
||||
#if USING_XR_SDK
|
||||
using UnityEngine.XR;
|
||||
using UnityEngine.Experimental.XR;
|
||||
#endif
|
||||
|
||||
using InputTracking = UnityEngine.XR.InputTracking;
|
||||
using Node = UnityEngine.XR.XRNode;
|
||||
using NodeState = UnityEngine.XR.XRNodeState;
|
||||
using Device = UnityEngine.XR.XRDevice;
|
||||
|
||||
/// <summary>
|
||||
/// Miscellaneous extension methods that any script can use.
|
||||
/// </summary>
|
||||
public static class OVRExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Converts the given world-space transform to an OVRPose in tracking space.
|
||||
/// </summary>
|
||||
public static OVRPose ToTrackingSpacePose(this Transform transform, Camera camera)
|
||||
{
|
||||
//Initializing to identity, but for all Oculus headsets, down below the pose will be initialized to the runtime's pose value, so identity will never be returned.
|
||||
OVRPose headPose = OVRPose.identity;
|
||||
|
||||
Vector3 pos;
|
||||
Quaternion rot;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Position, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out pos))
|
||||
headPose.position = pos;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.Head, NodeStatePropertyType.Orientation, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out rot))
|
||||
headPose.orientation = rot;
|
||||
|
||||
var ret = headPose * transform.ToHeadSpacePose(camera);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Converts the given pose from tracking-space to world-space.
|
||||
/// </summary>
|
||||
public static OVRPose ToWorldSpacePose(OVRPose trackingSpacePose)
|
||||
{
|
||||
OVRPose headPose = OVRPose.identity;
|
||||
|
||||
Vector3 pos;
|
||||
Quaternion rot;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Position, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out pos))
|
||||
headPose.position = pos;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyQuaternion(Node.Head, NodeStatePropertyType.Orientation, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out rot))
|
||||
headPose.orientation = rot;
|
||||
|
||||
// Transform from tracking-Space to head-Space
|
||||
OVRPose poseInHeadSpace = headPose.Inverse() * trackingSpacePose;
|
||||
|
||||
// Transform from head space to world space
|
||||
OVRPose ret = Camera.main.transform.ToOVRPose() * poseInHeadSpace;
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts the given world-space transform to an OVRPose in head space.
|
||||
/// </summary>
|
||||
public static OVRPose ToHeadSpacePose(this Transform transform, Camera camera)
|
||||
{
|
||||
return camera.transform.ToOVRPose().Inverse() * transform.ToOVRPose();
|
||||
}
|
||||
|
||||
public static OVRPose ToOVRPose(this Transform t, bool isLocal = false)
|
||||
{
|
||||
OVRPose pose;
|
||||
pose.orientation = (isLocal) ? t.localRotation : t.rotation;
|
||||
pose.position = (isLocal) ? t.localPosition : t.position;
|
||||
return pose;
|
||||
}
|
||||
|
||||
public static void FromOVRPose(this Transform t, OVRPose pose, bool isLocal = false)
|
||||
{
|
||||
if (isLocal)
|
||||
{
|
||||
t.localRotation = pose.orientation;
|
||||
t.localPosition = pose.position;
|
||||
}
|
||||
else
|
||||
{
|
||||
t.rotation = pose.orientation;
|
||||
t.position = pose.position;
|
||||
}
|
||||
}
|
||||
|
||||
public static OVRPose ToOVRPose(this OVRPlugin.Posef p)
|
||||
{
|
||||
return new OVRPose()
|
||||
{
|
||||
position = new Vector3(p.Position.x, p.Position.y, -p.Position.z),
|
||||
orientation = new Quaternion(-p.Orientation.x, -p.Orientation.y, p.Orientation.z, p.Orientation.w)
|
||||
};
|
||||
}
|
||||
|
||||
public static OVRTracker.Frustum ToFrustum(this OVRPlugin.Frustumf f)
|
||||
{
|
||||
return new OVRTracker.Frustum()
|
||||
{
|
||||
nearZ = f.zNear,
|
||||
farZ = f.zFar,
|
||||
|
||||
fov = new Vector2()
|
||||
{
|
||||
x = Mathf.Rad2Deg * f.fovX,
|
||||
y = Mathf.Rad2Deg * f.fovY
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
public static Color FromColorf(this OVRPlugin.Colorf c)
|
||||
{
|
||||
return new Color() { r = c.r, g = c.g, b = c.b, a = c.a };
|
||||
}
|
||||
|
||||
public static OVRPlugin.Colorf ToColorf(this Color c)
|
||||
{
|
||||
return new OVRPlugin.Colorf() { r = c.r, g = c.g, b = c.b, a = c.a };
|
||||
}
|
||||
|
||||
public static Vector3 FromVector3f(this OVRPlugin.Vector3f v)
|
||||
{
|
||||
return new Vector3() { x = v.x, y = v.y, z = v.z };
|
||||
}
|
||||
|
||||
public static Vector3 FromFlippedXVector3f(this OVRPlugin.Vector3f v)
|
||||
{
|
||||
return new Vector3() { x = -v.x, y = v.y, z = v.z };
|
||||
}
|
||||
|
||||
public static Vector3 FromFlippedZVector3f(this OVRPlugin.Vector3f v)
|
||||
{
|
||||
return new Vector3() { x = v.x, y = v.y, z = -v.z };
|
||||
}
|
||||
|
||||
public static OVRPlugin.Vector3f ToVector3f(this Vector3 v)
|
||||
{
|
||||
return new OVRPlugin.Vector3f() { x = v.x, y = v.y, z = v.z };
|
||||
}
|
||||
|
||||
public static OVRPlugin.Vector3f ToFlippedXVector3f(this Vector3 v)
|
||||
{
|
||||
return new OVRPlugin.Vector3f() { x = -v.x, y = v.y, z = v.z };
|
||||
}
|
||||
|
||||
public static OVRPlugin.Vector3f ToFlippedZVector3f(this Vector3 v)
|
||||
{
|
||||
return new OVRPlugin.Vector3f() { x = v.x, y = v.y, z = -v.z };
|
||||
}
|
||||
|
||||
public static Vector4 FromVector4f(this OVRPlugin.Vector4f v)
|
||||
{
|
||||
return new Vector4() { x = v.x, y = v.y, z = v.z, w = v.w };
|
||||
}
|
||||
|
||||
public static OVRPlugin.Vector4f ToVector4f(this Vector4 v)
|
||||
{
|
||||
return new OVRPlugin.Vector4f() { x = v.x, y = v.y, z = v.z, w = v.w };
|
||||
}
|
||||
|
||||
public static Quaternion FromQuatf(this OVRPlugin.Quatf q)
|
||||
{
|
||||
return new Quaternion() { x = q.x, y = q.y, z = q.z, w = q.w };
|
||||
}
|
||||
|
||||
public static Quaternion FromFlippedXQuatf(this OVRPlugin.Quatf q)
|
||||
{
|
||||
return new Quaternion() { x = q.x, y = -q.y, z = -q.z, w = q.w };
|
||||
}
|
||||
|
||||
public static Quaternion FromFlippedZQuatf(this OVRPlugin.Quatf q)
|
||||
{
|
||||
return new Quaternion() { x = -q.x, y = -q.y, z = q.z, w = q.w };
|
||||
}
|
||||
|
||||
public static OVRPlugin.Quatf ToQuatf(this Quaternion q)
|
||||
{
|
||||
return new OVRPlugin.Quatf() { x = q.x, y = q.y, z = q.z, w = q.w };
|
||||
}
|
||||
|
||||
public static OVRPlugin.Quatf ToFlippedXQuatf(this Quaternion q)
|
||||
{
|
||||
return new OVRPlugin.Quatf() { x = q.x, y = -q.y, z = -q.z, w = q.w };
|
||||
}
|
||||
|
||||
public static OVRPlugin.Quatf ToFlippedZQuatf(this Quaternion q)
|
||||
{
|
||||
return new OVRPlugin.Quatf() { x = -q.x, y = -q.y, z = q.z, w = q.w };
|
||||
}
|
||||
|
||||
public static OVR.OpenVR.HmdMatrix34_t ConvertToHMDMatrix34(this Matrix4x4 m)
|
||||
{
|
||||
OVR.OpenVR.HmdMatrix34_t pose = new OVR.OpenVR.HmdMatrix34_t();
|
||||
|
||||
pose.m0 = m[0, 0];
|
||||
pose.m1 = m[0, 1];
|
||||
pose.m2 = -m[0, 2];
|
||||
pose.m3 = m[0, 3];
|
||||
|
||||
pose.m4 = m[1, 0];
|
||||
pose.m5 = m[1, 1];
|
||||
pose.m6 = -m[1, 2];
|
||||
pose.m7 = m[1, 3];
|
||||
|
||||
pose.m8 = -m[2, 0];
|
||||
pose.m9 = -m[2, 1];
|
||||
pose.m10 = m[2, 2];
|
||||
pose.m11 = -m[2, 3];
|
||||
|
||||
return pose;
|
||||
}
|
||||
|
||||
public static Transform FindChildRecursive(this Transform parent, string name)
|
||||
{
|
||||
foreach (Transform child in parent)
|
||||
{
|
||||
if (child.name.Contains(name))
|
||||
return child;
|
||||
|
||||
var result = child.FindChildRecursive(name);
|
||||
if (result != null)
|
||||
return result;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static bool Equals(this Gradient gradient, Gradient otherGradient)
|
||||
{
|
||||
if (gradient.colorKeys.Length != otherGradient.colorKeys.Length || gradient.alphaKeys.Length != otherGradient.alphaKeys.Length)
|
||||
return false;
|
||||
|
||||
for (int i = 0; i < gradient.colorKeys.Length; i++)
|
||||
{
|
||||
GradientColorKey key = gradient.colorKeys[i];
|
||||
GradientColorKey otherKey = otherGradient.colorKeys[i];
|
||||
if (key.color != otherKey.color || key.time != otherKey.time)
|
||||
return false;
|
||||
}
|
||||
|
||||
for (int i = 0; i < gradient.alphaKeys.Length; i++)
|
||||
{
|
||||
GradientAlphaKey key = gradient.alphaKeys[i];
|
||||
GradientAlphaKey otherKey = otherGradient.alphaKeys[i];
|
||||
if (key.alpha != otherKey.alpha || key.time != otherKey.time)
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public static void CopyFrom(this Gradient gradient, Gradient otherGradient)
|
||||
{
|
||||
GradientColorKey[] colorKeys = new GradientColorKey[otherGradient.colorKeys.Length];
|
||||
for (int i = 0; i < colorKeys.Length; i++)
|
||||
{
|
||||
Color col = otherGradient.colorKeys[i].color;
|
||||
colorKeys[i].color = new Color(col.r, col.g, col.b, col.a);
|
||||
colorKeys[i].time = otherGradient.colorKeys[i].time;
|
||||
}
|
||||
|
||||
GradientAlphaKey[] alphaKeys = new GradientAlphaKey[otherGradient.alphaKeys.Length];
|
||||
for (int i = 0; i < alphaKeys.Length; i++)
|
||||
{
|
||||
alphaKeys[i].alpha = otherGradient.alphaKeys[i].alpha;
|
||||
alphaKeys[i].time = otherGradient.alphaKeys[i].time;
|
||||
}
|
||||
|
||||
gradient.SetKeys(colorKeys, alphaKeys);
|
||||
}
|
||||
}
|
||||
|
||||
//4 types of node state properties that can be queried with UnityEngine.XR
|
||||
public enum NodeStatePropertyType
|
||||
{
|
||||
Acceleration,
|
||||
AngularAcceleration,
|
||||
Velocity,
|
||||
AngularVelocity,
|
||||
Position,
|
||||
Orientation
|
||||
}
|
||||
|
||||
public static class OVRNodeStateProperties
|
||||
{
|
||||
private static List<NodeState> nodeStateList = new List<NodeState>();
|
||||
|
||||
public static bool IsHmdPresent()
|
||||
{
|
||||
if (OVRManager.OVRManagerinitialized && OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
|
||||
return OVRPlugin.hmdPresent;
|
||||
#if USING_XR_SDK
|
||||
XRDisplaySubsystem currentDisplaySubsystem = OVRManager.GetCurrentDisplaySubsystem();
|
||||
if (currentDisplaySubsystem != null)
|
||||
return currentDisplaySubsystem.running; //In 2019.3, this should be changed to currentDisplaySubsystem.isConnected, but this is a fine placeholder for now.
|
||||
return false;
|
||||
#elif REQUIRES_XR_SDK
|
||||
return false;
|
||||
#else
|
||||
return Device.isPresent;
|
||||
#endif
|
||||
}
|
||||
|
||||
public static bool GetNodeStatePropertyVector3(Node nodeType, NodeStatePropertyType propertyType, OVRPlugin.Node ovrpNodeType, OVRPlugin.Step stepType, out Vector3 retVec)
|
||||
{
|
||||
retVec = Vector3.zero;
|
||||
switch (propertyType)
|
||||
{
|
||||
case NodeStatePropertyType.Acceleration:
|
||||
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
|
||||
{
|
||||
retVec = OVRPlugin.GetNodeAcceleration(ovrpNodeType, stepType).FromFlippedZVector3f();
|
||||
return true;
|
||||
}
|
||||
if (GetUnityXRNodeStateVector3(nodeType, NodeStatePropertyType.Acceleration, out retVec))
|
||||
return true;
|
||||
break;
|
||||
|
||||
case NodeStatePropertyType.AngularAcceleration:
|
||||
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
|
||||
{
|
||||
retVec = OVRPlugin.GetNodeAngularAcceleration(ovrpNodeType, stepType).FromFlippedZVector3f();
|
||||
return true;
|
||||
}
|
||||
if (GetUnityXRNodeStateVector3(nodeType, NodeStatePropertyType.AngularAcceleration, out retVec))
|
||||
return true;
|
||||
break;
|
||||
|
||||
case NodeStatePropertyType.Velocity:
|
||||
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
|
||||
{
|
||||
retVec = OVRPlugin.GetNodeVelocity(ovrpNodeType, stepType).FromFlippedZVector3f();
|
||||
return true;
|
||||
}
|
||||
if (GetUnityXRNodeStateVector3(nodeType, NodeStatePropertyType.Velocity, out retVec))
|
||||
return true;
|
||||
break;
|
||||
|
||||
case NodeStatePropertyType.AngularVelocity:
|
||||
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
|
||||
{
|
||||
retVec = OVRPlugin.GetNodeAngularVelocity(ovrpNodeType, stepType).FromFlippedZVector3f();
|
||||
return true;
|
||||
}
|
||||
if (GetUnityXRNodeStateVector3(nodeType, NodeStatePropertyType.AngularVelocity, out retVec))
|
||||
return true;
|
||||
break;
|
||||
|
||||
case NodeStatePropertyType.Position:
|
||||
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
|
||||
{
|
||||
retVec = OVRPlugin.GetNodePose(ovrpNodeType, stepType).ToOVRPose().position;
|
||||
return true;
|
||||
}
|
||||
if (GetUnityXRNodeStateVector3(nodeType, NodeStatePropertyType.Position, out retVec))
|
||||
return true;
|
||||
break;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public static bool GetNodeStatePropertyQuaternion(Node nodeType, NodeStatePropertyType propertyType, OVRPlugin.Node ovrpNodeType, OVRPlugin.Step stepType, out Quaternion retQuat)
|
||||
{
|
||||
retQuat = Quaternion.identity;
|
||||
switch (propertyType)
|
||||
{
|
||||
case NodeStatePropertyType.Orientation:
|
||||
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
|
||||
{
|
||||
retQuat = OVRPlugin.GetNodePose(ovrpNodeType, stepType).ToOVRPose().orientation;
|
||||
return true;
|
||||
}
|
||||
if (GetUnityXRNodeStateQuaternion(nodeType, NodeStatePropertyType.Orientation, out retQuat))
|
||||
return true;
|
||||
break;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private static bool ValidateProperty(Node nodeType, ref NodeState requestedNodeState)
|
||||
{
|
||||
InputTracking.GetNodeStates(nodeStateList);
|
||||
|
||||
if (nodeStateList.Count == 0)
|
||||
return false;
|
||||
|
||||
bool nodeStateFound = false;
|
||||
requestedNodeState = nodeStateList[0];
|
||||
|
||||
for (int i = 0; i < nodeStateList.Count; i++)
|
||||
{
|
||||
if (nodeStateList[i].nodeType == nodeType)
|
||||
{
|
||||
requestedNodeState = nodeStateList[i];
|
||||
nodeStateFound = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return nodeStateFound;
|
||||
}
|
||||
|
||||
private static bool GetUnityXRNodeStateVector3(Node nodeType, NodeStatePropertyType propertyType, out Vector3 retVec)
|
||||
{
|
||||
retVec = Vector3.zero;
|
||||
|
||||
NodeState requestedNodeState = default(NodeState);
|
||||
|
||||
if (!ValidateProperty(nodeType, ref requestedNodeState))
|
||||
return false;
|
||||
|
||||
if (propertyType == NodeStatePropertyType.Acceleration)
|
||||
{
|
||||
if (requestedNodeState.TryGetAcceleration(out retVec))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
else if (propertyType == NodeStatePropertyType.AngularAcceleration)
|
||||
{
|
||||
if (requestedNodeState.TryGetAngularAcceleration(out retVec))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
else if (propertyType == NodeStatePropertyType.Velocity)
|
||||
{
|
||||
if (requestedNodeState.TryGetVelocity(out retVec))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
else if (propertyType == NodeStatePropertyType.AngularVelocity)
|
||||
{
|
||||
if (requestedNodeState.TryGetAngularVelocity(out retVec))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
else if (propertyType == NodeStatePropertyType.Position)
|
||||
{
|
||||
if (requestedNodeState.TryGetPosition(out retVec))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static bool GetUnityXRNodeStateQuaternion(Node nodeType, NodeStatePropertyType propertyType, out Quaternion retQuat)
|
||||
{
|
||||
retQuat = Quaternion.identity;
|
||||
|
||||
NodeState requestedNodeState = default(NodeState);
|
||||
|
||||
if (!ValidateProperty(nodeType, ref requestedNodeState))
|
||||
return false;
|
||||
|
||||
if (propertyType == NodeStatePropertyType.Orientation)
|
||||
{
|
||||
if (requestedNodeState.TryGetRotation(out retQuat))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// An affine transformation built from a Unity position and orientation.
|
||||
/// </summary>
|
||||
[System.Serializable]
|
||||
public struct OVRPose
|
||||
{
|
||||
/// <summary>
|
||||
/// A pose with no translation or rotation.
|
||||
/// </summary>
|
||||
public static OVRPose identity
|
||||
{
|
||||
get {
|
||||
return new OVRPose()
|
||||
{
|
||||
position = Vector3.zero,
|
||||
orientation = Quaternion.identity
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public override bool Equals(System.Object obj)
|
||||
{
|
||||
return obj is OVRPose && this == (OVRPose)obj;
|
||||
}
|
||||
|
||||
public override int GetHashCode()
|
||||
{
|
||||
return position.GetHashCode() ^ orientation.GetHashCode();
|
||||
}
|
||||
|
||||
public static bool operator ==(OVRPose x, OVRPose y)
|
||||
{
|
||||
return x.position == y.position && x.orientation == y.orientation;
|
||||
}
|
||||
|
||||
public static bool operator !=(OVRPose x, OVRPose y)
|
||||
{
|
||||
return !(x == y);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The position.
|
||||
/// </summary>
|
||||
public Vector3 position;
|
||||
|
||||
/// <summary>
|
||||
/// The orientation.
|
||||
/// </summary>
|
||||
public Quaternion orientation;
|
||||
|
||||
/// <summary>
|
||||
/// Multiplies two poses.
|
||||
/// </summary>
|
||||
public static OVRPose operator*(OVRPose lhs, OVRPose rhs)
|
||||
{
|
||||
var ret = new OVRPose();
|
||||
ret.position = lhs.position + lhs.orientation * rhs.position;
|
||||
ret.orientation = lhs.orientation * rhs.orientation;
|
||||
return ret;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes the inverse of the given pose.
|
||||
/// </summary>
|
||||
public OVRPose Inverse()
|
||||
{
|
||||
OVRPose ret;
|
||||
ret.orientation = Quaternion.Inverse(orientation);
|
||||
ret.position = ret.orientation * -position;
|
||||
return ret;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts the pose from left- to right-handed or vice-versa.
|
||||
/// </summary>
|
||||
public OVRPose flipZ()
|
||||
{
|
||||
var ret = this;
|
||||
ret.position.z = -ret.position.z;
|
||||
ret.orientation.z = -ret.orientation.z;
|
||||
ret.orientation.w = -ret.orientation.w;
|
||||
return ret;
|
||||
}
|
||||
|
||||
// Warning: this function is not a strict reverse of OVRPlugin.Posef.ToOVRPose(), even after flipZ()
|
||||
public OVRPlugin.Posef ToPosef_Legacy()
|
||||
{
|
||||
return new OVRPlugin.Posef()
|
||||
{
|
||||
Position = position.ToVector3f(),
|
||||
Orientation = orientation.ToQuatf()
|
||||
};
|
||||
}
|
||||
|
||||
public OVRPlugin.Posef ToPosef()
|
||||
{
|
||||
OVRPlugin.Posef result = new OVRPlugin.Posef();
|
||||
result.Position.x = position.x;
|
||||
result.Position.y = position.y;
|
||||
result.Position.z = -position.z;
|
||||
result.Orientation.x = -orientation.x;
|
||||
result.Orientation.y = -orientation.y;
|
||||
result.Orientation.z = orientation.z;
|
||||
result.Orientation.w = orientation.w;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Encapsulates an 8-byte-aligned of unmanaged memory.
|
||||
/// </summary>
|
||||
public class OVRNativeBuffer : IDisposable
|
||||
{
|
||||
private bool disposed = false;
|
||||
private int m_numBytes = 0;
|
||||
private IntPtr m_ptr = IntPtr.Zero;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a buffer of the specified size.
|
||||
/// </summary>
|
||||
public OVRNativeBuffer(int numBytes)
|
||||
{
|
||||
Reallocate(numBytes);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Releases unmanaged resources and performs other cleanup operations before the <see cref="OVRNativeBuffer"/> is
|
||||
/// reclaimed by garbage collection.
|
||||
/// </summary>
|
||||
~OVRNativeBuffer()
|
||||
{
|
||||
Dispose(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reallocates the buffer with the specified new size.
|
||||
/// </summary>
|
||||
public void Reset(int numBytes)
|
||||
{
|
||||
Reallocate(numBytes);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The current number of bytes in the buffer.
|
||||
/// </summary>
|
||||
public int GetCapacity()
|
||||
{
|
||||
return m_numBytes;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A pointer to the unmanaged memory in the buffer, starting at the given offset in bytes.
|
||||
/// </summary>
|
||||
public IntPtr GetPointer(int byteOffset = 0)
|
||||
{
|
||||
if (byteOffset < 0 || byteOffset >= m_numBytes)
|
||||
return IntPtr.Zero;
|
||||
return (byteOffset == 0) ? m_ptr : new IntPtr(m_ptr.ToInt64() + byteOffset);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Releases all resource used by the <see cref="OVRNativeBuffer"/> object.
|
||||
/// </summary>
|
||||
/// <remarks>Call <see cref="Dispose"/> when you are finished using the <see cref="OVRNativeBuffer"/>. The <see cref="Dispose"/>
|
||||
/// method leaves the <see cref="OVRNativeBuffer"/> in an unusable state. After calling <see cref="Dispose"/>, you must
|
||||
/// release all references to the <see cref="OVRNativeBuffer"/> so the garbage collector can reclaim the memory that
|
||||
/// the <see cref="OVRNativeBuffer"/> was occupying.</remarks>
|
||||
public void Dispose()
|
||||
{
|
||||
Dispose(true);
|
||||
GC.SuppressFinalize(this);
|
||||
}
|
||||
|
||||
private void Dispose(bool disposing)
|
||||
{
|
||||
if (disposed)
|
||||
return;
|
||||
|
||||
if (disposing)
|
||||
{
|
||||
// dispose managed resources
|
||||
}
|
||||
|
||||
// dispose unmanaged resources
|
||||
Release();
|
||||
|
||||
disposed = true;
|
||||
}
|
||||
|
||||
private void Reallocate(int numBytes)
|
||||
{
|
||||
Release();
|
||||
|
||||
if (numBytes > 0)
|
||||
{
|
||||
m_ptr = Marshal.AllocHGlobal(numBytes);
|
||||
m_numBytes = numBytes;
|
||||
}
|
||||
else
|
||||
{
|
||||
m_ptr = IntPtr.Zero;
|
||||
m_numBytes = 0;
|
||||
}
|
||||
}
|
||||
|
||||
private void Release()
|
||||
{
|
||||
if (m_ptr != IntPtr.Zero)
|
||||
{
|
||||
Marshal.FreeHGlobal(m_ptr);
|
||||
m_ptr = IntPtr.Zero;
|
||||
m_numBytes = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
8
Assets/Oculus/VR/Scripts/OVRCommon.cs.meta
Normal file
8
Assets/Oculus/VR/Scripts/OVRCommon.cs.meta
Normal file
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 176f8d665b1d78048b1e87956698df6b
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
135
Assets/Oculus/VR/Scripts/OVRDebugHeadController.cs
Normal file
135
Assets/Oculus/VR/Scripts/OVRDebugHeadController.cs
Normal file
@@ -0,0 +1,135 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
#if USING_XR_MANAGEMENT && USING_XR_SDK_OCULUS
|
||||
#define USING_XR_SDK
|
||||
#endif
|
||||
|
||||
#if UNITY_2020_1_OR_NEWER
|
||||
#define REQUIRES_XR_SDK
|
||||
#endif
|
||||
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
#if USING_XR_SDK
|
||||
using UnityEngine.XR;
|
||||
using UnityEngine.Experimental.XR;
|
||||
#endif
|
||||
|
||||
/// <summary>
|
||||
/// This is a simple behavior that can be attached to a parent of the CameraRig in order
|
||||
/// to provide movement via the gamepad. This is useful when testing an application in
|
||||
/// the Unity editor without the HMD.
|
||||
/// To use it, create a game object in your scene and drag your CameraRig to be a child
|
||||
/// of the game object. Then, add the OVRDebugHeadController behavior to the game object.
|
||||
/// Alternatively, this behavior can be placed directly on the OVRCameraRig object, but
|
||||
/// that is not guaranteed to work if OVRCameraRig functionality changes in the future.
|
||||
/// In the parent case, the object with OVRDebugHeadController can be thougt of as a
|
||||
/// platform that your camera is attached to. When the platform moves or rotates, the
|
||||
/// camera moves or rotates, but the camera can still move independently while "on" the
|
||||
/// platform.
|
||||
/// In general, this behavior should be disabled when not debugging.
|
||||
/// </summary>
|
||||
public class OVRDebugHeadController : MonoBehaviour
|
||||
{
|
||||
[SerializeField]
|
||||
public bool AllowPitchLook = false;
|
||||
[SerializeField]
|
||||
public bool AllowYawLook = true;
|
||||
[SerializeField]
|
||||
public bool InvertPitch = false;
|
||||
[SerializeField]
|
||||
public float GamePad_PitchDegreesPerSec = 90.0f;
|
||||
[SerializeField]
|
||||
public float GamePad_YawDegreesPerSec = 90.0f;
|
||||
[SerializeField]
|
||||
public bool AllowMovement = false;
|
||||
[SerializeField]
|
||||
public float ForwardSpeed = 2.0f;
|
||||
[SerializeField]
|
||||
public float StrafeSpeed = 2.0f;
|
||||
|
||||
protected OVRCameraRig CameraRig = null;
|
||||
|
||||
void Awake()
|
||||
{
|
||||
// locate the camera rig so we can use it to get the current camera transform each frame
|
||||
OVRCameraRig[] CameraRigs = gameObject.GetComponentsInChildren<OVRCameraRig>();
|
||||
|
||||
if( CameraRigs.Length == 0 )
|
||||
Debug.LogWarning("OVRCamParent: No OVRCameraRig attached.");
|
||||
else if (CameraRigs.Length > 1)
|
||||
Debug.LogWarning("OVRCamParent: More then 1 OVRCameraRig attached.");
|
||||
else
|
||||
CameraRig = CameraRigs[0];
|
||||
}
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
if ( AllowMovement )
|
||||
{
|
||||
float gamePad_FwdAxis = OVRInput.Get(OVRInput.RawAxis2D.LThumbstick).y;
|
||||
float gamePad_StrafeAxis = OVRInput.Get(OVRInput.RawAxis2D.LThumbstick).x;
|
||||
|
||||
Vector3 fwdMove = ( CameraRig.centerEyeAnchor.rotation * Vector3.forward ) * gamePad_FwdAxis * Time.deltaTime * ForwardSpeed;
|
||||
Vector3 strafeMove = ( CameraRig.centerEyeAnchor.rotation * Vector3.right ) * gamePad_StrafeAxis * Time.deltaTime * StrafeSpeed;
|
||||
transform.position += fwdMove + strafeMove;
|
||||
}
|
||||
|
||||
bool hasDevice = false;
|
||||
#if USING_XR_SDK
|
||||
XRDisplaySubsystem currentDisplaySubsystem = OVRManager.GetCurrentDisplaySubsystem();
|
||||
if (currentDisplaySubsystem != null)
|
||||
hasDevice = currentDisplaySubsystem.running;
|
||||
#elif REQUIRES_XR_SDK
|
||||
hasDevice = false;
|
||||
#else
|
||||
hasDevice = UnityEngine.XR.XRDevice.isPresent;
|
||||
#endif
|
||||
|
||||
if ( !hasDevice && ( AllowYawLook || AllowPitchLook ) )
|
||||
{
|
||||
Quaternion r = transform.rotation;
|
||||
if ( AllowYawLook )
|
||||
{
|
||||
float gamePadYaw = OVRInput.Get(OVRInput.RawAxis2D.RThumbstick).x;
|
||||
float yawAmount = gamePadYaw * Time.deltaTime * GamePad_YawDegreesPerSec;
|
||||
Quaternion yawRot = Quaternion.AngleAxis( yawAmount, Vector3.up );
|
||||
r = yawRot * r;
|
||||
}
|
||||
if ( AllowPitchLook )
|
||||
{
|
||||
float gamePadPitch = OVRInput.Get(OVRInput.RawAxis2D.RThumbstick).y;
|
||||
if ( Mathf.Abs( gamePadPitch ) > 0.0001f )
|
||||
{
|
||||
if ( InvertPitch )
|
||||
{
|
||||
gamePadPitch *= -1.0f;
|
||||
}
|
||||
float pitchAmount = gamePadPitch * Time.deltaTime * GamePad_PitchDegreesPerSec;
|
||||
Quaternion pitchRot = Quaternion.AngleAxis( pitchAmount, Vector3.left );
|
||||
r = r * pitchRot;
|
||||
}
|
||||
}
|
||||
|
||||
transform.rotation = r;
|
||||
}
|
||||
}
|
||||
}
|
||||
12
Assets/Oculus/VR/Scripts/OVRDebugHeadController.cs.meta
Normal file
12
Assets/Oculus/VR/Scripts/OVRDebugHeadController.cs.meta
Normal file
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 563681618daa71b4c89f979b1fd7170b
|
||||
timeCreated: 1433450365
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
386
Assets/Oculus/VR/Scripts/OVRDisplay.cs
Normal file
386
Assets/Oculus/VR/Scripts/OVRDisplay.cs
Normal file
@@ -0,0 +1,386 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
#if USING_XR_MANAGEMENT && USING_XR_SDK_OCULUS
|
||||
#define USING_XR_SDK
|
||||
#endif
|
||||
|
||||
#if UNITY_2020_1_OR_NEWER
|
||||
#define REQUIRES_XR_SDK
|
||||
#endif
|
||||
|
||||
using System;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Text.RegularExpressions;
|
||||
using UnityEngine;
|
||||
using System.Collections.Generic;
|
||||
|
||||
#if USING_XR_SDK
|
||||
using UnityEngine.XR;
|
||||
using UnityEngine.Experimental.XR;
|
||||
#endif
|
||||
|
||||
using InputTracking = UnityEngine.XR.InputTracking;
|
||||
using Node = UnityEngine.XR.XRNode;
|
||||
using Settings = UnityEngine.XR.XRSettings;
|
||||
|
||||
/// <summary>
|
||||
/// Manages an Oculus Rift head-mounted display (HMD).
|
||||
/// </summary>
|
||||
public class OVRDisplay
|
||||
{
|
||||
/// <summary>
|
||||
/// Contains full fov information per eye
|
||||
/// Under Symmetric Fov mode, UpFov == DownFov and LeftFov == RightFov.
|
||||
/// </summary>
|
||||
public struct EyeFov
|
||||
{
|
||||
public float UpFov;
|
||||
public float DownFov;
|
||||
public float LeftFov;
|
||||
public float RightFov;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the size and field-of-view for one eye texture.
|
||||
/// </summary>
|
||||
public struct EyeRenderDesc
|
||||
{
|
||||
/// <summary>
|
||||
/// The horizontal and vertical size of the texture.
|
||||
/// </summary>
|
||||
public Vector2 resolution;
|
||||
|
||||
/// <summary>
|
||||
/// The angle of the horizontal and vertical field of view in degrees.
|
||||
/// For Symmetric FOV interface compatibility
|
||||
/// Note this includes the fov angle from both sides
|
||||
/// </summary>
|
||||
public Vector2 fov;
|
||||
|
||||
/// <summary>
|
||||
/// The full information of field of view in degrees.
|
||||
/// When Asymmetric FOV isn't enabled, this returns the maximum fov angle
|
||||
/// </summary>
|
||||
public EyeFov fullFov;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Contains latency measurements for a single frame of rendering.
|
||||
/// </summary>
|
||||
public struct LatencyData
|
||||
{
|
||||
/// <summary>
|
||||
/// The time it took to render both eyes in seconds.
|
||||
/// </summary>
|
||||
public float render;
|
||||
|
||||
/// <summary>
|
||||
/// The time it took to perform TimeWarp in seconds.
|
||||
/// </summary>
|
||||
public float timeWarp;
|
||||
|
||||
/// <summary>
|
||||
/// The time between the end of TimeWarp and scan-out in seconds.
|
||||
/// </summary>
|
||||
public float postPresent;
|
||||
public float renderError;
|
||||
public float timeWarpError;
|
||||
}
|
||||
|
||||
private bool needsConfigureTexture;
|
||||
private EyeRenderDesc[] eyeDescs = new EyeRenderDesc[2];
|
||||
private bool recenterRequested = false;
|
||||
private int recenterRequestedFrameCount = int.MaxValue;
|
||||
private int localTrackingSpaceRecenterCount = 0;
|
||||
|
||||
/// <summary>
|
||||
/// Creates an instance of OVRDisplay. Called by OVRManager.
|
||||
/// </summary>
|
||||
public OVRDisplay()
|
||||
{
|
||||
UpdateTextures();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Updates the internal state of the OVRDisplay. Called by OVRManager.
|
||||
/// </summary>
|
||||
public void Update()
|
||||
{
|
||||
UpdateTextures();
|
||||
|
||||
if (recenterRequested && Time.frameCount > recenterRequestedFrameCount)
|
||||
{
|
||||
Debug.Log("Recenter event detected");
|
||||
if (RecenteredPose != null)
|
||||
{
|
||||
RecenteredPose();
|
||||
}
|
||||
recenterRequested = false;
|
||||
recenterRequestedFrameCount = int.MaxValue;
|
||||
}
|
||||
|
||||
if (OVRPlugin.GetSystemHeadsetType() >= OVRPlugin.SystemHeadset.Oculus_Quest &&
|
||||
OVRPlugin.GetSystemHeadsetType() < OVRPlugin.SystemHeadset.Rift_DK1) // all Oculus Standalone headsets
|
||||
{
|
||||
int recenterCount = OVRPlugin.GetLocalTrackingSpaceRecenterCount();
|
||||
if (localTrackingSpaceRecenterCount != recenterCount)
|
||||
{
|
||||
Debug.Log("Recenter event detected");
|
||||
if (RecenteredPose != null)
|
||||
{
|
||||
RecenteredPose();
|
||||
}
|
||||
localTrackingSpaceRecenterCount = recenterCount;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Occurs when the head pose is reset.
|
||||
/// </summary>
|
||||
public event System.Action RecenteredPose;
|
||||
|
||||
/// <summary>
|
||||
/// Recenters the head pose.
|
||||
/// </summary>
|
||||
public void RecenterPose()
|
||||
{
|
||||
#if USING_XR_SDK
|
||||
XRInputSubsystem currentInputSubsystem = OVRManager.GetCurrentInputSubsystem();
|
||||
if (currentInputSubsystem != null)
|
||||
{
|
||||
currentInputSubsystem.TryRecenter();
|
||||
}
|
||||
#elif !REQUIRES_XR_SDK
|
||||
#pragma warning disable 618
|
||||
InputTracking.Recenter();
|
||||
#pragma warning restore 618
|
||||
#endif
|
||||
|
||||
// The current poses are cached for the current frame and won't be updated immediately
|
||||
// after UnityEngine.VR.InputTracking.Recenter(). So we need to wait until next frame
|
||||
// to trigger the RecenteredPose delegate. The application could expect the correct pose
|
||||
// when the RecenteredPose delegate get called.
|
||||
recenterRequested = true;
|
||||
recenterRequestedFrameCount = Time.frameCount;
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
|
||||
OVRMixedReality.RecenterPose();
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current linear acceleration of the head.
|
||||
/// </summary>
|
||||
public Vector3 acceleration
|
||||
{
|
||||
get {
|
||||
if (!OVRManager.isHmdPresent)
|
||||
return Vector3.zero;
|
||||
|
||||
Vector3 retVec = Vector3.zero;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Acceleration, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
|
||||
return retVec;
|
||||
return Vector3.zero;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current angular acceleration of the head in radians per second per second about each axis.
|
||||
/// </summary>
|
||||
public Vector3 angularAcceleration
|
||||
{
|
||||
get
|
||||
{
|
||||
if (!OVRManager.isHmdPresent)
|
||||
return Vector3.zero;
|
||||
|
||||
Vector3 retVec = Vector3.zero;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.AngularAcceleration, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
|
||||
return retVec;
|
||||
return Vector3.zero;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current linear velocity of the head in meters per second.
|
||||
/// </summary>
|
||||
public Vector3 velocity
|
||||
{
|
||||
get
|
||||
{
|
||||
if (!OVRManager.isHmdPresent)
|
||||
return Vector3.zero;
|
||||
|
||||
Vector3 retVec = Vector3.zero;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Velocity, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
|
||||
return retVec;
|
||||
return Vector3.zero;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current angular velocity of the head in radians per second about each axis.
|
||||
/// </summary>
|
||||
public Vector3 angularVelocity
|
||||
{
|
||||
get {
|
||||
if (!OVRManager.isHmdPresent)
|
||||
return Vector3.zero;
|
||||
|
||||
Vector3 retVec = Vector3.zero;
|
||||
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.AngularVelocity, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
|
||||
return retVec;
|
||||
return Vector3.zero;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the resolution and field of view for the given eye.
|
||||
/// </summary>
|
||||
public EyeRenderDesc GetEyeRenderDesc(UnityEngine.XR.XRNode eye)
|
||||
{
|
||||
return eyeDescs[(int)eye];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current measured latency values.
|
||||
/// </summary>
|
||||
public LatencyData latency
|
||||
{
|
||||
get {
|
||||
if (!OVRManager.isHmdPresent)
|
||||
return new LatencyData();
|
||||
|
||||
string latency = OVRPlugin.latency;
|
||||
|
||||
var r = new Regex("Render: ([0-9]+[.][0-9]+)ms, TimeWarp: ([0-9]+[.][0-9]+)ms, PostPresent: ([0-9]+[.][0-9]+)ms", RegexOptions.None);
|
||||
|
||||
var ret = new LatencyData();
|
||||
|
||||
Match match = r.Match(latency);
|
||||
if (match.Success)
|
||||
{
|
||||
ret.render = float.Parse(match.Groups[1].Value);
|
||||
ret.timeWarp = float.Parse(match.Groups[2].Value);
|
||||
ret.postPresent = float.Parse(match.Groups[3].Value);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets application's frame rate reported by oculus plugin
|
||||
/// </summary>
|
||||
public float appFramerate
|
||||
{
|
||||
get
|
||||
{
|
||||
if (!OVRManager.isHmdPresent)
|
||||
return 0;
|
||||
|
||||
return OVRPlugin.GetAppFramerate();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the recommended MSAA level for optimal quality/performance the current device.
|
||||
/// </summary>
|
||||
public int recommendedMSAALevel
|
||||
{
|
||||
get
|
||||
{
|
||||
int result = OVRPlugin.recommendedMSAALevel;
|
||||
|
||||
if (result == 1)
|
||||
result = 0;
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the list of available display frequencies supported by this hardware.
|
||||
/// </summary>
|
||||
public float[] displayFrequenciesAvailable
|
||||
{
|
||||
get { return OVRPlugin.systemDisplayFrequenciesAvailable; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets and sets the current display frequency.
|
||||
/// </summary>
|
||||
public float displayFrequency
|
||||
{
|
||||
get
|
||||
{
|
||||
return OVRPlugin.systemDisplayFrequency;
|
||||
}
|
||||
set
|
||||
{
|
||||
OVRPlugin.systemDisplayFrequency = value;
|
||||
}
|
||||
}
|
||||
|
||||
private void UpdateTextures()
|
||||
{
|
||||
ConfigureEyeDesc(Node.LeftEye);
|
||||
ConfigureEyeDesc(Node.RightEye);
|
||||
}
|
||||
|
||||
private void ConfigureEyeDesc(Node eye)
|
||||
{
|
||||
if (!OVRManager.isHmdPresent)
|
||||
return;
|
||||
|
||||
int eyeTextureWidth = Settings.eyeTextureWidth;
|
||||
int eyeTextureHeight = Settings.eyeTextureHeight;
|
||||
|
||||
eyeDescs[(int)eye] = new EyeRenderDesc();
|
||||
eyeDescs[(int)eye].resolution = new Vector2(eyeTextureWidth, eyeTextureHeight);
|
||||
|
||||
OVRPlugin.Frustumf2 frust;
|
||||
if (OVRPlugin.GetNodeFrustum2((OVRPlugin.Node)eye, out frust))
|
||||
{
|
||||
eyeDescs[(int)eye].fullFov.LeftFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.LeftTan);
|
||||
eyeDescs[(int)eye].fullFov.RightFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.RightTan);
|
||||
eyeDescs[(int)eye].fullFov.UpFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.UpTan);
|
||||
eyeDescs[(int)eye].fullFov.DownFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.DownTan);
|
||||
}
|
||||
else
|
||||
{
|
||||
OVRPlugin.Frustumf frustOld = OVRPlugin.GetEyeFrustum((OVRPlugin.Eye)eye);
|
||||
eyeDescs[(int)eye].fullFov.LeftFov = Mathf.Rad2Deg * frustOld.fovX * 0.5f;
|
||||
eyeDescs[(int)eye].fullFov.RightFov = Mathf.Rad2Deg * frustOld.fovX * 0.5f;
|
||||
eyeDescs[(int)eye].fullFov.UpFov = Mathf.Rad2Deg * frustOld.fovY * 0.5f;
|
||||
eyeDescs[(int)eye].fullFov.DownFov = Mathf.Rad2Deg * frustOld.fovY * 0.5f;
|
||||
}
|
||||
|
||||
// Symmetric Fov uses the maximum fov angle
|
||||
float maxFovX = Mathf.Max(eyeDescs[(int)eye].fullFov.LeftFov, eyeDescs[(int)eye].fullFov.RightFov);
|
||||
float maxFovY = Mathf.Max(eyeDescs[(int)eye].fullFov.UpFov, eyeDescs[(int)eye].fullFov.DownFov);
|
||||
eyeDescs[(int)eye].fov.x = maxFovX * 2.0f;
|
||||
eyeDescs[(int)eye].fov.y = maxFovY * 2.0f;
|
||||
|
||||
if (!OVRPlugin.AsymmetricFovEnabled)
|
||||
{
|
||||
eyeDescs[(int)eye].fullFov.LeftFov = maxFovX;
|
||||
eyeDescs[(int)eye].fullFov.RightFov = maxFovX;
|
||||
|
||||
eyeDescs[(int)eye].fullFov.UpFov = maxFovY;
|
||||
eyeDescs[(int)eye].fullFov.DownFov = maxFovY;
|
||||
}
|
||||
}
|
||||
}
|
||||
8
Assets/Oculus/VR/Scripts/OVRDisplay.cs.meta
Normal file
8
Assets/Oculus/VR/Scripts/OVRDisplay.cs.meta
Normal file
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: bb365ebe8e821fc4e81e9dca9d704357
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
434
Assets/Oculus/VR/Scripts/OVRGLTFAccessor.cs
Normal file
434
Assets/Oculus/VR/Scripts/OVRGLTFAccessor.cs
Normal file
@@ -0,0 +1,434 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System;
|
||||
using UnityEngine;
|
||||
using OVRSimpleJSON;
|
||||
using Unity.Jobs;
|
||||
using Unity.Collections;
|
||||
|
||||
public enum OVRGLTFType
|
||||
{
|
||||
NONE,
|
||||
SCALAR,
|
||||
VEC2,
|
||||
VEC3,
|
||||
VEC4,
|
||||
MAT4,
|
||||
}
|
||||
|
||||
public enum OVRGLTFComponentType
|
||||
{
|
||||
BYTE = 5120,
|
||||
UNSIGNED_BYTE = 5121,
|
||||
SHORT = 5122,
|
||||
UNSIGNED_SHORT = 5123,
|
||||
UNSIGNED_INT = 5125,
|
||||
FLOAT = 5126,
|
||||
}
|
||||
|
||||
public class OVRGLTFAccessor
|
||||
{
|
||||
// Buffer View parameters
|
||||
private int byteOffset;
|
||||
private int byteLength;
|
||||
private int byteStride;
|
||||
private int bufferId;
|
||||
private int bufferLength;
|
||||
|
||||
// Accessor parameters
|
||||
private int additionalOffset;
|
||||
private OVRGLTFType dataType;
|
||||
private OVRGLTFComponentType componentType;
|
||||
private int dataCount;
|
||||
|
||||
public OVRGLTFAccessor(JSONNode node, JSONNode root, bool bufferViewOnly = false)
|
||||
{
|
||||
JSONNode jsonBufferView = node;
|
||||
if (!bufferViewOnly)
|
||||
{
|
||||
additionalOffset = node["byteOffset"].AsInt;
|
||||
dataType = ToOVRType(node["type"].Value);
|
||||
componentType = (OVRGLTFComponentType)node["componentType"].AsInt;
|
||||
dataCount = node["count"].AsInt;
|
||||
|
||||
int bufferViewId = node["bufferView"].AsInt;
|
||||
jsonBufferView = root["bufferViews"][bufferViewId];
|
||||
}
|
||||
|
||||
int bufferId = jsonBufferView["buffer"].AsInt;
|
||||
byteOffset = jsonBufferView["byteOffset"].AsInt;
|
||||
byteLength = jsonBufferView["byteLength"].AsInt;
|
||||
byteStride = jsonBufferView["byteStride"].AsInt;
|
||||
|
||||
var jsonBuffer = root["buffers"][bufferId];
|
||||
bufferLength = jsonBuffer["byteLength"].AsInt;
|
||||
}
|
||||
|
||||
public int GetDataCount()
|
||||
{
|
||||
return dataCount;
|
||||
}
|
||||
|
||||
private static OVRGLTFType ToOVRType(string type)
|
||||
{
|
||||
switch(type)
|
||||
{
|
||||
case "SCALAR":
|
||||
return OVRGLTFType.SCALAR;
|
||||
case "VEC2":
|
||||
return OVRGLTFType.VEC2;
|
||||
case "VEC3":
|
||||
return OVRGLTFType.VEC3;
|
||||
case "VEC4":
|
||||
return OVRGLTFType.VEC4;
|
||||
case "MAT4":
|
||||
return OVRGLTFType.MAT4;
|
||||
default:
|
||||
Debug.LogError("Unsupported accessor type.");
|
||||
return OVRGLTFType.NONE;
|
||||
}
|
||||
}
|
||||
|
||||
public void ReadAsInt(OVRBinaryChunk chunk, ref int[] data, int offset)
|
||||
{
|
||||
if (dataType != OVRGLTFType.SCALAR)
|
||||
{
|
||||
Debug.LogError("Tried to read non-scalar data as a uint array.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (chunk.chunkLength != bufferLength)
|
||||
{
|
||||
Debug.LogError("Chunk length is not equal to buffer length.");
|
||||
return;
|
||||
}
|
||||
|
||||
byte[] bufferData = new byte[byteLength];
|
||||
|
||||
chunk.chunkStream.Seek(chunk.chunkStart + byteOffset + additionalOffset, SeekOrigin.Begin);
|
||||
chunk.chunkStream.Read(bufferData, 0, byteLength);
|
||||
|
||||
int stride = byteStride > 0 ? byteStride : GetStrideForType(componentType);
|
||||
for(int i = 0; i < dataCount; i++)
|
||||
{
|
||||
data[offset + i] = (int)ReadElementAsUint(bufferData, i * stride, componentType);
|
||||
}
|
||||
}
|
||||
|
||||
public void ReadAsVector2(OVRBinaryChunk chunk, ref Vector2[] data, int offset)
|
||||
{
|
||||
if (dataType != OVRGLTFType.VEC2)
|
||||
{
|
||||
Debug.LogError("Tried to read non-vec3 data as a vec2 array.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (chunk.chunkLength != bufferLength)
|
||||
{
|
||||
Debug.LogError("Chunk length is not equal to buffer length.");
|
||||
return;
|
||||
}
|
||||
|
||||
byte[] bufferData = new byte[byteLength];
|
||||
|
||||
chunk.chunkStream.Seek(chunk.chunkStart + byteOffset + additionalOffset, SeekOrigin.Begin);
|
||||
chunk.chunkStream.Read(bufferData, 0, byteLength);
|
||||
|
||||
int dataTypeSize = GetStrideForType(componentType);
|
||||
int stride = byteStride > 0 ? byteStride : dataTypeSize * 2;
|
||||
for (int i = 0; i < dataCount; i++)
|
||||
{
|
||||
if (componentType == OVRGLTFComponentType.FLOAT)
|
||||
{
|
||||
data[offset + i].x = ReadElementAsFloat(bufferData, i * stride);
|
||||
data[offset + i].y = ReadElementAsFloat(bufferData, i * stride + dataTypeSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void ReadAsVector3(OVRBinaryChunk chunk, ref Vector3[] data, int offset, Vector3 conversionScale)
|
||||
{
|
||||
if (dataType != OVRGLTFType.VEC3)
|
||||
{
|
||||
Debug.LogError("Tried to read non-vec3 data as a vec3 array.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (chunk.chunkLength != bufferLength)
|
||||
{
|
||||
Debug.LogError("Chunk length is not equal to buffer length.");
|
||||
return;
|
||||
}
|
||||
|
||||
byte[] bufferData = new byte[byteLength];
|
||||
|
||||
chunk.chunkStream.Seek(chunk.chunkStart + byteOffset + additionalOffset, SeekOrigin.Begin);
|
||||
chunk.chunkStream.Read(bufferData, 0, byteLength);
|
||||
|
||||
int dataTypeSize = GetStrideForType(componentType);
|
||||
int stride = byteStride > 0 ? byteStride : dataTypeSize * 3;
|
||||
for (int i = 0; i < dataCount; i++)
|
||||
{
|
||||
if (componentType == OVRGLTFComponentType.FLOAT)
|
||||
{
|
||||
data[offset + i].x = ReadElementAsFloat(bufferData, i * stride);
|
||||
data[offset + i].y = ReadElementAsFloat(bufferData, i * stride + dataTypeSize);
|
||||
data[offset + i].z = ReadElementAsFloat(bufferData, i * stride + dataTypeSize * 2);
|
||||
}
|
||||
else
|
||||
{
|
||||
data[offset + i].x = ReadElementAsUint(bufferData, i * stride, componentType);
|
||||
data[offset + i].y = ReadElementAsUint(bufferData, i * stride + dataTypeSize, componentType);
|
||||
data[offset + i].z = ReadElementAsUint(bufferData, i * stride + dataTypeSize * 2, componentType);
|
||||
}
|
||||
data[offset + i].Scale(conversionScale);
|
||||
}
|
||||
}
|
||||
|
||||
public void ReadAsVector4(OVRBinaryChunk chunk, ref Vector4[] data, int offset, Vector4 conversionScale)
|
||||
{
|
||||
if (dataType != OVRGLTFType.VEC4)
|
||||
{
|
||||
Debug.LogError("Tried to read non-vec4 data as a vec4 array.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (chunk.chunkLength != bufferLength)
|
||||
{
|
||||
Debug.LogError("Chunk length is not equal to buffer length.");
|
||||
return;
|
||||
}
|
||||
|
||||
byte[] bufferData = new byte[byteLength];
|
||||
|
||||
chunk.chunkStream.Seek(chunk.chunkStart + byteOffset + additionalOffset, SeekOrigin.Begin);
|
||||
chunk.chunkStream.Read(bufferData, 0, byteLength);
|
||||
|
||||
int dataTypeSize = GetStrideForType(componentType);
|
||||
int stride = byteStride > 0 ? byteStride : dataTypeSize * 4;
|
||||
for (int i = 0; i < dataCount; i++)
|
||||
{
|
||||
if (componentType == OVRGLTFComponentType.FLOAT)
|
||||
{
|
||||
data[offset + i].x = ReadElementAsFloat(bufferData, i * stride);
|
||||
data[offset + i].y = ReadElementAsFloat(bufferData, i * stride + dataTypeSize);
|
||||
data[offset + i].z = ReadElementAsFloat(bufferData, i * stride + dataTypeSize * 2);
|
||||
data[offset + i].w = ReadElementAsFloat(bufferData, i * stride + dataTypeSize * 3);
|
||||
}
|
||||
else
|
||||
{
|
||||
data[offset + i].x = ReadElementAsUint(bufferData, i * stride, componentType);
|
||||
data[offset + i].y = ReadElementAsUint(bufferData, i * stride + dataTypeSize, componentType);
|
||||
data[offset + i].z = ReadElementAsUint(bufferData, i * stride + dataTypeSize * 2, componentType);
|
||||
data[offset + i].w = ReadElementAsUint(bufferData, i * stride + dataTypeSize * 3, componentType);
|
||||
}
|
||||
data[offset + i].Scale(conversionScale);
|
||||
}
|
||||
}
|
||||
|
||||
public void ReadAsColor(OVRBinaryChunk chunk, ref Color[] data, int offset)
|
||||
{
|
||||
if (dataType != OVRGLTFType.VEC4 && dataType != OVRGLTFType.VEC3)
|
||||
{
|
||||
Debug.LogError("Tried to read non-color type as a color array.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (chunk.chunkLength != bufferLength)
|
||||
{
|
||||
Debug.LogError("Chunk length is not equal to buffer length.");
|
||||
return;
|
||||
}
|
||||
|
||||
byte[] bufferData = new byte[byteLength];
|
||||
|
||||
chunk.chunkStream.Seek(chunk.chunkStart + byteOffset + additionalOffset, SeekOrigin.Begin);
|
||||
chunk.chunkStream.Read(bufferData, 0, byteLength);
|
||||
|
||||
int vecSize = dataType == OVRGLTFType.VEC3 ? 3 : 4;
|
||||
int dataTypeSize = GetStrideForType(componentType);
|
||||
int stride = byteStride > 0 ? byteStride : dataTypeSize * vecSize;
|
||||
float maxValue = GetMaxValueForType(componentType);
|
||||
for (int i = 0; i < dataCount; i++)
|
||||
{
|
||||
if (componentType == OVRGLTFComponentType.FLOAT)
|
||||
{
|
||||
data[offset + i].r = ReadElementAsFloat(bufferData, i * stride);
|
||||
data[offset + i].g = ReadElementAsFloat(bufferData, i * stride + dataTypeSize);
|
||||
data[offset + i].b = ReadElementAsFloat(bufferData, i * stride + dataTypeSize * 2);
|
||||
data[offset + i].a = dataType == OVRGLTFType.VEC3 ? 1.0f : ReadElementAsFloat(bufferData, i * stride + dataTypeSize * 3);
|
||||
}
|
||||
else
|
||||
{
|
||||
data[offset + i].r = ReadElementAsUint(bufferData, i * stride, componentType) / maxValue;
|
||||
data[offset + i].g = ReadElementAsUint(bufferData, i * stride + dataTypeSize, componentType) / maxValue;
|
||||
data[offset + i].b = ReadElementAsUint(bufferData, i * stride + dataTypeSize * 2, componentType) / maxValue;
|
||||
data[offset + i].a = dataType == OVRGLTFType.VEC3 ? 1.0f : ReadElementAsUint(bufferData, i * stride + dataTypeSize * 3, componentType) / maxValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void ReadAsMatrix4x4(OVRBinaryChunk chunk, ref Matrix4x4[] data, int offset, Vector3 conversionScale)
|
||||
{
|
||||
if (dataType != OVRGLTFType.MAT4)
|
||||
{
|
||||
Debug.LogError("Tried to read non-vec3 data as a vec3 array.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (chunk.chunkLength != bufferLength)
|
||||
{
|
||||
Debug.LogError("Chunk length is not equal to buffer length.");
|
||||
return;
|
||||
}
|
||||
|
||||
byte[] bufferData = new byte[byteLength];
|
||||
|
||||
chunk.chunkStream.Seek(chunk.chunkStart + byteOffset + additionalOffset, SeekOrigin.Begin);
|
||||
chunk.chunkStream.Read(bufferData, 0, byteLength);
|
||||
|
||||
int dataTypeSize = GetStrideForType(componentType);
|
||||
int stride = byteStride > 0 ? byteStride : dataTypeSize * 16;
|
||||
|
||||
Matrix4x4 scale = Matrix4x4.Scale(conversionScale);
|
||||
for (int i = 0; i < dataCount; i++)
|
||||
{
|
||||
for (int m = 0; m < 16; m++)
|
||||
{
|
||||
data[offset + i][m] = ReadElementAsFloat(bufferData, i * stride + dataTypeSize * m);
|
||||
}
|
||||
data[offset + i] = scale * data[offset + i] * scale;
|
||||
}
|
||||
}
|
||||
|
||||
public byte[] ReadAsKtxTexture(OVRBinaryChunk chunk)
|
||||
{
|
||||
if (chunk.chunkLength != bufferLength)
|
||||
{
|
||||
Debug.LogError("Chunk length is not equal to buffer length.");
|
||||
return null;
|
||||
}
|
||||
|
||||
byte[] bufferData = new byte[byteLength];
|
||||
chunk.chunkStream.Seek(chunk.chunkStart + byteOffset + additionalOffset, SeekOrigin.Begin);
|
||||
chunk.chunkStream.Read(bufferData, 0, byteLength);
|
||||
|
||||
return bufferData;
|
||||
}
|
||||
|
||||
public void ReadAsBoneWeights(OVRBinaryChunk chunk, ref Vector4[] data, int offset)
|
||||
{
|
||||
if (dataType != OVRGLTFType.VEC4)
|
||||
{
|
||||
Debug.LogError("Tried to read bone weights data as a non-vec4 array.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (chunk.chunkLength != bufferLength)
|
||||
{
|
||||
Debug.LogError("Chunk length is not equal to buffer length.");
|
||||
return;
|
||||
}
|
||||
|
||||
byte[] bufferData = new byte[byteLength];
|
||||
|
||||
chunk.chunkStream.Seek(chunk.chunkStart + byteOffset + additionalOffset, SeekOrigin.Begin);
|
||||
chunk.chunkStream.Read(bufferData, 0, byteLength);
|
||||
|
||||
int dataTypeSize = GetStrideForType(componentType);
|
||||
int stride = byteStride > 0 ? byteStride : dataTypeSize * 4;
|
||||
for (int i = 0; i < dataCount; i++)
|
||||
{
|
||||
data[offset + i].x = ReadElementAsFloat(bufferData, i * stride);
|
||||
data[offset + i].y = ReadElementAsFloat(bufferData, i * stride + dataTypeSize);
|
||||
data[offset + i].z = ReadElementAsFloat(bufferData, i * stride + dataTypeSize * 2);
|
||||
data[offset + i].w = ReadElementAsFloat(bufferData, i * stride + dataTypeSize * 3);
|
||||
|
||||
float weightSum = data[offset + i].x + data[offset + i].y + data[offset + i].z + data[offset + i].w;
|
||||
if (!Mathf.Approximately(weightSum, 0))
|
||||
{
|
||||
data[offset + i] /= weightSum;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private int GetStrideForType(OVRGLTFComponentType type)
|
||||
{
|
||||
switch (type)
|
||||
{
|
||||
case OVRGLTFComponentType.BYTE:
|
||||
return sizeof(sbyte);
|
||||
case OVRGLTFComponentType.UNSIGNED_BYTE:
|
||||
return sizeof(byte);
|
||||
case OVRGLTFComponentType.SHORT:
|
||||
return sizeof(short);
|
||||
case OVRGLTFComponentType.UNSIGNED_SHORT:
|
||||
return sizeof(ushort);
|
||||
case OVRGLTFComponentType.UNSIGNED_INT:
|
||||
return sizeof(uint);
|
||||
case OVRGLTFComponentType.FLOAT:
|
||||
return sizeof(float);
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
private float GetMaxValueForType(OVRGLTFComponentType type)
|
||||
{
|
||||
switch (type)
|
||||
{
|
||||
case OVRGLTFComponentType.BYTE:
|
||||
return sbyte.MaxValue;
|
||||
case OVRGLTFComponentType.UNSIGNED_BYTE:
|
||||
return byte.MaxValue;
|
||||
case OVRGLTFComponentType.SHORT:
|
||||
return short.MaxValue;
|
||||
case OVRGLTFComponentType.UNSIGNED_SHORT:
|
||||
return ushort.MaxValue;
|
||||
case OVRGLTFComponentType.UNSIGNED_INT:
|
||||
return uint.MaxValue;
|
||||
case OVRGLTFComponentType.FLOAT:
|
||||
return float.MaxValue;
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
private uint ReadElementAsUint(byte[] data, int index, OVRGLTFComponentType type)
|
||||
{
|
||||
switch(type)
|
||||
{
|
||||
case OVRGLTFComponentType.BYTE:
|
||||
return (uint)Convert.ToSByte(data[index]);
|
||||
case OVRGLTFComponentType.UNSIGNED_BYTE:
|
||||
return data[index];
|
||||
case OVRGLTFComponentType.SHORT:
|
||||
return (uint)BitConverter.ToInt16(data, index);
|
||||
case OVRGLTFComponentType.UNSIGNED_SHORT:
|
||||
return BitConverter.ToUInt16(data, index);
|
||||
case OVRGLTFComponentType.UNSIGNED_INT:
|
||||
return BitConverter.ToUInt32(data, index);
|
||||
default:
|
||||
Debug.Log(String.Format("Failed to read Component Type {0} as a uint.", type));
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
private float ReadElementAsFloat(byte[] data, int index)
|
||||
{
|
||||
return BitConverter.ToSingle(data, index);
|
||||
}
|
||||
}
|
||||
11
Assets/Oculus/VR/Scripts/OVRGLTFAccessor.cs.meta
Normal file
11
Assets/Oculus/VR/Scripts/OVRGLTFAccessor.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 28f1513cee5118748b26bb8b0ca56397
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
628
Assets/Oculus/VR/Scripts/OVRGLTFLoader.cs
Normal file
628
Assets/Oculus/VR/Scripts/OVRGLTFLoader.cs
Normal file
@@ -0,0 +1,628 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System;
|
||||
using UnityEngine;
|
||||
using OVRSimpleJSON;
|
||||
|
||||
using System.Threading.Tasks;
|
||||
|
||||
/// <summary>
|
||||
/// This is a lightweight glTF model loader that is guaranteed to work with models loaded from the Oculus runtime
|
||||
/// using OVRPlugin.LoadRenderModel. It is not recommended to be used as a general purpose glTF loader.
|
||||
/// </summary>
|
||||
|
||||
public enum OVRChunkType
|
||||
{
|
||||
JSON = 0x4E4F534A,
|
||||
BIN = 0x004E4942,
|
||||
}
|
||||
|
||||
public enum OVRTextureFormat
|
||||
{
|
||||
NONE,
|
||||
KTX2,
|
||||
PNG,
|
||||
JPEG,
|
||||
}
|
||||
|
||||
public struct OVRBinaryChunk
|
||||
{
|
||||
public Stream chunkStream;
|
||||
public uint chunkLength;
|
||||
public long chunkStart;
|
||||
}
|
||||
|
||||
public struct OVRMeshData
|
||||
{
|
||||
public Mesh mesh;
|
||||
public Material material;
|
||||
}
|
||||
|
||||
public struct OVRMaterialData
|
||||
{
|
||||
public Shader shader;
|
||||
public int textureId;
|
||||
public OVRTextureData texture;
|
||||
}
|
||||
|
||||
public struct OVRGLTFScene
|
||||
{
|
||||
public GameObject root;
|
||||
public List<GameObject> nodes;
|
||||
}
|
||||
|
||||
public struct OVRTextureData
|
||||
{
|
||||
public byte[] data;
|
||||
public int width;
|
||||
public int height;
|
||||
public OVRTextureFormat format;
|
||||
public TextureFormat transcodedFormat;
|
||||
}
|
||||
|
||||
public class OVRGLTFLoader
|
||||
{
|
||||
private JSONNode m_jsonData;
|
||||
private Stream m_glbStream;
|
||||
private OVRBinaryChunk m_binaryChunk;
|
||||
|
||||
private List<GameObject> m_Nodes;
|
||||
|
||||
private static readonly Vector3 GLTFToUnitySpace = new Vector3(-1, 1, 1);
|
||||
private static readonly Vector3 GLTFToUnityTangent = new Vector4(-1, 1, 1, -1);
|
||||
|
||||
private Shader m_Shader = null;
|
||||
|
||||
public OVRGLTFLoader(string fileName)
|
||||
{
|
||||
m_glbStream = File.Open(fileName, FileMode.Open);
|
||||
}
|
||||
|
||||
public OVRGLTFLoader(byte[] data)
|
||||
{
|
||||
m_glbStream = new MemoryStream(data, 0, data.Length, false, true);
|
||||
}
|
||||
|
||||
public OVRGLTFScene LoadGLB(bool loadMips = true)
|
||||
{
|
||||
OVRGLTFScene scene = new OVRGLTFScene();
|
||||
m_Nodes = new List<GameObject>();
|
||||
|
||||
if (ValidateGLB(m_glbStream))
|
||||
{
|
||||
byte[] jsonChunkData = ReadChunk(m_glbStream, OVRChunkType.JSON);
|
||||
if (jsonChunkData != null)
|
||||
{
|
||||
string json = System.Text.Encoding.ASCII.GetString(jsonChunkData);
|
||||
m_jsonData = JSON.Parse(json);
|
||||
}
|
||||
|
||||
uint binChunkLength = 0;
|
||||
bool validBinChunk = ValidateChunk(m_glbStream, OVRChunkType.BIN, out binChunkLength);
|
||||
if (validBinChunk && m_jsonData != null)
|
||||
{
|
||||
m_binaryChunk.chunkLength = binChunkLength;
|
||||
m_binaryChunk.chunkStart = m_glbStream.Position;
|
||||
m_binaryChunk.chunkStream = m_glbStream;
|
||||
|
||||
if (m_Shader == null)
|
||||
{
|
||||
Debug.LogWarning("A shader was not set before loading the model. Using default mobile shader.");
|
||||
m_Shader = Shader.Find("Legacy Shaders/Diffuse");
|
||||
}
|
||||
|
||||
LoadGLTF(loadMips);
|
||||
}
|
||||
}
|
||||
m_glbStream.Close();
|
||||
|
||||
scene.nodes = m_Nodes;
|
||||
scene.root = m_Nodes[0];
|
||||
|
||||
scene.root.transform.Rotate(Vector3.up, 180.0f);
|
||||
|
||||
return scene;
|
||||
}
|
||||
|
||||
public void SetModelShader(Shader shader)
|
||||
{
|
||||
m_Shader = shader;
|
||||
}
|
||||
|
||||
private bool ValidateGLB(Stream glbStream)
|
||||
{
|
||||
// Read the magic entry and ensure value matches the glTF value
|
||||
int uint32Size = sizeof(uint);
|
||||
byte[] buffer = new byte[uint32Size];
|
||||
glbStream.Read(buffer, 0, uint32Size);
|
||||
uint magic = BitConverter.ToUInt32(buffer, 0);
|
||||
|
||||
if (magic != 0x46546C67)
|
||||
{
|
||||
Debug.LogError("Data stream was not a valid glTF format");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Read glTF version
|
||||
glbStream.Read(buffer, 0, uint32Size);
|
||||
uint version = BitConverter.ToUInt32(buffer, 0);
|
||||
|
||||
if (version != 2)
|
||||
{
|
||||
Debug.LogError("Only glTF 2.0 is supported");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Read glTF file size
|
||||
glbStream.Read(buffer, 0, uint32Size);
|
||||
uint length = BitConverter.ToUInt32(buffer, 0);
|
||||
if (length != glbStream.Length)
|
||||
{
|
||||
Debug.LogError("glTF header length does not match file length");
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private byte[] ReadChunk(Stream glbStream, OVRChunkType type)
|
||||
{
|
||||
uint chunkLength;
|
||||
if (ValidateChunk(glbStream, type, out chunkLength))
|
||||
{
|
||||
byte[] chunkBuffer = new byte[chunkLength];
|
||||
glbStream.Read(chunkBuffer, 0, (int)chunkLength);
|
||||
return chunkBuffer;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private bool ValidateChunk(Stream glbStream, OVRChunkType type, out uint chunkLength)
|
||||
{
|
||||
int uint32Size = sizeof(uint);
|
||||
byte[] buffer = new byte[uint32Size];
|
||||
glbStream.Read(buffer, 0, uint32Size);
|
||||
chunkLength = BitConverter.ToUInt32(buffer, 0);
|
||||
|
||||
glbStream.Read(buffer, 0, uint32Size);
|
||||
uint chunkType = BitConverter.ToUInt32(buffer, 0);
|
||||
|
||||
if (chunkType != (uint)type)
|
||||
{
|
||||
Debug.LogError("Read chunk does not match type.");
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private void LoadGLTF(bool loadMips)
|
||||
{
|
||||
if (m_jsonData == null)
|
||||
{
|
||||
Debug.LogError("m_jsonData was null");
|
||||
}
|
||||
|
||||
var scenes = m_jsonData["scenes"];
|
||||
if (scenes.Count == 0)
|
||||
{
|
||||
Debug.LogError("No valid scenes in this glTF.");
|
||||
}
|
||||
|
||||
// Create GameObjects for each node in the model so that they can be referenced during processing
|
||||
var nodes = m_jsonData["nodes"].AsArray;
|
||||
for (int i = 0; i < nodes.Count; i++)
|
||||
{
|
||||
var jsonNode = m_jsonData["nodes"][i];
|
||||
GameObject go = new GameObject(jsonNode["name"]);
|
||||
m_Nodes.Add(go);
|
||||
}
|
||||
|
||||
// Limit loading to just the first scene in the glTF
|
||||
var mainScene = scenes[0];
|
||||
var rootNodes = mainScene["nodes"].AsArray;
|
||||
for (int i = 0; i < rootNodes.Count; i++)
|
||||
{
|
||||
int nodeId = rootNodes[i].AsInt;
|
||||
ProcessNode(m_jsonData["nodes"][nodeId], nodeId, loadMips);
|
||||
}
|
||||
}
|
||||
|
||||
private void ProcessNode(JSONNode node, int nodeId, bool loadMips)
|
||||
{
|
||||
// Process the child nodes first
|
||||
var childNodes = node["children"];
|
||||
if (childNodes.Count > 0)
|
||||
{
|
||||
for (int i = 0; i < childNodes.Count; i++)
|
||||
{
|
||||
int childId = childNodes[i].AsInt;
|
||||
m_Nodes[childId].transform.SetParent(m_Nodes[nodeId].transform);
|
||||
ProcessNode(m_jsonData["nodes"][childId], childId, loadMips);
|
||||
}
|
||||
}
|
||||
|
||||
string nodeName = node["name"].ToString();
|
||||
if (nodeName.Contains("batteryIndicator"))
|
||||
{
|
||||
GameObject.Destroy(m_Nodes[nodeId]);
|
||||
return;
|
||||
}
|
||||
|
||||
if (node["mesh"] != null)
|
||||
{
|
||||
var meshId = node["mesh"].AsInt;
|
||||
OVRMeshData meshData = ProcessMesh(m_jsonData["meshes"][meshId], loadMips);
|
||||
|
||||
if (node["skin"] != null)
|
||||
{
|
||||
var renderer = m_Nodes[nodeId].AddComponent<SkinnedMeshRenderer>();
|
||||
renderer.sharedMesh = meshData.mesh;
|
||||
renderer.sharedMaterial = meshData.material;
|
||||
|
||||
var skinId = node["skin"].AsInt;
|
||||
ProcessSkin(m_jsonData["skins"][skinId], renderer);
|
||||
}
|
||||
else
|
||||
{
|
||||
var filter = m_Nodes[nodeId].AddComponent<MeshFilter>();
|
||||
filter.sharedMesh = meshData.mesh;
|
||||
var renderer = m_Nodes[nodeId].AddComponent<MeshRenderer>();
|
||||
renderer.sharedMaterial = meshData.material;
|
||||
}
|
||||
}
|
||||
|
||||
var translation = node["translation"].AsArray;
|
||||
var rotation = node["rotation"].AsArray;
|
||||
var scale = node["scale"].AsArray;
|
||||
|
||||
if (translation.Count > 0)
|
||||
{
|
||||
Vector3 position = new Vector3(
|
||||
translation[0] * GLTFToUnitySpace.x,
|
||||
translation[1] * GLTFToUnitySpace.y,
|
||||
translation[2] * GLTFToUnitySpace.z);
|
||||
m_Nodes[nodeId].transform.position = position;
|
||||
}
|
||||
|
||||
if (rotation.Count > 0)
|
||||
{
|
||||
Vector3 rotationAxis = new Vector3(
|
||||
rotation[0] * GLTFToUnitySpace.x,
|
||||
rotation[1] * GLTFToUnitySpace.y,
|
||||
rotation[2] * GLTFToUnitySpace.z);
|
||||
rotationAxis *= -1.0f;
|
||||
m_Nodes[nodeId].transform.rotation = new Quaternion(rotationAxis.x, rotationAxis.y, rotationAxis.z, rotation[3]);
|
||||
}
|
||||
|
||||
if (scale.Count > 0)
|
||||
{
|
||||
Vector3 scaleVec = new Vector3(scale[0], scale[1], scale[2]);
|
||||
m_Nodes[nodeId].transform.localScale = scaleVec;
|
||||
}
|
||||
}
|
||||
|
||||
private OVRMeshData ProcessMesh(JSONNode meshNode, bool loadMips)
|
||||
{
|
||||
OVRMeshData meshData = new OVRMeshData();
|
||||
|
||||
int totalVertexCount = 0;
|
||||
var primitives = meshNode["primitives"];
|
||||
int[] primitiveVertexCounts = new int[primitives.Count];
|
||||
for (int i = 0; i < primitives.Count; i++)
|
||||
{
|
||||
var jsonPrimitive = primitives[i];
|
||||
var jsonAttrbite = jsonPrimitive["attributes"]["POSITION"];
|
||||
var jsonAccessor = m_jsonData["accessors"][jsonAttrbite.AsInt];
|
||||
|
||||
primitiveVertexCounts[i] = jsonAccessor["count"];
|
||||
totalVertexCount += primitiveVertexCounts[i];
|
||||
}
|
||||
|
||||
int[][] indicies = new int[primitives.Count][];
|
||||
Vector3[] vertices = new Vector3[totalVertexCount];
|
||||
|
||||
Vector3[] normals = null;
|
||||
if (primitives[0]["attributes"]["NORMAL"] != null)
|
||||
{
|
||||
normals = new Vector3[totalVertexCount];
|
||||
}
|
||||
|
||||
Vector4[] tangents = null;
|
||||
if (primitives[0]["attributes"]["TANGENT"] != null)
|
||||
{
|
||||
tangents = new Vector4[totalVertexCount];
|
||||
}
|
||||
|
||||
Vector2[] texcoords = null;
|
||||
if (primitives[0]["attributes"]["TEXCOORD_0"] != null)
|
||||
{
|
||||
texcoords = new Vector2[totalVertexCount];
|
||||
}
|
||||
|
||||
Color[] colors = null;
|
||||
if (primitives[0]["attributes"]["COLOR_0"] != null)
|
||||
{
|
||||
colors = new Color[totalVertexCount];
|
||||
}
|
||||
|
||||
BoneWeight[] boneWeights = null;
|
||||
if (primitives[0]["attributes"]["WEIGHTS_0"] != null)
|
||||
{
|
||||
boneWeights = new BoneWeight[totalVertexCount];
|
||||
}
|
||||
|
||||
// Begin async processing of material and its texture
|
||||
OVRMaterialData matData = default(OVRMaterialData);
|
||||
Task transcodeTask = null;
|
||||
var jsonMaterial = primitives[0]["material"];
|
||||
if (jsonMaterial != null)
|
||||
{
|
||||
matData = ProcessMaterial(jsonMaterial.AsInt);
|
||||
matData.texture = ProcessTexture(matData.textureId);
|
||||
transcodeTask = Task.Run(() => { TranscodeTexture(ref matData.texture); });
|
||||
}
|
||||
|
||||
int vertexOffset = 0;
|
||||
for (int i = 0; i < primitives.Count; i++)
|
||||
{
|
||||
var jsonPrimitive = primitives[i];
|
||||
|
||||
int indicesAccessorId = jsonPrimitive["indices"].AsInt;
|
||||
var jsonAccessor = m_jsonData["accessors"][indicesAccessorId];
|
||||
OVRGLTFAccessor indicesReader = new OVRGLTFAccessor(jsonAccessor, m_jsonData);
|
||||
|
||||
indicies[i] = new int[indicesReader.GetDataCount()];
|
||||
indicesReader.ReadAsInt(m_binaryChunk, ref indicies[i], 0);
|
||||
FlipTraingleIndices(ref indicies[i]);
|
||||
|
||||
var jsonAttribute = jsonPrimitive["attributes"]["POSITION"];
|
||||
if (jsonAttribute != null)
|
||||
{
|
||||
jsonAccessor = m_jsonData["accessors"][jsonAttribute.AsInt];
|
||||
OVRGLTFAccessor dataReader = new OVRGLTFAccessor(jsonAccessor, m_jsonData);
|
||||
dataReader.ReadAsVector3(m_binaryChunk, ref vertices, vertexOffset, GLTFToUnitySpace);
|
||||
}
|
||||
|
||||
jsonAttribute = jsonPrimitive["attributes"]["NORMAL"];
|
||||
if (jsonAttribute != null)
|
||||
{
|
||||
jsonAccessor = m_jsonData["accessors"][jsonAttribute.AsInt];
|
||||
OVRGLTFAccessor dataReader = new OVRGLTFAccessor(jsonAccessor, m_jsonData);
|
||||
dataReader.ReadAsVector3(m_binaryChunk, ref normals, vertexOffset, GLTFToUnitySpace);
|
||||
}
|
||||
|
||||
jsonAttribute = jsonPrimitive["attributes"]["TANGENT"];
|
||||
if (jsonAttribute != null)
|
||||
{
|
||||
jsonAccessor = m_jsonData["accessors"][jsonAttribute.AsInt];
|
||||
OVRGLTFAccessor dataReader = new OVRGLTFAccessor(jsonAccessor, m_jsonData);
|
||||
dataReader.ReadAsVector4(m_binaryChunk, ref tangents, vertexOffset, GLTFToUnityTangent);
|
||||
}
|
||||
|
||||
jsonAttribute = jsonPrimitive["attributes"]["TEXCOORD_0"];
|
||||
if (jsonAttribute != null)
|
||||
{
|
||||
jsonAccessor = m_jsonData["accessors"][jsonAttribute.AsInt];
|
||||
OVRGLTFAccessor dataReader = new OVRGLTFAccessor(jsonAccessor, m_jsonData);
|
||||
dataReader.ReadAsVector2(m_binaryChunk, ref texcoords, vertexOffset);
|
||||
}
|
||||
|
||||
jsonAttribute = jsonPrimitive["attributes"]["COLOR_0"];
|
||||
if (jsonAttribute != null)
|
||||
{
|
||||
jsonAccessor = m_jsonData["accessors"][jsonAttribute.AsInt];
|
||||
OVRGLTFAccessor dataReader = new OVRGLTFAccessor(jsonAccessor, m_jsonData);
|
||||
dataReader.ReadAsColor(m_binaryChunk, ref colors, vertexOffset);
|
||||
}
|
||||
|
||||
jsonAttribute = jsonPrimitive["attributes"]["WEIGHTS_0"];
|
||||
if (jsonAttribute != null)
|
||||
{
|
||||
jsonAccessor = m_jsonData["accessors"][jsonAttribute.AsInt];
|
||||
OVRGLTFAccessor weightReader = new OVRGLTFAccessor(jsonAccessor, m_jsonData);
|
||||
|
||||
var jointAttribute = jsonPrimitive["attributes"]["JOINTS_0"];
|
||||
var jointAccessor = m_jsonData["accessors"][jointAttribute.AsInt];
|
||||
OVRGLTFAccessor jointReader = new OVRGLTFAccessor(jointAccessor, m_jsonData);
|
||||
|
||||
Vector4[] weights = new Vector4[weightReader.GetDataCount()];
|
||||
Vector4[] joints = new Vector4[jointReader.GetDataCount()];
|
||||
|
||||
weightReader.ReadAsBoneWeights(m_binaryChunk, ref weights, 0);
|
||||
jointReader.ReadAsVector4(m_binaryChunk, ref joints, 0, Vector4.one);
|
||||
|
||||
for (int w = 0; w < weights.Length; w++)
|
||||
{
|
||||
boneWeights[vertexOffset + w].boneIndex0 = (int)joints[w].x;
|
||||
boneWeights[vertexOffset + w].boneIndex1 = (int)joints[w].y;
|
||||
boneWeights[vertexOffset + w].boneIndex2 = (int)joints[w].z;
|
||||
boneWeights[vertexOffset + w].boneIndex3 = (int)joints[w].w;
|
||||
|
||||
boneWeights[vertexOffset + w].weight0 = weights[w].x;
|
||||
boneWeights[vertexOffset + w].weight1 = weights[w].y;
|
||||
boneWeights[vertexOffset + w].weight2 = weights[w].z;
|
||||
boneWeights[vertexOffset + w].weight3 = weights[w].w;
|
||||
}
|
||||
}
|
||||
|
||||
vertexOffset += primitiveVertexCounts[i];
|
||||
}
|
||||
|
||||
Mesh mesh = new Mesh();
|
||||
mesh.vertices = vertices;
|
||||
mesh.normals = normals;
|
||||
mesh.tangents = tangents;
|
||||
mesh.colors = colors;
|
||||
mesh.uv = texcoords;
|
||||
mesh.boneWeights = boneWeights;
|
||||
mesh.subMeshCount = primitives.Count;
|
||||
|
||||
int baseVertex = 0;
|
||||
for(int i = 0; i < primitives.Count; i++)
|
||||
{
|
||||
mesh.SetIndices(indicies[i], MeshTopology.Triangles, i, false, baseVertex);
|
||||
baseVertex += primitiveVertexCounts[i];
|
||||
}
|
||||
|
||||
mesh.RecalculateBounds();
|
||||
meshData.mesh = mesh;
|
||||
|
||||
if (transcodeTask != null)
|
||||
{
|
||||
transcodeTask.Wait();
|
||||
meshData.material = CreateUnityMaterial(matData, loadMips);
|
||||
}
|
||||
return meshData;
|
||||
}
|
||||
|
||||
private static void FlipTraingleIndices(ref int[] indices)
|
||||
{
|
||||
for (int i = 0; i < indices.Length; i += 3)
|
||||
{
|
||||
int a = indices[i];
|
||||
indices[i] = indices[i + 2];
|
||||
indices[i + 2] = a;
|
||||
}
|
||||
}
|
||||
|
||||
private void ProcessSkin(JSONNode skinNode, SkinnedMeshRenderer renderer)
|
||||
{
|
||||
Matrix4x4[] inverseBindMatrices = null;
|
||||
if (skinNode["inverseBindMatrices"] != null)
|
||||
{
|
||||
int inverseBindMatricesId = skinNode["inverseBindMatrices"].AsInt;
|
||||
var jsonInverseBindMatrices = m_jsonData["accessors"][inverseBindMatricesId];
|
||||
|
||||
OVRGLTFAccessor dataReader = new OVRGLTFAccessor(jsonInverseBindMatrices, m_jsonData);
|
||||
inverseBindMatrices = new Matrix4x4[dataReader.GetDataCount()];
|
||||
dataReader.ReadAsMatrix4x4(m_binaryChunk, ref inverseBindMatrices, 0, GLTFToUnitySpace);
|
||||
}
|
||||
|
||||
if (skinNode["skeleton"] != null)
|
||||
{
|
||||
var skeletonRootId = skinNode["skeleton"].AsInt;
|
||||
renderer.rootBone = m_Nodes[skeletonRootId].transform;
|
||||
}
|
||||
|
||||
Transform[] bones = null;
|
||||
if (skinNode["joints"] != null)
|
||||
{
|
||||
var joints = skinNode["joints"].AsArray;
|
||||
|
||||
bones = new Transform[joints.Count];
|
||||
for (int i = 0; i < joints.Count; i++)
|
||||
{
|
||||
bones[i] = m_Nodes[joints[i]].transform;
|
||||
}
|
||||
}
|
||||
|
||||
renderer.sharedMesh.bindposes = inverseBindMatrices;
|
||||
renderer.bones = bones;
|
||||
}
|
||||
|
||||
private OVRMaterialData ProcessMaterial(int matId)
|
||||
{
|
||||
OVRMaterialData matData = new OVRMaterialData();
|
||||
|
||||
var jsonMaterial = m_jsonData["materials"][matId];
|
||||
var jsonPbrDetails = jsonMaterial["pbrMetallicRoughness"];
|
||||
|
||||
var jsonBaseColor = jsonPbrDetails["baseColorTexture"];
|
||||
if (jsonBaseColor != null)
|
||||
{
|
||||
int textureId = jsonBaseColor["index"].AsInt;
|
||||
matData.textureId = textureId;
|
||||
}
|
||||
else
|
||||
{
|
||||
var jsonTextrure = jsonMaterial["emissiveTexture"];
|
||||
if (jsonTextrure != null)
|
||||
{
|
||||
int textureId = jsonTextrure["index"].AsInt;
|
||||
matData.textureId = textureId;
|
||||
}
|
||||
}
|
||||
|
||||
matData.shader = m_Shader;
|
||||
return matData;
|
||||
}
|
||||
|
||||
private OVRTextureData ProcessTexture(int textureId)
|
||||
{
|
||||
var jsonTexture = m_jsonData["textures"][textureId];
|
||||
|
||||
int imageSource = -1;
|
||||
var jsonExtensions = jsonTexture["extensions"];
|
||||
if (jsonExtensions != null)
|
||||
{
|
||||
var baisuExtension = jsonExtensions["KHR_texture_basisu"];
|
||||
if (baisuExtension != null)
|
||||
{
|
||||
imageSource = baisuExtension["source"].AsInt;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
imageSource = jsonTexture["source"].AsInt;
|
||||
}
|
||||
var jsonSource = m_jsonData["images"][imageSource];
|
||||
|
||||
int sampler = jsonTexture["sampler"].AsInt;
|
||||
var jsonSampler = m_jsonData["samplers"][sampler];
|
||||
|
||||
int bufferViewId = jsonSource["bufferView"].AsInt;
|
||||
var jsonBufferView = m_jsonData["bufferViews"][bufferViewId];
|
||||
OVRGLTFAccessor dataReader = new OVRGLTFAccessor(jsonBufferView, m_jsonData, true);
|
||||
|
||||
OVRTextureData textureData = new OVRTextureData();
|
||||
if (jsonSource["mimeType"].Value == "image/ktx2")
|
||||
{
|
||||
textureData.data = dataReader.ReadAsKtxTexture(m_binaryChunk);
|
||||
textureData.format = OVRTextureFormat.KTX2;
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("Unsupported image mimeType.");
|
||||
}
|
||||
return textureData;
|
||||
}
|
||||
|
||||
private void TranscodeTexture(ref OVRTextureData textureData)
|
||||
{
|
||||
if (textureData.format == OVRTextureFormat.KTX2)
|
||||
{
|
||||
OVRKtxTexture.Load(textureData.data, ref textureData);
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("Only KTX2 textures can be trascoded.");
|
||||
}
|
||||
}
|
||||
|
||||
private Material CreateUnityMaterial(OVRMaterialData matData, bool loadMips)
|
||||
{
|
||||
Material mat = new Material(matData.shader);
|
||||
|
||||
if (matData.texture.format == OVRTextureFormat.KTX2)
|
||||
{
|
||||
Texture2D texture;
|
||||
texture = new Texture2D(matData.texture.width, matData.texture.height, matData.texture.transcodedFormat, loadMips);
|
||||
texture.LoadRawTextureData(matData.texture.data);
|
||||
texture.Apply(false, true);
|
||||
mat.mainTexture = texture;
|
||||
}
|
||||
return mat;
|
||||
}
|
||||
}
|
||||
11
Assets/Oculus/VR/Scripts/OVRGLTFLoader.cs.meta
Normal file
11
Assets/Oculus/VR/Scripts/OVRGLTFLoader.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: cc5cf61059125104eadec2fa160aa8d9
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
384
Assets/Oculus/VR/Scripts/OVRHaptics.cs
Normal file
384
Assets/Oculus/VR/Scripts/OVRHaptics.cs
Normal file
@@ -0,0 +1,384 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using UnityEngine;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.InteropServices;
|
||||
|
||||
/// <summary>
|
||||
/// Plays tactile effects on a tracked VR controller.
|
||||
/// </summary>
|
||||
public static class OVRHaptics
|
||||
{
|
||||
public readonly static OVRHapticsChannel[] Channels;
|
||||
public readonly static OVRHapticsChannel LeftChannel;
|
||||
public readonly static OVRHapticsChannel RightChannel;
|
||||
|
||||
private readonly static OVRHapticsOutput[] m_outputs;
|
||||
|
||||
static OVRHaptics()
|
||||
{
|
||||
Config.Load();
|
||||
|
||||
m_outputs = new OVRHapticsOutput[]
|
||||
{
|
||||
new OVRHapticsOutput((uint)OVRPlugin.Controller.LTouch),
|
||||
new OVRHapticsOutput((uint)OVRPlugin.Controller.RTouch),
|
||||
};
|
||||
|
||||
Channels = new OVRHapticsChannel[]
|
||||
{
|
||||
LeftChannel = new OVRHapticsChannel(0),
|
||||
RightChannel = new OVRHapticsChannel(1),
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines the target format for haptics data on a specific device.
|
||||
/// </summary>
|
||||
public static class Config
|
||||
{
|
||||
public static int SampleRateHz { get; private set; }
|
||||
public static int SampleSizeInBytes { get; private set; }
|
||||
public static int MinimumSafeSamplesQueued { get; private set; }
|
||||
public static int MinimumBufferSamplesCount { get; private set; }
|
||||
public static int OptimalBufferSamplesCount { get; private set; }
|
||||
public static int MaximumBufferSamplesCount { get; private set; }
|
||||
|
||||
static Config()
|
||||
{
|
||||
Load();
|
||||
}
|
||||
|
||||
public static void Load()
|
||||
{
|
||||
OVRPlugin.HapticsDesc desc = OVRPlugin.GetControllerHapticsDesc((uint)OVRPlugin.Controller.RTouch);
|
||||
|
||||
SampleRateHz = desc.SampleRateHz;
|
||||
SampleSizeInBytes = desc.SampleSizeInBytes;
|
||||
MinimumSafeSamplesQueued = desc.MinimumSafeSamplesQueued;
|
||||
MinimumBufferSamplesCount = desc.MinimumBufferSamplesCount;
|
||||
OptimalBufferSamplesCount = desc.OptimalBufferSamplesCount;
|
||||
MaximumBufferSamplesCount = desc.MaximumBufferSamplesCount;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A track of haptics data that can be mixed or sequenced with another track.
|
||||
/// </summary>
|
||||
public class OVRHapticsChannel
|
||||
{
|
||||
private OVRHapticsOutput m_output;
|
||||
|
||||
/// <summary>
|
||||
/// Constructs a channel targeting the specified output.
|
||||
/// </summary>
|
||||
public OVRHapticsChannel(uint outputIndex)
|
||||
{
|
||||
m_output = m_outputs[outputIndex];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Cancels any currently-playing clips and immediatly plays the specified clip instead.
|
||||
/// </summary>
|
||||
public void Preempt(OVRHapticsClip clip)
|
||||
{
|
||||
m_output.Preempt(clip);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Enqueues the specified clip to play after any currently-playing clips finish.
|
||||
/// </summary>
|
||||
public void Queue(OVRHapticsClip clip)
|
||||
{
|
||||
m_output.Queue(clip);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the specified clip to play simultaneously to the currently-playing clip(s).
|
||||
/// </summary>
|
||||
public void Mix(OVRHapticsClip clip)
|
||||
{
|
||||
m_output.Mix(clip);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Cancels any currently-playing clips.
|
||||
/// </summary>
|
||||
public void Clear()
|
||||
{
|
||||
m_output.Clear();
|
||||
}
|
||||
}
|
||||
|
||||
private class OVRHapticsOutput
|
||||
{
|
||||
private class ClipPlaybackTracker
|
||||
{
|
||||
public int ReadCount { get; set; }
|
||||
public OVRHapticsClip Clip { get; set; }
|
||||
|
||||
public ClipPlaybackTracker(OVRHapticsClip clip)
|
||||
{
|
||||
Clip = clip;
|
||||
}
|
||||
}
|
||||
|
||||
private bool m_lowLatencyMode = true;
|
||||
private bool m_paddingEnabled = true;
|
||||
private int m_prevSamplesQueued = 0;
|
||||
private float m_prevSamplesQueuedTime = 0;
|
||||
private int m_numPredictionHits = 0;
|
||||
private int m_numPredictionMisses = 0;
|
||||
private int m_numUnderruns = 0;
|
||||
private List<ClipPlaybackTracker> m_pendingClips = new List<ClipPlaybackTracker>();
|
||||
private uint m_controller = 0;
|
||||
private OVRNativeBuffer m_nativeBuffer = new OVRNativeBuffer(OVRHaptics.Config.MaximumBufferSamplesCount * OVRHaptics.Config.SampleSizeInBytes);
|
||||
private OVRHapticsClip m_paddingClip = new OVRHapticsClip();
|
||||
|
||||
public OVRHapticsOutput(uint controller)
|
||||
{
|
||||
#if UNITY_ANDROID
|
||||
m_paddingEnabled = false;
|
||||
#endif
|
||||
m_controller = controller;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The system calls this each frame to update haptics playback.
|
||||
/// </summary>
|
||||
public void Process()
|
||||
{
|
||||
var hapticsState = OVRPlugin.GetControllerHapticsState(m_controller);
|
||||
|
||||
float elapsedTime = Time.realtimeSinceStartup - m_prevSamplesQueuedTime;
|
||||
if (m_prevSamplesQueued > 0)
|
||||
{
|
||||
int expectedSamples = m_prevSamplesQueued - (int)(elapsedTime * OVRHaptics.Config.SampleRateHz + 0.5f);
|
||||
if (expectedSamples < 0)
|
||||
expectedSamples = 0;
|
||||
|
||||
if ((hapticsState.SamplesQueued - expectedSamples) == 0)
|
||||
m_numPredictionHits++;
|
||||
else
|
||||
m_numPredictionMisses++;
|
||||
|
||||
//Debug.Log(hapticsState.SamplesAvailable + "a " + hapticsState.SamplesQueued + "q " + expectedSamples + "e "
|
||||
//+ "Prediction Accuracy: " + m_numPredictionHits / (float)(m_numPredictionMisses + m_numPredictionHits));
|
||||
|
||||
if ((expectedSamples > 0) && (hapticsState.SamplesQueued == 0))
|
||||
{
|
||||
m_numUnderruns++;
|
||||
//Debug.LogError("Samples Underrun (" + m_controller + " #" + m_numUnderruns + ") -"
|
||||
// + " Expected: " + expectedSamples
|
||||
// + " Actual: " + hapticsState.SamplesQueued);
|
||||
}
|
||||
|
||||
m_prevSamplesQueued = hapticsState.SamplesQueued;
|
||||
m_prevSamplesQueuedTime = Time.realtimeSinceStartup;
|
||||
}
|
||||
|
||||
int desiredSamplesCount = OVRHaptics.Config.OptimalBufferSamplesCount;
|
||||
if (m_lowLatencyMode)
|
||||
{
|
||||
float sampleRateMs = 1000.0f / (float)OVRHaptics.Config.SampleRateHz;
|
||||
float elapsedMs = elapsedTime * 1000.0f;
|
||||
int samplesNeededPerFrame = (int)Mathf.Ceil(elapsedMs / sampleRateMs);
|
||||
int lowLatencySamplesCount = OVRHaptics.Config.MinimumSafeSamplesQueued + samplesNeededPerFrame;
|
||||
|
||||
if (lowLatencySamplesCount < desiredSamplesCount)
|
||||
desiredSamplesCount = lowLatencySamplesCount;
|
||||
}
|
||||
|
||||
if (hapticsState.SamplesQueued > desiredSamplesCount)
|
||||
return;
|
||||
|
||||
if (desiredSamplesCount > OVRHaptics.Config.MaximumBufferSamplesCount)
|
||||
desiredSamplesCount = OVRHaptics.Config.MaximumBufferSamplesCount;
|
||||
if (desiredSamplesCount > hapticsState.SamplesAvailable)
|
||||
desiredSamplesCount = hapticsState.SamplesAvailable;
|
||||
|
||||
int acquiredSamplesCount = 0;
|
||||
int clipIndex = 0;
|
||||
while(acquiredSamplesCount < desiredSamplesCount && clipIndex < m_pendingClips.Count)
|
||||
{
|
||||
int numSamplesToCopy = desiredSamplesCount - acquiredSamplesCount;
|
||||
int remainingSamplesInClip = m_pendingClips[clipIndex].Clip.Count - m_pendingClips[clipIndex].ReadCount;
|
||||
if (numSamplesToCopy > remainingSamplesInClip)
|
||||
numSamplesToCopy = remainingSamplesInClip;
|
||||
|
||||
if (numSamplesToCopy > 0)
|
||||
{
|
||||
int numBytes = numSamplesToCopy * OVRHaptics.Config.SampleSizeInBytes;
|
||||
int dstOffset = acquiredSamplesCount * OVRHaptics.Config.SampleSizeInBytes;
|
||||
int srcOffset = m_pendingClips[clipIndex].ReadCount * OVRHaptics.Config.SampleSizeInBytes;
|
||||
Marshal.Copy(m_pendingClips[clipIndex].Clip.Samples, srcOffset, m_nativeBuffer.GetPointer(dstOffset), numBytes);
|
||||
|
||||
m_pendingClips[clipIndex].ReadCount += numSamplesToCopy;
|
||||
acquiredSamplesCount += numSamplesToCopy;
|
||||
}
|
||||
|
||||
clipIndex++;
|
||||
}
|
||||
|
||||
for (int i = m_pendingClips.Count - 1; i >= 0 && m_pendingClips.Count > 0; i--)
|
||||
{
|
||||
if (m_pendingClips[i].ReadCount >= m_pendingClips[i].Clip.Count)
|
||||
m_pendingClips.RemoveAt(i);
|
||||
}
|
||||
|
||||
if (m_paddingEnabled)
|
||||
{
|
||||
int desiredPadding = desiredSamplesCount - (hapticsState.SamplesQueued + acquiredSamplesCount);
|
||||
if (desiredPadding < (OVRHaptics.Config.MinimumBufferSamplesCount - acquiredSamplesCount))
|
||||
desiredPadding = (OVRHaptics.Config.MinimumBufferSamplesCount - acquiredSamplesCount);
|
||||
if (desiredPadding > hapticsState.SamplesAvailable)
|
||||
desiredPadding = hapticsState.SamplesAvailable;
|
||||
|
||||
if (desiredPadding > 0)
|
||||
{
|
||||
int numBytes = desiredPadding * OVRHaptics.Config.SampleSizeInBytes;
|
||||
int dstOffset = acquiredSamplesCount * OVRHaptics.Config.SampleSizeInBytes;
|
||||
int srcOffset = 0;
|
||||
Marshal.Copy(m_paddingClip.Samples, srcOffset, m_nativeBuffer.GetPointer(dstOffset), numBytes);
|
||||
|
||||
acquiredSamplesCount += desiredPadding;
|
||||
}
|
||||
}
|
||||
|
||||
if (acquiredSamplesCount > 0)
|
||||
{
|
||||
OVRPlugin.HapticsBuffer hapticsBuffer;
|
||||
hapticsBuffer.Samples = m_nativeBuffer.GetPointer();
|
||||
hapticsBuffer.SamplesCount = acquiredSamplesCount;
|
||||
|
||||
OVRPlugin.SetControllerHaptics(m_controller, hapticsBuffer);
|
||||
|
||||
hapticsState = OVRPlugin.GetControllerHapticsState(m_controller);
|
||||
m_prevSamplesQueued = hapticsState.SamplesQueued;
|
||||
m_prevSamplesQueuedTime = Time.realtimeSinceStartup;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Immediately plays the specified clip without waiting for any currently-playing clip to finish.
|
||||
/// </summary>
|
||||
public void Preempt(OVRHapticsClip clip)
|
||||
{
|
||||
m_pendingClips.Clear();
|
||||
m_pendingClips.Add(new ClipPlaybackTracker(clip));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Enqueues the specified clip to play after any currently-playing clip finishes.
|
||||
/// </summary>
|
||||
public void Queue(OVRHapticsClip clip)
|
||||
{
|
||||
m_pendingClips.Add(new ClipPlaybackTracker(clip));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the samples from the specified clip to the ones in the currently-playing clip(s).
|
||||
/// </summary>
|
||||
public void Mix(OVRHapticsClip clip)
|
||||
{
|
||||
int numClipsToMix = 0;
|
||||
int numSamplesToMix = 0;
|
||||
int numSamplesRemaining = clip.Count;
|
||||
|
||||
while (numSamplesRemaining > 0 && numClipsToMix < m_pendingClips.Count)
|
||||
{
|
||||
int numSamplesRemainingInClip = m_pendingClips[numClipsToMix].Clip.Count - m_pendingClips[numClipsToMix].ReadCount;
|
||||
numSamplesRemaining -= numSamplesRemainingInClip;
|
||||
numSamplesToMix += numSamplesRemainingInClip;
|
||||
numClipsToMix++;
|
||||
}
|
||||
|
||||
if (numSamplesRemaining > 0)
|
||||
{
|
||||
numSamplesToMix += numSamplesRemaining;
|
||||
numSamplesRemaining = 0;
|
||||
}
|
||||
|
||||
if (numClipsToMix > 0)
|
||||
{
|
||||
OVRHapticsClip mixClip = new OVRHapticsClip(numSamplesToMix);
|
||||
|
||||
OVRHapticsClip a = clip;
|
||||
int aReadCount = 0;
|
||||
|
||||
for (int i = 0; i < numClipsToMix; i++)
|
||||
{
|
||||
OVRHapticsClip b = m_pendingClips[i].Clip;
|
||||
for(int bReadCount = m_pendingClips[i].ReadCount; bReadCount < b.Count; bReadCount++)
|
||||
{
|
||||
if (OVRHaptics.Config.SampleSizeInBytes == 1)
|
||||
{
|
||||
byte sample = 0; // TODO support multi-byte samples
|
||||
if ((aReadCount < a.Count) && (bReadCount < b.Count))
|
||||
{
|
||||
sample = (byte)(Mathf.Clamp(a.Samples[aReadCount] + b.Samples[bReadCount], 0, System.Byte.MaxValue)); // TODO support multi-byte samples
|
||||
aReadCount++;
|
||||
}
|
||||
else if (bReadCount < b.Count)
|
||||
{
|
||||
sample = b.Samples[bReadCount]; // TODO support multi-byte samples
|
||||
}
|
||||
|
||||
mixClip.WriteSample(sample); // TODO support multi-byte samples
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
while (aReadCount < a.Count)
|
||||
{
|
||||
if (OVRHaptics.Config.SampleSizeInBytes == 1)
|
||||
{
|
||||
mixClip.WriteSample(a.Samples[aReadCount]); // TODO support multi-byte samples
|
||||
}
|
||||
aReadCount++;
|
||||
}
|
||||
|
||||
m_pendingClips[0] = new ClipPlaybackTracker(mixClip);
|
||||
for (int i = 1; i < numClipsToMix; i++)
|
||||
{
|
||||
m_pendingClips.RemoveAt(1);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
m_pendingClips.Add(new ClipPlaybackTracker(clip));
|
||||
}
|
||||
}
|
||||
|
||||
public void Clear()
|
||||
{
|
||||
m_pendingClips.Clear();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The system calls this each frame to update haptics playback.
|
||||
/// </summary>
|
||||
public static void Process()
|
||||
{
|
||||
Config.Load();
|
||||
|
||||
for (int i = 0; i < m_outputs.Length; i++)
|
||||
{
|
||||
m_outputs[i].Process();
|
||||
}
|
||||
}
|
||||
}
|
||||
12
Assets/Oculus/VR/Scripts/OVRHaptics.cs.meta
Normal file
12
Assets/Oculus/VR/Scripts/OVRHaptics.cs.meta
Normal file
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: d3b22b858e27329498781f145fa42610
|
||||
timeCreated: 1463018541
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
160
Assets/Oculus/VR/Scripts/OVRHapticsClip.cs
Normal file
160
Assets/Oculus/VR/Scripts/OVRHapticsClip.cs
Normal file
@@ -0,0 +1,160 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
/// <summary>
|
||||
/// A PCM buffer of data for a haptics effect.
|
||||
/// </summary>
|
||||
public class OVRHapticsClip
|
||||
{
|
||||
/// <summary>
|
||||
/// The current number of samples in the clip.
|
||||
/// </summary>
|
||||
public int Count { get; private set; }
|
||||
|
||||
/// <summary>
|
||||
/// The maximum number of samples the clip can store.
|
||||
/// </summary>
|
||||
public int Capacity { get; private set; }
|
||||
|
||||
/// <summary>
|
||||
/// The raw haptics data.
|
||||
/// </summary>
|
||||
public byte[] Samples { get; private set; }
|
||||
|
||||
public OVRHapticsClip()
|
||||
{
|
||||
Capacity = OVRHaptics.Config.MaximumBufferSamplesCount;
|
||||
Samples = new byte[Capacity * OVRHaptics.Config.SampleSizeInBytes];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a clip with the specified capacity.
|
||||
/// </summary>
|
||||
public OVRHapticsClip(int capacity)
|
||||
{
|
||||
Capacity = (capacity >= 0) ? capacity : 0;
|
||||
Samples = new byte[Capacity * OVRHaptics.Config.SampleSizeInBytes];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a clip with the specified data.
|
||||
/// </summary>
|
||||
public OVRHapticsClip(byte[] samples, int samplesCount)
|
||||
{
|
||||
Samples = samples;
|
||||
Capacity = Samples.Length / OVRHaptics.Config.SampleSizeInBytes;
|
||||
Count = (samplesCount >= 0) ? samplesCount : 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a clip by mixing the specified clips.
|
||||
/// </summary>
|
||||
public OVRHapticsClip(OVRHapticsClip a, OVRHapticsClip b)
|
||||
{
|
||||
int maxCount = a.Count;
|
||||
if (b.Count > maxCount)
|
||||
maxCount = b.Count;
|
||||
|
||||
Capacity = maxCount;
|
||||
Samples = new byte[Capacity * OVRHaptics.Config.SampleSizeInBytes];
|
||||
|
||||
for (int i = 0; i < a.Count || i < b.Count; i++)
|
||||
{
|
||||
if (OVRHaptics.Config.SampleSizeInBytes == 1)
|
||||
{
|
||||
byte sample = 0; // TODO support multi-byte samples
|
||||
if ((i < a.Count) && (i < b.Count))
|
||||
sample = (byte)(Mathf.Clamp(a.Samples[i] + b.Samples[i], 0, System.Byte.MaxValue)); // TODO support multi-byte samples
|
||||
else if (i < a.Count)
|
||||
sample = a.Samples[i]; // TODO support multi-byte samples
|
||||
else if (i < b.Count)
|
||||
sample = b.Samples[i]; // TODO support multi-byte samples
|
||||
|
||||
WriteSample(sample); // TODO support multi-byte samples
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a haptics clip from the specified audio clip.
|
||||
/// </summary>
|
||||
public OVRHapticsClip(AudioClip audioClip, int channel = 0)
|
||||
{
|
||||
float[] audioData = new float[audioClip.samples * audioClip.channels];
|
||||
audioClip.GetData(audioData, 0);
|
||||
|
||||
InitializeFromAudioFloatTrack(audioData, audioClip.frequency, audioClip.channels, channel);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the specified sample to the end of the clip.
|
||||
/// </summary>
|
||||
public void WriteSample(byte sample) // TODO support multi-byte samples
|
||||
{
|
||||
if (Count >= Capacity)
|
||||
{
|
||||
//Debug.LogError("Attempted to write OVRHapticsClip sample out of range - Count:" + Count + " Capacity:" + Capacity);
|
||||
return;
|
||||
}
|
||||
|
||||
if (OVRHaptics.Config.SampleSizeInBytes == 1)
|
||||
{
|
||||
Samples[Count * OVRHaptics.Config.SampleSizeInBytes] = sample; // TODO support multi-byte samples
|
||||
}
|
||||
|
||||
Count++;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clears the clip and resets its size to 0.
|
||||
/// </summary>
|
||||
public void Reset()
|
||||
{
|
||||
Count = 0;
|
||||
}
|
||||
|
||||
private void InitializeFromAudioFloatTrack(float[] sourceData, double sourceFrequency, int sourceChannelCount, int sourceChannel)
|
||||
{
|
||||
double stepSizePrecise = (sourceFrequency + 1e-6) / OVRHaptics.Config.SampleRateHz;
|
||||
|
||||
if (stepSizePrecise < 1.0)
|
||||
return;
|
||||
|
||||
int stepSize = (int)stepSizePrecise;
|
||||
double stepSizeError = stepSizePrecise - stepSize;
|
||||
double accumulatedStepSizeError = 0.0f;
|
||||
int length = sourceData.Length;
|
||||
|
||||
Count = 0;
|
||||
Capacity = length / sourceChannelCount / stepSize + 1;
|
||||
Samples = new byte[Capacity * OVRHaptics.Config.SampleSizeInBytes];
|
||||
|
||||
int i = sourceChannel % sourceChannelCount;
|
||||
while (i < length)
|
||||
{
|
||||
if (OVRHaptics.Config.SampleSizeInBytes == 1)
|
||||
{
|
||||
WriteSample((byte)(Mathf.Clamp01(Mathf.Abs(sourceData[i])) * System.Byte.MaxValue)); // TODO support multi-byte samples
|
||||
}
|
||||
i+= stepSize * sourceChannelCount;
|
||||
accumulatedStepSizeError += stepSizeError;
|
||||
if ((int)accumulatedStepSizeError > 0)
|
||||
{
|
||||
i+= (int)accumulatedStepSizeError * sourceChannelCount;
|
||||
accumulatedStepSizeError = accumulatedStepSizeError - (int)accumulatedStepSizeError;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
12
Assets/Oculus/VR/Scripts/OVRHapticsClip.cs.meta
Normal file
12
Assets/Oculus/VR/Scripts/OVRHapticsClip.cs.meta
Normal file
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: c123270a848515b458069b5242866451
|
||||
timeCreated: 1467575852
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
180
Assets/Oculus/VR/Scripts/OVRHeadsetEmulator.cs
Normal file
180
Assets/Oculus/VR/Scripts/OVRHeadsetEmulator.cs
Normal file
@@ -0,0 +1,180 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
|
||||
public class OVRHeadsetEmulator : MonoBehaviour {
|
||||
public enum OpMode
|
||||
{
|
||||
Off,
|
||||
EditorOnly,
|
||||
AlwaysOn
|
||||
}
|
||||
|
||||
public OpMode opMode = OpMode.EditorOnly;
|
||||
public bool resetHmdPoseOnRelease = true;
|
||||
public bool resetHmdPoseByMiddleMouseButton = true;
|
||||
|
||||
public KeyCode[] activateKeys = new KeyCode[] { KeyCode.LeftControl, KeyCode.RightControl };
|
||||
|
||||
public KeyCode[] pitchKeys = new KeyCode[] { KeyCode.LeftAlt, KeyCode.RightAlt };
|
||||
|
||||
OVRManager manager;
|
||||
|
||||
const float MOUSE_SCALE_X = -2.0f;
|
||||
const float MOUSE_SCALE_X_PITCH = -2.0f;
|
||||
const float MOUSE_SCALE_Y = 2.0f;
|
||||
const float MOUSE_SCALE_HEIGHT = 1.0f;
|
||||
const float MAX_ROLL = 85.0f;
|
||||
|
||||
private bool lastFrameEmulationActivated = false;
|
||||
|
||||
private Vector3 recordedHeadPoseRelativeOffsetTranslation;
|
||||
private Vector3 recordedHeadPoseRelativeOffsetRotation;
|
||||
|
||||
private bool hasSentEvent = false;
|
||||
private bool emulatorHasInitialized = false;
|
||||
|
||||
private CursorLockMode previousCursorLockMode = CursorLockMode.None;
|
||||
|
||||
// Use this for initialization
|
||||
void Start () {
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update () {
|
||||
//todo: enable for Unity Input System
|
||||
#if ENABLE_LEGACY_INPUT_MANAGER
|
||||
if (!emulatorHasInitialized)
|
||||
{
|
||||
if (OVRManager.OVRManagerinitialized)
|
||||
{
|
||||
previousCursorLockMode = Cursor.lockState;
|
||||
manager = OVRManager.instance;
|
||||
recordedHeadPoseRelativeOffsetTranslation = manager.headPoseRelativeOffsetTranslation;
|
||||
recordedHeadPoseRelativeOffsetRotation = manager.headPoseRelativeOffsetRotation;
|
||||
emulatorHasInitialized = true;
|
||||
lastFrameEmulationActivated = false;
|
||||
}
|
||||
else
|
||||
return;
|
||||
}
|
||||
bool emulationActivated = IsEmulationActivated();
|
||||
if (emulationActivated)
|
||||
{
|
||||
if (!lastFrameEmulationActivated)
|
||||
{
|
||||
previousCursorLockMode = Cursor.lockState;
|
||||
Cursor.lockState = CursorLockMode.Locked;
|
||||
}
|
||||
|
||||
if (!lastFrameEmulationActivated && resetHmdPoseOnRelease)
|
||||
{
|
||||
manager.headPoseRelativeOffsetTranslation = recordedHeadPoseRelativeOffsetTranslation;
|
||||
manager.headPoseRelativeOffsetRotation = recordedHeadPoseRelativeOffsetRotation;
|
||||
}
|
||||
|
||||
if (resetHmdPoseByMiddleMouseButton && Input.GetMouseButton(2))
|
||||
{
|
||||
manager.headPoseRelativeOffsetTranslation = Vector3.zero;
|
||||
manager.headPoseRelativeOffsetRotation = Vector3.zero;
|
||||
}
|
||||
else
|
||||
{
|
||||
Vector3 emulatedTranslation = manager.headPoseRelativeOffsetTranslation;
|
||||
float deltaMouseScrollWheel = Input.GetAxis("Mouse ScrollWheel");
|
||||
float emulatedHeight = deltaMouseScrollWheel * MOUSE_SCALE_HEIGHT;
|
||||
emulatedTranslation.y += emulatedHeight;
|
||||
manager.headPoseRelativeOffsetTranslation = emulatedTranslation;
|
||||
|
||||
float deltaX = Input.GetAxis("Mouse X");
|
||||
float deltaY = Input.GetAxis("Mouse Y");
|
||||
|
||||
Vector3 emulatedAngles = manager.headPoseRelativeOffsetRotation;
|
||||
float emulatedRoll = emulatedAngles.x;
|
||||
float emulatedYaw = emulatedAngles.y;
|
||||
float emulatedPitch = emulatedAngles.z;
|
||||
if (IsTweakingPitch())
|
||||
{
|
||||
emulatedPitch += deltaX * MOUSE_SCALE_X_PITCH;
|
||||
}
|
||||
else
|
||||
{
|
||||
emulatedRoll += deltaY * MOUSE_SCALE_Y;
|
||||
emulatedYaw += deltaX * MOUSE_SCALE_X;
|
||||
}
|
||||
|
||||
manager.headPoseRelativeOffsetRotation = new Vector3(emulatedRoll, emulatedYaw, emulatedPitch);
|
||||
}
|
||||
|
||||
if (!hasSentEvent)
|
||||
{
|
||||
OVRPlugin.SendEvent("headset_emulator", "activated");
|
||||
hasSentEvent = true;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (lastFrameEmulationActivated)
|
||||
{
|
||||
Cursor.lockState = previousCursorLockMode;
|
||||
|
||||
recordedHeadPoseRelativeOffsetTranslation = manager.headPoseRelativeOffsetTranslation;
|
||||
recordedHeadPoseRelativeOffsetRotation = manager.headPoseRelativeOffsetRotation;
|
||||
|
||||
if (resetHmdPoseOnRelease)
|
||||
{
|
||||
manager.headPoseRelativeOffsetTranslation = Vector3.zero;
|
||||
manager.headPoseRelativeOffsetRotation = Vector3.zero;
|
||||
}
|
||||
}
|
||||
}
|
||||
lastFrameEmulationActivated = emulationActivated;
|
||||
#endif
|
||||
}
|
||||
|
||||
bool IsEmulationActivated()
|
||||
{
|
||||
if (opMode == OpMode.Off)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
else if (opMode == OpMode.EditorOnly && !Application.isEditor)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
foreach (KeyCode key in activateKeys)
|
||||
{
|
||||
if (Input.GetKey(key))
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool IsTweakingPitch()
|
||||
{
|
||||
if (!IsEmulationActivated())
|
||||
return false;
|
||||
|
||||
foreach (KeyCode key in pitchKeys)
|
||||
{
|
||||
if (Input.GetKey(key))
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
11
Assets/Oculus/VR/Scripts/OVRHeadsetEmulator.cs.meta
Normal file
11
Assets/Oculus/VR/Scripts/OVRHeadsetEmulator.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 5178bc8574ce2bf4388e787a2e2af326
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
3180
Assets/Oculus/VR/Scripts/OVRInput.cs
Normal file
3180
Assets/Oculus/VR/Scripts/OVRInput.cs
Normal file
File diff suppressed because it is too large
Load Diff
12
Assets/Oculus/VR/Scripts/OVRInput.cs.meta
Normal file
12
Assets/Oculus/VR/Scripts/OVRInput.cs.meta
Normal file
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: d210caf8a50e1954c80690fa858572ad
|
||||
timeCreated: 1438295094
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
69
Assets/Oculus/VR/Scripts/OVRKtxTexture.cs
Normal file
69
Assets/Oculus/VR/Scripts/OVRKtxTexture.cs
Normal file
@@ -0,0 +1,69 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.InteropServices;
|
||||
using System;
|
||||
|
||||
using UnityEngine;
|
||||
|
||||
// Class that manages loading of Ktx Textures through OVRPlugin
|
||||
public class OVRKtxTexture
|
||||
{
|
||||
private const uint KTX_TTF_BC7_RGBA = 6;
|
||||
private const uint KTX_TTF_ASTC_4x4_RGBA = 10;
|
||||
|
||||
public static bool Load(byte[] data, ref OVRTextureData ktxData)
|
||||
{
|
||||
int unmanagedSize = Marshal.SizeOf(data[0]) * data.Length;
|
||||
IntPtr dataPtr = Marshal.AllocHGlobal(unmanagedSize);
|
||||
|
||||
Marshal.Copy(data, 0, dataPtr, data.Length);
|
||||
IntPtr ktxTexturePtr = OVRPlugin.Ktx.LoadKtxFromMemory(dataPtr, (uint)data.Length);
|
||||
Marshal.FreeHGlobal(dataPtr);
|
||||
|
||||
ktxData.width = (int)OVRPlugin.Ktx.GetKtxTextureWidth(ktxTexturePtr);
|
||||
ktxData.height = (int)OVRPlugin.Ktx.GetKtxTextureHeight(ktxTexturePtr);
|
||||
|
||||
bool transcodeResult = false;
|
||||
#if UNITY_ANDROID && !UNITY_EDITOR
|
||||
transcodeResult = OVRPlugin.Ktx.TranscodeKtxTexture(ktxTexturePtr, KTX_TTF_ASTC_4x4_RGBA);
|
||||
ktxData.transcodedFormat = TextureFormat.ASTC_4x4;
|
||||
#else
|
||||
transcodeResult = OVRPlugin.Ktx.TranscodeKtxTexture(ktxTexturePtr, KTX_TTF_BC7_RGBA);
|
||||
ktxData.transcodedFormat = TextureFormat.BC7;
|
||||
#endif
|
||||
if (!transcodeResult)
|
||||
{
|
||||
Debug.LogError("Failed to transcode KTX texture.");
|
||||
return false;
|
||||
}
|
||||
|
||||
uint textureSize = OVRPlugin.Ktx.GetKtxTextureSize(ktxTexturePtr);
|
||||
IntPtr textureDataPtr = Marshal.AllocHGlobal(sizeof(byte) * (int)textureSize);
|
||||
if(!OVRPlugin.Ktx.GetKtxTextureData(ktxTexturePtr, textureDataPtr, textureSize))
|
||||
{
|
||||
Debug.LogError("Failed to get texture data from Ktx texture reference");
|
||||
return false;
|
||||
}
|
||||
|
||||
byte[] textureData = new byte[textureSize];
|
||||
Marshal.Copy(textureDataPtr, textureData, 0, textureData.Length);
|
||||
Marshal.FreeHGlobal(textureDataPtr);
|
||||
ktxData.data = textureData;
|
||||
|
||||
OVRPlugin.Ktx.DestroyKtxTexture(ktxTexturePtr);
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
11
Assets/Oculus/VR/Scripts/OVRKtxTexture.cs.meta
Normal file
11
Assets/Oculus/VR/Scripts/OVRKtxTexture.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 23eec80b7bf73ba41a46466912cd6bb9
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
19
Assets/Oculus/VR/Scripts/OVRLayerAttribute.cs
Normal file
19
Assets/Oculus/VR/Scripts/OVRLayerAttribute.cs
Normal file
@@ -0,0 +1,19 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using UnityEngine;
|
||||
|
||||
/// <summary>
|
||||
/// Dummy subtype of PropertyAttribute for custom inspector to use.
|
||||
/// </summary>
|
||||
public class OVRLayerAttribute : PropertyAttribute {
|
||||
}
|
||||
12
Assets/Oculus/VR/Scripts/OVRLayerAttribute.cs.meta
Normal file
12
Assets/Oculus/VR/Scripts/OVRLayerAttribute.cs.meta
Normal file
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 701bfebb60063334f994e36546c103d6
|
||||
timeCreated: 1499749327
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
2481
Assets/Oculus/VR/Scripts/OVRManager.cs
Normal file
2481
Assets/Oculus/VR/Scripts/OVRManager.cs
Normal file
File diff suppressed because it is too large
Load Diff
8
Assets/Oculus/VR/Scripts/OVRManager.cs.meta
Normal file
8
Assets/Oculus/VR/Scripts/OVRManager.cs.meta
Normal file
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 7e933e81d3c20c74ea6fdc708a67e3a5
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: -100
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
138
Assets/Oculus/VR/Scripts/OVRMixedReality.cs
Normal file
138
Assets/Oculus/VR/Scripts/OVRMixedReality.cs
Normal file
@@ -0,0 +1,138 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
#if UNITY_ANDROID && !UNITY_EDITOR
|
||||
#define OVR_ANDROID_MRC
|
||||
#endif
|
||||
|
||||
using System;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
|
||||
|
||||
/// <summary>
|
||||
/// Manages mix-reality elements
|
||||
/// </summary>
|
||||
internal static class OVRMixedReality
|
||||
{
|
||||
/// <summary>
|
||||
/// For Debugging purpose, we can use preset parameters to fake a camera when external camera is not available
|
||||
/// </summary>
|
||||
public static bool useFakeExternalCamera = false;
|
||||
public static Vector3 fakeCameraFloorLevelPosition = new Vector3(0.0f, 2.0f, -0.5f);
|
||||
public static Vector3 fakeCameraEyeLevelPosition = fakeCameraFloorLevelPosition - new Vector3(0.0f, 1.8f, 0.0f);
|
||||
public static Quaternion fakeCameraRotation = Quaternion.LookRotation((new Vector3(0.0f, fakeCameraFloorLevelPosition.y, 0.0f) - fakeCameraFloorLevelPosition).normalized, Vector3.up);
|
||||
public static float fakeCameraFov = 60.0f;
|
||||
public static float fakeCameraAspect = 16.0f / 9.0f;
|
||||
|
||||
/// <summary>
|
||||
/// Composition object
|
||||
/// </summary>
|
||||
public static OVRComposition currentComposition = null;
|
||||
|
||||
/// <summary>
|
||||
/// Updates the internal state of the Mixed Reality Camera. Called by OVRManager.
|
||||
/// </summary>
|
||||
|
||||
public static void Update(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration, OVRManager.TrackingOrigin trackingOrigin)
|
||||
{
|
||||
if (!OVRPlugin.initialized)
|
||||
{
|
||||
Debug.LogError("OVRPlugin not initialized");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!OVRPlugin.IsMixedRealityInitialized())
|
||||
{
|
||||
OVRPlugin.InitializeMixedReality();
|
||||
if (OVRPlugin.IsMixedRealityInitialized())
|
||||
{
|
||||
Debug.Log("OVRPlugin_MixedReality initialized");
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("Unable to initialize OVRPlugin_MixedReality");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (!OVRPlugin.IsMixedRealityInitialized())
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
OVRPlugin.UpdateExternalCamera();
|
||||
#if !OVR_ANDROID_MRC
|
||||
OVRPlugin.UpdateCameraDevices();
|
||||
#endif
|
||||
|
||||
#if OVR_ANDROID_MRC
|
||||
useFakeExternalCamera = OVRPlugin.Media.UseMrcDebugCamera();
|
||||
#endif
|
||||
|
||||
if (currentComposition != null && (currentComposition.CompositionMethod() != configuration.compositionMethod))
|
||||
{
|
||||
currentComposition.Cleanup();
|
||||
currentComposition = null;
|
||||
}
|
||||
|
||||
if (configuration.compositionMethod == OVRManager.CompositionMethod.External)
|
||||
{
|
||||
if (currentComposition == null)
|
||||
{
|
||||
currentComposition = new OVRExternalComposition(parentObject, mainCamera, configuration);
|
||||
}
|
||||
}
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
|
||||
else if (configuration.compositionMethod == OVRManager.CompositionMethod.Direct)
|
||||
{
|
||||
if (currentComposition == null)
|
||||
{
|
||||
currentComposition = new OVRDirectComposition(parentObject, mainCamera, configuration);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
else
|
||||
{
|
||||
Debug.LogError("Unknown CompositionMethod : " + configuration.compositionMethod);
|
||||
return;
|
||||
}
|
||||
currentComposition.Update(parentObject, mainCamera, configuration, trackingOrigin);
|
||||
}
|
||||
|
||||
public static void Cleanup()
|
||||
{
|
||||
if (currentComposition != null)
|
||||
{
|
||||
currentComposition.Cleanup();
|
||||
currentComposition = null;
|
||||
}
|
||||
if (OVRPlugin.IsMixedRealityInitialized())
|
||||
{
|
||||
OVRPlugin.ShutdownMixedReality();
|
||||
}
|
||||
}
|
||||
|
||||
public static void RecenterPose()
|
||||
{
|
||||
if (currentComposition != null)
|
||||
{
|
||||
currentComposition.RecenterPose();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
12
Assets/Oculus/VR/Scripts/OVRMixedReality.cs.meta
Normal file
12
Assets/Oculus/VR/Scripts/OVRMixedReality.cs.meta
Normal file
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 5daf6258e951ab84bb8b3e3b03386396
|
||||
timeCreated: 1497574405
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
29
Assets/Oculus/VR/Scripts/OVROnCompleteListener.cs
Normal file
29
Assets/Oculus/VR/Scripts/OVROnCompleteListener.cs
Normal file
@@ -0,0 +1,29 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
#if UNITY_ANDROID && !UNITY_EDITOR
|
||||
|
||||
public abstract class OVROnCompleteListener : AndroidJavaProxy
|
||||
{
|
||||
public OVROnCompleteListener() : base("com.oculus.svclib.OnCompleteListener")
|
||||
{
|
||||
}
|
||||
|
||||
public abstract void onSuccess();
|
||||
|
||||
public abstract void onFailure();
|
||||
}
|
||||
|
||||
#endif
|
||||
8
Assets/Oculus/VR/Scripts/OVROnCompleteListener.cs.meta
Normal file
8
Assets/Oculus/VR/Scripts/OVROnCompleteListener.cs.meta
Normal file
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 8730118d7f00f9b47b09be73f7e91d2b
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
1170
Assets/Oculus/VR/Scripts/OVROverlay.cs
Normal file
1170
Assets/Oculus/VR/Scripts/OVROverlay.cs
Normal file
File diff suppressed because it is too large
Load Diff
8
Assets/Oculus/VR/Scripts/OVROverlay.cs.meta
Normal file
8
Assets/Oculus/VR/Scripts/OVROverlay.cs.meta
Normal file
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 4444ce35d262aa648ad0c425a559b931
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
435
Assets/Oculus/VR/Scripts/OVROverlayMeshGenerator.cs
Normal file
435
Assets/Oculus/VR/Scripts/OVROverlayMeshGenerator.cs
Normal file
@@ -0,0 +1,435 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
|
||||
/// <summary>
|
||||
/// When attached to a GameObject with an OVROverlay component, OVROverlayMeshGenerator will use a mesh renderer
|
||||
/// to preview the appearance of the OVROverlay as it would appear as a TimeWarp overlay on a headset.
|
||||
/// </summary>
|
||||
[RequireComponent(typeof(MeshFilter))]
|
||||
[RequireComponent(typeof(MeshRenderer))]
|
||||
[ExecuteInEditMode]
|
||||
public class OVROverlayMeshGenerator : MonoBehaviour {
|
||||
|
||||
private Mesh _Mesh;
|
||||
private List<Vector3> _Verts = new List<Vector3>();
|
||||
private List<Vector2> _UV = new List<Vector2>();
|
||||
private List<int> _Tris = new List<int>();
|
||||
private OVROverlay _Overlay;
|
||||
private MeshFilter _MeshFilter;
|
||||
private MeshCollider _MeshCollider;
|
||||
private MeshRenderer _MeshRenderer;
|
||||
private Transform _CameraRoot;
|
||||
private Transform _Transform;
|
||||
|
||||
private OVROverlay.OverlayShape _LastShape;
|
||||
private Vector3 _LastPosition;
|
||||
private Quaternion _LastRotation;
|
||||
private Vector3 _LastScale;
|
||||
private Rect _LastDestRectLeft;
|
||||
private Rect _LastDestRectRight;
|
||||
private Rect _LastSrcRectLeft;
|
||||
private Texture _LastTexture;
|
||||
|
||||
private bool _Awake = false;
|
||||
|
||||
protected void Awake()
|
||||
{
|
||||
_MeshFilter = GetComponent<MeshFilter>();
|
||||
_MeshCollider = GetComponent<MeshCollider>();
|
||||
_MeshRenderer = GetComponent<MeshRenderer>();
|
||||
|
||||
_Transform = transform;
|
||||
if (Camera.main && Camera.main.transform.parent)
|
||||
{
|
||||
_CameraRoot = Camera.main.transform.parent;
|
||||
}
|
||||
|
||||
_Awake = true;
|
||||
}
|
||||
|
||||
public void SetOverlay(OVROverlay overlay) {
|
||||
_Overlay = overlay;
|
||||
}
|
||||
|
||||
private Rect GetBoundingRect(Rect a, Rect b)
|
||||
{
|
||||
float xMin = Mathf.Min(a.x, b.x);
|
||||
float xMax = Mathf.Max(a.x + a.width, b.x + b.width);
|
||||
float yMin = Mathf.Min(a.y, b.y);
|
||||
float yMax = Mathf.Max(a.y + a.height, b.y + b.height);
|
||||
|
||||
return new Rect(xMin, yMin, xMax - xMin, yMax - yMin);
|
||||
}
|
||||
|
||||
protected void OnEnable() {
|
||||
#if UNITY_EDITOR
|
||||
UnityEditor.EditorApplication.update += Update;
|
||||
#endif
|
||||
}
|
||||
|
||||
protected void OnDisable() {
|
||||
#if UNITY_EDITOR
|
||||
UnityEditor.EditorApplication.update -= Update;
|
||||
#endif
|
||||
}
|
||||
|
||||
private void Update()
|
||||
{
|
||||
if (!Application.isEditor)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!_Awake)
|
||||
{
|
||||
Awake();
|
||||
}
|
||||
|
||||
if (_Overlay)
|
||||
{
|
||||
OVROverlay.OverlayShape shape = _Overlay.currentOverlayShape;
|
||||
Vector3 position = _CameraRoot ? (_Transform.position - _CameraRoot.position) : _Transform.position;
|
||||
Quaternion rotation = _Transform.rotation;
|
||||
Vector3 scale = _Transform.lossyScale;
|
||||
Rect destRectLeft = _Overlay.overrideTextureRectMatrix ? _Overlay.destRectLeft : new Rect(0, 0, 1, 1);
|
||||
Rect destRectRight = _Overlay.overrideTextureRectMatrix ? _Overlay.destRectRight : new Rect(0, 0, 1, 1);
|
||||
Rect srcRectLeft = _Overlay.overrideTextureRectMatrix ? _Overlay.srcRectLeft : new Rect(0, 0, 1, 1);
|
||||
Texture texture = _Overlay.textures[0];
|
||||
|
||||
// Re-generate the mesh if necessary
|
||||
if (_Mesh == null ||
|
||||
_LastShape != shape ||
|
||||
_LastPosition != position ||
|
||||
_LastRotation != rotation ||
|
||||
_LastScale != scale ||
|
||||
_LastDestRectLeft != destRectLeft ||
|
||||
_LastDestRectRight != destRectRight)
|
||||
{
|
||||
UpdateMesh(shape, position, rotation, scale, GetBoundingRect(destRectLeft, destRectRight));
|
||||
_LastShape = shape;
|
||||
_LastPosition = position;
|
||||
_LastRotation = rotation;
|
||||
_LastScale = scale;
|
||||
_LastDestRectLeft = destRectLeft;
|
||||
_LastDestRectRight = destRectRight;
|
||||
}
|
||||
|
||||
// Generate the material and update textures if necessary
|
||||
if (_MeshRenderer.sharedMaterial == null)
|
||||
{
|
||||
Material previewMat = new Material(Shader.Find("Unlit/Transparent"));
|
||||
_MeshRenderer.sharedMaterial = previewMat;
|
||||
}
|
||||
|
||||
if (_MeshRenderer.sharedMaterial.mainTexture != texture && !_Overlay.isExternalSurface)
|
||||
{
|
||||
_MeshRenderer.sharedMaterial.mainTexture = texture;
|
||||
}
|
||||
|
||||
if (_LastSrcRectLeft != srcRectLeft)
|
||||
{
|
||||
_MeshRenderer.sharedMaterial.mainTextureOffset = srcRectLeft.position;
|
||||
_MeshRenderer.sharedMaterial.mainTextureScale = srcRectLeft.size;
|
||||
_LastSrcRectLeft = srcRectLeft;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void UpdateMesh(OVROverlay.OverlayShape shape, Vector3 position, Quaternion rotation, Vector3 scale, Rect rect)
|
||||
{
|
||||
if (_MeshFilter)
|
||||
{
|
||||
if (_Mesh == null)
|
||||
{
|
||||
_Mesh = new Mesh() { name = "Overlay" };
|
||||
_Mesh.hideFlags = HideFlags.DontSaveInBuild | HideFlags.DontSaveInEditor;
|
||||
}
|
||||
_Mesh.Clear();
|
||||
_Verts.Clear();
|
||||
_UV.Clear();
|
||||
_Tris.Clear();
|
||||
|
||||
GenerateMesh(_Verts, _UV, _Tris, shape, position, rotation, scale, rect);
|
||||
|
||||
_Mesh.SetVertices(_Verts);
|
||||
_Mesh.SetUVs(0, _UV);
|
||||
_Mesh.SetTriangles(_Tris, 0);
|
||||
_Mesh.UploadMeshData(false);
|
||||
|
||||
_MeshFilter.sharedMesh = _Mesh;
|
||||
|
||||
if (_MeshCollider)
|
||||
{
|
||||
_MeshCollider.sharedMesh = _Mesh;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static void GenerateMesh(List<Vector3> verts, List<Vector2> uvs, List<int> tris, OVROverlay.OverlayShape shape, Vector3 position, Quaternion rotation, Vector3 scale, Rect rect)
|
||||
{
|
||||
switch (shape)
|
||||
{
|
||||
case OVROverlay.OverlayShape.Equirect:
|
||||
BuildSphere(verts, uvs, tris, position, rotation, scale, rect);
|
||||
break;
|
||||
case OVROverlay.OverlayShape.Cubemap:
|
||||
case OVROverlay.OverlayShape.OffcenterCubemap:
|
||||
BuildCube(verts, uvs, tris, position, rotation, scale);
|
||||
break;
|
||||
case OVROverlay.OverlayShape.Quad:
|
||||
BuildQuad(verts, uvs, tris, rect);
|
||||
break;
|
||||
case OVROverlay.OverlayShape.Cylinder:
|
||||
BuildHemicylinder(verts, uvs, tris, scale, rect);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private static Vector2 GetSphereUV(float theta, float phi, float expand_coef)
|
||||
{
|
||||
float thetaU = ((theta / (2 * Mathf.PI) - 0.5f) / expand_coef) + 0.5f;
|
||||
float phiV = ((phi / Mathf.PI) / expand_coef) + 0.5f;
|
||||
return new Vector2(thetaU, phiV);
|
||||
}
|
||||
|
||||
private static Vector3 GetSphereVert(float theta, float phi)
|
||||
{
|
||||
return new Vector3(-Mathf.Sin(theta) * Mathf.Cos(phi), Mathf.Sin(phi), -Mathf.Cos(theta) * Mathf.Cos(phi));
|
||||
}
|
||||
|
||||
public static void BuildSphere(List<Vector3> verts, List<Vector2> uv, List<int> triangles, Vector3 position, Quaternion rotation, Vector3 scale, Rect rect, float worldScale = 800, int latitudes = 128, int longitudes = 128, float expand_coef = 1.0f)
|
||||
{
|
||||
position = Quaternion.Inverse(rotation) * position;
|
||||
|
||||
latitudes = Mathf.CeilToInt(latitudes * rect.height);
|
||||
longitudes = Mathf.CeilToInt(longitudes * rect.width);
|
||||
|
||||
float minTheta = Mathf.PI * 2 * ( rect.x);
|
||||
float minPhi = Mathf.PI * (0.5f - rect.y - rect.height);
|
||||
|
||||
float thetaScale = Mathf.PI * 2 * rect.width / longitudes;
|
||||
float phiScale = Mathf.PI * rect.height / latitudes;
|
||||
|
||||
for (int j = 0; j < latitudes + 1; j += 1)
|
||||
{
|
||||
for (int k = 0; k < longitudes + 1; k++)
|
||||
{
|
||||
float theta = minTheta + k * thetaScale;
|
||||
float phi = minPhi + j * phiScale;
|
||||
|
||||
Vector2 suv = GetSphereUV(theta, phi, expand_coef);
|
||||
uv.Add(new Vector2((suv.x - rect.x) / rect.width, (suv.y - rect.y) / rect.height));
|
||||
Vector3 vert = GetSphereVert(theta, phi);
|
||||
vert.x = (worldScale * vert.x - position.x) / scale.x;
|
||||
vert.y = (worldScale * vert.y - position.y) / scale.y;
|
||||
vert.z = (worldScale * vert.z - position.z) / scale.z;
|
||||
verts.Add(vert);
|
||||
}
|
||||
}
|
||||
|
||||
for (int j = 0; j < latitudes; j++)
|
||||
{
|
||||
for (int k = 0; k < longitudes; k++)
|
||||
{
|
||||
triangles.Add((j * (longitudes + 1)) + k);
|
||||
triangles.Add(((j + 1) * (longitudes + 1)) + k);
|
||||
triangles.Add(((j + 1) * (longitudes + 1)) + k + 1);
|
||||
triangles.Add(((j + 1) * (longitudes + 1)) + k + 1);
|
||||
triangles.Add((j * (longitudes + 1)) + k + 1);
|
||||
triangles.Add((j * (longitudes + 1)) + k);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private enum CubeFace
|
||||
{
|
||||
Right,
|
||||
Left,
|
||||
Top,
|
||||
Bottom,
|
||||
Front,
|
||||
Back,
|
||||
COUNT
|
||||
}
|
||||
|
||||
private static readonly Vector3[] BottomLeft = new Vector3[]
|
||||
{
|
||||
new Vector3(-0.5f, -0.5f, -0.5f),
|
||||
new Vector3(0.5f, -0.5f, 0.5f),
|
||||
new Vector3(0.5f, 0.5f, -0.5f),
|
||||
new Vector3(0.5f, -0.5f, 0.5f),
|
||||
new Vector3(0.5f, -0.5f, -0.5f),
|
||||
new Vector3(-0.5f, -0.5f, 0.5f)
|
||||
};
|
||||
|
||||
private static readonly Vector3[] RightVector = new Vector3[]
|
||||
{
|
||||
Vector3.forward,
|
||||
Vector3.back,
|
||||
Vector3.left,
|
||||
Vector3.left,
|
||||
Vector3.left,
|
||||
Vector3.right
|
||||
};
|
||||
|
||||
private static readonly Vector3[] UpVector = new Vector3[]
|
||||
{
|
||||
Vector3.up,
|
||||
Vector3.up,
|
||||
Vector3.forward,
|
||||
Vector3.back,
|
||||
Vector3.up,
|
||||
Vector3.up
|
||||
};
|
||||
|
||||
private static Vector2 GetCubeUV(CubeFace face, Vector2 sideUV, float expand_coef)
|
||||
{
|
||||
sideUV = (sideUV - 0.5f * Vector2.one) / expand_coef + 0.5f * Vector2.one;
|
||||
switch (face)
|
||||
{
|
||||
case CubeFace.Bottom:
|
||||
return new Vector2(sideUV.x / 3, sideUV.y / 2);
|
||||
case CubeFace.Front:
|
||||
return new Vector2((1 + sideUV.x) / 3, sideUV.y / 2);
|
||||
case CubeFace.Back:
|
||||
return new Vector2((2 + sideUV.x) / 3, sideUV.y / 2);
|
||||
case CubeFace.Right:
|
||||
return new Vector2(sideUV.x / 3, (1 + sideUV.y) / 2);
|
||||
case CubeFace.Left:
|
||||
return new Vector2((1 + sideUV.x) / 3, (1 + sideUV.y) / 2);
|
||||
case CubeFace.Top:
|
||||
return new Vector2((2 + sideUV.x) / 3, (1 + sideUV.y) / 2);
|
||||
default:
|
||||
return Vector2.zero;
|
||||
}
|
||||
}
|
||||
|
||||
private static Vector3 GetCubeVert(CubeFace face, Vector2 sideUV, float expand_coef)
|
||||
{
|
||||
return BottomLeft[(int)face] + sideUV.x * RightVector[(int)face] + sideUV.y * UpVector[(int)face];
|
||||
}
|
||||
|
||||
public static void BuildCube(List<Vector3> verts, List<Vector2> uv, List<int> triangles, Vector3 position, Quaternion rotation, Vector3 scale, float worldScale = 800, int subQuads = 1, float expand_coef = 1.01f)
|
||||
{
|
||||
position = Quaternion.Inverse(rotation) * position;
|
||||
|
||||
int vertsPerSide = (subQuads + 1) * (subQuads + 1);
|
||||
|
||||
for (int i = 0; i < (int)CubeFace.COUNT; i++)
|
||||
{
|
||||
for(int j = 0; j < subQuads + 1; j++)
|
||||
{
|
||||
for(int k = 0; k < subQuads + 1; k++)
|
||||
{
|
||||
float u = j / (float)subQuads;
|
||||
float v = k / (float)subQuads;
|
||||
|
||||
uv.Add(GetCubeUV((CubeFace)i, new Vector2(u, v), expand_coef));
|
||||
Vector3 vert = GetCubeVert((CubeFace)i, new Vector2(u, v), expand_coef);
|
||||
vert.x = (worldScale * vert.x - position.x) / scale.x;
|
||||
vert.y = (worldScale * vert.y - position.y) / scale.y;
|
||||
vert.z = (worldScale * vert.z - position.z) / scale.z;
|
||||
verts.Add(vert);
|
||||
}
|
||||
}
|
||||
|
||||
for(int j = 0; j < subQuads; j++)
|
||||
{
|
||||
for(int k = 0; k < subQuads; k++)
|
||||
{
|
||||
triangles.Add(vertsPerSide * i + ((j + 1) * (subQuads + 1)) + k);
|
||||
triangles.Add(vertsPerSide * i + (j * (subQuads + 1)) + k);
|
||||
triangles.Add(vertsPerSide * i + ((j + 1) * (subQuads + 1)) + k + 1);
|
||||
triangles.Add(vertsPerSide * i + ((j + 1) * (subQuads + 1)) + k + 1);
|
||||
triangles.Add(vertsPerSide * i + (j * (subQuads + 1)) + k);
|
||||
triangles.Add(vertsPerSide * i + (j * (subQuads + 1)) + k + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static void BuildQuad(List<Vector3> verts, List<Vector2> uv, List<int> triangles, Rect rect)
|
||||
{
|
||||
verts.Add(new Vector3(rect.x - 0.5f, (1 - rect.y - rect.height) - 0.5f, 0));
|
||||
verts.Add(new Vector3(rect.x - 0.5f, (1 - rect.y) - 0.5f, 0));
|
||||
verts.Add(new Vector3(rect.x + rect.width - 0.5f, (1 - rect.y) - 0.5f, 0));
|
||||
verts.Add(new Vector3(rect.x + rect.width - 0.5f, (1 - rect.y - rect.height) - 0.5f, 0));
|
||||
|
||||
uv.Add(new Vector2(0, 0));
|
||||
uv.Add(new Vector2(0, 1));
|
||||
uv.Add(new Vector2(1, 1));
|
||||
uv.Add(new Vector2(1, 0));
|
||||
|
||||
triangles.Add(0);
|
||||
triangles.Add(1);
|
||||
triangles.Add(2);
|
||||
triangles.Add(2);
|
||||
triangles.Add(3);
|
||||
triangles.Add(0);
|
||||
}
|
||||
|
||||
public static void BuildHemicylinder(List<Vector3> verts, List<Vector2> uv, List<int> triangles, Vector3 scale, Rect rect, int longitudes = 128)
|
||||
{
|
||||
float height = Mathf.Abs(scale.y) * rect.height;
|
||||
float radius = scale.z;
|
||||
float arcLength = scale.x * rect.width;
|
||||
|
||||
float arcAngle = arcLength / radius;
|
||||
float minAngle = scale.x * (-0.5f + rect.x) / radius;
|
||||
|
||||
int columns = Mathf.CeilToInt(longitudes * arcAngle / (2 * Mathf.PI));
|
||||
|
||||
// we don't want super tall skinny triangles because that can lead to artifacting.
|
||||
// make triangles no more than 2x taller than wide
|
||||
|
||||
float triangleWidth = arcLength / columns;
|
||||
float ratio = height / triangleWidth;
|
||||
|
||||
int rows = Mathf.CeilToInt(ratio / 2);
|
||||
|
||||
for (int j = 0; j < rows + 1; j += 1)
|
||||
{
|
||||
for (int k = 0; k < columns + 1; k++)
|
||||
{
|
||||
uv.Add(new Vector2((k / (float)columns), 1 - (j / (float)rows)));
|
||||
|
||||
Vector3 vert = Vector3.zero;
|
||||
// because the scale is used to control the parameters, we need
|
||||
// to reverse multiply by scale to appear correctly
|
||||
vert.x = (Mathf.Sin(minAngle + (k * arcAngle / columns)) * radius) / scale.x;
|
||||
|
||||
vert.y = (0.5f - rect.y - rect.height + rect.height * (1 - j / (float)rows));
|
||||
vert.z = (Mathf.Cos(minAngle + (k * arcAngle / columns)) * radius) / scale.z;
|
||||
verts.Add(vert);
|
||||
}
|
||||
}
|
||||
|
||||
for (int j = 0; j < rows; j++)
|
||||
{
|
||||
for (int k = 0; k < columns; k++)
|
||||
{
|
||||
triangles.Add((j * (columns + 1)) + k);
|
||||
triangles.Add(((j + 1) * (columns + 1)) + k + 1);
|
||||
triangles.Add(((j + 1) * (columns + 1)) + k);
|
||||
triangles.Add(((j + 1) * (columns + 1)) + k + 1);
|
||||
triangles.Add((j * (columns + 1)) + k);
|
||||
triangles.Add((j * (columns + 1)) + k + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
11
Assets/Oculus/VR/Scripts/OVROverlayMeshGenerator.cs.meta
Normal file
11
Assets/Oculus/VR/Scripts/OVROverlayMeshGenerator.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 302cd5de63ddc99458f67a786f625bad
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
848
Assets/Oculus/VR/Scripts/OVRPassthroughLayer.cs
Normal file
848
Assets/Oculus/VR/Scripts/OVRPassthroughLayer.cs
Normal file
@@ -0,0 +1,848 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.InteropServices;
|
||||
using UnityEngine;
|
||||
|
||||
using ColorMapType = OVRPlugin.InsightPassthroughColorMapType;
|
||||
|
||||
/// <summary>
|
||||
/// A layer used for passthrough.
|
||||
/// </summary>
|
||||
public class OVRPassthroughLayer : MonoBehaviour
|
||||
{
|
||||
#region Public Interface
|
||||
|
||||
/// <summary>
|
||||
/// The passthrough projection surface type: reconstructed | user defined.
|
||||
/// </summary>
|
||||
public enum ProjectionSurfaceType
|
||||
{
|
||||
Reconstructed, ///< Reconstructed surface type will render passthrough using automatic environment depth reconstruction
|
||||
UserDefined ///< UserDefined allows you to define a surface
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The type of the surface which passthrough textures are projected on: Automatic reconstruction or user-defined geometry.
|
||||
/// This field can only be modified immediately after the component is instantiated (e.g. using `AddComponent`).
|
||||
/// Once the backing layer has been created, changes won't be reflected unless the layer is disabled and enabled again.
|
||||
/// Default is automatic reconstruction.
|
||||
/// </summary>
|
||||
public ProjectionSurfaceType projectionSurfaceType = ProjectionSurfaceType.Reconstructed;
|
||||
|
||||
/// <summary>
|
||||
/// Overlay type that defines the placement of the passthrough layer to appear on top as an overlay or beneath as an underlay of the application’s main projection layer. By default, the passthrough layer appears as an overlay.
|
||||
/// </summary>
|
||||
public OVROverlay.OverlayType overlayType = OVROverlay.OverlayType.Overlay;
|
||||
|
||||
/// <summary>
|
||||
/// The compositionDepth defines the order of the layers in composition. The layer with smaller compositionDepth would be composited in the front of the layer with larger compositionDepth. The default value is zero.
|
||||
|
||||
/// </summary>
|
||||
public int compositionDepth = 0;
|
||||
|
||||
/// <summary>
|
||||
/// Property that can hide layers when required. Should be false when present, true when hidden. By default, the value is set to false, which means the layers are present.
|
||||
|
||||
/// </summary>
|
||||
public bool hidden = false;
|
||||
|
||||
/// <summary>
|
||||
/// Specify whether `colorScale` and `colorOffset` should be applied to this layer. By default, the color scale and offset are not applied to the layer.
|
||||
/// </summary>
|
||||
public bool overridePerLayerColorScaleAndOffset = false;
|
||||
|
||||
/// <summary>
|
||||
/// Color scale is a factor applied to the pixel color values during compositing.
|
||||
/// The four components of the vector correspond to the R, G, B, and A values, default set to `{1,1,1,1}`.
|
||||
|
||||
/// </summary>
|
||||
public Vector4 colorScale = Vector4.one;
|
||||
|
||||
/// <summary>
|
||||
/// Color offset is a value which gets added to the pixel color values during compositing.
|
||||
/// The four components of the vector correspond to the R, G, B, and A values, default set to `{0,0,0,0}`.
|
||||
/// </summary>
|
||||
public Vector4 colorOffset = Vector4.zero;
|
||||
|
||||
/// <summary>
|
||||
/// Add a GameObject to the Insight Passthrough projection surface. This is only applicable
|
||||
/// if the projection surface type is `UserDefined`.
|
||||
/// When `updateTransform` parameter is set to `true`, OVRPassthroughLayer will update the transform
|
||||
/// of the surface mesh every frame. Otherwise only the initial transform is recorded.
|
||||
/// </summary>
|
||||
/// <param name="obj">The Gameobject you want to add to the Insight Passthrough projection surface.</param>
|
||||
/// <param name="updateTransform">Indicate if the transform should be updated every frame</param>
|
||||
public void AddSurfaceGeometry(GameObject obj, bool updateTransform = false)
|
||||
{
|
||||
if (projectionSurfaceType != ProjectionSurfaceType.UserDefined)
|
||||
{
|
||||
Debug.LogError("Passthrough layer is not configured for surface projected passthrough.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (surfaceGameObjects.ContainsKey(obj))
|
||||
{
|
||||
Debug.LogError("Specified GameObject has already been added as passthrough surface.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (obj.GetComponent<MeshFilter>() == null)
|
||||
{
|
||||
Debug.LogError("Specified GameObject does not have a mesh component.");
|
||||
return;
|
||||
}
|
||||
|
||||
// Mesh and instance can't be created immediately, because the compositor layer may not have been initialized yet (layerId = 0).
|
||||
// Queue creation and attempt to do it in the update loop.
|
||||
deferredSurfaceGameObjects.Add(
|
||||
new DeferredPassthroughMeshAddition
|
||||
{
|
||||
gameObject = obj,
|
||||
updateTransform = updateTransform
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Removes a GameObject that was previously added using `AddSurfaceGeometry` from the projection surface.
|
||||
/// </summary>
|
||||
/// <param name="obj">The Gameobject to remove. </param>
|
||||
public void RemoveSurfaceGeometry(GameObject obj)
|
||||
{
|
||||
PassthroughMeshInstance passthroughMeshInstance;
|
||||
if (surfaceGameObjects.TryGetValue(obj, out passthroughMeshInstance))
|
||||
{
|
||||
if (OVRPlugin.DestroyInsightPassthroughGeometryInstance(passthroughMeshInstance.instanceHandle) &&
|
||||
OVRPlugin.DestroyInsightTriangleMesh(passthroughMeshInstance.meshHandle))
|
||||
{
|
||||
surfaceGameObjects.Remove(obj);
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("GameObject could not be removed from passthrough surface.");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
int count = deferredSurfaceGameObjects.RemoveAll(x => x.gameObject == obj);
|
||||
if (count == 0)
|
||||
{
|
||||
Debug.LogError("Specified GameObject has not been added as passthrough surface.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the given gameobject is a surface geometry (If called with AddSurfaceGeometry).
|
||||
/// </summary>
|
||||
/// <returns> True if the gameobject is a surface geometry. </returns>
|
||||
public bool IsSurfaceGeometry(GameObject obj)
|
||||
{
|
||||
return surfaceGameObjects.ContainsKey(obj) || deferredSurfaceGameObjects.Exists(x => x.gameObject == obj);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Float that defines the passthrough texture opacity.
|
||||
/// </summary>
|
||||
public float textureOpacity
|
||||
{
|
||||
get
|
||||
{
|
||||
return textureOpacity_;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (value != textureOpacity_)
|
||||
{
|
||||
textureOpacity_ = value;
|
||||
styleDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Enable or disable the Edge rendering.
|
||||
/// Use this flag to enable or disable the edge rendering but retain the previously selected color (incl. alpha)
|
||||
/// in the UI when it is disabled.
|
||||
/// </summary>
|
||||
public bool edgeRenderingEnabled
|
||||
{
|
||||
get
|
||||
{
|
||||
return edgeRenderingEnabled_;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (value != edgeRenderingEnabled_)
|
||||
{
|
||||
edgeRenderingEnabled_ = value;
|
||||
styleDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Color for the edge rendering.
|
||||
/// </summary>
|
||||
public Color edgeColor
|
||||
{
|
||||
get
|
||||
{
|
||||
return edgeColor_;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (value != edgeColor_)
|
||||
{
|
||||
edgeColor_ = value;
|
||||
styleDirty = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// This color map method allows to recolor the grayscale camera images by specifying a color lookup table.
|
||||
/// Scripts should call the designated methods to set a color map. The fields and properties
|
||||
/// are only intended for the inspector UI.
|
||||
/// </summary>
|
||||
/// <param name="values">The color map as an array of 256 color values to map each grayscale input to a color.</param>
|
||||
public void SetColorMap(Color[] values)
|
||||
{
|
||||
if (values.Length != 256)
|
||||
throw new ArgumentException("Must provide exactly 256 colors");
|
||||
|
||||
colorMapType = ColorMapType.MonoToRgba;
|
||||
colorMapEditorType = ColorMapEditorType.Custom;
|
||||
AllocateColorMapData();
|
||||
for (int i = 0; i < 256; i++)
|
||||
{
|
||||
WriteColorToColorMap(i, ref values[i]);
|
||||
}
|
||||
|
||||
styleDirty = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// This method allows to generate a color map from a set of color controls. Contrast, brightness and posterization is
|
||||
/// applied to the grayscale passthrough value, which is finally mapped to a color according to
|
||||
/// the provided gradient. The gradient can be null, in which case no colorization takes place.
|
||||
/// </summary>
|
||||
/// <param name="contrast">The contrast value. Range from -1 (minimum) to 1 (maximum). </param>
|
||||
/// <param name="brightness">The brightness value. Range from 0 (minimum) to 1 (maximum). </param>
|
||||
/// <param name="posterize">The posterize value. Range from 0 to 1, where 0 = no posterization (no effect), 1 = reduce to two colors. </param>
|
||||
/// <param name="gradient">The gradient will be evaluated from 0 (no intensity) to 1 (maximum intensity). </param>
|
||||
public void SetColorMapControls(float contrast, float brightness = 0.0f, float posterize = 0.0f, Gradient gradient = null)
|
||||
{
|
||||
colorMapEditorType = ColorMapEditorType.Controls;
|
||||
colorMapEditorContrast = contrast;
|
||||
colorMapEditorBrightness = brightness;
|
||||
colorMapEditorPosterize = posterize;
|
||||
if (gradient != null)
|
||||
{
|
||||
colorMapEditorGradient = gradient;
|
||||
}
|
||||
else if (!colorMapEditorGradient.Equals(colorMapNeutralGradient))
|
||||
{
|
||||
// Leave gradient untouched if it's already neutral to avoid unnecessary memory allocations.
|
||||
colorMapEditorGradient = CreateNeutralColorMapGradient();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// This method allows to specify the color map as an array of 256 8-bit intensity values.
|
||||
/// Use this to map each grayscale input value to a grayscale output value.
|
||||
/// </summary>
|
||||
/// <param name="values">Array of 256 8-bit values.</param>
|
||||
public void SetColorMapMonochromatic(byte[] values)
|
||||
{
|
||||
if (values.Length != 256)
|
||||
throw new ArgumentException("Must provide exactly 256 values");
|
||||
|
||||
colorMapType = ColorMapType.MonoToMono;
|
||||
colorMapEditorType = ColorMapEditorType.Custom;
|
||||
AllocateColorMapData();
|
||||
Buffer.BlockCopy(values, 0, colorMapData, 0, 256);
|
||||
|
||||
styleDirty = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Disables color mapping. Use this to remove any effects.
|
||||
/// </summary>
|
||||
public void DisableColorMap()
|
||||
{
|
||||
colorMapEditorType = ColorMapEditorType.None;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
|
||||
#region Editor Interface
|
||||
/// <summary>
|
||||
/// Unity editor enumerator to provide a dropdown in the inspector.
|
||||
/// </summary>
|
||||
public enum ColorMapEditorType
|
||||
{
|
||||
None, ///< Will clear the colormap
|
||||
Controls, ///< Will update the colormap from the inspector controls
|
||||
Custom ///< Will not update the colormap
|
||||
}
|
||||
|
||||
[SerializeField]
|
||||
private ColorMapEditorType colorMapEditorType_ = ColorMapEditorType.None;
|
||||
/// <summary>
|
||||
/// Editor attribute to get or set the selection in the inspector.
|
||||
/// Using this selection will update the `colorMapType` and `colorMapData` if needed.
|
||||
/// </summary>
|
||||
public ColorMapEditorType colorMapEditorType
|
||||
{
|
||||
get
|
||||
{
|
||||
return colorMapEditorType_;
|
||||
}
|
||||
set
|
||||
{
|
||||
if (value != colorMapEditorType_)
|
||||
{
|
||||
colorMapEditorType_ = value;
|
||||
|
||||
// Update colorMapType and colorMapData to match new editor selection
|
||||
switch (value)
|
||||
{
|
||||
case ColorMapEditorType.None:
|
||||
colorMapType = ColorMapType.None;
|
||||
DeallocateColorMapData();
|
||||
styleDirty = true;
|
||||
break;
|
||||
case ColorMapEditorType.Controls:
|
||||
colorMapType = ColorMapType.MonoToRgba;
|
||||
UpdateColorMapFromControls(true);
|
||||
break;
|
||||
case ColorMapEditorType.Custom:
|
||||
// no-op
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// This field is not intended for public scripting. Use `SetColorMapControls()` instead.
|
||||
/// </summary>
|
||||
public Gradient colorMapEditorGradient = CreateNeutralColorMapGradient();
|
||||
|
||||
// Keep a private copy of the gradient value. Every frame, it is compared against the public one in UpdateColorMapFromControls() and updated if necessary.
|
||||
private Gradient colorMapEditorGradientOld = new Gradient();
|
||||
|
||||
/// <summary>
|
||||
/// This field is not intended for public scripting. Use `SetColorMapControls()` instead.
|
||||
/// </summary>
|
||||
public float colorMapEditorContrast;
|
||||
// Keep a private copy of the contrast value. Every frame, it is compared against the public one in UpdateColorMapFromControls() and updated if necessary.
|
||||
private float colorMapEditorContrast_ = 0;
|
||||
|
||||
/// <summary>
|
||||
/// This field is not intended for public scripting. Use `SetColorMapControls()` instead.
|
||||
/// </summary>
|
||||
public float colorMapEditorBrightness;
|
||||
// Keep a private copy of the brightness value. Every frame, it is compared against the public one in UpdateColorMapFromControls() and updated if necessary.
|
||||
private float colorMapEditorBrightness_ = 0;
|
||||
|
||||
/// <summary>
|
||||
/// This field is not intended for public scripting. Use `SetColorMapControls()` instead.
|
||||
/// </summary>
|
||||
public float colorMapEditorPosterize;
|
||||
// Keep a private copy of the posterize value. Every frame, it is compared against the public one in UpdateColorMapFromControls() and updated if necessary.
|
||||
private float colorMapEditorPosterize_ = 0;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Internal Methods
|
||||
private void AddDeferredSurfaceGeometries()
|
||||
{
|
||||
for (int i = 0; i < deferredSurfaceGameObjects.Count; ++i)
|
||||
{
|
||||
var entry = deferredSurfaceGameObjects[i];
|
||||
bool entryIsPasthroughObject = false;
|
||||
if (surfaceGameObjects.ContainsKey(entry.gameObject))
|
||||
{
|
||||
entryIsPasthroughObject = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
ulong meshHandle;
|
||||
ulong instanceHandle;
|
||||
if (CreateAndAddMesh(entry.gameObject, out meshHandle, out instanceHandle))
|
||||
{
|
||||
surfaceGameObjects.Add(entry.gameObject, new PassthroughMeshInstance
|
||||
{
|
||||
meshHandle = meshHandle,
|
||||
instanceHandle = instanceHandle,
|
||||
updateTransform = entry.updateTransform
|
||||
});
|
||||
entryIsPasthroughObject = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("Failed to create internal resources for GameObject added to passthrough surface.");
|
||||
}
|
||||
}
|
||||
|
||||
if (entryIsPasthroughObject)
|
||||
{
|
||||
deferredSurfaceGameObjects.RemoveAt(i--);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Matrix4x4 GetTransformMatrixForPassthroughSurfaceObject(GameObject obj)
|
||||
{
|
||||
Matrix4x4 worldFromObj = obj.transform.localToWorldMatrix;
|
||||
|
||||
if (!cameraRigInitialized)
|
||||
{
|
||||
cameraRig = OVRManager.instance.GetComponentInParent<OVRCameraRig>();
|
||||
cameraRigInitialized = true;
|
||||
}
|
||||
|
||||
Matrix4x4 trackingSpaceFromWorld = (cameraRig != null) ?
|
||||
cameraRig.trackingSpace.worldToLocalMatrix :
|
||||
Matrix4x4.identity;
|
||||
|
||||
// Use model matrix to switch from left-handed coordinate system (Unity)
|
||||
// to right-handed (Open GL/Passthrough API): reverse z-axis
|
||||
Matrix4x4 rightHandedFromLeftHanded = Matrix4x4.Scale(new Vector3(1, 1, -1));
|
||||
return rightHandedFromLeftHanded * trackingSpaceFromWorld * worldFromObj;
|
||||
}
|
||||
|
||||
private bool CreateAndAddMesh(
|
||||
GameObject obj,
|
||||
out ulong meshHandle,
|
||||
out ulong instanceHandle)
|
||||
{
|
||||
Debug.Assert(passthroughOverlay != null);
|
||||
Debug.Assert(passthroughOverlay.layerId > 0);
|
||||
meshHandle = 0;
|
||||
instanceHandle = 0;
|
||||
|
||||
MeshFilter meshFilter = obj.GetComponent<MeshFilter>();
|
||||
if (meshFilter == null)
|
||||
{
|
||||
Debug.LogError("Passthrough surface GameObject does not have a mesh component.");
|
||||
return false;
|
||||
}
|
||||
|
||||
Mesh mesh = meshFilter.sharedMesh;
|
||||
|
||||
// TODO: evaluate using GetNativeVertexBufferPtr() instead to avoid copy
|
||||
Vector3[] vertices = mesh.vertices;
|
||||
int[] triangles = mesh.triangles;
|
||||
Matrix4x4 T_worldInsight_model = GetTransformMatrixForPassthroughSurfaceObject(obj);
|
||||
|
||||
if (!OVRPlugin.CreateInsightTriangleMesh(passthroughOverlay.layerId, vertices, triangles, out meshHandle))
|
||||
{
|
||||
Debug.LogWarning("Failed to create triangle mesh handle.");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!OVRPlugin.AddInsightPassthroughSurfaceGeometry(passthroughOverlay.layerId, meshHandle, T_worldInsight_model, out instanceHandle))
|
||||
{
|
||||
Debug.LogWarning("Failed to add mesh to passthrough surface.");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private void DestroySurfaceGeometries(bool addBackToDeferredQueue = false)
|
||||
{
|
||||
foreach (KeyValuePair<GameObject, PassthroughMeshInstance> el in surfaceGameObjects)
|
||||
{
|
||||
if (el.Value.meshHandle != 0)
|
||||
{
|
||||
OVRPlugin.DestroyInsightPassthroughGeometryInstance(el.Value.instanceHandle);
|
||||
OVRPlugin.DestroyInsightTriangleMesh(el.Value.meshHandle);
|
||||
|
||||
// When DestroySurfaceGeometries is called from OnDisable, we want to keep track of the existing
|
||||
// surface geometries so we can add them back when the script gets enabled again. We simply reinsert
|
||||
// them into deferredSurfaceGameObjects for that purpose.
|
||||
if (addBackToDeferredQueue)
|
||||
{
|
||||
deferredSurfaceGameObjects.Add(
|
||||
new DeferredPassthroughMeshAddition
|
||||
{
|
||||
gameObject = el.Key,
|
||||
updateTransform = el.Value.updateTransform
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
surfaceGameObjects.Clear();
|
||||
}
|
||||
|
||||
private void UpdateSurfaceGeometryTransforms()
|
||||
{
|
||||
// Iterate through mesh instances and see if transforms need to be updated
|
||||
foreach (KeyValuePair<GameObject, PassthroughMeshInstance> el in surfaceGameObjects)
|
||||
{
|
||||
if (el.Value.updateTransform && el.Value.instanceHandle != 0)
|
||||
{
|
||||
Matrix4x4 T_worldInsight_model = GetTransformMatrixForPassthroughSurfaceObject(el.Key);
|
||||
if (!OVRPlugin.UpdateInsightPassthroughGeometryTransform(
|
||||
el.Value.instanceHandle,
|
||||
T_worldInsight_model))
|
||||
{
|
||||
Debug.LogWarning("Failed to update a transform of a passthrough surface");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void AllocateColorMapData()
|
||||
{
|
||||
if (colorMapData == null)
|
||||
{
|
||||
colorMapData = new byte[4096];
|
||||
if (colorMapDataHandle.IsAllocated)
|
||||
{
|
||||
Debug.LogWarning("Passthrough color map data handle is not expected to be allocated at time of buffer allocation");
|
||||
}
|
||||
colorMapDataHandle = GCHandle.Alloc(colorMapData, GCHandleType.Pinned);
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure that Passthrough color map data is unpinned and freed.
|
||||
private void DeallocateColorMapData()
|
||||
{
|
||||
if (colorMapData != null)
|
||||
{
|
||||
if (!colorMapDataHandle.IsAllocated)
|
||||
{
|
||||
Debug.LogWarning("Passthrough color map data handle is expected to be allocated at time of buffer deallocation");
|
||||
}
|
||||
else
|
||||
{
|
||||
colorMapDataHandle.Free();
|
||||
}
|
||||
colorMapData = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Returns a gradient from black to white.
|
||||
private static Gradient CreateNeutralColorMapGradient()
|
||||
{
|
||||
return new Gradient()
|
||||
{
|
||||
colorKeys = new GradientColorKey[2] {
|
||||
new GradientColorKey(new Color(0, 0, 0), 0),
|
||||
new GradientColorKey(new Color(1, 1, 1), 1)
|
||||
},
|
||||
alphaKeys = new GradientAlphaKey[2] {
|
||||
new GradientAlphaKey(1, 0),
|
||||
new GradientAlphaKey(1, 1)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private void UpdateColorMapFromControls(bool forceUpdate = false)
|
||||
{
|
||||
if (colorMapEditorType != ColorMapEditorType.Controls)
|
||||
return;
|
||||
|
||||
AllocateColorMapData();
|
||||
|
||||
if (forceUpdate ||
|
||||
!colorMapEditorGradient.Equals(colorMapEditorGradientOld) ||
|
||||
colorMapEditorContrast_ != colorMapEditorContrast ||
|
||||
colorMapEditorBrightness_ != colorMapEditorBrightness ||
|
||||
colorMapEditorPosterize_ != colorMapEditorPosterize)
|
||||
{
|
||||
colorMapEditorGradientOld.CopyFrom(colorMapEditorGradient);
|
||||
colorMapEditorContrast_ = colorMapEditorContrast;
|
||||
colorMapEditorBrightness_ = colorMapEditorBrightness;
|
||||
colorMapEditorPosterize_ = colorMapEditorPosterize;
|
||||
|
||||
AllocateColorMapData();
|
||||
|
||||
// Populate colorMapData
|
||||
for (int i = 0; i < 256; i++)
|
||||
{
|
||||
// Apply contrast, brightness and posterization on the grayscale value
|
||||
double value = (double)i / 255.0;
|
||||
// Constrast and brightness
|
||||
double contrastFactor = colorMapEditorContrast + 1; // UI runs from -1 to 1
|
||||
value = (value - 0.5) * contrastFactor + 0.5 + colorMapEditorBrightness;
|
||||
// Posterization
|
||||
if (colorMapEditorPosterize > 0.0f)
|
||||
{
|
||||
// The posterization slider feels more useful if the progression is exponential. The function is emprically tuned.
|
||||
const double posterizationBase = 50.0;
|
||||
double posterize = (Math.Pow(posterizationBase, colorMapEditorPosterize) - 1.0) / (posterizationBase - 1.0);
|
||||
value = Math.Round(value / posterize) * posterize;
|
||||
}
|
||||
|
||||
// Clamp to [0, 1]
|
||||
value = Math.Min(Math.Max(value, 0.0), 1.0);
|
||||
|
||||
// Map to value to color
|
||||
Color color = colorMapEditorGradient.Evaluate((float)value);
|
||||
|
||||
WriteColorToColorMap(i, ref color);
|
||||
}
|
||||
|
||||
styleDirty = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Write a single color value to the Passthrough color map at the given position.
|
||||
private void WriteColorToColorMap(int colorIndex, ref Color color)
|
||||
{
|
||||
for (int c = 0; c < 4; c++)
|
||||
{
|
||||
byte[] bytes = BitConverter.GetBytes(color[c]);
|
||||
Buffer.BlockCopy(bytes, 0, colorMapData, colorIndex * 16 + c * 4, 4);
|
||||
}
|
||||
}
|
||||
|
||||
private void SyncToOverlay()
|
||||
{
|
||||
Debug.Assert(passthroughOverlay != null);
|
||||
|
||||
passthroughOverlay.currentOverlayType = overlayType;
|
||||
passthroughOverlay.compositionDepth = compositionDepth;
|
||||
passthroughOverlay.hidden = hidden;
|
||||
passthroughOverlay.overridePerLayerColorScaleAndOffset = overridePerLayerColorScaleAndOffset;
|
||||
passthroughOverlay.colorScale = colorScale;
|
||||
passthroughOverlay.colorOffset = colorOffset;
|
||||
|
||||
if (passthroughOverlay.currentOverlayShape != overlayShape)
|
||||
{
|
||||
if (passthroughOverlay.layerId > 0)
|
||||
{
|
||||
Debug.LogWarning("Change to projectionSurfaceType won't take effect until the layer goes through a disable/enable cycle. ");
|
||||
}
|
||||
|
||||
if (projectionSurfaceType == ProjectionSurfaceType.Reconstructed)
|
||||
{
|
||||
// Ensure there are no custom surface geometries when switching to reconstruction passthrough.
|
||||
Debug.Log("Removing user defined surface geometries");
|
||||
DestroySurfaceGeometries(false);
|
||||
}
|
||||
|
||||
passthroughOverlay.currentOverlayShape = overlayShape;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Internal Fields/Properties
|
||||
private OVRCameraRig cameraRig;
|
||||
private bool cameraRigInitialized = false;
|
||||
private GameObject auxGameObject;
|
||||
private OVROverlay passthroughOverlay;
|
||||
|
||||
// Each GameObjects requires a MrTriangleMesh and a MrPassthroughGeometryInstance handle.
|
||||
// The structure also keeps a flag for whether transform updates should be tracked.
|
||||
private struct PassthroughMeshInstance
|
||||
{
|
||||
public ulong meshHandle;
|
||||
public ulong instanceHandle;
|
||||
public bool updateTransform;
|
||||
}
|
||||
|
||||
// A structure for tracking a deferred addition of a game object to the projection surface
|
||||
private struct DeferredPassthroughMeshAddition
|
||||
{
|
||||
public GameObject gameObject;
|
||||
public bool updateTransform;
|
||||
}
|
||||
|
||||
// GameObjects which are in use as Insight Passthrough projection surface.
|
||||
private Dictionary<GameObject, PassthroughMeshInstance> surfaceGameObjects =
|
||||
new Dictionary<GameObject, PassthroughMeshInstance>();
|
||||
|
||||
// GameObjects which are pending addition to the Insight Passthrough projection surfaces.
|
||||
private List<DeferredPassthroughMeshAddition> deferredSurfaceGameObjects =
|
||||
new List<DeferredPassthroughMeshAddition>();
|
||||
|
||||
[SerializeField]
|
||||
private float textureOpacity_ = 1;
|
||||
|
||||
[SerializeField]
|
||||
private bool edgeRenderingEnabled_ = false;
|
||||
|
||||
[SerializeField]
|
||||
private Color edgeColor_ = new Color(1, 1, 1, 1);
|
||||
|
||||
// Internal fields which store the color map values that will be relayed to the Passthrough API in the next update.
|
||||
[SerializeField]
|
||||
private ColorMapType colorMapType = ColorMapType.None;
|
||||
|
||||
// Passthrough color map data gets allocated and deallocated on demand.
|
||||
private byte[] colorMapData = null;
|
||||
|
||||
// Passthrough color map data gets pinned in the GC on allocation so it can be passed to the native side safely.
|
||||
// In remains pinned for its lifecycle to avoid pinning per frame and the resulting memory allocation and GC pressure.
|
||||
private GCHandle colorMapDataHandle;
|
||||
|
||||
|
||||
// Flag which indicates whether the style values have changed since the last update in the Passthrough API.
|
||||
// It is set to `true` initially to ensure that the local default values are applied in the Passthrough API.
|
||||
private bool styleDirty = true;
|
||||
|
||||
// Keep a copy of a neutral gradient ready for comparison.
|
||||
static readonly private Gradient colorMapNeutralGradient = CreateNeutralColorMapGradient();
|
||||
|
||||
// Overlay shape derived from `projectionSurfaceType`.
|
||||
private OVROverlay.OverlayShape overlayShape
|
||||
{
|
||||
get
|
||||
{
|
||||
return projectionSurfaceType == ProjectionSurfaceType.UserDefined ?
|
||||
OVROverlay.OverlayShape.SurfaceProjectedPassthrough :
|
||||
OVROverlay.OverlayShape.ReconstructionPassthrough;
|
||||
}
|
||||
}
|
||||
#endregion
|
||||
|
||||
#region Unity Messages
|
||||
|
||||
void Update()
|
||||
{
|
||||
SyncToOverlay();
|
||||
}
|
||||
|
||||
void LateUpdate()
|
||||
{
|
||||
Debug.Assert(passthroughOverlay != null);
|
||||
|
||||
// This LateUpdate() should be called after passthroughOverlay's LateUpdate() such that the layerId has
|
||||
// become available at this point. This is achieved by setting the execution order of this script to a value
|
||||
// past the default time (in .meta).
|
||||
|
||||
if (passthroughOverlay.layerId <= 0)
|
||||
{
|
||||
// Layer not initialized yet
|
||||
return;
|
||||
}
|
||||
|
||||
if (projectionSurfaceType == ProjectionSurfaceType.UserDefined)
|
||||
{
|
||||
// Update the poses before adding new items to avoid redundant calls.
|
||||
UpdateSurfaceGeometryTransforms();
|
||||
// Delayed additon of passthrough surface geometries.
|
||||
AddDeferredSurfaceGeometries();
|
||||
}
|
||||
|
||||
// Update passthrough color map with gradient if it was changed in the inspector.
|
||||
UpdateColorMapFromControls();
|
||||
|
||||
// Passthrough style updates are buffered and committed to the API atomically here.
|
||||
if (styleDirty)
|
||||
{
|
||||
OVRPlugin.InsightPassthroughStyle style;
|
||||
style.Flags = OVRPlugin.InsightPassthroughStyleFlags.HasTextureOpacityFactor |
|
||||
OVRPlugin.InsightPassthroughStyleFlags.HasEdgeColor |
|
||||
OVRPlugin.InsightPassthroughStyleFlags.HasTextureColorMap;
|
||||
|
||||
style.TextureOpacityFactor = textureOpacity;
|
||||
|
||||
style.EdgeColor = edgeRenderingEnabled ? edgeColor.ToColorf() : new OVRPlugin.Colorf { r = 0, g = 0, b = 0, a = 0 };
|
||||
|
||||
style.TextureColorMapType = colorMapType;
|
||||
style.TextureColorMapData = IntPtr.Zero;
|
||||
style.TextureColorMapDataSize = 0;
|
||||
|
||||
if (style.TextureColorMapType != ColorMapType.None && colorMapData == null)
|
||||
{
|
||||
Debug.LogError("Color map not allocated");
|
||||
style.TextureColorMapType = ColorMapType.None;
|
||||
}
|
||||
|
||||
if (style.TextureColorMapType != ColorMapType.None)
|
||||
{
|
||||
if (!colorMapDataHandle.IsAllocated)
|
||||
{
|
||||
Debug.LogError("Passthrough color map enabled but data isn't pinned");
|
||||
}
|
||||
else
|
||||
{
|
||||
style.TextureColorMapData = colorMapDataHandle.AddrOfPinnedObject();
|
||||
switch (style.TextureColorMapType)
|
||||
{
|
||||
case ColorMapType.MonoToRgba:
|
||||
style.TextureColorMapDataSize = 256 * 4 * 4; // 256 * sizeof(MrColor4f)
|
||||
break;
|
||||
case ColorMapType.MonoToMono:
|
||||
style.TextureColorMapDataSize = 256;
|
||||
break;
|
||||
default:
|
||||
Debug.LogError("Unexpected texture color map type");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
OVRPlugin.SetInsightPassthroughStyle(passthroughOverlay.layerId, style);
|
||||
|
||||
styleDirty = false;
|
||||
}
|
||||
}
|
||||
|
||||
void OnEnable()
|
||||
{
|
||||
Debug.Assert(auxGameObject == null);
|
||||
Debug.Assert(passthroughOverlay == null);
|
||||
|
||||
// Create auxiliary GameObject which contains the OVROverlay component for the proxy layer (and possibly other
|
||||
// auxiliary layers in the future).
|
||||
auxGameObject = new GameObject("OVRPassthroughLayer auxiliary GameObject");
|
||||
|
||||
// Auxiliary GameObject must be a child of the current GameObject s.t. it survives if `DontDestroyOnLoad` is
|
||||
// called on the current GameObject.
|
||||
auxGameObject.transform.parent = this.transform;
|
||||
|
||||
// Add OVROverlay component for the passthrough proxy layer.
|
||||
passthroughOverlay = auxGameObject.AddComponent<OVROverlay>();
|
||||
passthroughOverlay.currentOverlayShape = overlayShape;
|
||||
SyncToOverlay();
|
||||
|
||||
// Surface geometries have been moved to the deferred additions queue in OnDisable() and will be re-added
|
||||
// in LateUpdate().
|
||||
|
||||
// Flag style to be re-applied in LateUpdate()
|
||||
styleDirty = true;
|
||||
}
|
||||
|
||||
void OnDisable()
|
||||
{
|
||||
if (OVRManager.loadedXRDevice == OVRManager.XRDevice.Oculus)
|
||||
{
|
||||
DestroySurfaceGeometries(true);
|
||||
}
|
||||
|
||||
if (auxGameObject != null) {
|
||||
Debug.Assert(passthroughOverlay != null);
|
||||
Destroy(auxGameObject);
|
||||
auxGameObject = null;
|
||||
passthroughOverlay = null;
|
||||
}
|
||||
}
|
||||
|
||||
void OnDestroy()
|
||||
{
|
||||
DestroySurfaceGeometries();
|
||||
}
|
||||
#endregion
|
||||
}
|
||||
11
Assets/Oculus/VR/Scripts/OVRPassthroughLayer.cs.meta
Normal file
11
Assets/Oculus/VR/Scripts/OVRPassthroughLayer.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 555725d48e9051a4bb6b8d45178c2fdd
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 100
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
119
Assets/Oculus/VR/Scripts/OVRPlatformMenu.cs
Normal file
119
Assets/Oculus/VR/Scripts/OVRPlatformMenu.cs
Normal file
@@ -0,0 +1,119 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
|
||||
/// <summary>
|
||||
/// Shows the Oculus plaform UI.
|
||||
/// </summary>
|
||||
public class OVRPlatformMenu : MonoBehaviour
|
||||
{
|
||||
/// <summary>
|
||||
/// The key code.
|
||||
/// </summary>
|
||||
private OVRInput.RawButton inputCode = OVRInput.RawButton.Back;
|
||||
|
||||
public enum eHandler
|
||||
{
|
||||
ShowConfirmQuit,
|
||||
RetreatOneLevel,
|
||||
};
|
||||
|
||||
public eHandler shortPressHandler = eHandler.ShowConfirmQuit;
|
||||
|
||||
/// <summary>
|
||||
/// Callback to handle short press. Returns true if ConfirmQuit menu should be shown.
|
||||
/// </summary>
|
||||
public System.Func<bool> OnShortPress;
|
||||
private static Stack<string> sceneStack = new Stack<string>();
|
||||
|
||||
enum eBackButtonAction
|
||||
{
|
||||
NONE,
|
||||
SHORT_PRESS
|
||||
};
|
||||
|
||||
eBackButtonAction HandleBackButtonState()
|
||||
{
|
||||
eBackButtonAction action = eBackButtonAction.NONE;
|
||||
|
||||
if (OVRInput.GetDown(inputCode))
|
||||
{
|
||||
action = eBackButtonAction.SHORT_PRESS;
|
||||
}
|
||||
|
||||
return action;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Instantiate the cursor timer
|
||||
/// </summary>
|
||||
void Awake()
|
||||
{
|
||||
if (shortPressHandler == eHandler.RetreatOneLevel && OnShortPress == null)
|
||||
OnShortPress = RetreatOneLevel;
|
||||
|
||||
if (!OVRManager.isHmdPresent)
|
||||
{
|
||||
enabled = false;
|
||||
return;
|
||||
}
|
||||
|
||||
sceneStack.Push(UnityEngine.SceneManagement.SceneManager.GetActiveScene().name);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Show the confirm quit menu
|
||||
/// </summary>
|
||||
void ShowConfirmQuitMenu()
|
||||
{
|
||||
#if UNITY_ANDROID && !UNITY_EDITOR
|
||||
Debug.Log("[PlatformUI-ConfirmQuit] Showing @ " + Time.time);
|
||||
OVRManager.PlatformUIConfirmQuit();
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sample handler for short press which retreats to the previous scene that used OVRPlatformMenu.
|
||||
/// </summary>
|
||||
private static bool RetreatOneLevel()
|
||||
{
|
||||
if (sceneStack.Count > 1)
|
||||
{
|
||||
string parentScene = sceneStack.Pop();
|
||||
UnityEngine.SceneManagement.SceneManager.LoadSceneAsync (parentScene);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for long-press and activates global platform menu when detected.
|
||||
/// as per the Unity integration doc, the back button responds to "mouse 1" button down/up/etc
|
||||
/// </summary>
|
||||
void Update()
|
||||
{
|
||||
#if UNITY_ANDROID
|
||||
eBackButtonAction action = HandleBackButtonState();
|
||||
if (action == eBackButtonAction.SHORT_PRESS)
|
||||
{
|
||||
if (OnShortPress == null || OnShortPress())
|
||||
{
|
||||
ShowConfirmQuitMenu();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
8
Assets/Oculus/VR/Scripts/OVRPlatformMenu.cs.meta
Normal file
8
Assets/Oculus/VR/Scripts/OVRPlatformMenu.cs.meta
Normal file
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 2afcf575f4a68de4db434c7b7233c451
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
7518
Assets/Oculus/VR/Scripts/OVRPlugin.cs
Normal file
7518
Assets/Oculus/VR/Scripts/OVRPlugin.cs
Normal file
File diff suppressed because it is too large
Load Diff
8
Assets/Oculus/VR/Scripts/OVRPlugin.cs.meta
Normal file
8
Assets/Oculus/VR/Scripts/OVRPlugin.cs.meta
Normal file
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 61abd23f3aff5394ba8027ee380760b8
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
32
Assets/Oculus/VR/Scripts/OVRPointerVisualizer.cs
Normal file
32
Assets/Oculus/VR/Scripts/OVRPointerVisualizer.cs
Normal file
@@ -0,0 +1,32 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using UnityEngine;
|
||||
|
||||
public class OVRPointerVisualizer : MonoBehaviour
|
||||
{
|
||||
[Tooltip("Object which points with Z axis. E.g. CentreEyeAnchor from OVRCameraRig")]
|
||||
public Transform rayTransform;
|
||||
[Header("Visual Elements")]
|
||||
[Tooltip("Line Renderer used to draw selection ray.")]
|
||||
public LineRenderer linePointer = null;
|
||||
[Tooltip("Visually, how far out should the ray be drawn.")]
|
||||
public float rayDrawDistance = 2.5f;
|
||||
|
||||
void Update()
|
||||
{
|
||||
linePointer.enabled = (OVRInput.GetActiveController() == OVRInput.Controller.Touch);
|
||||
Ray ray = new Ray(rayTransform.position, rayTransform.forward);
|
||||
linePointer.SetPosition(0, ray.origin);
|
||||
linePointer.SetPosition(1, ray.origin + ray.direction * rayDrawDistance);
|
||||
}
|
||||
}
|
||||
12
Assets/Oculus/VR/Scripts/OVRPointerVisualizer.cs.meta
Normal file
12
Assets/Oculus/VR/Scripts/OVRPointerVisualizer.cs.meta
Normal file
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 12283131fa5724f44b343883ae474072
|
||||
timeCreated: 1512454634
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
45
Assets/Oculus/VR/Scripts/OVRProfile.cs
Normal file
45
Assets/Oculus/VR/Scripts/OVRProfile.cs
Normal file
@@ -0,0 +1,45 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
using System.Threading;
|
||||
|
||||
/// <summary>
|
||||
/// (Deprecated) Contains information about the user's preferences and body dimensions.
|
||||
/// </summary>
|
||||
public class OVRProfile : Object
|
||||
{
|
||||
[System.Obsolete]
|
||||
public enum State
|
||||
{
|
||||
NOT_TRIGGERED,
|
||||
LOADING,
|
||||
READY,
|
||||
ERROR
|
||||
};
|
||||
|
||||
[System.Obsolete]
|
||||
public string id { get { return "000abc123def"; } }
|
||||
[System.Obsolete]
|
||||
public string userName { get { return "Oculus User"; } }
|
||||
[System.Obsolete]
|
||||
public string locale { get { return "en_US"; } }
|
||||
|
||||
public float ipd { get { return Vector3.Distance (OVRPlugin.GetNodePose (OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render).ToOVRPose ().position, OVRPlugin.GetNodePose (OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render).ToOVRPose ().position); } }
|
||||
public float eyeHeight { get { return OVRPlugin.eyeHeight; } }
|
||||
public float eyeDepth { get { return OVRPlugin.eyeDepth; } }
|
||||
public float neckHeight { get { return eyeHeight - 0.075f; } }
|
||||
|
||||
[System.Obsolete]
|
||||
public State state { get { return State.READY; } }
|
||||
}
|
||||
8
Assets/Oculus/VR/Scripts/OVRProfile.cs.meta
Normal file
8
Assets/Oculus/VR/Scripts/OVRProfile.cs.meta
Normal file
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 08539141953f28e439731aaf7cd5362f
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
58
Assets/Oculus/VR/Scripts/OVRResources.cs
Normal file
58
Assets/Oculus/VR/Scripts/OVRResources.cs
Normal file
@@ -0,0 +1,58 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
|
||||
public class OVRResources : MonoBehaviour
|
||||
{
|
||||
private static AssetBundle resourceBundle;
|
||||
private static List<string> assetNames;
|
||||
|
||||
public static UnityEngine.Object Load(string path)
|
||||
{
|
||||
if (Debug.isDebugBuild)
|
||||
{
|
||||
if(resourceBundle == null)
|
||||
{
|
||||
Debug.Log("[OVRResources] Resource bundle was not loaded successfully");
|
||||
return null;
|
||||
}
|
||||
|
||||
var result = assetNames.Find(s => s.Contains(path.ToLower()));
|
||||
return resourceBundle.LoadAsset(result);
|
||||
}
|
||||
return Resources.Load(path);
|
||||
}
|
||||
public static T Load<T>(string path) where T : UnityEngine.Object
|
||||
{
|
||||
if (Debug.isDebugBuild)
|
||||
{
|
||||
if (resourceBundle == null)
|
||||
{
|
||||
Debug.Log("[OVRResources] Resource bundle was not loaded successfully");
|
||||
return null;
|
||||
}
|
||||
|
||||
var result = assetNames.Find(s => s.Contains(path.ToLower()));
|
||||
return resourceBundle.LoadAsset<T>(result);
|
||||
}
|
||||
return Resources.Load<T>(path);
|
||||
}
|
||||
|
||||
public static void SetResourceBundle(AssetBundle bundle)
|
||||
{
|
||||
resourceBundle = bundle;
|
||||
assetNames = new List<string>();
|
||||
assetNames.AddRange(resourceBundle.GetAllAssetNames());
|
||||
}
|
||||
}
|
||||
11
Assets/Oculus/VR/Scripts/OVRResources.cs.meta
Normal file
11
Assets/Oculus/VR/Scripts/OVRResources.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 17584d04fbb571344a3aa2b6593287c1
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
83
Assets/Oculus/VR/Scripts/OVRRuntimeSettings.cs
Normal file
83
Assets/Oculus/VR/Scripts/OVRRuntimeSettings.cs
Normal file
@@ -0,0 +1,83 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using UnityEditor;
|
||||
|
||||
using System.IO;
|
||||
using System;
|
||||
|
||||
public class OVRRuntimeSettings : ScriptableObject
|
||||
{
|
||||
public OVRManager.ColorSpace colorSpace = OVRManager.ColorSpace.Rift_CV1;
|
||||
|
||||
#if UNITY_EDITOR
|
||||
private static string GetOculusRuntimeSettingsAssetPath()
|
||||
{
|
||||
string resourcesPath = Path.Combine(Application.dataPath, "Resources");
|
||||
if (!Directory.Exists(resourcesPath))
|
||||
{
|
||||
Directory.CreateDirectory(resourcesPath);
|
||||
}
|
||||
|
||||
string settingsAssetPath = Path.GetFullPath(Path.Combine(resourcesPath, "OculusRuntimeSettings.asset"));
|
||||
Uri configUri = new Uri(settingsAssetPath);
|
||||
Uri projectUri = new Uri(Application.dataPath);
|
||||
Uri relativeUri = projectUri.MakeRelativeUri(configUri);
|
||||
|
||||
return relativeUri.ToString();
|
||||
}
|
||||
|
||||
public static void CommitRuntimeSettings(OVRRuntimeSettings runtimeSettings)
|
||||
{
|
||||
string runtimeSettingsAssetPath = GetOculusRuntimeSettingsAssetPath();
|
||||
if (AssetDatabase.GetAssetPath(runtimeSettings) != runtimeSettingsAssetPath)
|
||||
{
|
||||
Debug.LogWarningFormat("The asset path of RuntimeSettings is wrong. Expect {0}, get {1}", runtimeSettingsAssetPath, AssetDatabase.GetAssetPath(runtimeSettings));
|
||||
}
|
||||
EditorUtility.SetDirty(runtimeSettings);
|
||||
}
|
||||
#endif
|
||||
|
||||
public static OVRRuntimeSettings GetRuntimeSettings()
|
||||
{
|
||||
OVRRuntimeSettings settings = null;
|
||||
#if UNITY_EDITOR
|
||||
string oculusRuntimeSettingsAssetPath = GetOculusRuntimeSettingsAssetPath();
|
||||
try
|
||||
{
|
||||
settings = AssetDatabase.LoadAssetAtPath(oculusRuntimeSettingsAssetPath, typeof(OVRRuntimeSettings)) as OVRRuntimeSettings;
|
||||
}
|
||||
catch (System.Exception e)
|
||||
{
|
||||
Debug.LogWarningFormat("Unable to load RuntimeSettings from {0}, error {1}", oculusRuntimeSettingsAssetPath, e.Message);
|
||||
}
|
||||
|
||||
if (settings == null && !BuildPipeline.isBuildingPlayer)
|
||||
{
|
||||
settings = ScriptableObject.CreateInstance<OVRRuntimeSettings>();
|
||||
|
||||
AssetDatabase.CreateAsset(settings, oculusRuntimeSettingsAssetPath);
|
||||
}
|
||||
#else
|
||||
settings = Resources.Load<OVRRuntimeSettings>("OculusRuntimeSettings");
|
||||
if (settings == null)
|
||||
{
|
||||
Debug.LogWarning("Failed to load runtime settings. Using default runtime settings instead.");
|
||||
settings = ScriptableObject.CreateInstance<OVRRuntimeSettings>();
|
||||
}
|
||||
#endif
|
||||
return settings;
|
||||
}
|
||||
}
|
||||
11
Assets/Oculus/VR/Scripts/OVRRuntimeSettings.cs.meta
Normal file
11
Assets/Oculus/VR/Scripts/OVRRuntimeSettings.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 3863570e7e6387a40ae4f323d83291e5
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
277
Assets/Oculus/VR/Scripts/OVRSceneLoader.cs
Normal file
277
Assets/Oculus/VR/Scripts/OVRSceneLoader.cs
Normal file
@@ -0,0 +1,277 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
|
||||
using UnityEngine;
|
||||
using UnityEngine.SceneManagement;
|
||||
using UnityEngine.UI;
|
||||
|
||||
public class OVRSceneLoader : MonoBehaviour
|
||||
{
|
||||
public const string externalStoragePath = "/sdcard/Android/data";
|
||||
public const string sceneLoadDataName = "SceneLoadData.txt";
|
||||
public const string resourceBundleName = "asset_resources";
|
||||
|
||||
public float sceneCheckIntervalSeconds = 1f;
|
||||
public float logCloseTime = 5.0f;
|
||||
|
||||
public Canvas mainCanvas;
|
||||
public Text logTextBox;
|
||||
|
||||
private AsyncOperation loadSceneOperation;
|
||||
private string formattedLogText;
|
||||
|
||||
private float closeLogTimer;
|
||||
private bool closeLogDialogue;
|
||||
|
||||
private bool canvasPosUpdated;
|
||||
|
||||
private struct SceneInfo
|
||||
{
|
||||
public List<string> scenes;
|
||||
public long version;
|
||||
|
||||
public SceneInfo(List<string> sceneList, long currentSceneEpochVersion)
|
||||
{
|
||||
scenes = sceneList;
|
||||
version = currentSceneEpochVersion;
|
||||
}
|
||||
}
|
||||
|
||||
private string scenePath = "";
|
||||
private string sceneLoadDataPath = "";
|
||||
private List<AssetBundle> loadedAssetBundles = new List<AssetBundle>();
|
||||
private SceneInfo currentSceneInfo;
|
||||
|
||||
private void Awake()
|
||||
{
|
||||
// Make it presist across scene to continue checking for changes
|
||||
DontDestroyOnLoad(this.gameObject);
|
||||
}
|
||||
|
||||
void Start()
|
||||
{
|
||||
string applicationPath = Path.Combine(externalStoragePath, Application.identifier);
|
||||
scenePath = Path.Combine(applicationPath, "cache/scenes");
|
||||
sceneLoadDataPath = Path.Combine(scenePath, sceneLoadDataName);
|
||||
|
||||
closeLogDialogue = false;
|
||||
StartCoroutine(DelayCanvasPosUpdate());
|
||||
|
||||
currentSceneInfo = GetSceneInfo();
|
||||
// Check valid scene info has been fetched, and load the scenes
|
||||
if (currentSceneInfo.version != 0 && !string.IsNullOrEmpty(currentSceneInfo.scenes[0]))
|
||||
{
|
||||
LoadScene(currentSceneInfo);
|
||||
}
|
||||
}
|
||||
|
||||
private void LoadScene(SceneInfo sceneInfo)
|
||||
{
|
||||
AssetBundle mainSceneBundle = null;
|
||||
Debug.Log("[OVRSceneLoader] Loading main scene: " + sceneInfo.scenes[0] + " with version " + sceneInfo.version.ToString());
|
||||
|
||||
logTextBox.text += "Target Scene: " + sceneInfo.scenes[0] + "\n";
|
||||
logTextBox.text += "Version: " + sceneInfo.version.ToString() + "\n";
|
||||
|
||||
// Load main scene and dependent additive scenes (if any)
|
||||
Debug.Log("[OVRSceneLoader] Loading scene bundle files.");
|
||||
// Fetch all files under scene cache path, excluding unnecessary files such as scene metadata file
|
||||
string[] bundles = Directory.GetFiles(scenePath, "*_*");
|
||||
logTextBox.text += "Loading " + bundles.Length + " bundle(s) . . . ";
|
||||
string mainSceneBundleFileName = "scene_" + sceneInfo.scenes[0].ToLower();
|
||||
try
|
||||
{
|
||||
foreach (string b in bundles)
|
||||
{
|
||||
var assetBundle = AssetBundle.LoadFromFile(b);
|
||||
if (assetBundle != null)
|
||||
{
|
||||
Debug.Log("[OVRSceneLoader] Loading file bundle: " + assetBundle.name == null ? "null" : assetBundle.name);
|
||||
loadedAssetBundles.Add(assetBundle);
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("[OVRSceneLoader] Loading file bundle failed");
|
||||
}
|
||||
|
||||
if (assetBundle.name == mainSceneBundleFileName)
|
||||
{
|
||||
mainSceneBundle = assetBundle;
|
||||
}
|
||||
|
||||
if (assetBundle.name == resourceBundleName)
|
||||
{
|
||||
OVRResources.SetResourceBundle(assetBundle);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch(Exception e)
|
||||
{
|
||||
logTextBox.text += "<color=red>" + e.Message + "</color>";
|
||||
return;
|
||||
}
|
||||
logTextBox.text += "<color=green>DONE\n</color>";
|
||||
|
||||
if (mainSceneBundle != null)
|
||||
{
|
||||
logTextBox.text += "Loading Scene: {0:P0}\n";
|
||||
formattedLogText = logTextBox.text;
|
||||
string[] scenePaths = mainSceneBundle.GetAllScenePaths();
|
||||
string sceneName = Path.GetFileNameWithoutExtension(scenePaths[0]);
|
||||
|
||||
loadSceneOperation = SceneManager.LoadSceneAsync(sceneName);
|
||||
loadSceneOperation.completed += LoadSceneOperation_completed;
|
||||
}
|
||||
else
|
||||
{
|
||||
logTextBox.text += "<color=red>Failed to get main scene bundle.\n</color>";
|
||||
}
|
||||
}
|
||||
|
||||
private void LoadSceneOperation_completed(AsyncOperation obj)
|
||||
{
|
||||
StartCoroutine(onCheckSceneCoroutine());
|
||||
StartCoroutine(DelayCanvasPosUpdate());
|
||||
|
||||
closeLogTimer = 0;
|
||||
closeLogDialogue = true;
|
||||
|
||||
logTextBox.text += "Log closing in {0} seconds.\n";
|
||||
formattedLogText = logTextBox.text;
|
||||
}
|
||||
|
||||
public void Update()
|
||||
{
|
||||
// Display scene load percentage
|
||||
if (loadSceneOperation != null)
|
||||
{
|
||||
if (!loadSceneOperation.isDone)
|
||||
{
|
||||
logTextBox.text = string.Format(formattedLogText, loadSceneOperation.progress + 0.1f);
|
||||
if (loadSceneOperation.progress >= 0.9f)
|
||||
{
|
||||
logTextBox.text = formattedLogText.Replace("{0:P0}", "<color=green>DONE</color>");
|
||||
logTextBox.text += "Transitioning to new scene.\nLoad times will vary depending on scene complexity.\n";
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
UpdateCanvasPosition();
|
||||
|
||||
// Wait a certain time before closing the log dialogue after the scene has transitioned
|
||||
if (closeLogDialogue)
|
||||
{
|
||||
if (closeLogTimer < logCloseTime)
|
||||
{
|
||||
closeLogTimer += Time.deltaTime;
|
||||
logTextBox.text = string.Format(formattedLogText, (int)(logCloseTime - closeLogTimer));
|
||||
}
|
||||
else
|
||||
{
|
||||
mainCanvas.gameObject.SetActive(false);
|
||||
closeLogDialogue = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void UpdateCanvasPosition()
|
||||
{
|
||||
// Update canvas camera reference and position if the main camera has changed
|
||||
if (mainCanvas.worldCamera != Camera.main)
|
||||
{
|
||||
mainCanvas.worldCamera = Camera.main;
|
||||
if (Camera.main != null)
|
||||
{
|
||||
Vector3 newPosition = Camera.main.transform.position + Camera.main.transform.forward * 0.3f;
|
||||
gameObject.transform.position = newPosition;
|
||||
gameObject.transform.rotation = Camera.main.transform.rotation;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private SceneInfo GetSceneInfo()
|
||||
{
|
||||
SceneInfo sceneInfo = new SceneInfo();
|
||||
try
|
||||
{
|
||||
StreamReader reader = new StreamReader(sceneLoadDataPath);
|
||||
sceneInfo.version = System.Convert.ToInt64(reader.ReadLine());
|
||||
List<string> sceneList = new List<string>();
|
||||
while (!reader.EndOfStream)
|
||||
{
|
||||
sceneList.Add(reader.ReadLine());
|
||||
}
|
||||
sceneInfo.scenes = sceneList;
|
||||
}
|
||||
catch
|
||||
{
|
||||
logTextBox.text += "<color=red>Failed to get scene info data.\n</color>";
|
||||
}
|
||||
return sceneInfo;
|
||||
}
|
||||
|
||||
// Update canvas position after a slight delay to get accurate headset position after scene transitions
|
||||
IEnumerator DelayCanvasPosUpdate()
|
||||
{
|
||||
yield return new WaitForSeconds(0.1f);
|
||||
UpdateCanvasPosition();
|
||||
}
|
||||
|
||||
IEnumerator onCheckSceneCoroutine()
|
||||
{
|
||||
SceneInfo newSceneInfo;
|
||||
while (true)
|
||||
{
|
||||
newSceneInfo = GetSceneInfo();
|
||||
if (newSceneInfo.version != currentSceneInfo.version)
|
||||
{
|
||||
Debug.Log("[OVRSceneLoader] Scene change detected.");
|
||||
|
||||
// Unload all asset bundles
|
||||
foreach (var b in loadedAssetBundles)
|
||||
{
|
||||
if (b != null)
|
||||
{
|
||||
b.Unload(true);
|
||||
}
|
||||
}
|
||||
loadedAssetBundles.Clear();
|
||||
|
||||
// Unload all scenes in the hierarchy including main scene and
|
||||
// its dependent additive scenes.
|
||||
int activeScenes = SceneManager.sceneCount;
|
||||
for (int i = 0; i < activeScenes; i++)
|
||||
{
|
||||
SceneManager.UnloadSceneAsync(SceneManager.GetSceneAt(i));
|
||||
}
|
||||
DestroyAllGameObjects();
|
||||
SceneManager.LoadSceneAsync("OVRTransitionScene");
|
||||
break;
|
||||
}
|
||||
yield return new WaitForSeconds(sceneCheckIntervalSeconds);
|
||||
}
|
||||
}
|
||||
|
||||
void DestroyAllGameObjects()
|
||||
{
|
||||
foreach (GameObject go in Resources.FindObjectsOfTypeAll(typeof(GameObject)) as GameObject[])
|
||||
{
|
||||
Destroy(go);
|
||||
}
|
||||
}
|
||||
}
|
||||
11
Assets/Oculus/VR/Scripts/OVRSceneLoader.cs.meta
Normal file
11
Assets/Oculus/VR/Scripts/OVRSceneLoader.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: a6d444f79f5ee4646b26c6d746385e80
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
8
Assets/Oculus/VR/Scripts/OVRTrackedKeyboard.meta
Normal file
8
Assets/Oculus/VR/Scripts/OVRTrackedKeyboard.meta
Normal file
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 39bd93ef6637e2f4db352893960b410c
|
||||
folderAsset: yes
|
||||
DefaultImporter:
|
||||
externalObjects: {}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
86
Assets/Oculus/VR/Scripts/OVRTrackedKeyboard/OVRKeyboard.cs
Normal file
86
Assets/Oculus/VR/Scripts/OVRTrackedKeyboard/OVRKeyboard.cs
Normal file
@@ -0,0 +1,86 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using UnityEngine;
|
||||
|
||||
public static class OVRKeyboard
|
||||
{
|
||||
public struct TrackedKeyboardState
|
||||
{
|
||||
public bool isPositionValid;
|
||||
public bool isPositionTracked;
|
||||
public bool isOrientationValid;
|
||||
public bool isOrientationTracked;
|
||||
public Vector3 position;
|
||||
public Quaternion rotation;
|
||||
public double timeInSeconds;
|
||||
}
|
||||
|
||||
public struct TrackedKeyboardInfo
|
||||
{
|
||||
public string Name;
|
||||
public UInt64 Identifier;
|
||||
public Vector3 Dimensions;
|
||||
public OVRPlugin.TrackedKeyboardFlags KeyboardFlags;
|
||||
public OVRPlugin.TrackedKeyboardPresentationStyles SupportedPresentationStyles;
|
||||
}
|
||||
|
||||
public static TrackedKeyboardState GetKeyboardState()
|
||||
{
|
||||
TrackedKeyboardState keyboardState;
|
||||
|
||||
OVRPlugin.KeyboardState keyboardStatePlugin;
|
||||
OVRPlugin.GetKeyboardState(OVRPlugin.Step.Render, out keyboardStatePlugin);
|
||||
keyboardState.timeInSeconds = keyboardStatePlugin.PoseState.Time;
|
||||
|
||||
OVRPose nodePose = keyboardStatePlugin.PoseState.Pose.ToOVRPose();
|
||||
keyboardState.position = nodePose.position;
|
||||
keyboardState.rotation = nodePose.orientation;
|
||||
|
||||
keyboardState.isPositionValid = (keyboardStatePlugin.PositionValid == OVRPlugin.Bool.True);
|
||||
keyboardState.isPositionTracked = (keyboardStatePlugin.PositionTracked == OVRPlugin.Bool.True);
|
||||
keyboardState.isOrientationValid = (keyboardStatePlugin.OrientationValid == OVRPlugin.Bool.True);
|
||||
keyboardState.isOrientationTracked = (keyboardStatePlugin.OrientationTracked == OVRPlugin.Bool.True);
|
||||
|
||||
return keyboardState;
|
||||
}
|
||||
|
||||
// Query for information about the system keyboards.
|
||||
public static bool GetSystemKeyboardInfo(OVRPlugin.TrackedKeyboardQueryFlags keyboardQueryFlags, out TrackedKeyboardInfo keyboardInfo)
|
||||
{
|
||||
keyboardInfo = default(TrackedKeyboardInfo);
|
||||
|
||||
OVRPlugin.KeyboardDescription keyboardDescription;
|
||||
if(OVRPlugin.GetSystemKeyboardDescription(keyboardQueryFlags, out keyboardDescription))
|
||||
{
|
||||
keyboardInfo.Name = Encoding.UTF8.GetString(keyboardDescription.Name).TrimEnd('\0');
|
||||
keyboardInfo.Identifier = keyboardDescription.TrackedKeyboardId;
|
||||
keyboardInfo.Dimensions = new Vector3(keyboardDescription.Dimensions.x, keyboardDescription.Dimensions.y, keyboardDescription.Dimensions.z);
|
||||
keyboardInfo.KeyboardFlags = keyboardDescription.KeyboardFlags;
|
||||
keyboardInfo.SupportedPresentationStyles = keyboardDescription.SupportedPresentationStyles;
|
||||
return true;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static bool StopKeyboardTracking(TrackedKeyboardInfo keyboardInfo)
|
||||
{
|
||||
return OVRPlugin.StopKeyboardTracking();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: b9cee92a63d840b43b5ba9d872653170
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,902 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using UnityEngine.Assertions;
|
||||
using UnityEngine.Serialization;
|
||||
|
||||
public class OVRTrackedKeyboard : MonoBehaviour
|
||||
{
|
||||
private static readonly float underlayScaleMultX_ = 1.475f;
|
||||
private static readonly float underlayScaleConstY_ = 0.001f;
|
||||
private static readonly float underlayScaleMultZ_ = 2.138f;
|
||||
private static readonly Vector3 underlayOffset_ = new Vector3 { x = 0.0f, y = 0.0f, z = -0.028f };
|
||||
private static readonly float boundingBoxAboveKeyboardY_ = 0.08f;
|
||||
|
||||
/// <summary>
|
||||
/// Used by TrackingState property to give the current state of keyboard tracking.
|
||||
/// </summary>
|
||||
public enum TrackedKeyboardState
|
||||
{
|
||||
/// <summary>
|
||||
/// The OVRTrackedKeyboard component has not yet been initialized.
|
||||
/// </summary>
|
||||
Uninitialized,
|
||||
/// <summary>
|
||||
/// Component is initialized but user has not selected a keyboard
|
||||
/// to track in the system settings.
|
||||
/// </summary>
|
||||
NoTrackableKeyboard,
|
||||
/// <summary>
|
||||
/// Keyboard tracking has been stopped or has not yet started for current keyboard.
|
||||
/// </summary>
|
||||
Offline,
|
||||
/// <summary>
|
||||
/// Keyboard tracking has been started but no tracking data is yet available.
|
||||
/// This can occur if the keyboard is not visible to the cameras.
|
||||
/// </summary>
|
||||
StartedNotTracked,
|
||||
/// <summary>
|
||||
/// Keyboard tracking has been started but no tracking data has been available for a while.
|
||||
/// This can occur if the keyboard is no longer visible to the cameras.
|
||||
/// </summary>
|
||||
Stale,
|
||||
/// <summary>
|
||||
/// Keyboard is currently being tracked and recent tracking data is available.
|
||||
/// </summary>
|
||||
Valid,
|
||||
/// <summary>
|
||||
/// An error occurred while initializing keyboard tracking.
|
||||
/// </summary>
|
||||
Error,
|
||||
/// <summary>
|
||||
/// Was unable to retrieve system keyboard info. Can occur if required
|
||||
/// keyboard extension is not properly enabled in the application manifest.
|
||||
/// </summary>
|
||||
ErrorExtensionFailed
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines which visualization is used for the tracked keyboard.
|
||||
/// </summary>
|
||||
public enum KeyboardPresentation
|
||||
{
|
||||
/// <summary>
|
||||
/// The keyboard is rendered as an opaque model in VR and if the user's hands are
|
||||
/// placed over it, they are rendered using passthrough.
|
||||
/// </summary>
|
||||
PreferOpaque,
|
||||
/// <summary>
|
||||
/// The keyboard and hands are rendered using a rectangular passthrough window
|
||||
/// around the keyboard, and only the key labels are rendered in VR on top of the keyboard.
|
||||
/// </summary>
|
||||
PreferKeyLabels,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines amount that keyboard is tilted from its ordinary horizontal position. For internal use.
|
||||
/// </summary>
|
||||
public float CurrentKeyboardAngleFromUp { get; private set; } = 0f;
|
||||
|
||||
/// <summary>
|
||||
/// Current state of keyboard tracking.
|
||||
/// </summary>
|
||||
public TrackedKeyboardState TrackingState { get; private set; } = TrackedKeyboardState.Uninitialized;
|
||||
/// <summary>
|
||||
/// Provides information about the keyboard currently being tracked by this
|
||||
/// OVRTrackedKeyboard component.
|
||||
/// </summary>
|
||||
public OVRKeyboard.TrackedKeyboardInfo ActiveKeyboardInfo { get; private set; }
|
||||
/// <summary>
|
||||
/// Provides information about the keyboard currently selected for tracking in
|
||||
/// the system settings. May not yet be tracked by this OVRTrackedKeyboard component.
|
||||
/// </summary>
|
||||
public OVRKeyboard.TrackedKeyboardInfo SystemKeyboardInfo { get; private set; }
|
||||
|
||||
/// <summary>
|
||||
/// Determines which visualization will be used to present the tracked keyboard
|
||||
/// to the user.
|
||||
/// </summary>
|
||||
public KeyboardPresentation Presentation
|
||||
{
|
||||
get
|
||||
{
|
||||
return presentation;
|
||||
}
|
||||
set
|
||||
{
|
||||
presentation = value;
|
||||
UpdatePresentation(GetKeyboardVisibility());
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Specifies whether or not the OVRTrackedKeyboard component will attempt to search
|
||||
/// for and track a keyboard. If true, the component will continually search
|
||||
/// for a tracked keyboard. If one is detected it will be shown. If false,
|
||||
/// no keyboard is shown and the prefab is inactive. The keyboard can still
|
||||
/// be used to enter text into input fields even though it cannot be seen in VR.
|
||||
/// </summary>
|
||||
public bool TrackingEnabled
|
||||
{
|
||||
get
|
||||
{
|
||||
return trackingEnabled;
|
||||
}
|
||||
set
|
||||
{
|
||||
trackingEnabled = value;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Specifies whether or not the keyboard must be connected via Bluetooth in
|
||||
/// order to be tracked. If set to true, the keyboard must be connected to the
|
||||
/// headset via Bluetooth in order to be tracked. The keyboard will stop being
|
||||
/// tracked if it is powered off or disconnected from the headset. If set to false,
|
||||
/// the keyboard will be tracked as long as it is visible to the headset's cameras.
|
||||
/// </summary>
|
||||
public bool ConnectionRequired
|
||||
{
|
||||
get
|
||||
{
|
||||
return connectionRequired;
|
||||
}
|
||||
set
|
||||
{
|
||||
connectionRequired = value;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Specifies whether to search for local keyboards attached to the headset
|
||||
/// or for remote keyboards not attached to the headset.
|
||||
/// </summary>
|
||||
public OVRPlugin.TrackedKeyboardQueryFlags KeyboardQueryFlags
|
||||
{
|
||||
get
|
||||
{
|
||||
return keyboardQueryFlags;
|
||||
}
|
||||
set
|
||||
{
|
||||
keyboardQueryFlags = value;
|
||||
}
|
||||
}
|
||||
|
||||
#region User settings
|
||||
// These properties can be modified by the user of the prefab
|
||||
[Header("Settings")]
|
||||
[SerializeField]
|
||||
[Tooltip("If true, will continually try to track and show keyboard. If false, no keyboard will be shown.")]
|
||||
private bool trackingEnabled = true;
|
||||
|
||||
[SerializeField]
|
||||
[Tooltip("If true, system keyboard must be paired and connected to track.")]
|
||||
private bool connectionRequired = true;
|
||||
|
||||
[SerializeField]
|
||||
[Tooltip("Which type of keyboard you wish to use.")]
|
||||
private OVRPlugin.TrackedKeyboardQueryFlags keyboardQueryFlags = OVRPlugin.TrackedKeyboardQueryFlags.Local;
|
||||
|
||||
[SerializeField]
|
||||
[Tooltip("Opaque will render a solid model of the keyboard with passthrough hands. " +
|
||||
"Key Labels will render the entire keyboard in passthrough other than the key labels. " +
|
||||
"These are both suggestions and may not always be available.")]
|
||||
private KeyboardPresentation presentation = KeyboardPresentation.PreferOpaque;
|
||||
|
||||
/// <summary>
|
||||
/// How large of a passthrough area to show surrounding the keyboard when using Key Label presentation.
|
||||
/// </summary>
|
||||
[Tooltip("How large of a passthrough area to show surrounding the keyboard when using Key Label presentation")]
|
||||
public float PassthroughBorderMultiplier = 0.2f;
|
||||
|
||||
/// <summary>
|
||||
/// The shader used for rendering the keyboard model in opaque mode.
|
||||
/// </summary>
|
||||
[Tooltip("The shader used for rendering the keyboard model")]
|
||||
public Shader keyboardModelShader;
|
||||
#endregion
|
||||
|
||||
private OVRPlugin.TrackedKeyboardPresentationStyles currentKeyboardPresentationStyles = 0;
|
||||
private OVROverlay projectedPassthroughOpaque_;
|
||||
private MeshRenderer[] activeKeyboardRenderers_;
|
||||
private GameObject activeKeyboardMesh_;
|
||||
private MeshRenderer activeKeyboardMeshRenderer_;
|
||||
private GameObject passthroughQuad_;
|
||||
private Shader opaqueShader_;
|
||||
|
||||
// These properties generally don't need to be modified by the user of the prefab
|
||||
|
||||
/// <summary>
|
||||
/// Internal only. The shader used to render the keyboard in key label mode.
|
||||
/// </summary>
|
||||
[Header("Internal")]
|
||||
public Shader KeyLabelModeShader;
|
||||
/// <summary>
|
||||
/// Internal only. The shader used to render the passthrough rectangle in opaque mode.
|
||||
/// </summary>
|
||||
public Shader PassthroughShader;
|
||||
|
||||
#region MR Service Setup
|
||||
[SerializeField] private Transform projectedPassthroughRoot;
|
||||
[SerializeField] private MeshFilter projectedPassthroughMesh;
|
||||
#endregion
|
||||
|
||||
/// <summary>
|
||||
/// Internal only. The passthrough layer used to render the passthrough rectangle in key label mode.
|
||||
/// </summary>
|
||||
public OVRPassthroughLayer ProjectedPassthroughKeyLabel;
|
||||
/// <summary>
|
||||
/// Internal only. The passthrough layer used to render the passthrough rectangle in opaque mode.
|
||||
/// </summary>
|
||||
public OVROverlay PassthroughOverlay
|
||||
{
|
||||
get { return projectedPassthroughOpaque_; }
|
||||
private set {}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Event that is dispatched when the component starts or stops actively tracking the keyboard.
|
||||
/// </summary>
|
||||
public Action<TrackedKeyboardSetActiveEvent> TrackedKeyboardActiveChanged = delegate { };
|
||||
/// <summary>
|
||||
/// Event that is dispatched when the state of keyboard tracking changes (e.g. tracking
|
||||
/// becomes stale or valid as keyboard passes in/out of camera view).
|
||||
/// </summary>
|
||||
public Action<TrackedKeyboardVisibilityChangedEvent> TrackedKeyboardVisibilityChanged = delegate { };
|
||||
|
||||
/// <summary>
|
||||
/// Transform that determines current position and rotation of the keyboard.
|
||||
/// </summary>
|
||||
public Transform ActiveKeyboardTransform;
|
||||
|
||||
/// <summary>
|
||||
/// Internal only. Determines whether the hands are currently positioned over the keyboard.
|
||||
/// In opaque presentation mode, passthrough hands are only shown when this is true.
|
||||
/// </summary>
|
||||
[HideInInspector]
|
||||
public bool HandsOverKeyboard = false;
|
||||
|
||||
private OVRCameraRig cameraRig_;
|
||||
|
||||
private Coroutine updateKeyboardRoutine_;
|
||||
|
||||
private BoxCollider keyboardBoundingBox_;
|
||||
|
||||
private float staleTimeoutCounter_ = 0f;
|
||||
private const float STALE_TIMEOUT = 10f;
|
||||
private float reacquisitionTimer_ = 0f;
|
||||
private float sendFilteredPoseEventTimer_ = 0f;
|
||||
private int skippedPoseCount_ = 0;
|
||||
private const float FILTERED_POSE_TIMEOUT = 15f;
|
||||
|
||||
// Exponentially-weighted average filter (EWA), smooths out changes in keyboard tracking over time
|
||||
private Vector3? EWAPosition = null;
|
||||
private Quaternion? EWARotation = null;
|
||||
private float HAND_HEIGHT_TUNING = 0.0f;
|
||||
|
||||
/// <summary>
|
||||
/// Determines whether rolling average filter and keyboard angle filters are applied.
|
||||
/// If true, keyboard will be shown in latest tracked position at all times.
|
||||
/// </summary>
|
||||
[HideInInspector]
|
||||
public bool UseHeuristicRollback = false;
|
||||
|
||||
private IEnumerator Start()
|
||||
{
|
||||
cameraRig_ = FindObjectOfType<OVRCameraRig>();
|
||||
|
||||
SystemKeyboardInfo = new OVRKeyboard.TrackedKeyboardInfo
|
||||
{
|
||||
Name = "None",
|
||||
Dimensions = new Vector3(0f, 0f, 0f),
|
||||
Identifier = uint.MaxValue
|
||||
};
|
||||
|
||||
yield return InitializeHandPresenceData();
|
||||
|
||||
yield return UpdateTrackingStateCoroutine();
|
||||
}
|
||||
|
||||
private IEnumerator InitializeHandPresenceData()
|
||||
{
|
||||
GameObject ovrCameraRig = GameObject.Find("OVRCameraRig");
|
||||
if (ovrCameraRig == null)
|
||||
{
|
||||
Debug.LogError("Scene does not contain an OVRCameraRig");
|
||||
yield break;
|
||||
}
|
||||
|
||||
projectedPassthroughOpaque_ = ovrCameraRig.AddComponent<OVROverlay>();
|
||||
|
||||
projectedPassthroughOpaque_.currentOverlayShape = OVROverlay.OverlayShape.KeyboardHandsPassthrough;
|
||||
|
||||
projectedPassthroughOpaque_.hidden = true;
|
||||
projectedPassthroughOpaque_.gameObject.SetActive(true);
|
||||
|
||||
ProjectedPassthroughKeyLabel.hidden = true;
|
||||
ProjectedPassthroughKeyLabel.gameObject.SetActive(true);
|
||||
}
|
||||
|
||||
void RegisterPassthroughMeshToSDK()
|
||||
{
|
||||
if (ProjectedPassthroughKeyLabel.IsSurfaceGeometry(projectedPassthroughMesh.gameObject))
|
||||
{
|
||||
ProjectedPassthroughKeyLabel.RemoveSurfaceGeometry(projectedPassthroughMesh.gameObject);
|
||||
}
|
||||
|
||||
ProjectedPassthroughKeyLabel.AddSurfaceGeometry(projectedPassthroughMesh.gameObject, true);
|
||||
}
|
||||
|
||||
#region Public API
|
||||
|
||||
/// <summary>
|
||||
/// Returns the distance from the given point to the keyboard
|
||||
/// </summary>
|
||||
/// <param name="point">A 3D vector coordinate to use as the reference point</param>
|
||||
/// <returns>A floating point value that is the distance to intersect within the keyboard bounds</returns>
|
||||
public float GetDistanceToKeyboard(Vector3 point)
|
||||
{
|
||||
if (keyboardBoundingBox_ == null)
|
||||
{
|
||||
return Mathf.Infinity;
|
||||
}
|
||||
if (keyboardBoundingBox_.bounds.Contains(point))
|
||||
{
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
var closestPointToKb = keyboardBoundingBox_.ClosestPointOnBounds(point);
|
||||
var pointToKeyboard = closestPointToKb - point;
|
||||
RaycastHit hitInfo;
|
||||
bool didHit = keyboardBoundingBox_.Raycast(
|
||||
new Ray(point, pointToKeyboard),
|
||||
out hitInfo,
|
||||
Mathf.Infinity);
|
||||
return didHit ? hitInfo.distance : Mathf.Infinity;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Invokes an Android broadcast to launch a keyboard selection dialog for local keyboard type.
|
||||
/// </summary>
|
||||
public void LaunchLocalKeyboardSelectionDialog()
|
||||
{
|
||||
LaunchOverlayIntent("systemux://dialog/set-local-physical-tracked-keyboard");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Invokes an Android broadcast to launch a keyboard selection dialog for remote keyboard type.
|
||||
/// </summary>
|
||||
public void LaunchRemoteKeyboardSelectionDialog()
|
||||
{
|
||||
LaunchOverlayIntent("systemux://dialog/set-remote-physical-tracked-keyboard");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Private Helpers
|
||||
private bool KeyboardTrackerIsRunning()
|
||||
{
|
||||
return (TrackingState != TrackedKeyboardState.NoTrackableKeyboard
|
||||
&& TrackingState != TrackedKeyboardState.Offline);
|
||||
}
|
||||
|
||||
private IEnumerator UpdateTrackingStateCoroutine()
|
||||
{
|
||||
for (;;)
|
||||
{
|
||||
// On Link this is called before initialization.
|
||||
//We don't want this on our normal flow because it breaks our tests.
|
||||
#if !UNITY_ANDROID && !UNITY_EDITOR
|
||||
if(OVRPlugin.initialized) {
|
||||
#endif
|
||||
OVRKeyboard.TrackedKeyboardInfo keyboardInfo;
|
||||
if (OVRKeyboard.GetSystemKeyboardInfo(KeyboardQueryFlags, out keyboardInfo))
|
||||
{
|
||||
bool systemKeyboardSwitched = false;
|
||||
if (SystemKeyboardInfo.Identifier != keyboardInfo.Identifier || SystemKeyboardInfo.KeyboardFlags != keyboardInfo.KeyboardFlags)
|
||||
{
|
||||
Debug.Log(String.Format("New System keyboard info: [{0}] {1} (Flags {2}) ({3} {4})",
|
||||
keyboardInfo.Identifier, keyboardInfo.Name,
|
||||
keyboardInfo.KeyboardFlags,
|
||||
(keyboardInfo.SupportedPresentationStyles & OVRPlugin.TrackedKeyboardPresentationStyles.Opaque) != 0 ? "Supports Opaque" : "",
|
||||
(keyboardInfo.SupportedPresentationStyles & OVRPlugin.TrackedKeyboardPresentationStyles.KeyLabel) != 0 ? "Supports Key Label" : ""));
|
||||
if (TrackingState == TrackedKeyboardState.NoTrackableKeyboard){
|
||||
SetKeyboardState(TrackedKeyboardState.Offline);
|
||||
}
|
||||
SystemKeyboardInfo = keyboardInfo;
|
||||
systemKeyboardSwitched = true;
|
||||
}
|
||||
|
||||
bool keyboardExists = (keyboardInfo.KeyboardFlags & OVRPlugin.TrackedKeyboardFlags.Exists) != 0;
|
||||
if (keyboardExists && trackingEnabled)
|
||||
{
|
||||
bool localKeyboard = (keyboardInfo.KeyboardFlags & OVRPlugin.TrackedKeyboardFlags.Local) != 0;
|
||||
bool remoteKeyboard = (keyboardInfo.KeyboardFlags & OVRPlugin.TrackedKeyboardFlags.Remote) != 0;
|
||||
bool connectedKeyboard = (keyboardInfo.KeyboardFlags & OVRPlugin.TrackedKeyboardFlags.Connected) != 0;
|
||||
bool shouldBeRunning = remoteKeyboard || (localKeyboard && (!connectionRequired || connectedKeyboard));
|
||||
|
||||
if(KeyboardTrackerIsRunning() && (systemKeyboardSwitched || !shouldBeRunning))
|
||||
{
|
||||
StopKeyboardTrackingInternal();
|
||||
}
|
||||
|
||||
if(!KeyboardTrackerIsRunning() && shouldBeRunning)
|
||||
{
|
||||
yield return StartKeyboardTrackingCoroutine();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (KeyboardTrackerIsRunning()){
|
||||
StopKeyboardTrackingInternal();
|
||||
}
|
||||
|
||||
if (!keyboardExists)
|
||||
{
|
||||
SetKeyboardState(TrackedKeyboardState.NoTrackableKeyboard);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (KeyboardTrackerIsRunning()){
|
||||
StopKeyboardTrackingInternal();
|
||||
}
|
||||
SetKeyboardState(TrackedKeyboardState.ErrorExtensionFailed);
|
||||
}
|
||||
SystemKeyboardInfo = keyboardInfo;
|
||||
#if !UNITY_ANDROID && !UNITY_EDITOR
|
||||
}
|
||||
#endif
|
||||
yield return new WaitForSeconds(.1f);
|
||||
}
|
||||
}
|
||||
|
||||
private IEnumerator StartKeyboardTrackingCoroutine()
|
||||
{
|
||||
if (KeyboardTrackerIsRunning())
|
||||
{
|
||||
Debug.Log("StartKeyboardTracking(): Keyboard already being tracked");
|
||||
yield break;
|
||||
}
|
||||
|
||||
Assert.IsTrue(
|
||||
!KeyboardTrackerIsRunning()
|
||||
&& activeKeyboardMesh_ == null
|
||||
&& activeKeyboardRenderers_ == null
|
||||
&& updateKeyboardRoutine_ == null,
|
||||
$"State: {TrackingState}, Mesh: {activeKeyboardMesh_}, Coroutine: {updateKeyboardRoutine_}");
|
||||
|
||||
InitializeKeyboardInfo();
|
||||
RegisterPassthroughMeshToSDK();
|
||||
|
||||
Debug.Log("Calling StartKeyboardTracking with id " + SystemKeyboardInfo.Identifier);
|
||||
|
||||
if (!OVRPlugin.StartKeyboardTracking(SystemKeyboardInfo.Identifier))
|
||||
{
|
||||
Debug.LogWarning("OVRKeyboard.StartKeyboardTracking Failed");
|
||||
yield break;
|
||||
}
|
||||
|
||||
projectedPassthroughRoot.localScale = new Vector3 { x = SystemKeyboardInfo.Dimensions.x * underlayScaleMultX_, y = underlayScaleConstY_, z = SystemKeyboardInfo.Dimensions.z * underlayScaleMultZ_ };
|
||||
|
||||
currentKeyboardPresentationStyles = SystemKeyboardInfo.SupportedPresentationStyles;
|
||||
ActiveKeyboardInfo = SystemKeyboardInfo;
|
||||
LoadKeyboardMesh();
|
||||
updateKeyboardRoutine_ = StartCoroutine(UpdateKeyboardPose());
|
||||
EWAPosition = null;
|
||||
EWARotation = null;
|
||||
|
||||
TrackedKeyboardActiveChanged?.Invoke(new TrackedKeyboardSetActiveEvent(isEnabled: true));
|
||||
SetKeyboardState(TrackedKeyboardState.StartedNotTracked);
|
||||
}
|
||||
|
||||
private void StopKeyboardTrackingInternal()
|
||||
{
|
||||
if (!KeyboardTrackerIsRunning() || updateKeyboardRoutine_ == null)
|
||||
{
|
||||
SetKeyboardState(TrackedKeyboardState.Offline);
|
||||
return;
|
||||
}
|
||||
|
||||
projectedPassthroughOpaque_.hidden = true;
|
||||
ProjectedPassthroughKeyLabel.hidden = true;
|
||||
|
||||
TrackedKeyboardActiveChanged?.Invoke(new TrackedKeyboardSetActiveEvent(isEnabled: false));
|
||||
|
||||
Debug.Log($"StopKeyboardTracking {ActiveKeyboardInfo.Name}");
|
||||
|
||||
StopCoroutine(updateKeyboardRoutine_);
|
||||
updateKeyboardRoutine_ = null;
|
||||
|
||||
OVRKeyboard.StopKeyboardTracking(ActiveKeyboardInfo);
|
||||
InitializeKeyboardInfo();
|
||||
|
||||
if (activeKeyboardMesh_ != null)
|
||||
{
|
||||
Destroy(activeKeyboardMesh_.gameObject);
|
||||
activeKeyboardMesh_ = null;
|
||||
activeKeyboardRenderers_ = null;
|
||||
keyboardBoundingBox_ = null;
|
||||
}
|
||||
|
||||
SetKeyboardState(TrackedKeyboardState.Offline);
|
||||
}
|
||||
|
||||
private IEnumerator UpdateKeyboardPose()
|
||||
{
|
||||
while (true)
|
||||
{
|
||||
transform.position = cameraRig_.trackingSpace.transform.position;
|
||||
transform.rotation = cameraRig_.trackingSpace.transform.rotation;
|
||||
|
||||
var poseState = OVRKeyboard.GetKeyboardState();
|
||||
TrackedKeyboardState keyboardState = TrackedKeyboardState.StartedNotTracked;
|
||||
|
||||
if (poseState.isPositionValid)
|
||||
{
|
||||
if (poseState.isPositionTracked && activeKeyboardMesh_ != null)
|
||||
{
|
||||
float keyboardAngleFilter = UseHeuristicRollback ? 360f : 20f;
|
||||
float ewaAlpha = UseHeuristicRollback ? 0f : 0.65f;
|
||||
|
||||
var worldRotation = transform.rotation * poseState.rotation;
|
||||
var upRotated = worldRotation * Vector3.up;
|
||||
CurrentKeyboardAngleFromUp = Vector3.Angle(upRotated, Vector3.up);
|
||||
|
||||
if (CurrentKeyboardAngleFromUp < keyboardAngleFilter)
|
||||
{
|
||||
if (!EWAPosition.HasValue)
|
||||
{
|
||||
EWAPosition = poseState.position;
|
||||
}
|
||||
else
|
||||
{
|
||||
EWAPosition = ewaAlpha * EWAPosition + (1f - ewaAlpha) * poseState.position;
|
||||
}
|
||||
|
||||
if (!EWARotation.HasValue)
|
||||
{
|
||||
EWARotation = poseState.rotation;
|
||||
}
|
||||
else
|
||||
{
|
||||
EWARotation = Quaternion.Slerp(EWARotation.Value, poseState.rotation, 1f - ewaAlpha);
|
||||
}
|
||||
|
||||
ActiveKeyboardTransform.localPosition = EWAPosition.Value;
|
||||
ActiveKeyboardTransform.localRotation = EWARotation.Value;
|
||||
|
||||
projectedPassthroughRoot.localPosition = EWAPosition.Value + underlayOffset_ + new Vector3(0f, HAND_HEIGHT_TUNING, 0f);
|
||||
projectedPassthroughRoot.localRotation = EWARotation.Value;
|
||||
}
|
||||
else
|
||||
{
|
||||
skippedPoseCount_++;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
keyboardState = poseState.isPositionTracked
|
||||
? TrackedKeyboardState.Valid
|
||||
: TrackedKeyboardState.Stale;
|
||||
}
|
||||
|
||||
SetKeyboardState(keyboardState);
|
||||
UpdateSkippedPoseTimer();
|
||||
yield return null;
|
||||
}
|
||||
}
|
||||
|
||||
private void UpdateSkippedPoseTimer()
|
||||
{
|
||||
sendFilteredPoseEventTimer_ += Time.deltaTime;
|
||||
if (sendFilteredPoseEventTimer_ > FILTERED_POSE_TIMEOUT
|
||||
&& skippedPoseCount_ > 0)
|
||||
{
|
||||
// dispatcher_.Dispatch(new TrackedKeyboardSkippedPoseEvent(skippedPoseCount_));
|
||||
skippedPoseCount_ = 0;
|
||||
sendFilteredPoseEventTimer_ = 0f;
|
||||
}
|
||||
}
|
||||
|
||||
private void LoadKeyboardMesh()
|
||||
{
|
||||
Debug.Log("LoadKeyboardMesh");
|
||||
activeKeyboardMesh_ = LoadRuntimeKeyboardMesh();
|
||||
if(activeKeyboardMesh_ == null) {
|
||||
Debug.LogError("Failed to load keyboard mesh.");
|
||||
SetKeyboardState(TrackedKeyboardState.Error);
|
||||
}
|
||||
keyboardBoundingBox_ = activeKeyboardMesh_.AddComponent<BoxCollider>();
|
||||
|
||||
keyboardBoundingBox_.center =
|
||||
new Vector3(0.0f, ActiveKeyboardInfo.Dimensions.y / 2.0f, 0.0f);
|
||||
keyboardBoundingBox_.size =
|
||||
new Vector3(ActiveKeyboardInfo.Dimensions.x,
|
||||
ActiveKeyboardInfo.Dimensions.y + boundingBoxAboveKeyboardY_,
|
||||
ActiveKeyboardInfo.Dimensions.z);
|
||||
|
||||
activeKeyboardMeshRenderer_ = activeKeyboardMesh_.GetComponentInChildren<MeshRenderer>();
|
||||
opaqueShader_ = activeKeyboardMeshRenderer_.material.shader;
|
||||
activeKeyboardMeshRenderer_.material.shader = KeyLabelModeShader;
|
||||
|
||||
passthroughQuad_ = GameObject.CreatePrimitive(PrimitiveType.Quad);
|
||||
passthroughQuad_.transform.localPosition = new Vector3(0.0f, -0.01f, 0.0f);
|
||||
passthroughQuad_.transform.parent = activeKeyboardMesh_.transform;
|
||||
passthroughQuad_.transform.localRotation = Quaternion.Euler(90.0f, 0.0f, 0.0f);
|
||||
float borderSize = ActiveKeyboardInfo.Dimensions.x * PassthroughBorderMultiplier;
|
||||
passthroughQuad_.transform.localScale = new Vector3(
|
||||
ActiveKeyboardInfo.Dimensions.x + borderSize,
|
||||
ActiveKeyboardInfo.Dimensions.z + borderSize,
|
||||
ActiveKeyboardInfo.Dimensions.y);
|
||||
|
||||
MeshRenderer meshRenderer = passthroughQuad_.GetComponent<MeshRenderer>();
|
||||
meshRenderer.material.shader = PassthroughShader;
|
||||
|
||||
GameObject parent = new GameObject();
|
||||
activeKeyboardMesh_.transform.parent = parent.transform;
|
||||
activeKeyboardMesh_ = parent;
|
||||
|
||||
activeKeyboardRenderers_ = activeKeyboardMesh_.GetComponentsInChildren<MeshRenderer>();
|
||||
activeKeyboardMesh_.transform.SetParent(ActiveKeyboardTransform, worldPositionStays: false);
|
||||
|
||||
UpdateKeyboardVisibility();
|
||||
}
|
||||
|
||||
void UpdatePresentation(bool isVisible)
|
||||
{
|
||||
KeyboardPresentation presentationToUse = Presentation;
|
||||
if(currentKeyboardPresentationStyles != 0) {
|
||||
if (Presentation == KeyboardPresentation.PreferOpaque && (currentKeyboardPresentationStyles & OVRPlugin.TrackedKeyboardPresentationStyles.Opaque) == 0) {
|
||||
if((currentKeyboardPresentationStyles & OVRPlugin.TrackedKeyboardPresentationStyles.KeyLabel) != 0) {
|
||||
presentationToUse = KeyboardPresentation.PreferKeyLabels;
|
||||
}
|
||||
}
|
||||
else if (Presentation == KeyboardPresentation.PreferKeyLabels && (currentKeyboardPresentationStyles & OVRPlugin.TrackedKeyboardPresentationStyles.KeyLabel) == 0) {
|
||||
if((currentKeyboardPresentationStyles & OVRPlugin.TrackedKeyboardPresentationStyles.Opaque) != 0) {
|
||||
presentationToUse = KeyboardPresentation.PreferOpaque;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!isVisible) {
|
||||
projectedPassthroughOpaque_.hidden = true;
|
||||
ProjectedPassthroughKeyLabel.hidden = true;
|
||||
} else if (presentationToUse == KeyboardPresentation.PreferOpaque) {
|
||||
activeKeyboardMeshRenderer_.material.shader = opaqueShader_;
|
||||
passthroughQuad_.SetActive(false);
|
||||
projectedPassthroughOpaque_.hidden = !GetKeyboardVisibility() || !HandsOverKeyboard;
|
||||
ProjectedPassthroughKeyLabel.hidden = true;
|
||||
} else {
|
||||
activeKeyboardMeshRenderer_.material.shader = KeyLabelModeShader;
|
||||
passthroughQuad_.SetActive(true);
|
||||
projectedPassthroughOpaque_.hidden = true;
|
||||
ProjectedPassthroughKeyLabel.hidden = false; // Always shown
|
||||
}
|
||||
}
|
||||
|
||||
private GameObject LoadRuntimeKeyboardMesh()
|
||||
{
|
||||
Debug.Log("LoadRuntimekeyboardMesh");
|
||||
string[] modelPaths = OVRPlugin.GetRenderModelPaths();
|
||||
if (modelPaths != null)
|
||||
{
|
||||
for (int i = 0; i < modelPaths.Length; i++)
|
||||
{
|
||||
if (modelPaths[i].Equals("/model_fb/keyboard/local"))
|
||||
{
|
||||
OVRPlugin.RenderModelProperties modelProps = new OVRPlugin.RenderModelProperties();
|
||||
if (OVRPlugin.GetRenderModelProperties(modelPaths[i], ref modelProps))
|
||||
{
|
||||
if (modelProps.ModelKey != OVRPlugin.RENDER_MODEL_NULL_KEY)
|
||||
{
|
||||
byte[] data = OVRPlugin.LoadRenderModel(modelProps.ModelKey);
|
||||
if (data != null)
|
||||
{
|
||||
OVRGLTFLoader gltfLoader = new OVRGLTFLoader(data);
|
||||
gltfLoader.SetModelShader(keyboardModelShader);
|
||||
OVRGLTFScene scene = gltfLoader.LoadGLB(false);
|
||||
return scene.root;
|
||||
}
|
||||
}
|
||||
}
|
||||
Debug.LogError("Failed to load model. Ensure that the correct keyboard is connected.");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
Debug.LogError("Failed to find keyboard model.");
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Internal only. Updates rendering of keyboard based on its current visibility.
|
||||
/// </summary>
|
||||
public void UpdateKeyboardVisibility()
|
||||
{
|
||||
var isVisible = GetKeyboardVisibility();
|
||||
UpdatePresentation(isVisible);
|
||||
|
||||
if (activeKeyboardRenderers_ == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
foreach (var renderer in activeKeyboardRenderers_)
|
||||
{
|
||||
renderer.enabled = isVisible;
|
||||
}
|
||||
}
|
||||
|
||||
private void SetKeyboardState(TrackedKeyboardState state)
|
||||
{
|
||||
var oldState = TrackingState;
|
||||
TrackingState = state;
|
||||
|
||||
bool timedOut = false;
|
||||
|
||||
switch (state)
|
||||
{
|
||||
case TrackedKeyboardState.Stale:
|
||||
if (!HandsOverKeyboard)
|
||||
{
|
||||
staleTimeoutCounter_ += Time.deltaTime;
|
||||
timedOut = staleTimeoutCounter_ - STALE_TIMEOUT > 0f;
|
||||
|
||||
if (timedOut) {
|
||||
reacquisitionTimer_ += Time.deltaTime;
|
||||
EWAPosition = null;
|
||||
EWARotation = null;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
reacquisitionTimer_ = 0f;
|
||||
staleTimeoutCounter_ = 0f;
|
||||
}
|
||||
break;
|
||||
case TrackedKeyboardState.Valid:
|
||||
staleTimeoutCounter_ = 0f;
|
||||
|
||||
if (oldState == TrackedKeyboardState.Stale
|
||||
&& reacquisitionTimer_ > 0f)
|
||||
{
|
||||
// dispatcher_.Dispatch(new TrackedKeyboardReacquiredEvent(reacquisitionTimer_));
|
||||
}
|
||||
break;
|
||||
case TrackedKeyboardState.StartedNotTracked:
|
||||
case TrackedKeyboardState.NoTrackableKeyboard:
|
||||
case TrackedKeyboardState.Offline:
|
||||
reacquisitionTimer_ = 0f;
|
||||
staleTimeoutCounter_ = 0f;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
if (oldState != state || timedOut)
|
||||
{
|
||||
DispatchVisibilityEvent(timedOut);
|
||||
}
|
||||
|
||||
UpdateKeyboardVisibility();
|
||||
}
|
||||
|
||||
private bool GetKeyboardVisibility()
|
||||
{
|
||||
switch (TrackingState)
|
||||
{
|
||||
case TrackedKeyboardState.Stale:
|
||||
if (!HandsOverKeyboard)
|
||||
{
|
||||
return !(staleTimeoutCounter_ - STALE_TIMEOUT > 0f);
|
||||
}
|
||||
else
|
||||
{
|
||||
return true;
|
||||
}
|
||||
case TrackedKeyboardState.Valid:
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private void InitializeKeyboardInfo()
|
||||
{
|
||||
ActiveKeyboardInfo = new OVRKeyboard.TrackedKeyboardInfo
|
||||
{
|
||||
Name = "None",
|
||||
Dimensions = new Vector3(0f, 0f, 0f),
|
||||
Identifier = uint.MaxValue
|
||||
};
|
||||
}
|
||||
|
||||
private void LaunchOverlayIntent(String dataUri)
|
||||
{
|
||||
AndroidJavaObject activityClass = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
|
||||
AndroidJavaObject currentActivity = activityClass.GetStatic<AndroidJavaObject>("currentActivity");
|
||||
var intent = new AndroidJavaObject("android.content.Intent");
|
||||
|
||||
intent.Call<AndroidJavaObject>("setPackage", "com.oculus.vrshell");
|
||||
intent.Call<AndroidJavaObject>("setAction", "com.oculus.vrshell.intent.action.LAUNCH");
|
||||
intent.Call<AndroidJavaObject>("putExtra", "intent_data", dataUri);
|
||||
|
||||
// Broadcast instead of starting activity, so that it goes to overlay
|
||||
currentActivity.Call("sendBroadcast", intent);
|
||||
}
|
||||
#endregion
|
||||
|
||||
/// <summary>
|
||||
/// Stops keyboard tracking and cleans up associated resources.
|
||||
/// </summary>
|
||||
public void Dispose()
|
||||
{
|
||||
if (KeyboardTrackerIsRunning())
|
||||
{
|
||||
StopKeyboardTrackingInternal();
|
||||
}
|
||||
|
||||
if (ProjectedPassthroughKeyLabel.IsSurfaceGeometry(projectedPassthroughMesh.gameObject))
|
||||
{
|
||||
ProjectedPassthroughKeyLabel.RemoveSurfaceGeometry(projectedPassthroughMesh.gameObject);
|
||||
}
|
||||
|
||||
if (activeKeyboardMesh_ != null)
|
||||
{
|
||||
Destroy(activeKeyboardMesh_.gameObject);
|
||||
}
|
||||
}
|
||||
|
||||
private void DispatchVisibilityEvent(bool timeOut)
|
||||
{
|
||||
TrackedKeyboardVisibilityChanged?.Invoke(
|
||||
new TrackedKeyboardVisibilityChangedEvent(ActiveKeyboardInfo.Name, TrackingState, timeOut));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Event sent when tracked keyboard changes visibility (passes in or out of camera view).
|
||||
/// </summary>
|
||||
public struct TrackedKeyboardVisibilityChangedEvent
|
||||
{
|
||||
public readonly string ActiveKeyboardName;
|
||||
public readonly TrackedKeyboardState State;
|
||||
public readonly bool TrackingTimeout;
|
||||
|
||||
public TrackedKeyboardVisibilityChangedEvent(string keyboardModel, TrackedKeyboardState state, bool timeout)
|
||||
{
|
||||
ActiveKeyboardName = keyboardModel;
|
||||
State = state;
|
||||
TrackingTimeout = timeout;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Event sent when tracked keyboard starts or stops actively tracking.
|
||||
/// </summary>
|
||||
public struct TrackedKeyboardSetActiveEvent
|
||||
{
|
||||
public readonly bool IsEnabled;
|
||||
|
||||
public TrackedKeyboardSetActiveEvent(bool isEnabled)
|
||||
{
|
||||
IsEnabled = isEnabled;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: cde6014a6f09af34c934c03defe61f3a
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,502 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using UnityEngine;
|
||||
using UnityEngine.Assertions;
|
||||
|
||||
public class OVRTrackedKeyboardHands : MonoBehaviour
|
||||
{
|
||||
public GameObject LeftHandPresence;
|
||||
public GameObject RightHandPresence;
|
||||
private bool handPresenceInitialized_ = false;
|
||||
|
||||
private Transform leftHandRoot_;
|
||||
private Transform rightHandRoot_;
|
||||
|
||||
public OVRTrackedKeyboard KeyboardTracker;
|
||||
|
||||
private OVRCameraRig cameraRig_;
|
||||
private OVRHand leftHand_;
|
||||
private OVRSkeleton leftHandSkeleton_;
|
||||
private OVRSkeletonRenderer leftHandSkeletonRenderer_;
|
||||
private GameObject leftHandSkeletonRendererGO_;
|
||||
private SkinnedMeshRenderer leftHandSkinnedMeshRenderer_;
|
||||
private OVRMeshRenderer leftHandMeshRenderer_;
|
||||
private OVRHand rightHand_;
|
||||
private OVRSkeleton rightHandSkeleton_;
|
||||
private OVRSkeletonRenderer rightHandSkeletonRenderer_;
|
||||
private GameObject rightHandSkeletonRendererGO_;
|
||||
private OVRMeshRenderer rightHandMeshRenderer_;
|
||||
private SkinnedMeshRenderer rightHandSkinnedMeshRenderer_;
|
||||
|
||||
public bool RightHandOverKeyboard { get; private set; } = false;
|
||||
public bool LeftHandOverKeyboard { get; private set; } = false;
|
||||
|
||||
private static readonly float handInnerAlphaThreshold_ = 0.08f;
|
||||
private static readonly float handOuterAlphaThreshold_ = 0.20f;
|
||||
private static readonly float maximumPassthroughHandsDistance_ = 0.18f;
|
||||
private static readonly float minimumModelHandsDistance_ = 0.11f;
|
||||
|
||||
private TrackedKeyboardHandsVisibilityChangedEvent? lastVisibilityEvent_ = null;
|
||||
|
||||
private struct HandBoneMapping
|
||||
{
|
||||
public Transform LeftHandTransform;
|
||||
public Transform LeftPresenceTransform;
|
||||
public Transform RightHandTransform;
|
||||
public Transform RightPresenceTransform;
|
||||
|
||||
public OVRSkeleton.BoneId BoneName;
|
||||
public string HandPresenceLeftBoneName;
|
||||
public string HandPresenceRightBoneName;
|
||||
};
|
||||
|
||||
private readonly HandBoneMapping[] boneMappings_ = new HandBoneMapping[]
|
||||
{
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_WristRoot,
|
||||
HandPresenceLeftBoneName = "b_l_wrist",
|
||||
HandPresenceRightBoneName = "b_r_wrist"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Thumb0,
|
||||
HandPresenceLeftBoneName = "b_l_thumb0",
|
||||
HandPresenceRightBoneName = "b_r_thumb0"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Thumb1,
|
||||
HandPresenceLeftBoneName = "b_l_thumb1",
|
||||
HandPresenceRightBoneName = "b_r_thumb1"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Thumb2,
|
||||
HandPresenceLeftBoneName = "b_l_thumb2",
|
||||
HandPresenceRightBoneName = "b_r_thumb2"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Thumb3,
|
||||
HandPresenceLeftBoneName = "b_l_thumb3",
|
||||
HandPresenceRightBoneName = "b_r_thumb3"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Index1,
|
||||
HandPresenceLeftBoneName = "b_l_index1",
|
||||
HandPresenceRightBoneName = "b_r_index1"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Index2,
|
||||
HandPresenceLeftBoneName = "b_l_index2",
|
||||
HandPresenceRightBoneName = "b_r_index2"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Index3,
|
||||
HandPresenceLeftBoneName = "b_l_index3",
|
||||
HandPresenceRightBoneName = "b_r_index3"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Middle1,
|
||||
HandPresenceLeftBoneName = "b_l_middle1",
|
||||
HandPresenceRightBoneName = "b_r_middle1"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Middle2,
|
||||
HandPresenceLeftBoneName = "b_l_middle2",
|
||||
HandPresenceRightBoneName = "b_r_middle2"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Middle3,
|
||||
HandPresenceLeftBoneName = "b_l_middle3",
|
||||
HandPresenceRightBoneName = "b_r_middle3"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Ring1,
|
||||
HandPresenceLeftBoneName = "b_l_ring1",
|
||||
HandPresenceRightBoneName = "b_r_ring1"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Ring2,
|
||||
HandPresenceLeftBoneName = "b_l_ring2",
|
||||
HandPresenceRightBoneName = "b_r_ring2"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Ring3,
|
||||
HandPresenceLeftBoneName = "b_l_ring3",
|
||||
HandPresenceRightBoneName = "b_r_ring3"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Pinky0,
|
||||
HandPresenceLeftBoneName = "b_l_pinky0",
|
||||
HandPresenceRightBoneName = "b_r_pinky0"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Pinky1,
|
||||
HandPresenceLeftBoneName = "b_l_pinky1",
|
||||
HandPresenceRightBoneName = "b_r_pinky1"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Pinky2,
|
||||
HandPresenceLeftBoneName = "b_l_pinky2",
|
||||
HandPresenceRightBoneName = "b_r_pinky2"
|
||||
},
|
||||
new HandBoneMapping
|
||||
{
|
||||
BoneName = OVRSkeleton.BoneId.Hand_Pinky3,
|
||||
HandPresenceLeftBoneName = "b_l_pinky3",
|
||||
HandPresenceRightBoneName = "b_r_pinky3"
|
||||
}
|
||||
};
|
||||
|
||||
public Material HandsMaterial;
|
||||
|
||||
#region MATERIAL PROPERTIES
|
||||
|
||||
private const float XSCALE = 0.73f;
|
||||
private const float YSCALE = 0.8f;
|
||||
private const float FORWARD_OFFSET = -0.02f;
|
||||
|
||||
private int keyboardPositionID_;
|
||||
private int keyboardRotationID_;
|
||||
private int keyboardScaleID_;
|
||||
|
||||
#endregion
|
||||
|
||||
private void Awake() {
|
||||
KeyboardTracker.TrackedKeyboardActiveChanged += TrackedKeyboardActiveUpdated;
|
||||
KeyboardTracker.TrackedKeyboardVisibilityChanged += TrackedKeyboardVisibilityChanged;
|
||||
|
||||
keyboardPositionID_ = Shader.PropertyToID("_KeyboardPosition");
|
||||
keyboardRotationID_ = Shader.PropertyToID("_KeyboardRotation");
|
||||
keyboardScaleID_ = Shader.PropertyToID("_KeyboardScale");
|
||||
}
|
||||
|
||||
private void Start()
|
||||
{
|
||||
enabled = false;
|
||||
|
||||
cameraRig_ = FindObjectOfType<OVRCameraRig>();
|
||||
leftHand_ = cameraRig_.leftHandAnchor.GetComponentInChildren<OVRHand>();
|
||||
rightHand_ = cameraRig_.rightHandAnchor.GetComponentInChildren<OVRHand>();
|
||||
leftHandSkeleton_ = leftHand_.GetComponent<OVRSkeleton>();
|
||||
rightHandSkeleton_ = rightHand_.GetComponent<OVRSkeleton>();
|
||||
|
||||
leftHandMeshRenderer_ = leftHand_.GetComponent<OVRMeshRenderer>();
|
||||
rightHandMeshRenderer_ = rightHand_.GetComponent<OVRMeshRenderer>();
|
||||
|
||||
leftHandSkeletonRenderer_ = leftHand_.GetComponent<OVRSkeletonRenderer>();
|
||||
rightHandSkeletonRenderer_ = rightHand_.GetComponent<OVRSkeletonRenderer>();
|
||||
if (!leftHandSkeletonRenderer_.enabled)
|
||||
{
|
||||
// App is not using skeleton renderer
|
||||
leftHandSkeletonRenderer_ = null;
|
||||
rightHandSkeletonRenderer_ = null;
|
||||
}
|
||||
|
||||
leftHandSkinnedMeshRenderer_ = leftHand_.GetComponent<SkinnedMeshRenderer>();
|
||||
rightHandSkinnedMeshRenderer_ = rightHand_.GetComponent<SkinnedMeshRenderer>();
|
||||
|
||||
var leftHand = GameObject.Instantiate(LeftHandPresence);
|
||||
var rightHand = GameObject.Instantiate(RightHandPresence);
|
||||
leftHandRoot_ = leftHand.transform;
|
||||
rightHandRoot_ = rightHand.transform;
|
||||
|
||||
leftHand.SetActive(false);
|
||||
rightHand.SetActive(false);
|
||||
|
||||
#if !UNITY_EDITOR // GameObject trees for hands only available on-device
|
||||
RetargetHandTrackingToHandPresence();
|
||||
#endif
|
||||
}
|
||||
|
||||
private bool AreControllersActive =>
|
||||
!(leftHand_.IsTracked || rightHand_.IsTracked);
|
||||
|
||||
private void LateUpdate()
|
||||
{
|
||||
if (AreControllersActive)
|
||||
{
|
||||
DisableHandObjects();
|
||||
return;
|
||||
}
|
||||
|
||||
foreach (var boneEntry in boneMappings_)
|
||||
{
|
||||
boneEntry.LeftPresenceTransform.localRotation = boneEntry.LeftHandTransform.localRotation;
|
||||
|
||||
boneEntry.RightPresenceTransform.localRotation = boneEntry.RightHandTransform.localRotation;
|
||||
|
||||
if (boneEntry.BoneName == OVRSkeleton.BoneId.Hand_WristRoot)
|
||||
{
|
||||
boneEntry.LeftPresenceTransform.rotation = boneEntry.LeftHandTransform.rotation;
|
||||
|
||||
boneEntry.RightPresenceTransform.rotation = boneEntry.RightHandTransform.rotation;
|
||||
|
||||
var leftScale = leftHand_.HandScale;
|
||||
var rightScale = rightHand_.HandScale;
|
||||
|
||||
boneEntry.RightPresenceTransform.localScale = new Vector3(rightScale, rightScale, rightScale);
|
||||
boneEntry.LeftPresenceTransform.localScale = new Vector3(leftScale, leftScale, leftScale);
|
||||
}
|
||||
}
|
||||
rightHandRoot_.position = rightHand_.transform.position;
|
||||
rightHandRoot_.rotation = rightHand_.transform.rotation;
|
||||
|
||||
leftHandRoot_.position = leftHand_.transform.position;
|
||||
leftHandRoot_.rotation = leftHand_.transform.rotation;
|
||||
|
||||
var leftHandDistance = GetHandDistanceToKeyboard(leftHandSkeleton_);
|
||||
var rightHandDistance = GetHandDistanceToKeyboard(rightHandSkeleton_);
|
||||
|
||||
LeftHandOverKeyboard = ShouldEnablePassthrough(leftHandDistance);
|
||||
RightHandOverKeyboard = ShouldEnablePassthrough(rightHandDistance);
|
||||
|
||||
KeyboardTracker.HandsOverKeyboard = RightHandOverKeyboard || LeftHandOverKeyboard;
|
||||
|
||||
var enableLeftModel = ShouldEnableModel(leftHandDistance);
|
||||
var enableRightModel = ShouldEnableModel(rightHandDistance);
|
||||
SetHandModelsEnabled(enableLeftModel, enableRightModel);
|
||||
|
||||
if (KeyboardTracker.Presentation == OVRTrackedKeyboard.KeyboardPresentation.PreferOpaque)
|
||||
{
|
||||
// Used mixed reality service hands
|
||||
leftHandRoot_.gameObject.SetActive(false);
|
||||
rightHandRoot_.gameObject.SetActive(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
leftHandRoot_.gameObject.SetActive(LeftHandOverKeyboard);
|
||||
rightHandRoot_.gameObject.SetActive(RightHandOverKeyboard);
|
||||
}
|
||||
|
||||
var position = KeyboardTracker.ActiveKeyboardTransform?.position;
|
||||
var rotation = KeyboardTracker.ActiveKeyboardTransform?.rotation;
|
||||
var offset = KeyboardTracker.ActiveKeyboardTransform == null
|
||||
? Vector3.zero
|
||||
: KeyboardTracker.ActiveKeyboardTransform.forward * FORWARD_OFFSET;
|
||||
|
||||
HandsMaterial.SetVector(keyboardPositionID_, position.HasValue ? position.Value + offset : Vector3.zero);
|
||||
HandsMaterial.SetVector(keyboardRotationID_, rotation.HasValue ? rotation.Value.eulerAngles : Vector3.zero);
|
||||
HandsMaterial.SetVector(
|
||||
keyboardScaleID_,
|
||||
new Vector4(
|
||||
KeyboardTracker.ActiveKeyboardInfo.Dimensions.x * XSCALE,
|
||||
0.1f,
|
||||
KeyboardTracker.ActiveKeyboardInfo.Dimensions.z * YSCALE,
|
||||
1f
|
||||
)
|
||||
);
|
||||
|
||||
if (lastVisibilityEvent_ == null
|
||||
|| LeftHandOverKeyboard != lastVisibilityEvent_.Value.leftVisible
|
||||
|| RightHandOverKeyboard != lastVisibilityEvent_.Value.rightVisible)
|
||||
{
|
||||
lastVisibilityEvent_ = new TrackedKeyboardHandsVisibilityChangedEvent
|
||||
{
|
||||
leftVisible = LeftHandOverKeyboard,
|
||||
rightVisible = RightHandOverKeyboard
|
||||
};
|
||||
KeyboardTracker.UpdateKeyboardVisibility();
|
||||
}
|
||||
|
||||
if (LeftHandOverKeyboard || RightHandOverKeyboard)
|
||||
{
|
||||
var handsIntensity = new OVRPlugin.InsightPassthroughKeyboardHandsIntensity
|
||||
{
|
||||
LeftHandIntensity =
|
||||
ComputeOpacity(leftHandDistance, handInnerAlphaThreshold_, handOuterAlphaThreshold_),
|
||||
RightHandIntensity =
|
||||
ComputeOpacity(rightHandDistance, handInnerAlphaThreshold_, handOuterAlphaThreshold_)
|
||||
};
|
||||
OVRPlugin.SetInsightPassthroughKeyboardHandsIntensity(KeyboardTracker.PassthroughOverlay.layerId, handsIntensity);
|
||||
}
|
||||
}
|
||||
|
||||
private bool ShouldEnablePassthrough(float distance)
|
||||
{
|
||||
return distance <= maximumPassthroughHandsDistance_;
|
||||
}
|
||||
|
||||
private bool ShouldEnableModel(float distance)
|
||||
{
|
||||
return distance >= minimumModelHandsDistance_;
|
||||
}
|
||||
|
||||
private float GetHandDistanceToKeyboard(OVRSkeleton handSkeleton)
|
||||
{
|
||||
// TODO: Switch back to PointerPose once it's working in OpenXR
|
||||
var pinchPosition = handSkeleton.Bones[(int) OVRSkeleton.BoneId.Hand_Index3].Transform.position;
|
||||
var handPosition = handSkeleton.Bones[(int) OVRSkeleton.BoneId.Hand_Middle1].Transform.position;
|
||||
var pinkyPosition = handSkeleton.Bones[(int) OVRSkeleton.BoneId.Hand_Pinky3].Transform.position;
|
||||
|
||||
return Mathf.Min(KeyboardTracker.GetDistanceToKeyboard(pinchPosition),
|
||||
KeyboardTracker.GetDistanceToKeyboard(handPosition),
|
||||
KeyboardTracker.GetDistanceToKeyboard(pinkyPosition));
|
||||
}
|
||||
|
||||
private float ComputeOpacity(float distance, float innerThreshold, float outerThreshold)
|
||||
{
|
||||
return Mathf.Clamp((outerThreshold - distance) / (outerThreshold - innerThreshold), 0.0f, 1.0f);
|
||||
}
|
||||
|
||||
private void SetHandModelsEnabled(bool enableLeftModel, bool enableRightModel)
|
||||
{
|
||||
leftHandMeshRenderer_.enabled = enableLeftModel;
|
||||
rightHandMeshRenderer_.enabled = enableRightModel;
|
||||
|
||||
leftHandSkinnedMeshRenderer_.enabled = enableLeftModel;
|
||||
rightHandSkinnedMeshRenderer_.enabled = enableRightModel;
|
||||
|
||||
if (leftHandSkeletonRenderer_ != null)
|
||||
{
|
||||
if (leftHandSkeletonRendererGO_ == null)
|
||||
{
|
||||
leftHandSkeletonRendererGO_ = leftHandSkeletonRenderer_.gameObject.transform.Find("SkeletonRenderer")?.gameObject;
|
||||
rightHandSkeletonRendererGO_ = rightHandSkeletonRenderer_.gameObject.transform.Find("SkeletonRenderer")?.gameObject;
|
||||
}
|
||||
|
||||
if (leftHandSkeletonRendererGO_ != null)
|
||||
{
|
||||
leftHandSkeletonRendererGO_.SetActive(enableLeftModel);
|
||||
}
|
||||
|
||||
if (rightHandSkeletonRendererGO_ != null)
|
||||
{
|
||||
rightHandSkeletonRendererGO_.SetActive(enableRightModel);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void RetargetHandTrackingToHandPresence()
|
||||
{
|
||||
Assert.IsTrue(LeftHandPresence != null && RightHandPresence != null);
|
||||
|
||||
for (int index = 0; index < boneMappings_.Length; index++)
|
||||
{
|
||||
var entry = boneMappings_[index];
|
||||
|
||||
var ovrBoneStringLeft = OVRSkeleton.BoneLabelFromBoneId(OVRSkeleton.SkeletonType.HandLeft, entry.BoneName);
|
||||
var ovrBoneStringRight = OVRSkeleton.BoneLabelFromBoneId(OVRSkeleton.SkeletonType.HandRight, entry.BoneName);
|
||||
|
||||
boneMappings_[index].LeftHandTransform =
|
||||
leftHand_.transform.FindChildRecursive(ovrBoneStringLeft);
|
||||
boneMappings_[index].LeftPresenceTransform = leftHandRoot_.FindChildRecursive(entry.HandPresenceLeftBoneName);
|
||||
|
||||
boneMappings_[index].RightHandTransform =
|
||||
rightHand_.transform.FindChildRecursive(ovrBoneStringRight);
|
||||
boneMappings_[index].RightPresenceTransform = rightHandRoot_.FindChildRecursive(entry.HandPresenceRightBoneName);
|
||||
|
||||
Assert.IsTrue(
|
||||
boneMappings_[index].LeftPresenceTransform != null
|
||||
&& boneMappings_[index].RightPresenceTransform != null
|
||||
&& boneMappings_[index].RightHandTransform != null
|
||||
&& boneMappings_[index].LeftHandTransform != null,
|
||||
string.Format(
|
||||
"[tracked_keyboard] - entry.lp {0} && entry.rp {1} && entry.rt {2} && entry.lt {3}, {4}, {5}",
|
||||
boneMappings_[index].LeftPresenceTransform,
|
||||
boneMappings_[index].RightPresenceTransform,
|
||||
boneMappings_[index].RightHandTransform,
|
||||
boneMappings_[index].LeftHandTransform,
|
||||
ovrBoneStringRight,
|
||||
ovrBoneStringLeft
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
handPresenceInitialized_ = true;
|
||||
}
|
||||
|
||||
private void StopHandPresence()
|
||||
{
|
||||
enabled = false;
|
||||
// Re-enable hand models if they are disabled, let OVRHand handle controller/hands switching
|
||||
SetHandModelsEnabled(true, true);
|
||||
DisableHandObjects();
|
||||
}
|
||||
|
||||
private void DisableHandObjects()
|
||||
{
|
||||
KeyboardTracker.HandsOverKeyboard = false;
|
||||
RightHandOverKeyboard = false;
|
||||
LeftHandOverKeyboard = false;
|
||||
|
||||
if (leftHandRoot_ != null)
|
||||
{
|
||||
leftHandRoot_.gameObject.SetActive(false);
|
||||
}
|
||||
|
||||
if (rightHandRoot_ != null)
|
||||
{
|
||||
rightHandRoot_.gameObject.SetActive(false);
|
||||
}
|
||||
}
|
||||
|
||||
public void TrackedKeyboardActiveUpdated(OVRTrackedKeyboard.TrackedKeyboardSetActiveEvent e)
|
||||
{
|
||||
if (!e.IsEnabled)
|
||||
{
|
||||
StopHandPresence();
|
||||
}
|
||||
}
|
||||
|
||||
public void TrackedKeyboardVisibilityChanged(OVRTrackedKeyboard.TrackedKeyboardVisibilityChangedEvent e)
|
||||
{
|
||||
switch (e.State)
|
||||
{
|
||||
case OVRTrackedKeyboard.TrackedKeyboardState.Offline:
|
||||
case OVRTrackedKeyboard.TrackedKeyboardState.NoTrackableKeyboard:
|
||||
case OVRTrackedKeyboard.TrackedKeyboardState.StartedNotTracked:
|
||||
StopHandPresence();
|
||||
break;
|
||||
case OVRTrackedKeyboard.TrackedKeyboardState.Valid:
|
||||
enabled = handPresenceInitialized_;
|
||||
break;
|
||||
case OVRTrackedKeyboard.TrackedKeyboardState.Stale:
|
||||
if (e.TrackingTimeout)
|
||||
{
|
||||
StopHandPresence();
|
||||
}
|
||||
break;
|
||||
case OVRTrackedKeyboard.TrackedKeyboardState.Uninitialized:
|
||||
case OVRTrackedKeyboard.TrackedKeyboardState.Error:
|
||||
case OVRTrackedKeyboard.TrackedKeyboardState.ErrorExtensionFailed:
|
||||
StopHandPresence();
|
||||
Debug.LogWarning("Invalid state passed into TrackedKeyboardVisibilityChanged " + e.State.ToString());
|
||||
break;
|
||||
default:
|
||||
throw new System.Exception(
|
||||
$"[tracked_keyboard] - unhandled state: TrackedKeyboardVisibilityChanged {e.State}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public struct TrackedKeyboardHandsVisibilityChangedEvent
|
||||
{
|
||||
public bool leftVisible;
|
||||
public bool rightVisible;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: f856490bcef82364d9770bece066fcf5
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
@@ -0,0 +1,81 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using UnityEngine;
|
||||
using UnityEngine.UI;
|
||||
|
||||
public class OVRTrackedKeyboardSampleControls : MonoBehaviour
|
||||
{
|
||||
public OVRTrackedKeyboard trackedKeyboard;
|
||||
public InputField StartingFocusField;
|
||||
public Text NameValue;
|
||||
public Text ConnectedValue;
|
||||
public Text StateValue;
|
||||
public Color GoodStateColor = new Color(0.25f, 1, 0.25f, 1);
|
||||
public Color BadStateColor = new Color(1, 0.25f, 0.25f, 1);
|
||||
public Toggle TrackingToggle;
|
||||
public Toggle ConnectionToggle;
|
||||
|
||||
void Start()
|
||||
{
|
||||
StartingFocusField.Select();
|
||||
StartingFocusField.ActivateInputField();
|
||||
if(TrackingToggle.isOn != trackedKeyboard.TrackingEnabled){
|
||||
TrackingToggle.isOn = trackedKeyboard.TrackingEnabled;
|
||||
}
|
||||
if (ConnectionToggle.isOn != trackedKeyboard.ConnectionRequired)
|
||||
{
|
||||
ConnectionToggle.isOn = trackedKeyboard.ConnectionRequired;
|
||||
}
|
||||
}
|
||||
|
||||
void Update()
|
||||
{
|
||||
NameValue.text = trackedKeyboard.SystemKeyboardInfo.Name;
|
||||
ConnectedValue.text = ((bool)((trackedKeyboard.SystemKeyboardInfo.KeyboardFlags & OVRPlugin.TrackedKeyboardFlags.Connected) > 0)).ToString();
|
||||
StateValue.text = trackedKeyboard.TrackingState.ToString();
|
||||
switch (trackedKeyboard.TrackingState)
|
||||
{
|
||||
case OVRTrackedKeyboard.TrackedKeyboardState.Uninitialized:
|
||||
case OVRTrackedKeyboard.TrackedKeyboardState.Error:
|
||||
case OVRTrackedKeyboard.TrackedKeyboardState.ErrorExtensionFailed:
|
||||
case OVRTrackedKeyboard.TrackedKeyboardState.StartedNotTracked:
|
||||
case OVRTrackedKeyboard.TrackedKeyboardState.Stale:
|
||||
StateValue.color = BadStateColor;
|
||||
break;
|
||||
default:
|
||||
StateValue.color = GoodStateColor;
|
||||
break;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void SetPresentationOpaque()
|
||||
{
|
||||
trackedKeyboard.Presentation = OVRTrackedKeyboard.KeyboardPresentation.PreferOpaque;
|
||||
}
|
||||
|
||||
public void SetPresentationKeyLabels()
|
||||
{
|
||||
trackedKeyboard.Presentation = OVRTrackedKeyboard.KeyboardPresentation.PreferKeyLabels;
|
||||
}
|
||||
|
||||
public void LaunchKeyboardSelection()
|
||||
{
|
||||
trackedKeyboard.LaunchLocalKeyboardSelectionDialog();
|
||||
}
|
||||
|
||||
public void SetTrackingEnabled(bool value)
|
||||
{
|
||||
trackedKeyboard.TrackingEnabled = value;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 58529f2accd8ef7449ea22070cdfd2ca
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
189
Assets/Oculus/VR/Scripts/OVRTracker.cs
Normal file
189
Assets/Oculus/VR/Scripts/OVRTracker.cs
Normal file
@@ -0,0 +1,189 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
using System;
|
||||
using System.Runtime.InteropServices;
|
||||
using UnityEngine;
|
||||
|
||||
/// <summary>
|
||||
/// An infrared camera that tracks the position of a head-mounted display.
|
||||
/// </summary>
|
||||
public class OVRTracker
|
||||
{
|
||||
/// <summary>
|
||||
/// The (symmetric) visible area in front of the sensor.
|
||||
/// </summary>
|
||||
public struct Frustum
|
||||
{
|
||||
/// <summary>
|
||||
/// The sensor's minimum supported distance to the HMD.
|
||||
/// </summary>
|
||||
public float nearZ;
|
||||
/// <summary>
|
||||
/// The sensor's maximum supported distance to the HMD.
|
||||
/// </summary>
|
||||
public float farZ;
|
||||
/// <summary>
|
||||
/// The sensor's horizontal and vertical fields of view in degrees.
|
||||
/// </summary>
|
||||
public Vector2 fov;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// If true, a sensor is attached to the system.
|
||||
/// </summary>
|
||||
public bool isPresent
|
||||
{
|
||||
get {
|
||||
if (!OVRManager.isHmdPresent)
|
||||
return false;
|
||||
|
||||
return OVRPlugin.positionSupported;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// If true, the sensor is actively tracking the HMD's position. Otherwise the HMD may be temporarily occluded, the system may not support position tracking, etc.
|
||||
/// </summary>
|
||||
public bool isPositionTracked
|
||||
{
|
||||
get {
|
||||
return OVRPlugin.positionTracked;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// If this is true and a sensor is available, the system will use position tracking when isPositionTracked is also true.
|
||||
/// </summary>
|
||||
public bool isEnabled
|
||||
{
|
||||
get {
|
||||
if (!OVRManager.isHmdPresent)
|
||||
return false;
|
||||
|
||||
return OVRPlugin.position;
|
||||
}
|
||||
|
||||
set {
|
||||
if (!OVRManager.isHmdPresent)
|
||||
return;
|
||||
|
||||
OVRPlugin.position = value;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the number of sensors currently connected to the system.
|
||||
/// </summary>
|
||||
public int count
|
||||
{
|
||||
get {
|
||||
int count = 0;
|
||||
|
||||
for (int i = 0; i < (int)OVRPlugin.Tracker.Count; ++i)
|
||||
{
|
||||
if (GetPresent(i))
|
||||
count++;
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the sensor's viewing frustum.
|
||||
/// </summary>
|
||||
public Frustum GetFrustum(int tracker = 0)
|
||||
{
|
||||
if (!OVRManager.isHmdPresent)
|
||||
return new Frustum();
|
||||
|
||||
return OVRPlugin.GetTrackerFrustum((OVRPlugin.Tracker)tracker).ToFrustum();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the sensor's pose, relative to the head's pose at the time of the last pose recentering.
|
||||
/// </summary>
|
||||
public OVRPose GetPose(int tracker = 0)
|
||||
{
|
||||
if (!OVRManager.isHmdPresent)
|
||||
return OVRPose.identity;
|
||||
|
||||
OVRPose p;
|
||||
switch (tracker)
|
||||
{
|
||||
case 0:
|
||||
p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerZero, OVRPlugin.Step.Render).ToOVRPose();
|
||||
break;
|
||||
case 1:
|
||||
p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerOne, OVRPlugin.Step.Render).ToOVRPose();
|
||||
break;
|
||||
case 2:
|
||||
p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerTwo, OVRPlugin.Step.Render).ToOVRPose();
|
||||
break;
|
||||
case 3:
|
||||
p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerThree, OVRPlugin.Step.Render).ToOVRPose();
|
||||
break;
|
||||
default:
|
||||
return OVRPose.identity;
|
||||
}
|
||||
|
||||
return new OVRPose()
|
||||
{
|
||||
position = p.position,
|
||||
orientation = p.orientation * Quaternion.Euler(0, 180, 0)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// If true, the pose of the sensor is valid and is ready to be queried.
|
||||
/// </summary>
|
||||
public bool GetPoseValid(int tracker = 0)
|
||||
{
|
||||
if (!OVRManager.isHmdPresent)
|
||||
return false;
|
||||
|
||||
switch (tracker)
|
||||
{
|
||||
case 0:
|
||||
return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerZero);
|
||||
case 1:
|
||||
return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerOne);
|
||||
case 2:
|
||||
return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerTwo);
|
||||
case 3:
|
||||
return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerThree);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public bool GetPresent(int tracker = 0)
|
||||
{
|
||||
if (!OVRManager.isHmdPresent)
|
||||
return false;
|
||||
|
||||
switch (tracker)
|
||||
{
|
||||
case 0:
|
||||
return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerZero);
|
||||
case 1:
|
||||
return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerOne);
|
||||
case 2:
|
||||
return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerTwo);
|
||||
case 3:
|
||||
return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerThree);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
8
Assets/Oculus/VR/Scripts/OVRTracker.cs.meta
Normal file
8
Assets/Oculus/VR/Scripts/OVRTracker.cs.meta
Normal file
@@ -0,0 +1,8 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 7cb3c9d4cb0970e448c655096649e814
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
39
Assets/Oculus/VR/Scripts/OVRXRSDKNative.cs
Normal file
39
Assets/Oculus/VR/Scripts/OVRXRSDKNative.cs
Normal file
@@ -0,0 +1,39 @@
|
||||
/************************************************************************************
|
||||
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
|
||||
|
||||
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
|
||||
https://developer.oculus.com/licenses/oculussdk/
|
||||
|
||||
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
|
||||
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
||||
ANY KIND, either express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
************************************************************************************/
|
||||
|
||||
#if USING_XR_MANAGEMENT && USING_XR_SDK_OCULUS
|
||||
#define USING_XR_SDK
|
||||
#endif
|
||||
|
||||
using System.Runtime.InteropServices;
|
||||
|
||||
// C# wrapper for Unity XR SDK Native APIs.
|
||||
|
||||
#if USING_XR_SDK
|
||||
public static class OculusXRPlugin
|
||||
{
|
||||
[DllImport("OculusXRPlugin")]
|
||||
public static extern void SetColorScale(float x, float y, float z, float w);
|
||||
|
||||
[DllImport("OculusXRPlugin")]
|
||||
public static extern void SetColorOffset(float x, float y, float z, float w);
|
||||
|
||||
[DllImport("OculusXRPlugin")]
|
||||
public static extern void SetSpaceWarp(OVRPlugin.Bool on);
|
||||
|
||||
[DllImport("OculusXRPlugin")]
|
||||
public static extern void SetAppSpacePosition(float x, float y, float z);
|
||||
|
||||
[DllImport("OculusXRPlugin")]
|
||||
public static extern void SetAppSpaceRotation(float x, float y, float z, float w);
|
||||
}
|
||||
#endif
|
||||
11
Assets/Oculus/VR/Scripts/OVRXRSDKNative.cs.meta
Normal file
11
Assets/Oculus/VR/Scripts/OVRXRSDKNative.cs.meta
Normal file
@@ -0,0 +1,11 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 903a593623dfcbf4a81205c0f7386ea9
|
||||
MonoImporter:
|
||||
externalObjects: {}
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
5
Assets/Oculus/VR/Scripts/Util.meta
Normal file
5
Assets/Oculus/VR/Scripts/Util.meta
Normal file
@@ -0,0 +1,5 @@
|
||||
fileFormatVersion: 2
|
||||
guid: c0c7a593695f68e4bbe0cabb0f4f93f2
|
||||
folderAsset: yes
|
||||
DefaultImporter:
|
||||
userData:
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user