clean project

This commit is contained in:
Helar Jaadla
2022-03-07 17:52:41 +02:00
parent a174b45bd2
commit cbeb10ec35
5100 changed files with 837159 additions and 0 deletions

View File

@@ -0,0 +1,61 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class OVRAudioSourceTest : MonoBehaviour
{
public float period = 2.0f;
private float nextActionTime;
// Start is called before the first frame update
void Start()
{
Material templateMaterial = GetComponent<Renderer>().material;
Material newMaterial = Instantiate<Material>(templateMaterial);
newMaterial.color = Color.green;
GetComponent<Renderer>().material = newMaterial;
nextActionTime = Time.time + period;
}
// Update is called once per frame
void Update()
{
if (Time.time > nextActionTime)
{
nextActionTime = Time.time + period;
Material mat = GetComponent<Renderer>().material;
if (mat.color == Color.green)
{
mat.color = Color.red;
}
else
{
mat.color = Color.green;
}
AudioSource audioSource = GetComponent<AudioSource>();
if (audioSource == null)
{
Debug.LogError("Unable to find AudioSource");
}
else
{
audioSource.Play();
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 358b12a21a8aa9540b435051f334fe9b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,46 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
// If there is a game object under the main camera which should not be cloned under Mixed Reality Capture,
// attaching this component would auto destroy that after the MRC camera get cloned
public class OVRAutoDestroyInMRC : MonoBehaviour {
// Use this for initialization
void Start () {
bool underMrcCamera = false;
Transform p = transform.parent;
while (p != null)
{
if (p.gameObject.name.StartsWith("OculusMRC_"))
{
underMrcCamera = true;
break;
}
p = p.parent;
}
if (underMrcCamera)
{
Destroy(gameObject);
}
}
// Update is called once per frame
void Update () {
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 42a68265e2d624d49ae7fced6a7e4d91
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,47 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Allows you to toggle chromatic aberration correction with a gamepad button press.
/// </summary>
public class OVRChromaticAberration : MonoBehaviour
{
/// <summary>
/// The button that will toggle chromatic aberration correction.
/// </summary>
public OVRInput.RawButton toggleButton = OVRInput.RawButton.X;
private bool chromatic = false;
void Start ()
{
// Enable/Disable Chromatic Aberration Correction.
// NOTE: Enabling Chromatic Aberration for mobile has a large performance cost.
OVRManager.instance.chromatic = chromatic;
}
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if (OVRInput.GetDown(toggleButton))
{
//*************************
// toggle chromatic aberration correction
//*************************
chromatic = !chromatic;
OVRManager.instance.chromatic = chromatic;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 3b56515a831f2fb44bc7ae02679aeebc
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,183 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Simple helper script that conditionally enables rendering of a controller if it is connected.
/// </summary>
public class OVRControllerHelper : MonoBehaviour
{
/// <summary>
/// The root GameObject that represents the Oculus Touch for Quest And RiftS Controller model (Left).
/// </summary>
public GameObject m_modelOculusTouchQuestAndRiftSLeftController;
/// <summary>
/// The root GameObject that represents the Oculus Touch for Quest And RiftS Controller model (Right).
/// </summary>
public GameObject m_modelOculusTouchQuestAndRiftSRightController;
/// <summary>
/// The root GameObject that represents the Oculus Touch for Rift Controller model (Left).
/// </summary>
public GameObject m_modelOculusTouchRiftLeftController;
/// <summary>
/// The root GameObject that represents the Oculus Touch for Rift Controller model (Right).
/// </summary>
public GameObject m_modelOculusTouchRiftRightController;
/// <summary>
/// The root GameObject that represents the Oculus Touch for Quest 2 Controller model (Left).
/// </summary>
public GameObject m_modelOculusTouchQuest2LeftController;
/// <summary>
/// The root GameObject that represents the Oculus Touch for Quest 2 Controller model (Right).
/// </summary>
public GameObject m_modelOculusTouchQuest2RightController;
/// <summary>
/// The controller that determines whether or not to enable rendering of the controller model.
/// </summary>
public OVRInput.Controller m_controller;
/// <summary>
/// The animator component that contains the controller animation controller for animating buttons and triggers.
/// </summary>
private Animator m_animator;
private GameObject m_activeController;
private bool m_hasInputFocus = true;
private bool m_hasInputFocusPrev = false;
private enum ControllerType
{
QuestAndRiftS = 1,
Rift = 2,
Quest2 = 3,
}
private ControllerType activeControllerType = ControllerType.Rift;
private bool m_prevControllerConnected = false;
private bool m_prevControllerConnectedCached = false;
void Start()
{
OVRPlugin.SystemHeadset headset = OVRPlugin.GetSystemHeadsetType();
switch (headset)
{
case OVRPlugin.SystemHeadset.Rift_CV1:
activeControllerType = ControllerType.Rift;
break;
case OVRPlugin.SystemHeadset.Oculus_Quest_2:
activeControllerType = ControllerType.Quest2;
break;
default:
activeControllerType = ControllerType.QuestAndRiftS;
break;
}
Debug.LogFormat("OVRControllerHelp: Active controller type: {0} for product {1}", activeControllerType, OVRPlugin.productName);
// Hide all controller models until controller get connected
m_modelOculusTouchQuestAndRiftSLeftController.SetActive(false);
m_modelOculusTouchQuestAndRiftSRightController.SetActive(false);
m_modelOculusTouchRiftLeftController.SetActive(false);
m_modelOculusTouchRiftRightController.SetActive(false);
m_modelOculusTouchQuest2LeftController.SetActive(false);
m_modelOculusTouchQuest2RightController.SetActive(false);
OVRManager.InputFocusAcquired += InputFocusAquired;
OVRManager.InputFocusLost += InputFocusLost;
}
void Update()
{
bool controllerConnected = OVRInput.IsControllerConnected(m_controller);
if ((controllerConnected != m_prevControllerConnected) || !m_prevControllerConnectedCached || (m_hasInputFocus != m_hasInputFocusPrev))
{
if (activeControllerType == ControllerType.Rift)
{
m_modelOculusTouchQuestAndRiftSLeftController.SetActive(false);
m_modelOculusTouchQuestAndRiftSRightController.SetActive(false);
m_modelOculusTouchRiftLeftController.SetActive(controllerConnected && (m_controller == OVRInput.Controller.LTouch));
m_modelOculusTouchRiftRightController.SetActive(controllerConnected && (m_controller == OVRInput.Controller.RTouch));
m_modelOculusTouchQuest2LeftController.SetActive(false);
m_modelOculusTouchQuest2RightController.SetActive(false);
m_animator = m_controller == OVRInput.Controller.LTouch ? m_modelOculusTouchRiftLeftController.GetComponent<Animator>() :
m_modelOculusTouchRiftRightController.GetComponent<Animator>();
m_activeController = m_controller == OVRInput.Controller.LTouch ? m_modelOculusTouchRiftLeftController : m_modelOculusTouchRiftRightController;
}
else if (activeControllerType == ControllerType.Quest2)
{
m_modelOculusTouchQuestAndRiftSLeftController.SetActive(false);
m_modelOculusTouchQuestAndRiftSRightController.SetActive(false);
m_modelOculusTouchRiftLeftController.SetActive(false);
m_modelOculusTouchRiftRightController.SetActive(false);
m_modelOculusTouchQuest2LeftController.SetActive(controllerConnected && (m_controller == OVRInput.Controller.LTouch));
m_modelOculusTouchQuest2RightController.SetActive(controllerConnected && (m_controller == OVRInput.Controller.RTouch));
m_animator = m_controller == OVRInput.Controller.LTouch ? m_modelOculusTouchQuest2LeftController.GetComponent<Animator>() :
m_modelOculusTouchQuest2RightController.GetComponent<Animator>();
m_activeController = m_controller == OVRInput.Controller.LTouch ? m_modelOculusTouchQuest2LeftController : m_modelOculusTouchQuest2RightController;
}
else /*if (activeControllerType == ControllerType.QuestAndRiftS)*/
{
m_modelOculusTouchQuestAndRiftSLeftController.SetActive(controllerConnected && (m_controller == OVRInput.Controller.LTouch));
m_modelOculusTouchQuestAndRiftSRightController.SetActive(controllerConnected && (m_controller == OVRInput.Controller.RTouch));
m_modelOculusTouchRiftLeftController.SetActive(false);
m_modelOculusTouchRiftRightController.SetActive(false);
m_modelOculusTouchQuest2LeftController.SetActive(false);
m_modelOculusTouchQuest2RightController.SetActive(false);
m_animator = m_controller == OVRInput.Controller.LTouch ? m_modelOculusTouchQuestAndRiftSLeftController.GetComponent<Animator>() :
m_modelOculusTouchQuestAndRiftSRightController.GetComponent<Animator>();
m_activeController = m_controller == OVRInput.Controller.LTouch ? m_modelOculusTouchQuestAndRiftSLeftController : m_modelOculusTouchQuestAndRiftSRightController;
}
m_activeController.SetActive(m_hasInputFocus && controllerConnected);
m_prevControllerConnected = controllerConnected;
m_prevControllerConnectedCached = true;
m_hasInputFocusPrev = m_hasInputFocus;
}
if (m_animator != null)
{
m_animator.SetFloat("Button 1", OVRInput.Get(OVRInput.Button.One, m_controller) ? 1.0f : 0.0f);
m_animator.SetFloat("Button 2", OVRInput.Get(OVRInput.Button.Two, m_controller) ? 1.0f : 0.0f);
m_animator.SetFloat("Button 3", OVRInput.Get(OVRInput.Button.Start, m_controller) ? 1.0f : 0.0f);
m_animator.SetFloat("Joy X", OVRInput.Get(OVRInput.Axis2D.PrimaryThumbstick, m_controller).x);
m_animator.SetFloat("Joy Y", OVRInput.Get(OVRInput.Axis2D.PrimaryThumbstick, m_controller).y);
m_animator.SetFloat("Trigger", OVRInput.Get(OVRInput.Axis1D.PrimaryIndexTrigger, m_controller));
m_animator.SetFloat("Grip", OVRInput.Get(OVRInput.Axis1D.PrimaryHandTrigger, m_controller));
}
}
public void InputFocusAquired()
{
m_hasInputFocus = true;
}
public void InputFocusLost()
{
m_hasInputFocus = false;
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: aed62bf3ae2456c408f247f96808ce96
timeCreated: 1486166271
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,191 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
using System.Collections.Generic;
using System.Text;
public class OVRControllerTest : MonoBehaviour
{
public class BoolMonitor
{
public delegate bool BoolGenerator();
private string m_name = "";
private BoolGenerator m_generator;
private bool m_prevValue = false;
private bool m_currentValue = false;
private bool m_currentValueRecentlyChanged = false;
private float m_displayTimeout = 0.0f;
private float m_displayTimer = 0.0f;
public BoolMonitor(string name, BoolGenerator generator, float displayTimeout = 0.5f)
{
m_name = name;
m_generator = generator;
m_displayTimeout = displayTimeout;
}
public void Update()
{
m_prevValue = m_currentValue;
m_currentValue = m_generator();
if (m_currentValue != m_prevValue)
{
m_currentValueRecentlyChanged = true;
m_displayTimer = m_displayTimeout;
}
if (m_displayTimer > 0.0f)
{
m_displayTimer -= Time.deltaTime;
if (m_displayTimer <= 0.0f)
{
m_currentValueRecentlyChanged = false;
m_displayTimer = 0.0f;
}
}
}
public void AppendToStringBuilder(ref StringBuilder sb)
{
sb.Append(m_name);
if (m_currentValue && m_currentValueRecentlyChanged)
sb.Append(": *True*\n");
else if (m_currentValue)
sb.Append(": True \n");
else if (!m_currentValue && m_currentValueRecentlyChanged)
sb.Append(": *False*\n");
else if (!m_currentValue)
sb.Append(": False \n");
}
}
public Text uiText;
private List<BoolMonitor> monitors;
private StringBuilder data;
void Start()
{
if (uiText != null)
{
uiText.supportRichText = false;
}
data = new StringBuilder(2048);
monitors = new List<BoolMonitor>()
{
// virtual
new BoolMonitor("One", () => OVRInput.Get(OVRInput.Button.One)),
new BoolMonitor("OneDown", () => OVRInput.GetDown(OVRInput.Button.One)),
new BoolMonitor("OneUp", () => OVRInput.GetUp(OVRInput.Button.One)),
new BoolMonitor("One (Touch)", () => OVRInput.Get(OVRInput.Touch.One)),
new BoolMonitor("OneDown (Touch)", () => OVRInput.GetDown(OVRInput.Touch.One)),
new BoolMonitor("OneUp (Touch)", () => OVRInput.GetUp(OVRInput.Touch.One)),
new BoolMonitor("Two", () => OVRInput.Get(OVRInput.Button.Two)),
new BoolMonitor("TwoDown", () => OVRInput.GetDown(OVRInput.Button.Two)),
new BoolMonitor("TwoUp", () => OVRInput.GetUp(OVRInput.Button.Two)),
new BoolMonitor("PrimaryIndexTrigger", () => OVRInput.Get(OVRInput.Button.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerDown", () => OVRInput.GetDown(OVRInput.Button.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerUp", () => OVRInput.GetUp(OVRInput.Button.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTrigger (Touch)", () => OVRInput.Get(OVRInput.Touch.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerDown (Touch)", () => OVRInput.GetDown(OVRInput.Touch.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerUp (Touch)", () => OVRInput.GetUp(OVRInput.Touch.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryHandTrigger", () => OVRInput.Get(OVRInput.Button.PrimaryHandTrigger)),
new BoolMonitor("PrimaryHandTriggerDown", () => OVRInput.GetDown(OVRInput.Button.PrimaryHandTrigger)),
new BoolMonitor("PrimaryHandTriggerUp", () => OVRInput.GetUp(OVRInput.Button.PrimaryHandTrigger)),
new BoolMonitor("Up", () => OVRInput.Get(OVRInput.Button.Up)),
new BoolMonitor("Down", () => OVRInput.Get(OVRInput.Button.Down)),
new BoolMonitor("Left", () => OVRInput.Get(OVRInput.Button.Left)),
new BoolMonitor("Right", () => OVRInput.Get(OVRInput.Button.Right)),
// raw
new BoolMonitor("Start", () => OVRInput.Get(OVRInput.RawButton.Start)),
new BoolMonitor("StartDown", () => OVRInput.GetDown(OVRInput.RawButton.Start)),
new BoolMonitor("StartUp", () => OVRInput.GetUp(OVRInput.RawButton.Start)),
new BoolMonitor("Back", () => OVRInput.Get(OVRInput.RawButton.Back)),
new BoolMonitor("BackDown", () => OVRInput.GetDown(OVRInput.RawButton.Back)),
new BoolMonitor("BackUp", () => OVRInput.GetUp(OVRInput.RawButton.Back)),
new BoolMonitor("A", () => OVRInput.Get(OVRInput.RawButton.A)),
new BoolMonitor("ADown", () => OVRInput.GetDown(OVRInput.RawButton.A)),
new BoolMonitor("AUp", () => OVRInput.GetUp(OVRInput.RawButton.A)),
};
}
static string prevConnected = "";
static BoolMonitor controllers = new BoolMonitor("Controllers Changed", () => { return OVRInput.GetConnectedControllers().ToString() != prevConnected; });
void Update()
{
OVRInput.Controller activeController = OVRInput.GetActiveController();
data.Length = 0;
byte battery = OVRInput.GetControllerBatteryPercentRemaining();
data.AppendFormat("Battery: {0}\n", battery);
float framerate = OVRPlugin.GetAppFramerate();
data.AppendFormat("Framerate: {0:F2}\n", framerate);
string activeControllerName = activeController.ToString();
data.AppendFormat("Active: {0}\n", activeControllerName);
string connectedControllerNames = OVRInput.GetConnectedControllers().ToString();
data.AppendFormat("Connected: {0}\n", connectedControllerNames);
data.AppendFormat("PrevConnected: {0}\n", prevConnected);
controllers.Update();
controllers.AppendToStringBuilder(ref data);
prevConnected = connectedControllerNames;
Quaternion rot = OVRInput.GetLocalControllerRotation(activeController);
data.AppendFormat("Orientation: ({0:F2}, {1:F2}, {2:F2}, {3:F2})\n", rot.x, rot.y, rot.z, rot.w);
Vector3 angVel = OVRInput.GetLocalControllerAngularVelocity(activeController);
data.AppendFormat("AngVel: ({0:F2}, {1:F2}, {2:F2})\n", angVel.x, angVel.y, angVel.z);
Vector3 angAcc = OVRInput.GetLocalControllerAngularAcceleration(activeController);
data.AppendFormat("AngAcc: ({0:F2}, {1:F2}, {2:F2})\n", angAcc.x, angAcc.y, angAcc.z);
Vector3 pos = OVRInput.GetLocalControllerPosition(activeController);
data.AppendFormat("Position: ({0:F2}, {1:F2}, {2:F2})\n", pos.x, pos.y, pos.z);
Vector3 vel = OVRInput.GetLocalControllerVelocity(activeController);
data.AppendFormat("Vel: ({0:F2}, {1:F2}, {2:F2})\n", vel.x, vel.y, vel.z);
Vector3 acc = OVRInput.GetLocalControllerAcceleration(activeController);
data.AppendFormat("Acc: ({0:F2}, {1:F2}, {2:F2})\n", acc.x, acc.y, acc.z);
float indexTrigger = OVRInput.Get(OVRInput.Axis1D.PrimaryIndexTrigger);
data.AppendFormat("PrimaryIndexTriggerAxis1D: ({0:F2})\n", indexTrigger);
float handTrigger = OVRInput.Get(OVRInput.Axis1D.PrimaryHandTrigger);
data.AppendFormat("PrimaryHandTriggerAxis1D: ({0:F2})\n", handTrigger);
for (int i = 0; i < monitors.Count; i++)
{
monitors[i].Update();
monitors[i].AppendToStringBuilder(ref data);
}
if (uiText != null)
{
uiText.text = data.ToString();
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e0a6abd1cb88e9245bd78dac49d7fd6e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,283 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
using System.IO;
/// <summary>
/// Helper script for capture cubemap and save it into PNG or JPG file
/// </summary>
/// <description>
/// How it works:
/// 1) This script can be attached to a existing game object, you can also use prefab Assets\OVR\Prefabs\OVRCubemapCaptureProbe
/// There are 2 ways to trigger a capture if you attached this script to a game object.
/// * Automatic capturing: if [autoTriggerAfterLaunch] is true, a automatic capturing will be triggered after [autoTriggerDelay] seconds.
/// * Keyboard trigger: press key [triggeredByKey], a capturing will be triggered.
/// 2) If you like to trigger the screen capture in your code logic, just call static function [OVRCubemapCapture.TriggerCubemapCapture] with proper input arguments.
/// </description>
public class OVRCubemapCapture : MonoBehaviour
{
/// <summary>
/// Enable the automatic screenshot trigger, which will capture a cubemap after autoTriggerDelay (seconds)
/// </summary>
public bool autoTriggerAfterLaunch = true;
public float autoTriggerDelay = 1.0f;
private float autoTriggerElapse = 0.0f;
/// <summary>
/// Trigger cubemap screenshot if user pressed key triggeredByKey
/// </summary>
public KeyCode triggeredByKey = KeyCode.F8;
/// <summary>
/// The complete file path for saving the cubemap screenshot, including the filename and extension
/// if pathName is blank, screenshots will be saved into %USERPROFILE%\Documents\OVR_ScreenShot360
/// </summary>
public string pathName;
/// <summary>
/// The cube face resolution
/// </summary>
public int cubemapSize = 2048;
// Update is called once per frame
void Update()
{
// Trigger after autoTriggerDelay
if (autoTriggerAfterLaunch)
{
autoTriggerElapse += Time.deltaTime;
if (autoTriggerElapse >= autoTriggerDelay)
{
autoTriggerAfterLaunch = false;
TriggerCubemapCapture(transform.position, cubemapSize, pathName);
}
}
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
// Trigger by press triggeredByKey
if ( Input.GetKeyDown( triggeredByKey ) )
{
TriggerCubemapCapture(transform.position, cubemapSize, pathName);
}
#endif
}
/// <summary>
/// Generate unity cubemap at specific location and save into JPG/PNG
/// </summary>
/// <description>
/// Default save folder: your app's persistentDataPath
/// Default file name: using current time OVR_hh_mm_ss.png
/// Note1: this will take a few seconds to finish
/// Note2: if you only want to specify path not filename, please end [pathName] with "/"
/// </description>
public static void TriggerCubemapCapture(Vector3 capturePos, int cubemapSize = 2048, string pathName = null)
{
GameObject ownerObj = new GameObject("CubemapCamera", typeof(Camera));
ownerObj.hideFlags = HideFlags.HideAndDontSave;
ownerObj.transform.position = capturePos;
ownerObj.transform.rotation = Quaternion.identity;
Camera camComponent = ownerObj.GetComponent<Camera>();
camComponent.farClipPlane = 10000.0f;
camComponent.enabled = false;
Cubemap cubemap = new Cubemap(cubemapSize, TextureFormat.RGB24, false);
RenderIntoCubemap(camComponent, cubemap);
SaveCubemapCapture(cubemap, pathName);
DestroyImmediate(cubemap);
DestroyImmediate(ownerObj);
}
public static void RenderIntoCubemap(Camera ownerCamera, Cubemap outCubemap)
{
int width = (int)outCubemap.width;
int height = (int)outCubemap.height;
CubemapFace[] faces = new CubemapFace[] { CubemapFace.PositiveX, CubemapFace.NegativeX, CubemapFace.PositiveY, CubemapFace.NegativeY, CubemapFace.PositiveZ, CubemapFace.NegativeZ };
Vector3[] faceAngles = new Vector3[] { new Vector3(0.0f, 90.0f, 0.0f), new Vector3(0.0f, -90.0f, 0.0f), new Vector3(-90.0f, 0.0f, 0.0f), new Vector3(90.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 180.0f, 0.0f) };
// Backup states
RenderTexture backupRenderTex = RenderTexture.active;
float backupFieldOfView = ownerCamera.fieldOfView;
float backupAspect = ownerCamera.aspect;
Quaternion backupRot = ownerCamera.transform.rotation;
//RenderTexture backupRT = ownerCamera.targetTexture;
// Enable 8X MSAA
RenderTexture faceTexture = new RenderTexture(width, height, 24);
faceTexture.antiAliasing = 8;
faceTexture.dimension = UnityEngine.Rendering.TextureDimension.Tex2D;
faceTexture.hideFlags = HideFlags.HideAndDontSave;
// For intermediate saving
Texture2D swapTex = new Texture2D(width, height, TextureFormat.RGB24, false);
swapTex.hideFlags = HideFlags.HideAndDontSave;
// Capture 6 Directions
ownerCamera.targetTexture = faceTexture;
ownerCamera.fieldOfView = 90;
ownerCamera.aspect = 1.0f;
Color[] mirroredPixels = new Color[swapTex.height * swapTex.width];
for (int i = 0; i < faces.Length; i++)
{
ownerCamera.transform.eulerAngles = faceAngles[i];
ownerCamera.Render();
RenderTexture.active = faceTexture;
swapTex.ReadPixels(new Rect(0, 0, width, height), 0, 0);
// Mirror vertically to meet the standard of unity cubemap
Color[] OrignalPixels = swapTex.GetPixels();
for (int y1 = 0; y1 < height; y1++)
{
for (int x1 = 0; x1 < width; x1++)
{
mirroredPixels[y1 * width + x1] = OrignalPixels[((height - 1 - y1) * width) + x1];
}
};
outCubemap.SetPixels(mirroredPixels, faces[i]);
}
outCubemap.SmoothEdges();
// Restore states
RenderTexture.active = backupRenderTex;
ownerCamera.fieldOfView = backupFieldOfView;
ownerCamera.aspect = backupAspect;
ownerCamera.transform.rotation = backupRot;
ownerCamera.targetTexture = backupRenderTex;
DestroyImmediate(swapTex);
DestroyImmediate(faceTexture);
}
/// <summary>
/// Save unity cubemap into NPOT 6x1 cubemap/texture atlas in the following format PX NX PY NY PZ NZ
/// </summary>
/// <description>
/// Supported format: PNG/JPG
/// Default file name: using current time OVR_hh_mm_ss.png
/// </description>
public static bool SaveCubemapCapture(Cubemap cubemap, string pathName = null)
{
string fileName;
string dirName;
int width = cubemap.width;
int height = cubemap.height;
int x = 0;
int y = 0;
bool saveToPNG = true;
if (string.IsNullOrEmpty(pathName))
{
dirName = Application.persistentDataPath + "/OVR_ScreenShot360/";
fileName = null;
}
else
{
dirName = Path.GetDirectoryName(pathName);
fileName = Path.GetFileName(pathName);
if (dirName[dirName.Length - 1] != '/' || dirName[dirName.Length - 1] != '\\')
dirName += "/";
}
if (string.IsNullOrEmpty(fileName))
fileName = "OVR_" + System.DateTime.Now.ToString("hh_mm_ss") + ".png";
string extName = Path.GetExtension(fileName);
if (extName == ".png")
{
saveToPNG = true;
}
else if (extName == ".jpg")
{
saveToPNG = false;
}
else
{
Debug.LogError("Unsupported file format" + extName);
return false;
}
// Validate path
try
{
System.IO.Directory.CreateDirectory(dirName);
}
catch (System.Exception e)
{
Debug.LogError("Failed to create path " + dirName + " since " + e.ToString());
return false;
}
// Create the new texture
Texture2D tex = new Texture2D(width * 6, height, TextureFormat.RGB24, false);
if (tex == null)
{
Debug.LogError("[OVRScreenshotWizard] Failed creating the texture!");
return false;
}
// Merge all the cubemap faces into the texture
// Reference cubemap format: http://docs.unity3d.com/Manual/class-Cubemap.html
CubemapFace[] faces = new CubemapFace[] { CubemapFace.PositiveX, CubemapFace.NegativeX, CubemapFace.PositiveY, CubemapFace.NegativeY, CubemapFace.PositiveZ, CubemapFace.NegativeZ };
for (int i = 0; i < faces.Length; i++)
{
// get the pixels from the cubemap
Color[] srcPixels = null;
Color[] pixels = cubemap.GetPixels(faces[i]);
// if desired, flip them as they are ordered left to right, bottom to top
srcPixels = new Color[pixels.Length];
for (int y1 = 0; y1 < height; y1++)
{
for (int x1 = 0; x1 < width; x1++)
{
srcPixels[y1 * width + x1] = pixels[((height - 1 - y1) * width) + x1];
}
}
// Copy them to the dest texture
tex.SetPixels(x, y, width, height, srcPixels);
x += width;
}
try
{
// Encode the texture and save it to disk
byte[] bytes = saveToPNG ? tex.EncodeToPNG() : tex.EncodeToJPG();
System.IO.File.WriteAllBytes(dirName + fileName, bytes);
Debug.Log("Cubemap file created " + dirName + fileName);
}
catch (System.Exception e)
{
Debug.LogError("Failed to save cubemap file since " + e.ToString());
return false;
}
DestroyImmediate(tex);
return true;
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 7a489178b0acf0147846b3873447beaf
timeCreated: 1464728890
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,22 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
using UnityEngine.EventSystems;
using UnityEngine.UI;
abstract public class OVRCursor : MonoBehaviour
{
public abstract void SetCursorRay(Transform ray);
public abstract void SetCursorStartDest(Vector3 start, Vector3 dest, Vector3 normal);
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f2233ce673fcb9f41bd0753f867b7f70
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,129 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[DefaultExecutionOrder(-80)]
public class OVRCustomSkeleton : OVRSkeleton
{
[SerializeField]
private bool _applyBoneTranslations = true;
[HideInInspector]
[SerializeField]
private List<Transform> _customBones_V2 = new List<Transform>(new Transform[(int)BoneId.Max]);
#if UNITY_EDITOR
private static readonly string[] _fbxHandSidePrefix = { "l_", "r_" };
private static readonly string _fbxHandBonePrefix = "b_";
private static readonly string[] _fbxHandBoneNames =
{
"wrist",
"forearm_stub",
"thumb0",
"thumb1",
"thumb2",
"thumb3",
"index1",
"index2",
"index3",
"middle1",
"middle2",
"middle3",
"ring1",
"ring2",
"ring3",
"pinky0",
"pinky1",
"pinky2",
"pinky3"
};
private static readonly string[] _fbxHandFingerNames =
{
"thumb",
"index",
"middle",
"ring",
"pinky"
};
#endif
public List<Transform> CustomBones { get { return _customBones_V2; } }
#if UNITY_EDITOR
public void TryAutoMapBonesByName()
{
BoneId start = GetCurrentStartBoneId();
BoneId end = GetCurrentEndBoneId();
SkeletonType skeletonType = GetSkeletonType();
if (start != BoneId.Invalid && end != BoneId.Invalid)
{
for (int bi = (int)start; bi < (int)end; ++bi)
{
string fbxBoneName = FbxBoneNameFromBoneId(skeletonType, (BoneId)bi);
Transform t = transform.FindChildRecursive(fbxBoneName);
if (t != null)
{
_customBones_V2[(int)bi] = t;
}
}
}
}
private static string FbxBoneNameFromBoneId(SkeletonType skeletonType, BoneId bi)
{
{
if (bi >= BoneId.Hand_ThumbTip && bi <= BoneId.Hand_PinkyTip)
{
return _fbxHandSidePrefix[(int)skeletonType] + _fbxHandFingerNames[(int)bi - (int)BoneId.Hand_ThumbTip] + "_finger_tip_marker";
}
else
{
return _fbxHandBonePrefix + _fbxHandSidePrefix[(int)skeletonType] + _fbxHandBoneNames[(int)bi];
}
}
}
#endif
protected override void InitializeBones()
{
bool flipX = (_skeletonType == SkeletonType.HandLeft || _skeletonType == SkeletonType.HandRight);
if (_bones == null || _bones.Count != _skeleton.NumBones)
{
_bones = new List<OVRBone>(new OVRBone[_skeleton.NumBones]);
Bones = _bones.AsReadOnly();
}
for (int i = 0; i < _bones.Count; ++i)
{
OVRBone bone = _bones[i] ?? (_bones[i] = new OVRBone());
bone.Id = (OVRSkeleton.BoneId)_skeleton.Bones[i].Id;
bone.ParentBoneIndex = _skeleton.Bones[i].ParentBoneIndex;
bone.Transform = _customBones_V2[(int)bone.Id];
if (_applyBoneTranslations)
{
bone.Transform.localPosition = flipX ? _skeleton.Bones[i].Pose.Position.FromFlippedXVector3f() : _skeleton.Bones[i].Pose.Position.FromFlippedZVector3f();
}
bone.Transform.localRotation = flipX ? _skeleton.Bones[i].Pose.Orientation.FromFlippedXQuatf() : _skeleton.Bones[i].Pose.Orientation.FromFlippedZQuatf();
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 674a40251fe8ad841b18517ac5209957
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,421 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
using UnityEngine.UI;
//-------------------------------------------------------------------------------------
/// <summary>
/// Shows debug information on a heads-up display.
/// </summary>
public class OVRDebugInfo : MonoBehaviour
{
#region GameObjects for Debug Information UIs
GameObject debugUIManager;
GameObject debugUIObject;
GameObject riftPresent;
GameObject fps;
GameObject ipd;
GameObject fov;
GameObject height;
GameObject depth;
GameObject resolutionEyeTexture;
GameObject latencies;
GameObject texts;
#endregion
#region Debug strings
string strRiftPresent = null; // "VR DISABLED"
string strFPS = null; // "FPS: 0";
string strIPD = null; // "IPD: 0.000";
string strFOV = null; // "FOV: 0.0f";
string strHeight = null; // "Height: 0.0f";
string strDepth = null; // "Depth: 0.0f";
string strResolutionEyeTexture = null; // "Resolution : {0} x {1}"
string strLatencies = null; // "R: {0:F3} TW: {1:F3} PP: {2:F3} RE: {3:F3} TWE: {4:F3}"
#endregion
/// <summary>
/// Variables for FPS
/// </summary>
float updateInterval = 0.5f;
float accum = 0.0f;
int frames = 0;
float timeLeft = 0.0f;
/// <summary>
/// Managing for UI initialization
/// </summary>
bool initUIComponent = false;
bool isInited = false;
/// <summary>
/// UIs Y offset
/// </summary>
float offsetY = 55.0f;
/// <summary>
/// Managing for rift detection UI
/// </summary>
float riftPresentTimeout = 0.0f;
/// <summary>
/// Turn on / off VR variables
/// </summary>
bool showVRVars = false;
#region MonoBehaviour handler
/// <summary>
/// Initialization
/// </summary>
void Awake()
{
// Create canvas for using new GUI
debugUIManager = new GameObject();
debugUIManager.name = "DebugUIManager";
debugUIManager.transform.parent = GameObject.Find("LeftEyeAnchor").transform;
RectTransform rectTransform = debugUIManager.AddComponent<RectTransform>();
rectTransform.sizeDelta = new Vector2(100f, 100f);
rectTransform.localScale = new Vector3(0.001f, 0.001f, 0.001f);
rectTransform.localPosition = new Vector3(0.01f, 0.17f, 0.53f);
rectTransform.localEulerAngles = Vector3.zero;
Canvas canvas = debugUIManager.AddComponent<Canvas>();
canvas.renderMode = RenderMode.WorldSpace;
canvas.pixelPerfect = false;
}
/// <summary>
/// Updating VR variables and managing UI present
/// </summary>
void Update()
{
if (initUIComponent && !isInited)
{
InitUIComponents();
}
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
if (Input.GetKeyDown(KeyCode.Space) && riftPresentTimeout < 0.0f)
{
initUIComponent = true;
showVRVars ^= true;
}
#endif
UpdateDeviceDetection();
// Presenting VR variables
if (showVRVars)
{
debugUIManager.SetActive(true);
UpdateVariable();
UpdateStrings();
}
else
{
debugUIManager.SetActive(false);
}
}
/// <summary>
/// Initialize isInited value on OnDestroy
/// </summary>
void OnDestroy()
{
isInited = false;
}
#endregion
#region Private Functions
/// <summary>
/// Initialize UI GameObjects
/// </summary>
void InitUIComponents()
{
float posY = 0.0f;
int fontSize = 20;
debugUIObject = new GameObject();
debugUIObject.name = "DebugInfo";
debugUIObject.transform.parent = GameObject.Find("DebugUIManager").transform;
debugUIObject.transform.localPosition = new Vector3(0.0f, 100.0f, 0.0f);
debugUIObject.transform.localEulerAngles = Vector3.zero;
debugUIObject.transform.localScale = new Vector3(1.0f, 1.0f, 1.0f);
// Print out for FPS
if (!string.IsNullOrEmpty(strFPS))
{
fps = VariableObjectManager(fps, "FPS", posY -= offsetY, strFPS, fontSize);
}
// Print out for IPD
if (!string.IsNullOrEmpty(strIPD))
{
ipd = VariableObjectManager(ipd, "IPD", posY -= offsetY, strIPD, fontSize);
}
// Print out for FOV
if (!string.IsNullOrEmpty(strFOV))
{
fov = VariableObjectManager(fov, "FOV", posY -= offsetY, strFOV, fontSize);
}
// Print out for Height
if (!string.IsNullOrEmpty(strHeight))
{
height = VariableObjectManager(height, "Height", posY -= offsetY, strHeight, fontSize);
}
// Print out for Depth
if (!string.IsNullOrEmpty(strDepth))
{
depth = VariableObjectManager(depth, "Depth", posY -= offsetY, strDepth, fontSize);
}
// Print out for Resoulution of Eye Texture
if (!string.IsNullOrEmpty(strResolutionEyeTexture))
{
resolutionEyeTexture = VariableObjectManager(resolutionEyeTexture, "Resolution", posY -= offsetY, strResolutionEyeTexture, fontSize);
}
// Print out for Latency
if (!string.IsNullOrEmpty(strLatencies))
{
latencies = VariableObjectManager(latencies, "Latency", posY -= offsetY, strLatencies, 17);
posY = 0.0f;
}
initUIComponent = false;
isInited = true;
}
/// <summary>
/// Update VR Variables
/// </summary>
void UpdateVariable()
{
UpdateIPD();
UpdateEyeHeightOffset();
UpdateEyeDepthOffset();
UpdateFOV();
UpdateResolutionEyeTexture();
UpdateLatencyValues();
UpdateFPS();
}
/// <summary>
/// Update Strings
/// </summary>
void UpdateStrings()
{
if (debugUIObject == null)
return;
if (!string.IsNullOrEmpty(strFPS))
fps.GetComponentInChildren<Text>().text = strFPS;
if (!string.IsNullOrEmpty(strIPD))
ipd.GetComponentInChildren<Text>().text = strIPD;
if (!string.IsNullOrEmpty(strFOV))
fov.GetComponentInChildren<Text>().text = strFOV;
if (!string.IsNullOrEmpty(strResolutionEyeTexture))
resolutionEyeTexture.GetComponentInChildren<Text>().text = strResolutionEyeTexture;
if (!string.IsNullOrEmpty(strLatencies))
{
latencies.GetComponentInChildren<Text>().text = strLatencies;
latencies.GetComponentInChildren<Text>().fontSize = 14;
}
if (!string.IsNullOrEmpty(strHeight))
height.GetComponentInChildren<Text>().text = strHeight;
if (!string.IsNullOrEmpty(strDepth))
depth.GetComponentInChildren<Text>().text = strDepth;
}
/// <summary>
/// It's for rift present GUI
/// </summary>
void RiftPresentGUI(GameObject guiMainOBj)
{
riftPresent = ComponentComposition(riftPresent);
riftPresent.transform.SetParent(guiMainOBj.transform);
riftPresent.name = "RiftPresent";
RectTransform rectTransform = riftPresent.GetComponent<RectTransform>();
rectTransform.localPosition = new Vector3(0.0f, 0.0f, 0.0f);
rectTransform.localScale = new Vector3(1.0f, 1.0f, 1.0f);
rectTransform.localEulerAngles = Vector3.zero;
Text text = riftPresent.GetComponentInChildren<Text>();
text.text = strRiftPresent;
text.fontSize = 20;
}
/// <summary>
/// Updates the device detection.
/// </summary>
void UpdateDeviceDetection()
{
if (riftPresentTimeout >= 0.0f)
{
riftPresentTimeout -= Time.deltaTime;
}
}
/// <summary>
/// Object Manager for Variables
/// </summary>
/// <returns> gameobject for each Variable </returns>
GameObject VariableObjectManager(GameObject gameObject, string name, float posY, string str, int fontSize)
{
gameObject = ComponentComposition(gameObject);
gameObject.name = name;
gameObject.transform.SetParent(debugUIObject.transform);
RectTransform rectTransform = gameObject.GetComponent<RectTransform>();
rectTransform.localPosition = new Vector3(0.0f, posY -= offsetY, 0.0f);
Text text = gameObject.GetComponentInChildren<Text>();
text.text = str;
text.fontSize = fontSize;
gameObject.transform.localEulerAngles = Vector3.zero;
rectTransform.localScale = new Vector3(1.0f, 1.0f, 1.0f);
return gameObject;
}
/// <summary>
/// Component composition
/// </summary>
/// <returns> Composed gameobject. </returns>
GameObject ComponentComposition(GameObject GO)
{
GO = new GameObject();
GO.AddComponent<RectTransform>();
GO.AddComponent<CanvasRenderer>();
GO.AddComponent<Image>();
GO.GetComponent<RectTransform>().sizeDelta = new Vector2(350f, 50f);
GO.GetComponent<Image>().color = new Color(7f / 255f, 45f / 255f, 71f / 255f, 200f / 255f);
texts = new GameObject();
texts.AddComponent<RectTransform>();
texts.AddComponent<CanvasRenderer>();
texts.AddComponent<Text>();
texts.GetComponent<RectTransform>().sizeDelta = new Vector2(350f, 50f);
texts.GetComponent<Text>().font = Resources.GetBuiltinResource(typeof(Font), "Arial.ttf") as Font;
texts.GetComponent<Text>().alignment = TextAnchor.MiddleCenter;
texts.transform.SetParent(GO.transform);
texts.name = "TextBox";
return GO;
}
#endregion
#region Debugging variables handler
/// <summary>
/// Updates the IPD.
/// </summary>
void UpdateIPD()
{
strIPD = System.String.Format("IPD (mm): {0:F4}", OVRManager.profile.ipd * 1000.0f);
}
/// <summary>
/// Updates the eye height offset.
/// </summary>
void UpdateEyeHeightOffset()
{
float eyeHeight = OVRManager.profile.eyeHeight;
strHeight = System.String.Format("Eye Height (m): {0:F3}", eyeHeight);
}
/// <summary>
/// Updates the eye depth offset.
/// </summary>
void UpdateEyeDepthOffset()
{
float eyeDepth = OVRManager.profile.eyeDepth;
strDepth = System.String.Format("Eye Depth (m): {0:F3}", eyeDepth);
}
/// <summary>
/// Updates the FOV.
/// </summary>
void UpdateFOV()
{
OVRDisplay.EyeRenderDesc eyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.XR.XRNode.LeftEye);
strFOV = System.String.Format("FOV (deg): {0:F3}", eyeDesc.fov.y);
}
/// <summary>
/// Updates resolution of eye texture
/// </summary>
void UpdateResolutionEyeTexture()
{
OVRDisplay.EyeRenderDesc leftEyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.XR.XRNode.LeftEye);
OVRDisplay.EyeRenderDesc rightEyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.XR.XRNode.RightEye);
float scale = UnityEngine.XR.XRSettings.renderViewportScale;
float w = (int)(scale * (float)(leftEyeDesc.resolution.x + rightEyeDesc.resolution.x));
float h = (int)(scale * (float)Mathf.Max(leftEyeDesc.resolution.y, rightEyeDesc.resolution.y));
strResolutionEyeTexture = System.String.Format("Resolution : {0} x {1}", w, h);
}
/// <summary>
/// Updates latency values
/// </summary>
void UpdateLatencyValues()
{
#if !UNITY_ANDROID || UNITY_EDITOR
OVRDisplay.LatencyData latency = OVRManager.display.latency;
if (latency.render < 0.000001f && latency.timeWarp < 0.000001f && latency.postPresent < 0.000001f)
strLatencies = System.String.Format("Latency values are not available.");
else
strLatencies = System.String.Format("Render: {0:F3} TimeWarp: {1:F3} Post-Present: {2:F3}\nRender Error: {3:F3} TimeWarp Error: {4:F3}",
latency.render,
latency.timeWarp,
latency.postPresent,
latency.renderError,
latency.timeWarpError);
#endif
}
/// <summary>
/// Updates the FPS.
/// </summary>
void UpdateFPS()
{
timeLeft -= Time.unscaledDeltaTime;
accum += Time.unscaledDeltaTime;
++frames;
// Interval ended - update GUI text and start new interval
if (timeLeft <= 0.0)
{
// display two fractional digits (f2 format)
float fps = frames / accum;
strFPS = System.String.Format("FPS: {0:F2}", fps);
timeLeft += updateInterval;
accum = 0.0f;
frames = 0;
}
}
#endregion
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: b71d1996d67004241a3b69960856ffcb
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,272 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
using UnityEngine.EventSystems;
using UnityEngine.UI;
/// <summary>
/// UI pointer driven by gaze input.
/// </summary>
public class OVRGazePointer : OVRCursor {
private Transform gazeIcon; //the transform that rotates according to our movement
[Tooltip("Should the pointer be hidden when not over interactive objects.")]
public bool hideByDefault = true;
[Tooltip("Time after leaving interactive object before pointer fades.")]
public float showTimeoutPeriod = 1;
[Tooltip("Time after mouse pointer becoming inactive before pointer unfades.")]
public float hideTimeoutPeriod = 0.1f;
[Tooltip("Keep a faint version of the pointer visible while using a mouse")]
public bool dimOnHideRequest = true;
[Tooltip("Angular scale of pointer")]
public float depthScaleMultiplier = 0.03f;
public bool matchNormalOnPhysicsColliders;
/// <summary>
/// The gaze ray.
/// </summary>
public Transform rayTransform;
/// <summary>
/// Is gaze pointer current visible
/// </summary>
public bool hidden { get; private set; }
/// <summary>
/// Current scale applied to pointer
/// </summary>
public float currentScale { get; private set; }
/// <summary>
/// Current depth of pointer from camera
/// </summary>
private float depth;
private float hideUntilTime;
/// <summary>
/// How many times position has been set this frame. Used to detect when there are no position sets in a frame.
/// </summary>
private int positionSetsThisFrame = 0;
/// <summary>
/// Last time code requested the pointer be shown. Usually when pointer passes over interactive elements.
/// </summary>
private float lastShowRequestTime;
/// <summary>
/// Last time pointer was requested to be hidden. Usually mouse pointer activity.
/// </summary>
private float lastHideRequestTime;
// Optionally present GUI element displaying progress when using gaze-to-select mechanics
private OVRProgressIndicator progressIndicator;
private static OVRGazePointer _instance;
public static OVRGazePointer instance
{
// If there's no GazePointer already in the scene, instanciate one now.
get
{
if (_instance == null)
{
Debug.Log(string.Format("Instanciating GazePointer", 0));
_instance = (OVRGazePointer)GameObject.Instantiate((OVRGazePointer)Resources.Load("Prefabs/GazePointerRing", typeof(OVRGazePointer)));
}
return _instance;
}
}
/// <summary>
/// Used to determine alpha level of gaze cursor. Could also be used to determine cursor size, for example, as the cursor fades out.
/// </summary>
public float visibilityStrength
{
get
{
// It's possible there are reasons to show the cursor - such as it hovering over some UI - and reasons to hide
// the cursor - such as another input method (e.g. mouse) being used. We take both of these in to account.
float strengthFromShowRequest;
if (hideByDefault)
{
// fade the cursor out with time
strengthFromShowRequest = Mathf.Clamp01(1 - (Time.time - lastShowRequestTime) / showTimeoutPeriod);
}
else
{
// keep it fully visible
strengthFromShowRequest = 1;
}
// Now consider factors requesting pointer to be hidden
float strengthFromHideRequest;
strengthFromHideRequest = (lastHideRequestTime + hideTimeoutPeriod > Time.time) ? (dimOnHideRequest ? 0.1f : 0) : 1;
// Hide requests take priority
return Mathf.Min(strengthFromShowRequest, strengthFromHideRequest);
}
}
public float SelectionProgress
{
get
{
return progressIndicator ? progressIndicator.currentProgress : 0;
}
set
{
if (progressIndicator)
progressIndicator.currentProgress = value;
}
}
public void Awake()
{
currentScale = 1;
// Only allow one instance at runtime.
if (_instance != null && _instance != this)
{
enabled = false;
DestroyImmediate(this);
return;
}
_instance = this;
gazeIcon = transform.Find("GazeIcon");
progressIndicator = transform.GetComponent<OVRProgressIndicator>();
}
void Update ()
{
if (rayTransform == null && Camera.main != null)
rayTransform = Camera.main.transform;
// Move the gaze cursor to keep it in the middle of the view
transform.position = rayTransform.position + rayTransform.forward * depth;
// Should we show or hide the gaze cursor?
if (visibilityStrength == 0 && !hidden)
{
Hide();
}
else if (visibilityStrength > 0 && hidden)
{
Show();
}
}
/// <summary>
/// Set position and orientation of pointer
/// </summary>
/// <param name="pos"></param>
/// <param name="normal"></param>
public override void SetCursorStartDest(Vector3 _, Vector3 pos, Vector3 normal)
{
transform.position = pos;
if (!matchNormalOnPhysicsColliders) normal = rayTransform.forward;
// Set the rotation to match the normal of the surface it's on.
Quaternion newRot = transform.rotation;
newRot.SetLookRotation(normal, rayTransform.up);
transform.rotation = newRot;
// record depth so that distance doesn't pop when pointer leaves an object
depth = (rayTransform.position - pos).magnitude;
//set scale based on depth
currentScale = depth * depthScaleMultiplier;
transform.localScale = new Vector3(currentScale, currentScale, currentScale);
positionSetsThisFrame++;
RequestShow();
}
public override void SetCursorRay(Transform ray)
{
// We don't do anything here, because we already set this properly by default in Update.
}
void LateUpdate()
{
// This happens after all Updates so we know that if positionSetsThisFrame is zero then nothing set the position this frame
if (positionSetsThisFrame == 0)
{
// No geometry intersections, so gazing into space. Make the cursor face directly at the camera
Quaternion newRot = transform.rotation;
newRot.SetLookRotation(rayTransform.forward, rayTransform.up);
transform.rotation = newRot;
}
Quaternion iconRotation = gazeIcon.rotation;
iconRotation.SetLookRotation(transform.rotation * new Vector3(0, 0, 1));
gazeIcon.rotation = iconRotation;
positionSetsThisFrame = 0;
}
/// <summary>
/// Request the pointer be hidden
/// </summary>
public void RequestHide()
{
if (!dimOnHideRequest)
{
Hide();
}
lastHideRequestTime = Time.time;
}
/// <summary>
/// Request the pointer be shown. Hide requests take priority
/// </summary>
public void RequestShow()
{
Show();
lastShowRequestTime = Time.time;
}
// Disable/Enable child elements when we show/hide the cursor. For performance reasons.
void Hide()
{
foreach (Transform child in transform)
{
child.gameObject.SetActive(false);
}
if (GetComponent<Renderer>())
GetComponent<Renderer>().enabled = false;
hidden = true;
}
void Show()
{
foreach (Transform child in transform)
{
child.gameObject.SetActive(true);
}
if (GetComponent<Renderer>())
GetComponent<Renderer>().enabled = true;
hidden = false;
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 30530ad0e40d0a64ea26d753ee4996ea
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,18 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
public class OVRGearVrControllerTest : MonoBehaviour
{
// Deprecated since SDK 1.51
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 7acc4619d4cb5e64e9ed05e5a7a8099f
timeCreated: 1486173066
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,160 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System;
using UnityEngine;
/// <summary>
/// An object that can be grabbed and thrown by OVRGrabber.
/// </summary>
public class OVRGrabbable : MonoBehaviour
{
[SerializeField]
protected bool m_allowOffhandGrab = true;
[SerializeField]
protected bool m_snapPosition = false;
[SerializeField]
protected bool m_snapOrientation = false;
[SerializeField]
protected Transform m_snapOffset;
[SerializeField]
protected Collider[] m_grabPoints = null;
protected bool m_grabbedKinematic = false;
protected Collider m_grabbedCollider = null;
protected OVRGrabber m_grabbedBy = null;
/// <summary>
/// If true, the object can currently be grabbed.
/// </summary>
public bool allowOffhandGrab
{
get { return m_allowOffhandGrab; }
}
/// <summary>
/// If true, the object is currently grabbed.
/// </summary>
public bool isGrabbed
{
get { return m_grabbedBy != null; }
}
/// <summary>
/// If true, the object's position will snap to match snapOffset when grabbed.
/// </summary>
public bool snapPosition
{
get { return m_snapPosition; }
}
/// <summary>
/// If true, the object's orientation will snap to match snapOffset when grabbed.
/// </summary>
public bool snapOrientation
{
get { return m_snapOrientation; }
}
/// <summary>
/// An offset relative to the OVRGrabber where this object can snap when grabbed.
/// </summary>
public Transform snapOffset
{
get { return m_snapOffset; }
}
/// <summary>
/// Returns the OVRGrabber currently grabbing this object.
/// </summary>
public OVRGrabber grabbedBy
{
get { return m_grabbedBy; }
}
/// <summary>
/// The transform at which this object was grabbed.
/// </summary>
public Transform grabbedTransform
{
get { return m_grabbedCollider.transform; }
}
/// <summary>
/// The Rigidbody of the collider that was used to grab this object.
/// </summary>
public Rigidbody grabbedRigidbody
{
get { return m_grabbedCollider.attachedRigidbody; }
}
/// <summary>
/// The contact point(s) where the object was grabbed.
/// </summary>
public Collider[] grabPoints
{
get { return m_grabPoints; }
}
/// <summary>
/// Notifies the object that it has been grabbed.
/// </summary>
virtual public void GrabBegin(OVRGrabber hand, Collider grabPoint)
{
m_grabbedBy = hand;
m_grabbedCollider = grabPoint;
gameObject.GetComponent<Rigidbody>().isKinematic = true;
}
/// <summary>
/// Notifies the object that it has been released.
/// </summary>
virtual public void GrabEnd(Vector3 linearVelocity, Vector3 angularVelocity)
{
Rigidbody rb = gameObject.GetComponent<Rigidbody>();
rb.isKinematic = m_grabbedKinematic;
rb.velocity = linearVelocity;
rb.angularVelocity = angularVelocity;
m_grabbedBy = null;
m_grabbedCollider = null;
}
void Awake()
{
if (m_grabPoints.Length == 0)
{
// Get the collider from the grabbable
Collider collider = this.GetComponent<Collider>();
if (collider == null)
{
throw new ArgumentException("Grabbables cannot have zero grab points and no collider -- please add a grab point or collider.");
}
// Create a default grab point
m_grabPoints = new Collider[1] { collider };
}
}
protected virtual void Start()
{
m_grabbedKinematic = GetComponent<Rigidbody>().isKinematic;
}
void OnDestroy()
{
if (m_grabbedBy != null)
{
// Notify the hand to release destroyed grabbables
m_grabbedBy.ForceRelease(this);
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 02d61468f8b77ae4b92c344bc9a600fb
timeCreated: 1481833527
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,414 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System.Collections.Generic;
using UnityEngine;
/// <summary>
/// Allows grabbing and throwing of objects with the OVRGrabbable component on them.
/// </summary>
[RequireComponent(typeof(Rigidbody))]
public class OVRGrabber : MonoBehaviour
{
// Grip trigger thresholds for picking up objects, with some hysteresis.
public float grabBegin = 0.55f;
public float grabEnd = 0.35f;
// Demonstrates parenting the held object to the hand's transform when grabbed.
// When false, the grabbed object is moved every FixedUpdate using MovePosition.
// Note that MovePosition is required for proper physics simulation. If you set this to true, you can
// easily observe broken physics simulation by, for example, moving the bottom cube of a stacked
// tower and noting a complete loss of friction.
[SerializeField]
protected bool m_parentHeldObject = false;
// If true, this script will move the hand to the transform specified by m_parentTransform, using MovePosition in
// Update. This allows correct physics behavior, at the cost of some latency. In this usage scenario, you
// should NOT parent the hand to the hand anchor.
// (If m_moveHandPosition is false, this script will NOT update the game object's position.
// The hand gameObject can simply be attached to the hand anchor, which updates position in LateUpdate,
// gaining us a few ms of reduced latency.)
[SerializeField]
protected bool m_moveHandPosition = false;
// Child/attached transforms of the grabber, indicating where to snap held objects to (if you snap them).
// Also used for ranking grab targets in case of multiple candidates.
[SerializeField]
protected Transform m_gripTransform = null;
// Child/attached Colliders to detect candidate grabbable objects.
[SerializeField]
protected Collider[] m_grabVolumes = null;
// Should be OVRInput.Controller.LTouch or OVRInput.Controller.RTouch.
[SerializeField]
protected OVRInput.Controller m_controller;
// You can set this explicitly in the inspector if you're using m_moveHandPosition.
// Otherwise, you should typically leave this null and simply parent the hand to the hand anchor
// in your scene, using Unity's inspector.
[SerializeField]
protected Transform m_parentTransform;
[SerializeField]
protected GameObject m_player;
protected bool m_grabVolumeEnabled = true;
protected Vector3 m_lastPos;
protected Quaternion m_lastRot;
protected Quaternion m_anchorOffsetRotation;
protected Vector3 m_anchorOffsetPosition;
protected float m_prevFlex;
protected OVRGrabbable m_grabbedObj = null;
protected Vector3 m_grabbedObjectPosOff;
protected Quaternion m_grabbedObjectRotOff;
protected Dictionary<OVRGrabbable, int> m_grabCandidates = new Dictionary<OVRGrabbable, int>();
protected bool m_operatingWithoutOVRCameraRig = true;
/// <summary>
/// The currently grabbed object.
/// </summary>
public OVRGrabbable grabbedObject
{
get { return m_grabbedObj; }
}
public void ForceRelease(OVRGrabbable grabbable)
{
bool canRelease = (
(m_grabbedObj != null) &&
(m_grabbedObj == grabbable)
);
if (canRelease)
{
GrabEnd();
}
}
protected virtual void Awake()
{
m_anchorOffsetPosition = transform.localPosition;
m_anchorOffsetRotation = transform.localRotation;
if(!m_moveHandPosition)
{
// If we are being used with an OVRCameraRig, let it drive input updates, which may come from Update or FixedUpdate.
OVRCameraRig rig = transform.GetComponentInParent<OVRCameraRig>();
if (rig != null)
{
rig.UpdatedAnchors += (r) => {OnUpdatedAnchors();};
m_operatingWithoutOVRCameraRig = false;
}
}
}
protected virtual void Start()
{
m_lastPos = transform.position;
m_lastRot = transform.rotation;
if(m_parentTransform == null)
{
m_parentTransform = gameObject.transform;
}
// We're going to setup the player collision to ignore the hand collision.
SetPlayerIgnoreCollision(gameObject, true);
}
// Using Update instead of FixedUpdate. Doing this in FixedUpdate causes visible judder even with
// somewhat high tick rates, because variable numbers of ticks per frame will give hand poses of
// varying recency. We want a single hand pose sampled at the same time each frame.
// Note that this can lead to its own side effects. For example, if m_parentHeldObject is false, the
// grabbed objects will be moved with MovePosition. If this is called in Update while the physics
// tick rate is dramatically different from the application frame rate, other objects touched by
// the held object will see an incorrect velocity (because the move will occur over the time of the
// physics tick, not the render tick), and will respond to the incorrect velocity with potentially
// visible artifacts.
virtual public void Update()
{
if (m_operatingWithoutOVRCameraRig)
{
OnUpdatedAnchors();
}
}
// Hands follow the touch anchors by calling MovePosition each frame to reach the anchor.
// This is done instead of parenting to achieve workable physics. If you don't require physics on
// your hands or held objects, you may wish to switch to parenting.
void OnUpdatedAnchors()
{
Vector3 destPos = m_parentTransform.TransformPoint(m_anchorOffsetPosition);
Quaternion destRot = m_parentTransform.rotation * m_anchorOffsetRotation;
if (m_moveHandPosition)
{
GetComponent<Rigidbody>().MovePosition(destPos);
GetComponent<Rigidbody>().MoveRotation(destRot);
}
if (!m_parentHeldObject)
{
MoveGrabbedObject(destPos, destRot);
}
m_lastPos = transform.position;
m_lastRot = transform.rotation;
float prevFlex = m_prevFlex;
// Update values from inputs
m_prevFlex = OVRInput.Get(OVRInput.Axis1D.PrimaryHandTrigger, m_controller);
CheckForGrabOrRelease(prevFlex);
}
void OnDestroy()
{
if (m_grabbedObj != null)
{
GrabEnd();
}
}
void OnTriggerEnter(Collider otherCollider)
{
// Get the grab trigger
OVRGrabbable grabbable = otherCollider.GetComponent<OVRGrabbable>() ?? otherCollider.GetComponentInParent<OVRGrabbable>();
if (grabbable == null) return;
// Add the grabbable
int refCount = 0;
m_grabCandidates.TryGetValue(grabbable, out refCount);
m_grabCandidates[grabbable] = refCount + 1;
}
void OnTriggerExit(Collider otherCollider)
{
OVRGrabbable grabbable = otherCollider.GetComponent<OVRGrabbable>() ?? otherCollider.GetComponentInParent<OVRGrabbable>();
if (grabbable == null) return;
// Remove the grabbable
int refCount = 0;
bool found = m_grabCandidates.TryGetValue(grabbable, out refCount);
if (!found)
{
return;
}
if (refCount > 1)
{
m_grabCandidates[grabbable] = refCount - 1;
}
else
{
m_grabCandidates.Remove(grabbable);
}
}
protected void CheckForGrabOrRelease(float prevFlex)
{
if ((m_prevFlex >= grabBegin) && (prevFlex < grabBegin))
{
GrabBegin();
}
else if ((m_prevFlex <= grabEnd) && (prevFlex > grabEnd))
{
GrabEnd();
}
}
protected virtual void GrabBegin()
{
float closestMagSq = float.MaxValue;
OVRGrabbable closestGrabbable = null;
Collider closestGrabbableCollider = null;
// Iterate grab candidates and find the closest grabbable candidate
foreach (OVRGrabbable grabbable in m_grabCandidates.Keys)
{
bool canGrab = !(grabbable.isGrabbed && !grabbable.allowOffhandGrab);
if (!canGrab)
{
continue;
}
for (int j = 0; j < grabbable.grabPoints.Length; ++j)
{
Collider grabbableCollider = grabbable.grabPoints[j];
// Store the closest grabbable
Vector3 closestPointOnBounds = grabbableCollider.ClosestPointOnBounds(m_gripTransform.position);
float grabbableMagSq = (m_gripTransform.position - closestPointOnBounds).sqrMagnitude;
if (grabbableMagSq < closestMagSq)
{
closestMagSq = grabbableMagSq;
closestGrabbable = grabbable;
closestGrabbableCollider = grabbableCollider;
}
}
}
// Disable grab volumes to prevent overlaps
GrabVolumeEnable(false);
if (closestGrabbable != null)
{
if (closestGrabbable.isGrabbed)
{
closestGrabbable.grabbedBy.OffhandGrabbed(closestGrabbable);
}
m_grabbedObj = closestGrabbable;
m_grabbedObj.GrabBegin(this, closestGrabbableCollider);
m_lastPos = transform.position;
m_lastRot = transform.rotation;
// Set up offsets for grabbed object desired position relative to hand.
if(m_grabbedObj.snapPosition)
{
m_grabbedObjectPosOff = m_gripTransform.localPosition;
if(m_grabbedObj.snapOffset)
{
Vector3 snapOffset = m_grabbedObj.snapOffset.position;
if (m_controller == OVRInput.Controller.LTouch) snapOffset.x = -snapOffset.x;
m_grabbedObjectPosOff += snapOffset;
}
}
else
{
Vector3 relPos = m_grabbedObj.transform.position - transform.position;
relPos = Quaternion.Inverse(transform.rotation) * relPos;
m_grabbedObjectPosOff = relPos;
}
if (m_grabbedObj.snapOrientation)
{
m_grabbedObjectRotOff = m_gripTransform.localRotation;
if(m_grabbedObj.snapOffset)
{
m_grabbedObjectRotOff = m_grabbedObj.snapOffset.rotation * m_grabbedObjectRotOff;
}
}
else
{
Quaternion relOri = Quaternion.Inverse(transform.rotation) * m_grabbedObj.transform.rotation;
m_grabbedObjectRotOff = relOri;
}
// NOTE: force teleport on grab, to avoid high-speed travel to dest which hits a lot of other objects at high
// speed and sends them flying. The grabbed object may still teleport inside of other objects, but fixing that
// is beyond the scope of this demo.
MoveGrabbedObject(m_lastPos, m_lastRot, true);
// NOTE: This is to get around having to setup collision layers, but in your own project you might
// choose to remove this line in favor of your own collision layer setup.
SetPlayerIgnoreCollision(m_grabbedObj.gameObject, true);
if (m_parentHeldObject)
{
m_grabbedObj.transform.parent = transform;
}
}
}
protected virtual void MoveGrabbedObject(Vector3 pos, Quaternion rot, bool forceTeleport = false)
{
if (m_grabbedObj == null)
{
return;
}
Rigidbody grabbedRigidbody = m_grabbedObj.grabbedRigidbody;
Vector3 grabbablePosition = pos + rot * m_grabbedObjectPosOff;
Quaternion grabbableRotation = rot * m_grabbedObjectRotOff;
if (forceTeleport)
{
grabbedRigidbody.transform.position = grabbablePosition;
grabbedRigidbody.transform.rotation = grabbableRotation;
}
else
{
grabbedRigidbody.MovePosition(grabbablePosition);
grabbedRigidbody.MoveRotation(grabbableRotation);
}
}
protected void GrabEnd()
{
if (m_grabbedObj != null)
{
OVRPose localPose = new OVRPose { position = OVRInput.GetLocalControllerPosition(m_controller), orientation = OVRInput.GetLocalControllerRotation(m_controller) };
OVRPose offsetPose = new OVRPose { position = m_anchorOffsetPosition, orientation = m_anchorOffsetRotation };
localPose = localPose * offsetPose;
OVRPose trackingSpace = transform.ToOVRPose() * localPose.Inverse();
Vector3 linearVelocity = trackingSpace.orientation * OVRInput.GetLocalControllerVelocity(m_controller);
Vector3 angularVelocity = trackingSpace.orientation * OVRInput.GetLocalControllerAngularVelocity(m_controller);
GrabbableRelease(linearVelocity, angularVelocity);
}
// Re-enable grab volumes to allow overlap events
GrabVolumeEnable(true);
}
protected void GrabbableRelease(Vector3 linearVelocity, Vector3 angularVelocity)
{
m_grabbedObj.GrabEnd(linearVelocity, angularVelocity);
if(m_parentHeldObject) m_grabbedObj.transform.parent = null;
m_grabbedObj = null;
}
protected virtual void GrabVolumeEnable(bool enabled)
{
if (m_grabVolumeEnabled == enabled)
{
return;
}
m_grabVolumeEnabled = enabled;
for (int i = 0; i < m_grabVolumes.Length; ++i)
{
Collider grabVolume = m_grabVolumes[i];
grabVolume.enabled = m_grabVolumeEnabled;
}
if (!m_grabVolumeEnabled)
{
m_grabCandidates.Clear();
}
}
protected virtual void OffhandGrabbed(OVRGrabbable grabbable)
{
if (m_grabbedObj == grabbable)
{
GrabbableRelease(Vector3.zero, Vector3.zero);
}
}
protected void SetPlayerIgnoreCollision(GameObject grabbable, bool ignore)
{
if (m_player != null)
{
Collider[] playerColliders = m_player.GetComponentsInChildren<Collider>();
foreach (Collider pc in playerColliders)
{
Collider[] colliders = grabbable.GetComponentsInChildren<Collider>();
foreach (Collider c in colliders)
{
if(!c.isTrigger && !pc.isTrigger)
Physics.IgnoreCollision(c, pc, ignore);
}
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: fd425c2d06f39bf4899d07c05d0f10eb
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 200
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,189 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
/// <summary>
/// Diagnostic display with a regular grid of cubes for visual testing of
/// tracking and distortion.
/// </summary>
public class OVRGridCube : MonoBehaviour
{
/// <summary>
/// The key that toggles the grid of cubes.
/// </summary>
public KeyCode GridKey = KeyCode.G;
private GameObject CubeGrid = null;
private bool CubeGridOn = false;
private bool CubeSwitchColorOld = false;
private bool CubeSwitchColor = false;
private int gridSizeX = 6;
private int gridSizeY = 4;
private int gridSizeZ = 6;
private float gridScale = 0.3f;
private float cubeScale = 0.03f;
// Handle to OVRCameraRig
private OVRCameraRig CameraController = null;
/// <summary>
/// Update this instance.
/// </summary>
void Update ()
{
UpdateCubeGrid();
}
/// <summary>
/// Sets the OVR camera controller.
/// </summary>
/// <param name="cameraController">Camera controller.</param>
public void SetOVRCameraController(ref OVRCameraRig cameraController)
{
CameraController = cameraController;
}
void UpdateCubeGrid()
{
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
// Toggle the grid cube display on 'G'
if(Input.GetKeyDown(GridKey))
{
if(CubeGridOn == false)
{
CubeGridOn = true;
Debug.LogWarning("CubeGrid ON");
if(CubeGrid != null)
CubeGrid.SetActive(true);
else
CreateCubeGrid();
}
else
{
CubeGridOn = false;
Debug.LogWarning("CubeGrid OFF");
if(CubeGrid != null)
CubeGrid.SetActive(false);
}
}
#endif
if (CubeGrid != null)
{
// Set cube colors to let user know if camera is tracking
CubeSwitchColor = !OVRManager.tracker.isPositionTracked;
if(CubeSwitchColor != CubeSwitchColorOld)
CubeGridSwitchColor(CubeSwitchColor);
CubeSwitchColorOld = CubeSwitchColor;
}
}
void CreateCubeGrid()
{
Debug.LogWarning("Create CubeGrid");
// Create the visual cube grid
CubeGrid = new GameObject("CubeGrid");
// Set a layer to target a specific camera
CubeGrid.layer = CameraController.gameObject.layer;
for (int x = -gridSizeX; x <= gridSizeX; x++)
for (int y = -gridSizeY; y <= gridSizeY; y++)
for (int z = -gridSizeZ; z <= gridSizeZ; z++)
{
// Set the cube type:
// 0 = non-axis cube
// 1 = axis cube
// 2 = center cube
int CubeType = 0;
if ((x == 0 && y == 0) || (x == 0 && z == 0) || (y == 0 && z == 0))
{
if((x == 0) && (y == 0) && (z == 0))
CubeType = 2;
else
CubeType = 1;
}
GameObject cube = GameObject.CreatePrimitive(PrimitiveType.Cube);
BoxCollider bc = cube.GetComponent<BoxCollider>();
bc.enabled = false;
cube.layer = CameraController.gameObject.layer;
// No shadows
Renderer r = cube.GetComponent<Renderer>();
#if UNITY_4_0 || UNITY_4_1 || UNITY_4_2 || UNITY_4_3 || UNITY_4_5 || UNITY_4_6
// Renderer.castShadows was deprecated starting in Unity 5.0
r.castShadows = false;
#else
r.shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
#endif
r.receiveShadows = false;
// Cube line is white down the middle
if (CubeType == 0)
r.material.color = Color.red;
else if (CubeType == 1)
r.material.color = Color.white;
else
r.material.color = Color.yellow;
cube.transform.position =
new Vector3(((float)x * gridScale),
((float)y * gridScale),
((float)z * gridScale));
float s = 0.7f;
// Axis cubes are bigger
if(CubeType == 1)
s = 1.0f;
// Center cube is the largest
if(CubeType == 2)
s = 2.0f;
cube.transform.localScale =
new Vector3(cubeScale * s, cubeScale * s, cubeScale * s);
cube.transform.parent = CubeGrid.transform;
}
}
/// <summary>
/// Switch the Cube grid color.
/// </summary>
/// <param name="CubeSwitchColor">If set to <c>true</c> cube switch color.</param>
void CubeGridSwitchColor(bool CubeSwitchColor)
{
Color c = Color.red;
if(CubeSwitchColor == true)
c = Color.blue;
foreach(Transform child in CubeGrid.transform)
{
Material m = child.GetComponent<Renderer>().material;
// Cube line is white down the middle
if(m.color == Color.red || m.color == Color.blue)
m.color = c;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 4988596c8a187f94f8e6a345ebb4254b
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,222 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[DefaultExecutionOrder(-90)]
public class OVRHand : MonoBehaviour,
OVRSkeleton.IOVRSkeletonDataProvider,
OVRSkeletonRenderer.IOVRSkeletonRendererDataProvider,
OVRMesh.IOVRMeshDataProvider,
OVRMeshRenderer.IOVRMeshRendererDataProvider
{
public enum Hand
{
None = OVRPlugin.Hand.None,
HandLeft = OVRPlugin.Hand.HandLeft,
HandRight = OVRPlugin.Hand.HandRight,
}
public enum HandFinger
{
Thumb = OVRPlugin.HandFinger.Thumb,
Index = OVRPlugin.HandFinger.Index,
Middle = OVRPlugin.HandFinger.Middle,
Ring = OVRPlugin.HandFinger.Ring,
Pinky = OVRPlugin.HandFinger.Pinky,
Max = OVRPlugin.HandFinger.Max,
}
public enum TrackingConfidence
{
Low = OVRPlugin.TrackingConfidence.Low,
High = OVRPlugin.TrackingConfidence.High
}
[SerializeField]
private Hand HandType = Hand.None;
[SerializeField]
private Transform _pointerPoseRoot = null;
private GameObject _pointerPoseGO;
private OVRPlugin.HandState _handState = new OVRPlugin.HandState();
public bool IsDataValid { get; private set; }
public bool IsDataHighConfidence { get; private set; }
public bool IsTracked { get; private set; }
public bool IsSystemGestureInProgress { get; private set; }
public bool IsPointerPoseValid { get; private set; }
public Transform PointerPose { get; private set; }
public float HandScale { get; private set; }
public TrackingConfidence HandConfidence { get; private set; }
public bool IsDominantHand { get; private set; }
private void Awake()
{
_pointerPoseGO = new GameObject();
PointerPose = _pointerPoseGO.transform;
if (_pointerPoseRoot != null)
{
PointerPose.SetParent(_pointerPoseRoot, false);
}
GetHandState(OVRPlugin.Step.Render);
}
private void Update()
{
GetHandState(OVRPlugin.Step.Render);
}
private void FixedUpdate()
{
if (OVRPlugin.nativeXrApi != OVRPlugin.XrApi.OpenXR)
{
GetHandState(OVRPlugin.Step.Physics);
}
}
private void GetHandState(OVRPlugin.Step step)
{
if (OVRPlugin.GetHandState(step, (OVRPlugin.Hand)HandType, ref _handState))
{
IsTracked = (_handState.Status & OVRPlugin.HandStatus.HandTracked) != 0;
IsSystemGestureInProgress = (_handState.Status & OVRPlugin.HandStatus.SystemGestureInProgress) != 0;
IsPointerPoseValid = (_handState.Status & OVRPlugin.HandStatus.InputStateValid) != 0;
IsDominantHand = (_handState.Status & OVRPlugin.HandStatus.DominantHand) != 0;
PointerPose.localPosition = _handState.PointerPose.Position.FromFlippedZVector3f();
PointerPose.localRotation = _handState.PointerPose.Orientation.FromFlippedZQuatf();
HandScale = _handState.HandScale;
HandConfidence = (TrackingConfidence)_handState.HandConfidence;
IsDataValid = true;
IsDataHighConfidence = IsTracked && HandConfidence == TrackingConfidence.High;
}
else
{
IsTracked = false;
IsSystemGestureInProgress = false;
IsPointerPoseValid = false;
PointerPose.localPosition = Vector3.zero;
PointerPose.localRotation = Quaternion.identity;
HandScale = 1.0f;
HandConfidence = TrackingConfidence.Low;
IsDataValid = false;
IsDataHighConfidence = false;
}
}
public bool GetFingerIsPinching(HandFinger finger)
{
return IsDataValid && (((int)_handState.Pinches & (1 << (int)finger)) != 0);
}
public float GetFingerPinchStrength(HandFinger finger)
{
if (IsDataValid
&& _handState.PinchStrength != null
&& _handState.PinchStrength.Length == (int)OVRPlugin.HandFinger.Max)
{
return _handState.PinchStrength[(int)finger];
}
return 0.0f;
}
public TrackingConfidence GetFingerConfidence(HandFinger finger)
{
if (IsDataValid
&& _handState.FingerConfidences != null
&& _handState.FingerConfidences.Length == (int)OVRPlugin.HandFinger.Max)
{
return (TrackingConfidence)_handState.FingerConfidences[(int)finger];
}
return TrackingConfidence.Low;
}
OVRSkeleton.SkeletonType OVRSkeleton.IOVRSkeletonDataProvider.GetSkeletonType()
{
switch (HandType)
{
case Hand.HandLeft:
return OVRSkeleton.SkeletonType.HandLeft;
case Hand.HandRight:
return OVRSkeleton.SkeletonType.HandRight;
case Hand.None:
default:
return OVRSkeleton.SkeletonType.None;
}
}
OVRSkeleton.SkeletonPoseData OVRSkeleton.IOVRSkeletonDataProvider.GetSkeletonPoseData()
{
var data = new OVRSkeleton.SkeletonPoseData();
data.IsDataValid = IsDataValid;
if (IsDataValid)
{
data.RootPose = _handState.RootPose;
data.RootScale = _handState.HandScale;
data.BoneRotations = _handState.BoneRotations;
data.IsDataHighConfidence = IsTracked && HandConfidence == TrackingConfidence.High;
}
return data;
}
OVRSkeletonRenderer.SkeletonRendererData OVRSkeletonRenderer.IOVRSkeletonRendererDataProvider.GetSkeletonRendererData()
{
var data = new OVRSkeletonRenderer.SkeletonRendererData();
data.IsDataValid = IsDataValid;
if (IsDataValid)
{
data.RootScale = _handState.HandScale;
data.IsDataHighConfidence = IsTracked && HandConfidence == TrackingConfidence.High;
data.ShouldUseSystemGestureMaterial = IsSystemGestureInProgress;
}
return data;
}
OVRMesh.MeshType OVRMesh.IOVRMeshDataProvider.GetMeshType()
{
switch (HandType)
{
case Hand.None:
return OVRMesh.MeshType.None;
case Hand.HandLeft:
return OVRMesh.MeshType.HandLeft;
case Hand.HandRight:
return OVRMesh.MeshType.HandRight;
default:
return OVRMesh.MeshType.None;
}
}
OVRMeshRenderer.MeshRendererData OVRMeshRenderer.IOVRMeshRendererDataProvider.GetMeshRendererData()
{
var data = new OVRMeshRenderer.MeshRendererData();
data.IsDataValid = IsDataValid;
if (IsDataValid)
{
data.IsDataHighConfidence = IsTracked && HandConfidence == TrackingConfidence.High;
data.ShouldUseSystemGestureMaterial = IsSystemGestureInProgress;
}
return data;
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: cb7623a8f0b49cf4dbaa40aea4d4be64
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,186 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
using System.Collections.Generic;
using System.Text;
public class OVRHandTest : MonoBehaviour
{
public class BoolMonitor
{
public delegate bool BoolGenerator();
private string m_name = "";
private BoolGenerator m_generator;
private bool m_prevValue = false;
private bool m_currentValue = false;
private bool m_currentValueRecentlyChanged = false;
private float m_displayTimeout = 0.0f;
private float m_displayTimer = 0.0f;
public BoolMonitor(string name, BoolGenerator generator, float displayTimeout = 0.5f)
{
m_name = name;
m_generator = generator;
m_displayTimeout = displayTimeout;
}
public void Update()
{
m_prevValue = m_currentValue;
m_currentValue = m_generator();
if (m_currentValue != m_prevValue)
{
m_currentValueRecentlyChanged = true;
m_displayTimer = m_displayTimeout;
}
if (m_displayTimer > 0.0f)
{
m_displayTimer -= Time.deltaTime;
if (m_displayTimer <= 0.0f)
{
m_currentValueRecentlyChanged = false;
m_displayTimer = 0.0f;
}
}
}
public void AppendToStringBuilder(ref StringBuilder sb)
{
sb.Append(m_name);
if (m_currentValue && m_currentValueRecentlyChanged)
sb.Append(": *True*\n");
else if (m_currentValue)
sb.Append(": True \n");
else if (!m_currentValue && m_currentValueRecentlyChanged)
sb.Append(": *False*\n");
else if (!m_currentValue)
sb.Append(": False \n");
}
}
public Text uiText;
private List<BoolMonitor> monitors;
private StringBuilder data;
private OVRPlugin.HandState hs_LH = new OVRPlugin.HandState();
private OVRPlugin.HandState hs_RH = new OVRPlugin.HandState();
private OVRPlugin.Skeleton skel_LH = new OVRPlugin.Skeleton();
private OVRPlugin.Skeleton skel_RH = new OVRPlugin.Skeleton();
private OVRPlugin.Mesh mesh_LH = new OVRPlugin.Mesh();
private OVRPlugin.Mesh mesh_RH = new OVRPlugin.Mesh();
private bool result_skel_LH = false;
private bool result_skel_RH = false;
private bool result_mesh_LH = false;
private bool result_mesh_RH = false;
void Start()
{
if (uiText != null)
{
uiText.supportRichText = false;
}
data = new StringBuilder(2048);
monitors = new List<BoolMonitor>()
{
new BoolMonitor("One", () => OVRInput.Get(OVRInput.Button.One)),
};
result_skel_LH = OVRPlugin.GetSkeleton(OVRPlugin.SkeletonType.HandLeft, out skel_LH);
result_skel_RH = OVRPlugin.GetSkeleton(OVRPlugin.SkeletonType.HandRight, out skel_RH);
result_mesh_LH = OVRPlugin.GetMesh(OVRPlugin.MeshType.HandLeft, out mesh_LH);
result_mesh_RH = OVRPlugin.GetMesh(OVRPlugin.MeshType.HandRight, out mesh_RH);
}
static string prevConnected = "";
static BoolMonitor controllers = new BoolMonitor("Controllers Changed", () => { return OVRInput.GetConnectedControllers().ToString() != prevConnected; });
void Update()
{
data.Length = 0;
OVRInput.Controller activeController = OVRInput.GetActiveController();
string activeControllerName = activeController.ToString();
data.AppendFormat("Active: {0}\n", activeControllerName);
string connectedControllerNames = OVRInput.GetConnectedControllers().ToString();
data.AppendFormat("Connected: {0}\n", connectedControllerNames);
data.AppendFormat("PrevConnected: {0}\n", prevConnected);
controllers.Update();
controllers.AppendToStringBuilder(ref data);
prevConnected = connectedControllerNames;
Vector3 pos = OVRInput.GetLocalControllerPosition(activeController);
data.AppendFormat("Position: ({0:F2}, {1:F2}, {2:F2})\n", pos.x, pos.y, pos.z);
Quaternion rot = OVRInput.GetLocalControllerRotation(activeController);
data.AppendFormat("Orientation: ({0:F2}, {1:F2}, {2:F2}, {3:F2})\n", rot.x, rot.y, rot.z, rot.w);
data.AppendFormat("HandTrackingEnabled: {0}\n", OVRPlugin.GetHandTrackingEnabled());
bool result_hs_LH = OVRPlugin.GetHandState(OVRPlugin.Step.Render, OVRPlugin.Hand.HandLeft, ref hs_LH);
data.AppendFormat("LH HS Query Res: {0}\n", result_hs_LH);
data.AppendFormat("LH HS Status: {0}\n", hs_LH.Status);
data.AppendFormat("LH HS Pose: {0}\n", hs_LH.RootPose);
data.AppendFormat("LH HS HandConf: {0}\n", hs_LH.HandConfidence);
bool result_hs_RH = OVRPlugin.GetHandState(OVRPlugin.Step.Render, OVRPlugin.Hand.HandRight, ref hs_RH);
data.AppendFormat("RH HS Query Res: {0}\n", result_hs_RH);
data.AppendFormat("RH HS Status: {0}\n", hs_RH.Status);
data.AppendFormat("RH HS Pose: {0}\n", hs_RH.RootPose);
data.AppendFormat("RH HS HandConf: {0}\n", hs_RH.HandConfidence);
data.AppendFormat("LH Skel Query Res: {0}\n", result_skel_LH);
data.AppendFormat("LH Skel Type: {0}\n", skel_LH.Type);
data.AppendFormat("LH Skel NumBones: {0}\n", skel_LH.NumBones);
data.AppendFormat("RH Skel Query Res: {0}\n", result_skel_RH);
data.AppendFormat("RH Skel Type: {0}\n", skel_RH.Type);
data.AppendFormat("RH Skel NumBones: {0}\n", skel_RH.NumBones);
data.AppendFormat("LH Mesh Query Res: {0}\n", result_mesh_LH);
data.AppendFormat("LH Mesh Type: {0}\n", mesh_LH.Type);
data.AppendFormat("LH Mesh NumVers: {0}\n", mesh_LH.NumVertices);
data.AppendFormat("RH Mesh Query Res: {0}\n", result_mesh_RH);
data.AppendFormat("RH Mesh Type: {0}\n", mesh_RH.Type);
data.AppendFormat("RH Mesh NumVers: {0}\n", mesh_RH.NumVertices);
for (int i = 0; i < monitors.Count; i++)
{
monitors[i].Update();
monitors[i].AppendToStringBuilder(ref data);
}
if (uiText != null)
{
uiText.text = data.ToString();
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 12a1e4a79af8fe849b7a2769ff0d3886
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,891 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System;
using System.Collections.Generic;
namespace UnityEngine.EventSystems
{
/// <summary>
/// VR extension of PointerInputModule which supports gaze and controller pointing.
/// </summary>
public class OVRInputModule : PointerInputModule
{
[Tooltip("Object which points with Z axis. E.g. CentreEyeAnchor from OVRCameraRig")]
public Transform rayTransform;
public OVRCursor m_Cursor;
[Tooltip("Gamepad button to act as gaze click")]
public OVRInput.Button joyPadClickButton = OVRInput.Button.One;
[Tooltip("Keyboard button to act as gaze click")]
public KeyCode gazeClickKey = KeyCode.Space;
[Header("Physics")]
[Tooltip("Perform an sphere cast to determine correct depth for gaze pointer")]
public bool performSphereCastForGazepointer;
[Header("Gamepad Stick Scroll")]
[Tooltip("Enable scrolling with the right stick on a gamepad")]
public bool useRightStickScroll = true;
[Tooltip("Deadzone for right stick to prevent accidental scrolling")]
public float rightStickDeadZone = 0.15f;
[Header("Touchpad Swipe Scroll")]
[Tooltip("Enable scrolling by swiping the touchpad")]
public bool useSwipeScroll = true;
[Tooltip("Minimum trackpad movement in pixels to start swiping")]
public float swipeDragThreshold = 2;
[Tooltip("Distance scrolled when swipe scroll occurs")]
public float swipeDragScale = 1f;
[Tooltip("Invert X axis on touchpad")]
public bool InvertSwipeXAxis = false;
// The raycaster that gets to do pointer interaction (e.g. with a mouse), gaze interaction always works
[NonSerialized]
public OVRRaycaster activeGraphicRaycaster;
[Header("Dragging")]
[Tooltip("Minimum pointer movement in degrees to start dragging")]
public float angleDragThreshold = 1;
[SerializeField]
private float m_SpherecastRadius = 1.0f;
// The following region contains code exactly the same as the implementation
// of StandaloneInputModule. It is copied here rather than inheriting from StandaloneInputModule
// because most of StandaloneInputModule is private so it isn't possible to easily derive from.
// Future changes from Unity to StandaloneInputModule will make it possible for this class to
// derive from StandaloneInputModule instead of PointerInput module.
//
// The following functions are not present in the following region since they have modified
// versions in the next region:
// Process
// ProcessMouseEvent
// UseMouse
#region StandaloneInputModule code
private float m_NextAction;
private Vector2 m_LastMousePosition;
private Vector2 m_MousePosition;
protected OVRInputModule()
{}
#if UNITY_EDITOR
protected override void Reset()
{
allowActivationOnMobileDevice = true;
}
#endif
[Obsolete("Mode is no longer needed on input module as it handles both mouse and keyboard simultaneously.", false)]
public enum InputMode
{
Mouse,
Buttons
}
[Obsolete("Mode is no longer needed on input module as it handles both mouse and keyboard simultaneously.", false)]
public InputMode inputMode
{
get { return InputMode.Mouse; }
}
[Header("Standalone Input Module")]
[SerializeField]
private string m_HorizontalAxis = "Horizontal";
/// <summary>
/// Name of the vertical axis for movement (if axis events are used).
/// </summary>
[SerializeField]
private string m_VerticalAxis = "Vertical";
/// <summary>
/// Name of the submit button.
/// </summary>
[SerializeField]
private string m_SubmitButton = "Submit";
/// <summary>
/// Name of the submit button.
/// </summary>
[SerializeField]
private string m_CancelButton = "Cancel";
[SerializeField]
private float m_InputActionsPerSecond = 10;
[SerializeField]
private bool m_AllowActivationOnMobileDevice;
public bool allowActivationOnMobileDevice
{
get { return m_AllowActivationOnMobileDevice; }
set { m_AllowActivationOnMobileDevice = value; }
}
public float inputActionsPerSecond
{
get { return m_InputActionsPerSecond; }
set { m_InputActionsPerSecond = value; }
}
/// <summary>
/// Name of the horizontal axis for movement (if axis events are used).
/// </summary>
public string horizontalAxis
{
get { return m_HorizontalAxis; }
set { m_HorizontalAxis = value; }
}
/// <summary>
/// Name of the vertical axis for movement (if axis events are used).
/// </summary>
public string verticalAxis
{
get { return m_VerticalAxis; }
set { m_VerticalAxis = value; }
}
public string submitButton
{
get { return m_SubmitButton; }
set { m_SubmitButton = value; }
}
public string cancelButton
{
get { return m_CancelButton; }
set { m_CancelButton = value; }
}
public override void UpdateModule()
{
m_LastMousePosition = m_MousePosition;
m_MousePosition = Input.mousePosition;
}
public override bool IsModuleSupported()
{
// Check for mouse presence instead of whether touch is supported,
// as you can connect mouse to a tablet and in that case we'd want
// to use StandaloneInputModule for non-touch input events.
return m_AllowActivationOnMobileDevice || Input.mousePresent;
}
public override bool ShouldActivateModule()
{
if (!base.ShouldActivateModule())
return false;
var shouldActivate = Input.GetButtonDown(m_SubmitButton);
shouldActivate |= Input.GetButtonDown(m_CancelButton);
shouldActivate |= !Mathf.Approximately(Input.GetAxisRaw(m_HorizontalAxis), 0.0f);
shouldActivate |= !Mathf.Approximately(Input.GetAxisRaw(m_VerticalAxis), 0.0f);
shouldActivate |= (m_MousePosition - m_LastMousePosition).sqrMagnitude > 0.0f;
shouldActivate |= Input.GetMouseButtonDown(0);
return shouldActivate;
}
public override void ActivateModule()
{
base.ActivateModule();
m_MousePosition = Input.mousePosition;
m_LastMousePosition = Input.mousePosition;
var toSelect = eventSystem.currentSelectedGameObject;
if (toSelect == null)
toSelect = eventSystem.firstSelectedGameObject;
eventSystem.SetSelectedGameObject(toSelect, GetBaseEventData());
}
public override void DeactivateModule()
{
base.DeactivateModule();
ClearSelection();
}
/// <summary>
/// Process submit keys.
/// </summary>
private bool SendSubmitEventToSelectedObject()
{
if (eventSystem.currentSelectedGameObject == null)
return false;
var data = GetBaseEventData();
if (Input.GetButtonDown(m_SubmitButton))
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, data, ExecuteEvents.submitHandler);
if (Input.GetButtonDown(m_CancelButton))
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, data, ExecuteEvents.cancelHandler);
return data.used;
}
private bool AllowMoveEventProcessing(float time)
{
bool allow = Input.GetButtonDown(m_HorizontalAxis);
allow |= Input.GetButtonDown(m_VerticalAxis);
allow |= (time > m_NextAction);
return allow;
}
private Vector2 GetRawMoveVector()
{
Vector2 move = Vector2.zero;
move.x = Input.GetAxisRaw(m_HorizontalAxis);
move.y = Input.GetAxisRaw(m_VerticalAxis);
if (Input.GetButtonDown(m_HorizontalAxis))
{
if (move.x < 0)
move.x = -1f;
if (move.x > 0)
move.x = 1f;
}
if (Input.GetButtonDown(m_VerticalAxis))
{
if (move.y < 0)
move.y = -1f;
if (move.y > 0)
move.y = 1f;
}
return move;
}
/// <summary>
/// Process keyboard events.
/// </summary>
private bool SendMoveEventToSelectedObject()
{
float time = Time.unscaledTime;
if (!AllowMoveEventProcessing(time))
return false;
Vector2 movement = GetRawMoveVector();
// Debug.Log(m_ProcessingEvent.rawType + " axis:" + m_AllowAxisEvents + " value:" + "(" + x + "," + y + ")");
var axisEventData = GetAxisEventData(movement.x, movement.y, 0.6f);
if (!Mathf.Approximately(axisEventData.moveVector.x, 0f)
|| !Mathf.Approximately(axisEventData.moveVector.y, 0f))
{
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, axisEventData, ExecuteEvents.moveHandler);
}
m_NextAction = time + 1f / m_InputActionsPerSecond;
return axisEventData.used;
}
private bool SendUpdateEventToSelectedObject()
{
if (eventSystem.currentSelectedGameObject == null)
return false;
var data = GetBaseEventData();
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, data, ExecuteEvents.updateSelectedHandler);
return data.used;
}
/// <summary>
/// Process the current mouse press.
/// </summary>
private void ProcessMousePress(MouseButtonEventData data)
{
var pointerEvent = data.buttonData;
var currentOverGo = pointerEvent.pointerCurrentRaycast.gameObject;
// PointerDown notification
if (data.PressedThisFrame())
{
pointerEvent.eligibleForClick = true;
pointerEvent.delta = Vector2.zero;
pointerEvent.dragging = false;
pointerEvent.useDragThreshold = true;
pointerEvent.pressPosition = pointerEvent.position;
if (pointerEvent.IsVRPointer())
{
pointerEvent.SetSwipeStart(Input.mousePosition);
}
pointerEvent.pointerPressRaycast = pointerEvent.pointerCurrentRaycast;
DeselectIfSelectionChanged(currentOverGo, pointerEvent);
// search for the control that will receive the press
// if we can't find a press handler set the press
// handler to be what would receive a click.
var newPressed = ExecuteEvents.ExecuteHierarchy(currentOverGo, pointerEvent, ExecuteEvents.pointerDownHandler);
// didnt find a press handler... search for a click handler
if (newPressed == null)
newPressed = ExecuteEvents.GetEventHandler<IPointerClickHandler>(currentOverGo);
// Debug.Log("Pressed: " + newPressed);
float time = Time.unscaledTime;
if (newPressed == pointerEvent.lastPress)
{
var diffTime = time - pointerEvent.clickTime;
if (diffTime < 0.3f)
++pointerEvent.clickCount;
else
pointerEvent.clickCount = 1;
pointerEvent.clickTime = time;
}
else
{
pointerEvent.clickCount = 1;
}
pointerEvent.pointerPress = newPressed;
pointerEvent.rawPointerPress = currentOverGo;
pointerEvent.clickTime = time;
// Save the drag handler as well
pointerEvent.pointerDrag = ExecuteEvents.GetEventHandler<IDragHandler>(currentOverGo);
if (pointerEvent.pointerDrag != null)
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.initializePotentialDrag);
}
// PointerUp notification
if (data.ReleasedThisFrame())
{
// Debug.Log("Executing pressup on: " + pointer.pointerPress);
ExecuteEvents.Execute(pointerEvent.pointerPress, pointerEvent, ExecuteEvents.pointerUpHandler);
// Debug.Log("KeyCode: " + pointer.eventData.keyCode);
// see if we mouse up on the same element that we clicked on...
var pointerUpHandler = ExecuteEvents.GetEventHandler<IPointerClickHandler>(currentOverGo);
// PointerClick and Drop events
if (pointerEvent.pointerPress == pointerUpHandler && pointerEvent.eligibleForClick)
{
ExecuteEvents.Execute(pointerEvent.pointerPress, pointerEvent, ExecuteEvents.pointerClickHandler);
}
else if (pointerEvent.pointerDrag != null)
{
ExecuteEvents.ExecuteHierarchy(currentOverGo, pointerEvent, ExecuteEvents.dropHandler);
}
pointerEvent.eligibleForClick = false;
pointerEvent.pointerPress = null;
pointerEvent.rawPointerPress = null;
if (pointerEvent.pointerDrag != null && pointerEvent.dragging)
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.endDragHandler);
pointerEvent.dragging = false;
pointerEvent.pointerDrag = null;
// redo pointer enter / exit to refresh state
// so that if we moused over somethign that ignored it before
// due to having pressed on something else
// it now gets it.
if (currentOverGo != pointerEvent.pointerEnter)
{
HandlePointerExitAndEnter(pointerEvent, null);
HandlePointerExitAndEnter(pointerEvent, currentOverGo);
}
}
}
#endregion
#region Modified StandaloneInputModule methods
/// <summary>
/// Process all mouse events. This is the same as the StandaloneInputModule version except that
/// it takes MouseState as a parameter, allowing it to be used for both Gaze and Mouse
/// pointerss.
/// </summary>
private void ProcessMouseEvent(MouseState mouseData)
{
var pressed = mouseData.AnyPressesThisFrame();
var released = mouseData.AnyReleasesThisFrame();
var leftButtonData = mouseData.GetButtonState(PointerEventData.InputButton.Left).eventData;
if (!UseMouse(pressed, released, leftButtonData.buttonData))
return;
// Process the first mouse button fully
ProcessMousePress(leftButtonData);
ProcessMove(leftButtonData.buttonData);
ProcessDrag(leftButtonData.buttonData);
// Now process right / middle clicks
ProcessMousePress(mouseData.GetButtonState(PointerEventData.InputButton.Right).eventData);
ProcessDrag(mouseData.GetButtonState(PointerEventData.InputButton.Right).eventData.buttonData);
ProcessMousePress(mouseData.GetButtonState(PointerEventData.InputButton.Middle).eventData);
ProcessDrag(mouseData.GetButtonState(PointerEventData.InputButton.Middle).eventData.buttonData);
if (!Mathf.Approximately(leftButtonData.buttonData.scrollDelta.sqrMagnitude, 0.0f))
{
var scrollHandler = ExecuteEvents.GetEventHandler<IScrollHandler>(leftButtonData.buttonData.pointerCurrentRaycast.gameObject);
ExecuteEvents.ExecuteHierarchy(scrollHandler, leftButtonData.buttonData, ExecuteEvents.scrollHandler);
}
}
/// <summary>
/// Process this InputModule. Same as the StandaloneInputModule version, except that it calls
/// ProcessMouseEvent twice, once for gaze pointers, and once for mouse pointers.
/// </summary>
public override void Process()
{
bool usedEvent = SendUpdateEventToSelectedObject();
if (eventSystem.sendNavigationEvents)
{
if (!usedEvent)
usedEvent |= SendMoveEventToSelectedObject();
if (!usedEvent)
SendSubmitEventToSelectedObject();
}
ProcessMouseEvent(GetGazePointerData());
#if !UNITY_ANDROID
ProcessMouseEvent(GetCanvasPointerData());
#endif
}
/// <summary>
/// Decide if mouse events need to be processed this frame. Same as StandloneInputModule except
/// that the IsPointerMoving method from this class is used, instead of the method on PointerEventData
/// </summary>
private static bool UseMouse(bool pressed, bool released, PointerEventData pointerData)
{
if (pressed || released || IsPointerMoving(pointerData) || pointerData.IsScrolling())
return true;
return false;
}
#endregion
/// <summary>
/// Convenience function for cloning PointerEventData
/// </summary>
/// <param name="from">Copy this value</param>
/// <param name="to">to this object</param>
protected void CopyFromTo(OVRPointerEventData @from, OVRPointerEventData @to)
{
@to.position = @from.position;
@to.delta = @from.delta;
@to.scrollDelta = @from.scrollDelta;
@to.pointerCurrentRaycast = @from.pointerCurrentRaycast;
@to.pointerEnter = @from.pointerEnter;
@to.worldSpaceRay = @from.worldSpaceRay;
}
/// <summary>
/// Convenience function for cloning PointerEventData
/// </summary>
/// <param name="from">Copy this value</param>
/// <param name="to">to this object</param>
protected new void CopyFromTo(PointerEventData @from, PointerEventData @to)
{
@to.position = @from.position;
@to.delta = @from.delta;
@to.scrollDelta = @from.scrollDelta;
@to.pointerCurrentRaycast = @from.pointerCurrentRaycast;
@to.pointerEnter = @from.pointerEnter;
}
// In the following region we extend the PointerEventData system implemented in PointerInputModule
// We define an additional dictionary for ray(e.g. gaze) based pointers. Mouse pointers still use the dictionary
// in PointerInputModule
#region PointerEventData pool
// Pool for OVRRayPointerEventData for ray based pointers
protected Dictionary<int, OVRPointerEventData> m_VRRayPointerData = new Dictionary<int, OVRPointerEventData>();
protected bool GetPointerData(int id, out OVRPointerEventData data, bool create)
{
if (!m_VRRayPointerData.TryGetValue(id, out data) && create)
{
data = new OVRPointerEventData(eventSystem)
{
pointerId = id,
};
m_VRRayPointerData.Add(id, data);
return true;
}
return false;
}
/// <summary>
/// Clear pointer state for both types of pointer
/// </summary>
protected new void ClearSelection()
{
var baseEventData = GetBaseEventData();
foreach (var pointer in m_PointerData.Values)
{
// clear all selection
HandlePointerExitAndEnter(pointer, null);
}
foreach (var pointer in m_VRRayPointerData.Values)
{
// clear all selection
HandlePointerExitAndEnter(pointer, null);
}
m_PointerData.Clear();
eventSystem.SetSelectedGameObject(null, baseEventData);
}
#endregion
/// <summary>
/// For RectTransform, calculate it's normal in world space
/// </summary>
static Vector3 GetRectTransformNormal(RectTransform rectTransform)
{
Vector3[] corners = new Vector3[4];
rectTransform.GetWorldCorners(corners);
Vector3 BottomEdge = corners[3] - corners[0];
Vector3 LeftEdge = corners[1] - corners[0];
rectTransform.GetWorldCorners(corners);
return Vector3.Cross(BottomEdge, LeftEdge).normalized;
}
private readonly MouseState m_MouseState = new MouseState();
// The following 2 functions are equivalent to PointerInputModule.GetMousePointerEventData but are customized to
// get data for ray pointers and canvas mouse pointers.
/// <summary>
/// State for a pointer controlled by a world space ray. E.g. gaze pointer
/// </summary>
/// <returns></returns>
virtual protected MouseState GetGazePointerData()
{
// Get the OVRRayPointerEventData reference
OVRPointerEventData leftData;
GetPointerData(kMouseLeftId, out leftData, true );
leftData.Reset();
//Now set the world space ray. This ray is what the user uses to point at UI elements
leftData.worldSpaceRay = new Ray(rayTransform.position, rayTransform.forward);
leftData.scrollDelta = GetExtraScrollDelta();
//Populate some default values
leftData.button = PointerEventData.InputButton.Left;
leftData.useDragThreshold = true;
// Perform raycast to find intersections with world
eventSystem.RaycastAll(leftData, m_RaycastResultCache);
var raycast = FindFirstRaycast(m_RaycastResultCache);
leftData.pointerCurrentRaycast = raycast;
m_RaycastResultCache.Clear();
m_Cursor.SetCursorRay(rayTransform);
OVRRaycaster ovrRaycaster = raycast.module as OVRRaycaster;
// We're only interested in intersections from OVRRaycasters
if (ovrRaycaster)
{
// The Unity UI system expects event data to have a screen position
// so even though this raycast came from a world space ray we must get a screen
// space position for the camera attached to this raycaster for compatability
leftData.position = ovrRaycaster.GetScreenPosition(raycast);
// Find the world position and normal the Graphic the ray intersected
RectTransform graphicRect = raycast.gameObject.GetComponent<RectTransform>();
if (graphicRect != null)
{
// Set are gaze indicator with this world position and normal
Vector3 worldPos = raycast.worldPosition;
Vector3 normal = GetRectTransformNormal(graphicRect);
m_Cursor.SetCursorStartDest(rayTransform.position, worldPos, normal);
}
}
// Now process physical raycast intersections
OVRPhysicsRaycaster physicsRaycaster = raycast.module as OVRPhysicsRaycaster;
if (physicsRaycaster)
{
Vector3 position = raycast.worldPosition;
if (performSphereCastForGazepointer)
{
// Here we cast a sphere into the scene rather than a ray. This gives a more accurate depth
// for positioning a circular gaze pointer
List<RaycastResult> results = new List<RaycastResult>();
physicsRaycaster.Spherecast(leftData, results, m_SpherecastRadius);
if (results.Count > 0 && results[0].distance < raycast.distance)
{
position = results[0].worldPosition;
}
}
leftData.position = physicsRaycaster.GetScreenPos(raycast.worldPosition);
m_Cursor.SetCursorStartDest(rayTransform.position, position, raycast.worldNormal);
}
// Stick default data values in right and middle slots for compatability
// copy the apropriate data into right and middle slots
OVRPointerEventData rightData;
GetPointerData(kMouseRightId, out rightData, true );
CopyFromTo(leftData, rightData);
rightData.button = PointerEventData.InputButton.Right;
OVRPointerEventData middleData;
GetPointerData(kMouseMiddleId, out middleData, true );
CopyFromTo(leftData, middleData);
middleData.button = PointerEventData.InputButton.Middle;
m_MouseState.SetButtonState(PointerEventData.InputButton.Left, GetGazeButtonState(), leftData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Right, PointerEventData.FramePressState.NotChanged, rightData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Middle, PointerEventData.FramePressState.NotChanged, middleData);
return m_MouseState;
}
/// <summary>
/// Get state for pointer which is a pointer moving in world space across the surface of a world space canvas.
/// </summary>
/// <returns></returns>
protected MouseState GetCanvasPointerData()
{
// Get the OVRRayPointerEventData reference
PointerEventData leftData;
GetPointerData(kMouseLeftId, out leftData, true );
leftData.Reset();
// Setup default values here. Set position to zero because we don't actually know the pointer
// positions. Each canvas knows the position of its canvas pointer.
leftData.position = Vector2.zero;
leftData.scrollDelta = Input.mouseScrollDelta;
leftData.button = PointerEventData.InputButton.Left;
if (activeGraphicRaycaster)
{
// Let the active raycaster find intersections on its canvas
activeGraphicRaycaster.RaycastPointer(leftData, m_RaycastResultCache);
var raycast = FindFirstRaycast(m_RaycastResultCache);
leftData.pointerCurrentRaycast = raycast;
m_RaycastResultCache.Clear();
OVRRaycaster ovrRaycaster = raycast.module as OVRRaycaster;
if (ovrRaycaster) // raycast may not actually contain a result
{
// The Unity UI system expects event data to have a screen position
// so even though this raycast came from a world space ray we must get a screen
// space position for the camera attached to this raycaster for compatability
Vector2 position = ovrRaycaster.GetScreenPosition(raycast);
leftData.delta = position - leftData.position;
leftData.position = position;
}
}
// copy the apropriate data into right and middle slots
PointerEventData rightData;
GetPointerData(kMouseRightId, out rightData, true );
CopyFromTo(leftData, rightData);
rightData.button = PointerEventData.InputButton.Right;
PointerEventData middleData;
GetPointerData(kMouseMiddleId, out middleData, true );
CopyFromTo(leftData, middleData);
middleData.button = PointerEventData.InputButton.Middle;
m_MouseState.SetButtonState(PointerEventData.InputButton.Left, StateForMouseButton(0), leftData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Right, StateForMouseButton(1), rightData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Middle, StateForMouseButton(2), middleData);
return m_MouseState;
}
/// <summary>
/// New version of ShouldStartDrag implemented first in PointerInputModule. This version differs in that
/// for ray based pointers it makes a decision about whether a drag should start based on the angular change
/// the pointer has made so far, as seen from the camera. This also works when the world space ray is
/// translated rather than rotated, since the beginning and end of the movement are considered as angle from
/// the same point.
/// </summary>
private bool ShouldStartDrag(PointerEventData pointerEvent)
{
if (!pointerEvent.useDragThreshold)
return true;
if (!pointerEvent.IsVRPointer())
{
// Same as original behaviour for canvas based pointers
return (pointerEvent.pressPosition - pointerEvent.position).sqrMagnitude >= eventSystem.pixelDragThreshold * eventSystem.pixelDragThreshold;
}
else
{
#if UNITY_ANDROID && !UNITY_EDITOR // On android allow swiping to start drag
if (useSwipeScroll && ((Vector3)pointerEvent.GetSwipeStart() - Input.mousePosition).magnitude > swipeDragThreshold)
{
return true;
}
#endif
// When it's not a screen space pointer we have to look at the angle it moved rather than the pixels distance
// For gaze based pointing screen-space distance moved will always be near 0
Vector3 cameraPos = pointerEvent.pressEventCamera.transform.position;
Vector3 pressDir = (pointerEvent.pointerPressRaycast.worldPosition - cameraPos).normalized;
Vector3 currentDir = (pointerEvent.pointerCurrentRaycast.worldPosition - cameraPos).normalized;
return Vector3.Dot(pressDir, currentDir) < Mathf.Cos(Mathf.Deg2Rad * (angleDragThreshold));
}
}
/// <summary>
/// The purpose of this function is to allow us to switch between using the standard IsPointerMoving
/// method for mouse driven pointers, but to always return true when it's a ray based pointer.
/// All real-world ray-based input devices are always moving so for simplicity we just return true
/// for them.
///
/// If PointerEventData.IsPointerMoving was virtual we could just override that in
/// OVRRayPointerEventData.
/// </summary>
/// <param name="pointerEvent"></param>
/// <returns></returns>
static bool IsPointerMoving(PointerEventData pointerEvent)
{
if (pointerEvent.IsVRPointer())
return true;
else
return pointerEvent.IsPointerMoving();
}
protected Vector2 SwipeAdjustedPosition(Vector2 originalPosition, PointerEventData pointerEvent)
{
#if UNITY_ANDROID && !UNITY_EDITOR
// On android we use the touchpad position (accessed through Input.mousePosition) to modify
// the effective cursor position for events related to dragging. This allows the user to
// use the touchpad to drag draggable UI elements
if (useSwipeScroll)
{
Vector2 delta = (Vector2)Input.mousePosition - pointerEvent.GetSwipeStart();
if (InvertSwipeXAxis)
delta.x *= -1;
return originalPosition + delta * swipeDragScale;
}
#endif
return originalPosition;
}
/// <summary>
/// Exactly the same as the code from PointerInputModule, except that we call our own
/// IsPointerMoving.
///
/// This would also not be necessary if PointerEventData.IsPointerMoving was virtual
/// </summary>
/// <param name="pointerEvent"></param>
protected override void ProcessDrag(PointerEventData pointerEvent)
{
Vector2 originalPosition = pointerEvent.position;
bool moving = IsPointerMoving(pointerEvent);
if (moving && pointerEvent.pointerDrag != null
&& !pointerEvent.dragging
&& ShouldStartDrag(pointerEvent))
{
if (pointerEvent.IsVRPointer())
{
//adjust the position used based on swiping action. Allowing the user to
//drag items by swiping on the touchpad
pointerEvent.position = SwipeAdjustedPosition (originalPosition, pointerEvent);
}
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.beginDragHandler);
pointerEvent.dragging = true;
}
// Drag notification
if (pointerEvent.dragging && moving && pointerEvent.pointerDrag != null)
{
if (pointerEvent.IsVRPointer())
{
pointerEvent.position = SwipeAdjustedPosition(originalPosition, pointerEvent);
}
// Before doing drag we should cancel any pointer down state
// And clear selection!
if (pointerEvent.pointerPress != pointerEvent.pointerDrag)
{
ExecuteEvents.Execute(pointerEvent.pointerPress, pointerEvent, ExecuteEvents.pointerUpHandler);
pointerEvent.eligibleForClick = false;
pointerEvent.pointerPress = null;
pointerEvent.rawPointerPress = null;
}
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.dragHandler);
}
}
/// <summary>
/// Get state of button corresponding to gaze pointer
/// </summary>
/// <returns></returns>
virtual protected PointerEventData.FramePressState GetGazeButtonState()
{
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
var pressed = Input.GetKeyDown(gazeClickKey) || OVRInput.GetDown(joyPadClickButton);
var released = Input.GetKeyUp(gazeClickKey) || OVRInput.GetUp(joyPadClickButton);
#if UNITY_ANDROID && !UNITY_EDITOR
pressed |= Input.GetMouseButtonDown(0);
released |= Input.GetMouseButtonUp(0);
#endif
#else
var pressed = OVRInput.GetDown(joyPadClickButton);
var released = OVRInput.GetUp(joyPadClickButton);
#endif
if (pressed && released)
return PointerEventData.FramePressState.PressedAndReleased;
if (pressed)
return PointerEventData.FramePressState.Pressed;
if (released)
return PointerEventData.FramePressState.Released;
return PointerEventData.FramePressState.NotChanged;
}
/// <summary>
/// Get extra scroll delta from gamepad
/// </summary>
protected Vector2 GetExtraScrollDelta()
{
Vector2 scrollDelta = new Vector2();
if (useRightStickScroll)
{
Vector2 s = OVRInput.Get(OVRInput.Axis2D.SecondaryThumbstick);
if (Mathf.Abs(s.x) < rightStickDeadZone) s.x = 0;
if (Mathf.Abs(s.y) < rightStickDeadZone) s.y = 0;
scrollDelta = s;
}
return scrollDelta;
}
};
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 8f1a9a1d119a5944aacfb87d1ec283a2
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,154 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[DefaultExecutionOrder(-80)]
public class OVRMesh : MonoBehaviour
{
public interface IOVRMeshDataProvider
{
MeshType GetMeshType();
}
public enum MeshType
{
None = OVRPlugin.MeshType.None,
HandLeft = OVRPlugin.MeshType.HandLeft,
HandRight = OVRPlugin.MeshType.HandRight,
}
[SerializeField]
private IOVRMeshDataProvider _dataProvider;
[SerializeField]
private MeshType _meshType = MeshType.None;
private Mesh _mesh;
public bool IsInitialized { get; private set; }
public Mesh Mesh
{
get { return _mesh; }
}
private void Awake()
{
if (_dataProvider == null)
{
_dataProvider = GetComponent<IOVRMeshDataProvider>();
}
if (_dataProvider != null)
{
_meshType = _dataProvider.GetMeshType();
}
if (ShouldInitialize())
{
Initialize(_meshType);
}
}
private bool ShouldInitialize()
{
if (IsInitialized)
{
return false;
}
if (_meshType == MeshType.None)
{
return false;
}
else if (_meshType == MeshType.HandLeft || _meshType == MeshType.HandRight)
{
#if UNITY_EDITOR
return OVRInput.IsControllerConnected(OVRInput.Controller.Hands);
#else
return true;
#endif
}
else
{
return true;
}
}
private void Initialize(MeshType meshType)
{
_mesh = new Mesh();
var ovrpMesh = new OVRPlugin.Mesh();
if (OVRPlugin.GetMesh((OVRPlugin.MeshType)_meshType, out ovrpMesh))
{
var vertices = new Vector3[ovrpMesh.NumVertices];
for (int i = 0; i < ovrpMesh.NumVertices; ++i)
{
vertices[i] = ovrpMesh.VertexPositions[i].FromFlippedXVector3f();
}
_mesh.vertices = vertices;
var uv = new Vector2[ovrpMesh.NumVertices];
for (int i = 0; i < ovrpMesh.NumVertices; ++i)
{
uv[i] = new Vector2(ovrpMesh.VertexUV0[i].x, -ovrpMesh.VertexUV0[i].y);
}
_mesh.uv = uv;
var triangles = new int[ovrpMesh.NumIndices];
for (int i = 0; i < ovrpMesh.NumIndices; ++i)
{
triangles[i] = ovrpMesh.Indices[ovrpMesh.NumIndices - i - 1];
}
_mesh.triangles = triangles;
var normals = new Vector3[ovrpMesh.NumVertices];
for (int i = 0; i < ovrpMesh.NumVertices; ++i)
{
normals[i] = ovrpMesh.VertexNormals[i].FromFlippedXVector3f();
}
_mesh.normals = normals;
var boneWeights = new BoneWeight[ovrpMesh.NumVertices];
for (int i = 0; i < ovrpMesh.NumVertices; ++i)
{
var currentBlendWeight = ovrpMesh.BlendWeights[i];
var currentBlendIndices = ovrpMesh.BlendIndices[i];
boneWeights[i].boneIndex0 = (int)currentBlendIndices.x;
boneWeights[i].weight0 = currentBlendWeight.x;
boneWeights[i].boneIndex1 = (int)currentBlendIndices.y;
boneWeights[i].weight1 = currentBlendWeight.y;
boneWeights[i].boneIndex2 = (int)currentBlendIndices.z;
boneWeights[i].weight2 = currentBlendWeight.z;
boneWeights[i].boneIndex3 = (int)currentBlendIndices.w;
boneWeights[i].weight3 = currentBlendWeight.w;
}
_mesh.boneWeights = boneWeights;
IsInitialized = true;
}
}
#if UNITY_EDITOR
private void Update()
{
if (ShouldInitialize())
{
Initialize(_meshType);
}
}
#endif
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 5cc5c234723e3a54d8dd09b131117743
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,195 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class OVRMeshRenderer : MonoBehaviour
{
public interface IOVRMeshRendererDataProvider
{
MeshRendererData GetMeshRendererData();
}
public struct MeshRendererData
{
public bool IsDataValid { get; set; }
public bool IsDataHighConfidence { get; set; }
public bool ShouldUseSystemGestureMaterial { get; set; }
}
public enum ConfidenceBehavior
{
None,
ToggleRenderer,
}
public enum SystemGestureBehavior
{
None,
SwapMaterial,
}
[SerializeField]
private IOVRMeshRendererDataProvider _dataProvider;
[SerializeField]
private OVRMesh _ovrMesh;
[SerializeField]
private OVRSkeleton _ovrSkeleton;
[SerializeField]
private ConfidenceBehavior _confidenceBehavior = ConfidenceBehavior.ToggleRenderer;
[SerializeField]
private SystemGestureBehavior _systemGestureBehavior = SystemGestureBehavior.SwapMaterial;
[SerializeField]
private Material _systemGestureMaterial = null;
private Material _originalMaterial = null;
private SkinnedMeshRenderer _skinnedMeshRenderer;
public bool IsInitialized { get; private set; }
public bool IsDataValid { get; private set; }
public bool IsDataHighConfidence { get; private set; }
public bool ShouldUseSystemGestureMaterial { get; private set; }
private void Awake()
{
if (_dataProvider == null)
{
_dataProvider = GetComponent<IOVRMeshRendererDataProvider>();
}
if (_ovrMesh == null)
{
_ovrMesh = GetComponent<OVRMesh>();
}
if (_ovrSkeleton == null)
{
_ovrSkeleton = GetComponent<OVRSkeleton>();
}
}
private void Start()
{
if (_ovrMesh == null)
{
// disable if no mesh configured
this.enabled = false;
return;
}
if (ShouldInitialize())
{
Initialize();
}
}
private bool ShouldInitialize()
{
if (IsInitialized)
{
return false;
}
if ((_ovrMesh == null) || ((_ovrMesh != null) && !_ovrMesh.IsInitialized) || ((_ovrSkeleton != null) && !_ovrSkeleton.IsInitialized))
{
// do not initialize if mesh or optional skeleton are not initialized
return false;
}
return true;
}
private void Initialize()
{
_skinnedMeshRenderer = GetComponent<SkinnedMeshRenderer>();
if (!_skinnedMeshRenderer)
{
_skinnedMeshRenderer = gameObject.AddComponent<SkinnedMeshRenderer>();
}
_skinnedMeshRenderer.sharedMesh = _ovrMesh.Mesh;
_originalMaterial = _skinnedMeshRenderer.sharedMaterial;
if ((_ovrSkeleton != null))
{
int numSkinnableBones = _ovrSkeleton.GetCurrentNumSkinnableBones();
var bindPoses = new Matrix4x4[numSkinnableBones];
var bones = new Transform[numSkinnableBones];
var localToWorldMatrix = transform.localToWorldMatrix;
for (int i = 0; i < numSkinnableBones && i < _ovrSkeleton.Bones.Count; ++i)
{
bones[i] = _ovrSkeleton.Bones[i].Transform;
bindPoses[i] = _ovrSkeleton.BindPoses[i].Transform.worldToLocalMatrix * localToWorldMatrix;
}
_ovrMesh.Mesh.bindposes = bindPoses;
_skinnedMeshRenderer.bones = bones;
_skinnedMeshRenderer.updateWhenOffscreen = true;
}
IsInitialized = true;
}
private void Update()
{
#if UNITY_EDITOR
if (ShouldInitialize())
{
Initialize();
}
#endif
IsDataValid = false;
IsDataHighConfidence = false;
ShouldUseSystemGestureMaterial = false;
if (IsInitialized)
{
bool shouldRender = false;
if (_dataProvider != null)
{
var data = _dataProvider.GetMeshRendererData();
IsDataValid = data.IsDataValid;
IsDataHighConfidence = data.IsDataHighConfidence;
ShouldUseSystemGestureMaterial = data.ShouldUseSystemGestureMaterial;
shouldRender = data.IsDataValid && data.IsDataHighConfidence;
}
if (_confidenceBehavior == ConfidenceBehavior.ToggleRenderer)
{
if (_skinnedMeshRenderer != null && _skinnedMeshRenderer.enabled != shouldRender)
{
_skinnedMeshRenderer.enabled = shouldRender;
}
}
if (_systemGestureBehavior == SystemGestureBehavior.SwapMaterial)
{
if (_skinnedMeshRenderer != null)
{
if (ShouldUseSystemGestureMaterial && _systemGestureMaterial != null && _skinnedMeshRenderer.sharedMaterial != _systemGestureMaterial)
{
_skinnedMeshRenderer.sharedMaterial = _systemGestureMaterial;
}
else if (!ShouldUseSystemGestureMaterial && _originalMaterial != null && _skinnedMeshRenderer.sharedMaterial != _originalMaterial)
{
_skinnedMeshRenderer.sharedMaterial = _originalMaterial;
}
}
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 73eac66b128fc8749a21623225c34541
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,93 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if UNITY_ANDROID && !UNITY_EDITOR
#define OVR_ANDROID_MRC
#endif
using System;
using UnityEngine;
public interface OVRMixedRealityCaptureConfiguration
{
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
bool enableMixedReality { get; set; }
LayerMask extraHiddenLayers { get; set; }
LayerMask extraVisibleLayers { get; set; }
bool dynamicCullingMask { get; set; }
OVRManager.CompositionMethod compositionMethod { get; set; }
Color externalCompositionBackdropColorRift { get; set; }
Color externalCompositionBackdropColorQuest { get; set; }
OVRManager.CameraDevice capturingCameraDevice { get; set; }
bool flipCameraFrameHorizontally { get; set; }
bool flipCameraFrameVertically { get; set; }
float handPoseStateLatency { get; set; }
float sandwichCompositionRenderLatency { get; set; }
int sandwichCompositionBufferedFrames { get; set; }
Color chromaKeyColor { get; set; }
float chromaKeySimilarity { get; set; }
float chromaKeySmoothRange { get; set; }
float chromaKeySpillRange { get; set; }
bool useDynamicLighting { get; set; }
OVRManager.DepthQuality depthQuality { get; set; }
float dynamicLightingSmoothFactor { get; set; }
float dynamicLightingDepthVariationClampingValue { get; set; }
OVRManager.VirtualGreenScreenType virtualGreenScreenType { get; set; }
float virtualGreenScreenTopY { get; set; }
float virtualGreenScreenBottomY { get; set; }
bool virtualGreenScreenApplyDepthCulling { get; set; }
float virtualGreenScreenDepthTolerance { get; set; }
OVRManager.MrcActivationMode mrcActivationMode { get; set; }
OVRManager.InstantiateMrcCameraDelegate instantiateMixedRealityCameraGameObject { get; set; }
#endif
}
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
public static class OVRMixedRealityCaptureConfigurationExtensions
{
public static void ApplyTo(this OVRMixedRealityCaptureConfiguration dest, OVRMixedRealityCaptureConfiguration source)
{
dest.ReadFrom(source);
}
public static void ReadFrom(this OVRMixedRealityCaptureConfiguration dest, OVRMixedRealityCaptureConfiguration source)
{
dest.enableMixedReality = source.enableMixedReality;
dest.compositionMethod = source.compositionMethod;
dest.extraHiddenLayers = source.extraHiddenLayers;
dest.externalCompositionBackdropColorRift = source.externalCompositionBackdropColorRift;
dest.externalCompositionBackdropColorQuest = source.externalCompositionBackdropColorQuest;
dest.capturingCameraDevice = source.capturingCameraDevice;
dest.flipCameraFrameHorizontally = source.flipCameraFrameHorizontally;
dest.flipCameraFrameVertically = source.flipCameraFrameVertically;
dest.handPoseStateLatency = source.handPoseStateLatency;
dest.sandwichCompositionRenderLatency = source.sandwichCompositionRenderLatency;
dest.sandwichCompositionBufferedFrames = source.sandwichCompositionBufferedFrames;
dest.chromaKeyColor = source.chromaKeyColor;
dest.chromaKeySimilarity = source.chromaKeySimilarity;
dest.chromaKeySmoothRange = source.chromaKeySmoothRange;
dest.chromaKeySpillRange = source.chromaKeySpillRange;
dest.useDynamicLighting = source.useDynamicLighting;
dest.depthQuality = source.depthQuality;
dest.dynamicLightingSmoothFactor = source.dynamicLightingSmoothFactor;
dest.dynamicLightingDepthVariationClampingValue = source.dynamicLightingDepthVariationClampingValue;
dest.virtualGreenScreenType = source.virtualGreenScreenType;
dest.virtualGreenScreenTopY = source.virtualGreenScreenTopY;
dest.virtualGreenScreenBottomY = source.virtualGreenScreenBottomY;
dest.virtualGreenScreenApplyDepthCulling = source.virtualGreenScreenApplyDepthCulling;
dest.virtualGreenScreenDepthTolerance = source.virtualGreenScreenDepthTolerance;
dest.mrcActivationMode = source.mrcActivationMode;
dest.instantiateMixedRealityCameraGameObject = source.instantiateMixedRealityCameraGameObject;
}
}
#endif

View File

@@ -0,0 +1,3 @@
fileFormatVersion: 2
guid: 3daa35105ec74ccb94bc0756d34b2d55
timeCreated: 1605293235

View File

@@ -0,0 +1,124 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if UNITY_ANDROID && !UNITY_EDITOR
#define OVR_ANDROID_MRC
#endif
using UnityEngine;
using System;
using System.IO;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
public class OVRMixedRealityCaptureSettings : ScriptableObject, OVRMixedRealityCaptureConfiguration
{
public bool enableMixedReality = false;
public LayerMask extraHiddenLayers;
public LayerMask extraVisibleLayers;
public bool dynamicCullingMask = true;
public OVRManager.CompositionMethod compositionMethod = OVRManager.CompositionMethod.External;
public Color externalCompositionBackdropColorRift = Color.green;
public Color externalCompositionBackdropColorQuest = Color.clear;
public OVRManager.CameraDevice capturingCameraDevice = OVRManager.CameraDevice.WebCamera0;
public bool flipCameraFrameHorizontally = false;
public bool flipCameraFrameVertically = false;
public float handPoseStateLatency = 0.0f;
public float sandwichCompositionRenderLatency = 0.0f;
public int sandwichCompositionBufferedFrames = 8;
public Color chromaKeyColor = Color.green;
public float chromaKeySimilarity = 0.6f;
public float chromaKeySmoothRange = 0.03f;
public float chromaKeySpillRange = 0.04f;
public bool useDynamicLighting = false;
public OVRManager.DepthQuality depthQuality = OVRManager.DepthQuality.Medium;
public float dynamicLightingSmoothFactor = 8.0f;
public float dynamicLightingDepthVariationClampingValue = 0.001f;
public OVRManager.VirtualGreenScreenType virtualGreenScreenType = OVRManager.VirtualGreenScreenType.Off;
public float virtualGreenScreenTopY;
public float virtualGreenScreenBottomY;
public bool virtualGreenScreenApplyDepthCulling = false;
public float virtualGreenScreenDepthTolerance = 0.2f;
public OVRManager.MrcActivationMode mrcActivationMode;
// OVRMixedRealityCaptureConfiguration Interface implementation
bool OVRMixedRealityCaptureConfiguration.enableMixedReality { get { return enableMixedReality; } set { enableMixedReality = value; } }
LayerMask OVRMixedRealityCaptureConfiguration.extraHiddenLayers { get { return extraHiddenLayers; } set { extraHiddenLayers = value; } }
LayerMask OVRMixedRealityCaptureConfiguration.extraVisibleLayers { get { return extraVisibleLayers; } set { extraVisibleLayers = value; } }
bool OVRMixedRealityCaptureConfiguration.dynamicCullingMask { get { return dynamicCullingMask; } set { dynamicCullingMask = value; } }
OVRManager.CompositionMethod OVRMixedRealityCaptureConfiguration.compositionMethod { get { return compositionMethod; } set { compositionMethod = value; } }
Color OVRMixedRealityCaptureConfiguration.externalCompositionBackdropColorRift { get { return externalCompositionBackdropColorRift; } set { externalCompositionBackdropColorRift = value; } }
Color OVRMixedRealityCaptureConfiguration.externalCompositionBackdropColorQuest { get { return externalCompositionBackdropColorQuest; } set { externalCompositionBackdropColorQuest = value; } }
OVRManager.CameraDevice OVRMixedRealityCaptureConfiguration.capturingCameraDevice { get { return capturingCameraDevice; } set { capturingCameraDevice = value; } }
bool OVRMixedRealityCaptureConfiguration.flipCameraFrameHorizontally { get { return flipCameraFrameHorizontally; } set { flipCameraFrameHorizontally = value; } }
bool OVRMixedRealityCaptureConfiguration.flipCameraFrameVertically { get { return flipCameraFrameVertically; } set { flipCameraFrameVertically = value; } }
float OVRMixedRealityCaptureConfiguration.handPoseStateLatency { get { return handPoseStateLatency; } set { handPoseStateLatency = value; } }
float OVRMixedRealityCaptureConfiguration.sandwichCompositionRenderLatency { get { return sandwichCompositionRenderLatency; } set { sandwichCompositionRenderLatency = value; } }
int OVRMixedRealityCaptureConfiguration.sandwichCompositionBufferedFrames { get { return sandwichCompositionBufferedFrames; } set { sandwichCompositionBufferedFrames = value; } }
Color OVRMixedRealityCaptureConfiguration.chromaKeyColor { get { return chromaKeyColor; } set { chromaKeyColor = value; } }
float OVRMixedRealityCaptureConfiguration.chromaKeySimilarity { get { return chromaKeySimilarity; } set { chromaKeySimilarity = value; } }
float OVRMixedRealityCaptureConfiguration.chromaKeySmoothRange { get { return chromaKeySmoothRange; } set { chromaKeySmoothRange = value; } }
float OVRMixedRealityCaptureConfiguration.chromaKeySpillRange { get { return chromaKeySpillRange; } set { chromaKeySpillRange = value; } }
bool OVRMixedRealityCaptureConfiguration.useDynamicLighting { get { return useDynamicLighting; } set { useDynamicLighting = value; } }
OVRManager.DepthQuality OVRMixedRealityCaptureConfiguration.depthQuality { get { return depthQuality; } set { depthQuality = value; } }
float OVRMixedRealityCaptureConfiguration.dynamicLightingSmoothFactor { get { return dynamicLightingSmoothFactor; } set { dynamicLightingSmoothFactor = value; } }
float OVRMixedRealityCaptureConfiguration.dynamicLightingDepthVariationClampingValue { get { return dynamicLightingDepthVariationClampingValue; } set { dynamicLightingDepthVariationClampingValue = value; } }
OVRManager.VirtualGreenScreenType OVRMixedRealityCaptureConfiguration.virtualGreenScreenType { get { return virtualGreenScreenType; } set { virtualGreenScreenType = value; } }
float OVRMixedRealityCaptureConfiguration.virtualGreenScreenTopY { get { return virtualGreenScreenTopY; } set { virtualGreenScreenTopY = value; } }
float OVRMixedRealityCaptureConfiguration.virtualGreenScreenBottomY { get { return virtualGreenScreenBottomY; } set { virtualGreenScreenBottomY = value; } }
bool OVRMixedRealityCaptureConfiguration.virtualGreenScreenApplyDepthCulling { get { return virtualGreenScreenApplyDepthCulling; } set { virtualGreenScreenApplyDepthCulling = value; } }
float OVRMixedRealityCaptureConfiguration.virtualGreenScreenDepthTolerance { get { return virtualGreenScreenDepthTolerance; } set { virtualGreenScreenDepthTolerance = value; } }
OVRManager.MrcActivationMode OVRMixedRealityCaptureConfiguration.mrcActivationMode { get { return mrcActivationMode; } set { mrcActivationMode = value; } }
OVRManager.InstantiateMrcCameraDelegate OVRMixedRealityCaptureConfiguration.instantiateMixedRealityCameraGameObject { get; set; }
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN // Rift MRC only
const string configFileName = "mrc.config";
public void WriteToConfigurationFile()
{
string text = JsonUtility.ToJson(this, true);
try
{
string configPath = Path.Combine(Application.dataPath, configFileName);
Debug.Log("Write OVRMixedRealityCaptureSettings to " + configPath);
File.WriteAllText(configPath, text);
}
catch(Exception e)
{
Debug.LogWarning("Exception caught " + e.Message);
}
}
public void CombineWithConfigurationFile()
{
try
{
string configPath = Path.Combine(Application.dataPath, configFileName);
if (File.Exists(configPath))
{
Debug.Log("MixedRealityCapture configuration file found at " + configPath);
string text = File.ReadAllText(configPath);
Debug.Log("Apply MixedRealityCapture configuration");
JsonUtility.FromJsonOverwrite(text, this);
}
else
{
Debug.Log("MixedRealityCapture configuration file doesn't exist at " + configPath);
}
}
catch(Exception e)
{
Debug.LogWarning("Exception caught " + e.Message);
}
}
#endif
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 99bbd170d56da4248941de890e6d7af5
timeCreated: 1501004238
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,259 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if UNITY_ANDROID && !UNITY_EDITOR
#define OVR_ANDROID_MRC
#endif
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class OVRMixedRealityCaptureTest : MonoBehaviour {
bool inited = false;
enum CameraMode
{
Normal = 0,
OverrideFov,
ThirdPerson,
}
CameraMode currentMode = CameraMode.Normal;
public Camera defaultExternalCamera;
OVRPlugin.Fovf defaultFov;
// Use this for initialization
void Start () {
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
if (!defaultExternalCamera)
{
Debug.LogWarning("defaultExternalCamera undefined");
}
#if !OVR_ANDROID_MRC
// On Quest, we enable MRC automatically through the configuration
if (!OVRManager.instance.enableMixedReality)
{
OVRManager.instance.enableMixedReality = true;
}
#endif
#endif
}
void Initialize()
{
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
if (inited)
return;
#if OVR_ANDROID_MRC
if (!OVRPlugin.Media.GetInitialized())
return;
#else
if (!OVRPlugin.IsMixedRealityInitialized())
return;
#endif
OVRPlugin.ResetDefaultExternalCamera();
Debug.LogFormat("GetExternalCameraCount before adding manual external camera {0}", OVRPlugin.GetExternalCameraCount());
UpdateDefaultExternalCamera();
Debug.LogFormat("GetExternalCameraCount after adding manual external camera {0}", OVRPlugin.GetExternalCameraCount());
// obtain default FOV
{
OVRPlugin.CameraIntrinsics cameraIntrinsics;
OVRPlugin.CameraExtrinsics cameraExtrinsics;
OVRPlugin.GetMixedRealityCameraInfo(0, out cameraExtrinsics, out cameraIntrinsics);
defaultFov = cameraIntrinsics.FOVPort;
}
inited = true;
#endif
}
void UpdateDefaultExternalCamera()
{
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
// always build a 1080p external camera
const int cameraPixelWidth = 1920;
const int cameraPixelHeight = 1080;
const float cameraAspect = (float)cameraPixelWidth / cameraPixelHeight;
string cameraName = "UnityExternalCamera";
OVRPlugin.CameraIntrinsics cameraIntrinsics = new OVRPlugin.CameraIntrinsics();
OVRPlugin.CameraExtrinsics cameraExtrinsics = new OVRPlugin.CameraExtrinsics();
// intrinsics
cameraIntrinsics.IsValid = OVRPlugin.Bool.True;
cameraIntrinsics.LastChangedTimeSeconds = Time.time;
float vFov = defaultExternalCamera.fieldOfView * Mathf.Deg2Rad;
float hFov = Mathf.Atan(Mathf.Tan(vFov * 0.5f) * cameraAspect) * 2.0f;
OVRPlugin.Fovf fov = new OVRPlugin.Fovf();
fov.UpTan = fov.DownTan = Mathf.Tan(vFov * 0.5f);
fov.LeftTan = fov.RightTan = Mathf.Tan(hFov * 0.5f);
cameraIntrinsics.FOVPort = fov;
cameraIntrinsics.VirtualNearPlaneDistanceMeters = defaultExternalCamera.nearClipPlane;
cameraIntrinsics.VirtualFarPlaneDistanceMeters = defaultExternalCamera.farClipPlane;
cameraIntrinsics.ImageSensorPixelResolution.w = cameraPixelWidth;
cameraIntrinsics.ImageSensorPixelResolution.h = cameraPixelHeight;
// extrinsics
cameraExtrinsics.IsValid = OVRPlugin.Bool.True;
cameraExtrinsics.LastChangedTimeSeconds = Time.time;
cameraExtrinsics.CameraStatusData = OVRPlugin.CameraStatus.CameraStatus_Calibrated;
cameraExtrinsics.AttachedToNode = OVRPlugin.Node.None;
Camera mainCamera = Camera.main;
OVRCameraRig cameraRig = mainCamera.GetComponentInParent<OVRCameraRig>();
if (cameraRig)
{
Transform trackingSpace = cameraRig.trackingSpace;
OVRPose trackingSpacePose = trackingSpace.ToOVRPose(false);
OVRPose cameraPose = defaultExternalCamera.transform.ToOVRPose(false);
OVRPose relativePose = trackingSpacePose.Inverse() * cameraPose;
#if OVR_ANDROID_MRC
OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
OVRPose localToStagePose = stageToLocalPose.Inverse();
relativePose = localToStagePose * relativePose;
#endif
cameraExtrinsics.RelativePose = relativePose.ToPosef();
}
else
{
cameraExtrinsics.RelativePose = OVRPlugin.Posef.identity;
}
if (!OVRPlugin.SetDefaultExternalCamera(cameraName, ref cameraIntrinsics, ref cameraExtrinsics))
{
Debug.LogError("SetDefaultExternalCamera() failed");
}
#endif
}
// Update is called once per frame
void Update () {
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
if (!inited)
{
Initialize();
return;
}
if (!defaultExternalCamera)
{
return;
}
#if OVR_ANDROID_MRC
if (!OVRPlugin.Media.GetInitialized())
{
return;
}
#else
if (!OVRPlugin.IsMixedRealityInitialized())
{
return;
}
#endif
if (OVRInput.GetDown(OVRInput.Button.One))
{
if (currentMode == CameraMode.ThirdPerson)
{
currentMode = CameraMode.Normal;
}
else
{
currentMode = currentMode + 1;
}
Debug.LogFormat("Camera mode change to {0}", currentMode);
}
if (currentMode == CameraMode.Normal)
{
UpdateDefaultExternalCamera();
OVRPlugin.OverrideExternalCameraFov(0, false, new OVRPlugin.Fovf());
OVRPlugin.OverrideExternalCameraStaticPose(0, false, OVRPlugin.Posef.identity);
}
else if (currentMode == CameraMode.OverrideFov)
{
OVRPlugin.Fovf fov = defaultFov;
OVRPlugin.Fovf newFov = new OVRPlugin.Fovf();
newFov.LeftTan = fov.LeftTan * 2.0f;
newFov.RightTan = fov.RightTan * 2.0f;
newFov.UpTan = fov.UpTan * 2.0f;
newFov.DownTan = fov.DownTan * 2.0f;
OVRPlugin.OverrideExternalCameraFov(0, true, newFov);
OVRPlugin.OverrideExternalCameraStaticPose(0, false, OVRPlugin.Posef.identity);
if (!OVRPlugin.GetUseOverriddenExternalCameraFov(0))
{
Debug.LogWarning("FOV not overridden");
}
}
else if (currentMode == CameraMode.ThirdPerson)
{
Camera camera = GetComponent<Camera>();
if (camera == null)
{
return;
}
float vFov = camera.fieldOfView * Mathf.Deg2Rad;
float hFov = Mathf.Atan(Mathf.Tan(vFov * 0.5f) * camera.aspect) * 2.0f;
OVRPlugin.Fovf fov = new OVRPlugin.Fovf();
fov.UpTan = fov.DownTan = Mathf.Tan(vFov * 0.5f);
fov.LeftTan = fov.RightTan = Mathf.Tan(hFov * 0.5f);
OVRPlugin.OverrideExternalCameraFov(0, true, fov);
Camera mainCamera = Camera.main;
OVRCameraRig cameraRig = mainCamera.GetComponentInParent<OVRCameraRig>();
if (cameraRig)
{
Transform trackingSpace = cameraRig.trackingSpace;
OVRPose trackingSpacePose = trackingSpace.ToOVRPose(false);
OVRPose cameraPose = transform.ToOVRPose(false);
OVRPose relativePose = trackingSpacePose.Inverse() * cameraPose;
OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
OVRPose localToStagePose = stageToLocalPose.Inverse();
OVRPose relativePoseInStage = localToStagePose * relativePose;
OVRPlugin.Posef relativePosef = relativePoseInStage.ToPosef();
OVRPlugin.OverrideExternalCameraStaticPose(0, true, relativePosef);
}
else
{
OVRPlugin.OverrideExternalCameraStaticPose(0, false, OVRPlugin.Posef.identity);
}
if (!OVRPlugin.GetUseOverriddenExternalCameraFov(0))
{
Debug.LogWarning("FOV not overridden");
}
if (!OVRPlugin.GetUseOverriddenExternalCameraStaticPose(0))
{
Debug.LogWarning("StaticPose not overridden");
}
}
#endif
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 95a5f282b22a9d846bd1a9d2de25079c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,77 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Logs when the application enters power save mode and allows you to a low-power CPU/GPU level with a button press.
/// </summary>
public class OVRModeParms : MonoBehaviour
{
#region Member Variables
/// <summary>
/// The gamepad button that will switch the application to CPU level 0 and GPU level 1.
/// </summary>
public OVRInput.RawButton resetButton = OVRInput.RawButton.X;
#endregion
/// <summary>
/// Invoke power state mode test.
/// </summary>
void Start()
{
if (!OVRManager.isHmdPresent)
{
enabled = false;
return;
}
// Call TestPowerLevelState after 10 seconds
// and repeats every 10 seconds.
InvokeRepeating ( "TestPowerStateMode", 10, 10.0f );
}
/// <summary>
/// Change default vr mode parms dynamically.
/// </summary>
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if ( OVRInput.GetDown(resetButton))
{
//*************************
// Dynamically change VrModeParms cpu and gpu level.
// NOTE: Reset will cause 1 frame of flicker as it leaves
// and re-enters Vr mode.
//*************************
OVRPlugin.cpuLevel = 0;
OVRPlugin.gpuLevel = 1;
}
}
/// <summary>
/// Check current power state mode.
/// </summary>
void TestPowerStateMode()
{
//*************************
// Check power-level state mode
//*************************
if (OVRPlugin.powerSaving)
{
// The device has been throttled
Debug.Log("POWER SAVE MODE ACTIVATED");
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 6a6ae8e8def81df429a8fdfc00f63e5c
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,44 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Allows you to toggle monoscopic rendering with a gamepad button press.
/// </summary>
public class OVRMonoscopic : MonoBehaviour
{
/// <summary>
/// The gamepad button that will toggle monoscopic rendering.
/// </summary>
public OVRInput.RawButton toggleButton = OVRInput.RawButton.B;
private bool monoscopic = false;
/// <summary>
/// Check input and toggle monoscopic rendering mode if necessary
/// See the input mapping setup in the Unity Integration guide
/// </summary>
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if (OVRInput.GetDown(toggleButton))
{
//*************************
// toggle monoscopic rendering mode
//*************************
monoscopic = !monoscopic;
OVRManager.instance.monoscopic = monoscopic;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 06ef2a389c534554c848533f88dbb32c
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,431 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Net;
using System.Net.Sockets;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Threading;
using UnityEngine;
using Debug = UnityEngine.Debug;
public class OVRNetwork
{
public const int MaxBufferLength = 65536;
public const int MaxPayloadLength = MaxBufferLength - FrameHeader.StructSize;
public const uint FrameHeaderMagicIdentifier = 0x5283A76B;
[StructLayout(LayoutKind.Sequential, Pack = 1)]
struct FrameHeader
{
public uint protocolIdentifier;
public int payloadType;
public int payloadLength;
public const int StructSize = sizeof(uint) + sizeof(int) + sizeof(int);
// endianness conversion is NOT handled since all our current mobile/PC devices are little-endian
public byte[] ToBytes()
{
int size = Marshal.SizeOf(this);
Trace.Assert(size == StructSize);
byte[] arr = new byte[size];
IntPtr ptr = Marshal.AllocHGlobal(size);
Marshal.StructureToPtr(this, ptr, true);
Marshal.Copy(ptr, arr, 0, size);
Marshal.FreeHGlobal(ptr);
return arr;
}
public static FrameHeader FromBytes(byte[] arr)
{
FrameHeader header = new FrameHeader();
int size = Marshal.SizeOf(header);
Trace.Assert(size == StructSize);
IntPtr ptr = Marshal.AllocHGlobal(size);
Marshal.Copy(arr, 0, ptr, size);
header = (FrameHeader)Marshal.PtrToStructure(ptr, header.GetType());
Marshal.FreeHGlobal(ptr);
return header;
}
}
public class OVRNetworkTcpServer
{
public TcpListener tcpListener = null;
private readonly object clientsLock = new object();
public readonly List<TcpClient> clients = new List<TcpClient>();
public void StartListening(int listeningPort)
{
if (tcpListener != null)
{
Debug.LogWarning("[OVRNetworkTcpServer] tcpListener is not null");
return;
}
IPAddress localAddr = IPAddress.Any;
tcpListener = new TcpListener(localAddr, listeningPort);
try
{
tcpListener.Start();
Debug.LogFormat("TcpListener started. Local endpoint: {0}", tcpListener.LocalEndpoint.ToString());
}
catch (SocketException e)
{
Debug.LogWarningFormat("[OVRNetworkTcpServer] Unsable to start TcpListener. Socket exception: {0}", e.Message);
Debug.LogWarning("It could be caused by multiple instances listening at the same port, or the port is forwarded to the Android device through ADB");
Debug.LogWarning("If the port is forwarded through ADB, use the Android Tools in Tools/Oculus/System Metrics Profiler to kill the server");
tcpListener = null;
}
if (tcpListener != null)
{
Debug.LogFormat("[OVRNetworkTcpServer] Start Listening on port {0}", listeningPort);
try
{
tcpListener.BeginAcceptTcpClient(new AsyncCallback(DoAcceptTcpClientCallback), tcpListener);
}
catch (Exception e)
{
Debug.LogWarningFormat("[OVRNetworkTcpServer] can't accept new client: {0}", e.Message);
}
}
}
public void StopListening()
{
if (tcpListener == null)
{
Debug.LogWarning("[OVRNetworkTcpServer] tcpListener is null");
return;
}
lock (clientsLock)
{
clients.Clear();
}
tcpListener.Stop();
tcpListener = null;
Debug.Log("[OVRNetworkTcpServer] Stopped listening");
}
private void DoAcceptTcpClientCallback(IAsyncResult ar)
{
TcpListener listener = ar.AsyncState as TcpListener;
try
{
TcpClient client = listener.EndAcceptTcpClient(ar);
lock (clientsLock)
{
clients.Add(client);
Debug.Log("[OVRNetworkTcpServer] client added");
}
try
{
tcpListener.BeginAcceptTcpClient(new AsyncCallback(DoAcceptTcpClientCallback), tcpListener);
}
catch (Exception e)
{
Debug.LogWarningFormat("[OVRNetworkTcpServer] can't accept new client: {0}", e.Message);
}
}
catch (ObjectDisposedException)
{
// Do nothing. It happens when stop preview in editor, which is normal behavior.
}
catch (Exception e)
{
Debug.LogWarningFormat("[OVRNetworkTcpServer] EndAcceptTcpClient failed: {0}", e.Message);
}
}
public bool HasConnectedClient()
{
lock (clientsLock)
{
foreach (TcpClient client in clients)
{
if (client.Connected)
{
return true;
}
}
}
return false;
}
public void Broadcast(int payloadType, byte[] payload)
{
if (payload.Length > OVRNetwork.MaxPayloadLength)
{
Debug.LogWarningFormat("[OVRNetworkTcpServer] drop payload because it's too long: {0} bytes", payload.Length);
}
FrameHeader header = new FrameHeader();
header.protocolIdentifier = FrameHeaderMagicIdentifier;
header.payloadType = payloadType;
header.payloadLength = payload.Length;
byte[] headerBuffer = header.ToBytes();
byte[] dataBuffer = new byte[headerBuffer.Length + payload.Length];
headerBuffer.CopyTo(dataBuffer, 0);
payload.CopyTo(dataBuffer, headerBuffer.Length);
lock (clientsLock)
{
foreach (TcpClient client in clients)
{
if (client.Connected)
{
try
{
client.GetStream().BeginWrite(dataBuffer, 0, dataBuffer.Length, new AsyncCallback(DoWriteDataCallback), client.GetStream());
}
catch (SocketException e)
{
Debug.LogWarningFormat("[OVRNetworkTcpServer] close client because of socket error: {0}", e.Message);
client.GetStream().Close();
client.Close();
}
}
}
}
}
private void DoWriteDataCallback(IAsyncResult ar)
{
NetworkStream stream = ar.AsyncState as NetworkStream;
stream.EndWrite(ar);
}
}
public class OVRNetworkTcpClient
{
public Action connectionStateChangedCallback;
public Action<int, byte[], int, int> payloadReceivedCallback;
public enum ConnectionState
{
Disconnected,
Connected,
Connecting
}
public ConnectionState connectionState
{
get
{
if (tcpClient == null)
{
return ConnectionState.Disconnected;
}
else
{
if (tcpClient.Connected)
{
return ConnectionState.Connected;
}
else
{
return ConnectionState.Connecting;
}
}
}
}
public bool Connected
{
get
{
return connectionState == ConnectionState.Connected;
}
}
TcpClient tcpClient = null;
byte[][] receivedBuffers = { new byte[OVRNetwork.MaxBufferLength], new byte[OVRNetwork.MaxBufferLength] };
int receivedBufferIndex = 0;
int receivedBufferDataSize = 0;
ManualResetEvent readyReceiveDataEvent = new ManualResetEvent(true);
public void Connect(int listeningPort)
{
if (tcpClient == null)
{
receivedBufferIndex = 0;
receivedBufferDataSize = 0;
readyReceiveDataEvent.Set();
string remoteAddress = "127.0.0.1";
tcpClient = new TcpClient(AddressFamily.InterNetwork);
tcpClient.BeginConnect(remoteAddress, listeningPort, new AsyncCallback(ConnectCallback), tcpClient);
if (connectionStateChangedCallback != null)
{
connectionStateChangedCallback();
}
}
else
{
Debug.LogWarning("[OVRNetworkTcpClient] already connected");
}
}
void ConnectCallback(IAsyncResult ar)
{
try
{
TcpClient client = ar.AsyncState as TcpClient;
client.EndConnect(ar);
Debug.LogFormat("[OVRNetworkTcpClient] connected to {0}", client.ToString());
}
catch (Exception e)
{
Debug.LogWarningFormat("[OVRNetworkTcpClient] connect error {0}", e.Message);
}
if (connectionStateChangedCallback != null)
{
connectionStateChangedCallback();
}
}
public void Disconnect()
{
if (tcpClient != null)
{
if (!readyReceiveDataEvent.WaitOne(5))
{
Debug.LogWarning("[OVRNetworkTcpClient] readyReceiveDataEvent not signaled. data receiving timeout?");
}
Debug.Log("[OVRNetworkTcpClient] close tcpClient");
try
{
tcpClient.GetStream().Close();
tcpClient.Close();
}
catch (Exception e)
{
Debug.LogWarning("[OVRNetworkTcpClient] " + e.Message);
}
tcpClient = null;
if (connectionStateChangedCallback != null)
{
connectionStateChangedCallback();
}
}
else
{
Debug.LogWarning("[OVRNetworkTcpClient] not connected");
}
}
public void Tick()
{
if (tcpClient == null || !tcpClient.Connected)
{
return;
}
if (readyReceiveDataEvent.WaitOne(TimeSpan.Zero))
{
if (tcpClient.GetStream().DataAvailable)
{
if (receivedBufferDataSize >= OVRNetwork.MaxBufferLength)
{
Debug.LogWarning("[OVRNetworkTcpClient] receive buffer overflow. It should not happen since we have the constraint on message size");
Disconnect();
return;
}
readyReceiveDataEvent.Reset();
int maximumDataSize = OVRSystemPerfMetrics.MaxBufferLength - receivedBufferDataSize;
tcpClient.GetStream().BeginRead(receivedBuffers[receivedBufferIndex], receivedBufferDataSize, maximumDataSize, new AsyncCallback(OnReadDataCallback), tcpClient.GetStream());
}
}
}
void OnReadDataCallback(IAsyncResult ar)
{
NetworkStream stream = ar.AsyncState as NetworkStream;
try
{
int numBytes = stream.EndRead(ar);
receivedBufferDataSize += numBytes;
while (receivedBufferDataSize >= FrameHeader.StructSize)
{
FrameHeader header = FrameHeader.FromBytes(receivedBuffers[receivedBufferIndex]);
if (header.protocolIdentifier != OVRNetwork.FrameHeaderMagicIdentifier)
{
Debug.LogWarning("[OVRNetworkTcpClient] header mismatch");
Disconnect();
return;
}
if (header.payloadLength < 0 || header.payloadLength > OVRNetwork.MaxPayloadLength)
{
Debug.LogWarningFormat("[OVRNetworkTcpClient] Sanity check failed. PayloadLength %d", header.payloadLength);
Disconnect();
return;
}
if (receivedBufferDataSize >= FrameHeader.StructSize + header.payloadLength)
{
if (payloadReceivedCallback != null)
{
payloadReceivedCallback(header.payloadType, receivedBuffers[receivedBufferIndex], FrameHeader.StructSize, header.payloadLength);
}
// swap receive buffer
int newBufferIndex = 1 - receivedBufferIndex;
int newBufferDataSize = receivedBufferDataSize - (FrameHeader.StructSize + header.payloadLength);
if (newBufferDataSize > 0)
{
Array.Copy(receivedBuffers[receivedBufferIndex], (FrameHeader.StructSize + header.payloadLength), receivedBuffers[newBufferIndex], 0, newBufferDataSize);
}
receivedBufferIndex = newBufferIndex;
receivedBufferDataSize = newBufferDataSize;
}
}
readyReceiveDataEvent.Set();
}
catch (SocketException e)
{
Debug.LogErrorFormat("[OVRNetworkTcpClient] OnReadDataCallback: socket error: {0}", e.Message);
Disconnect();
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: df7e9afcfd21ebd44951ca0eeb5cd692
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,175 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System.Collections.Generic;
namespace UnityEngine.EventSystems
{
/// <summary>
/// Simple event system using physics raycasts. Very closely based on UnityEngine.EventSystems.PhysicsRaycaster
/// </summary>
[RequireComponent(typeof(OVRCameraRig))]
public class OVRPhysicsRaycaster : BaseRaycaster
{
/// <summary>
/// Const to use for clarity when no event mask is set
/// </summary>
protected const int kNoEventMaskSet = -1;
/// <summary>
/// Layer mask used to filter events. Always combined with the camera's culling mask if a camera is used.
/// </summary>
[SerializeField]
protected LayerMask m_EventMask = kNoEventMaskSet;
protected OVRPhysicsRaycaster()
{ }
public override Camera eventCamera
{
get
{
return GetComponent<OVRCameraRig>().leftEyeCamera;
}
}
/// <summary>
/// Depth used to determine the order of event processing.
/// </summary>
public virtual int depth
{
get { return (eventCamera != null) ? (int)eventCamera.depth : 0xFFFFFF; }
}
public int sortOrder = 0;
public override int sortOrderPriority
{
get
{
return sortOrder;
}
}
/// <summary>
/// Event mask used to determine which objects will receive events.
/// </summary>
public int finalEventMask
{
get { return (eventCamera != null) ? eventCamera.cullingMask & m_EventMask : kNoEventMaskSet; }
}
/// <summary>
/// Layer mask used to filter events. Always combined with the camera's culling mask if a camera is used.
/// </summary>
public LayerMask eventMask
{
get { return m_EventMask; }
set { m_EventMask = value; }
}
/// <summary>
/// Perform a raycast using the worldSpaceRay in eventData.
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
public override void Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList)
{
// This function is closely based on PhysicsRaycaster.Raycast
if (eventCamera == null)
return;
if (!eventData.IsVRPointer())
return;
var ray = eventData.GetRay();
float dist = eventCamera.farClipPlane - eventCamera.nearClipPlane;
var hits = Physics.RaycastAll(ray, dist, finalEventMask);
if (hits.Length > 1)
System.Array.Sort(hits, (r1, r2) => r1.distance.CompareTo(r2.distance));
if (hits.Length != 0)
{
for (int b = 0, bmax = hits.Length; b < bmax; ++b)
{
var result = new RaycastResult
{
gameObject = hits[b].collider.gameObject,
module = this,
distance = hits[b].distance,
index = resultAppendList.Count,
worldPosition = hits[0].point,
worldNormal = hits[0].normal,
};
resultAppendList.Add(result);
}
}
}
/// <summary>
/// Perform a Spherecast using the worldSpaceRay in eventData.
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
/// <param name="radius">Radius of the sphere</param>
public void Spherecast(PointerEventData eventData, List<RaycastResult> resultAppendList, float radius)
{
if (eventCamera == null)
return;
if (!eventData.IsVRPointer())
return;
var ray = eventData.GetRay();
float dist = eventCamera.farClipPlane - eventCamera.nearClipPlane;
var hits = Physics.SphereCastAll(ray, radius, dist, finalEventMask);
if (hits.Length > 1)
System.Array.Sort(hits, (r1, r2) => r1.distance.CompareTo(r2.distance));
if (hits.Length != 0)
{
for (int b = 0, bmax = hits.Length; b < bmax; ++b)
{
var result = new RaycastResult
{
gameObject = hits[b].collider.gameObject,
module = this,
distance = hits[b].distance,
index = resultAppendList.Count,
worldPosition = hits[0].point,
worldNormal = hits[0].normal,
};
resultAppendList.Add(result);
}
}
}
/// <summary>
/// Get screen position of this world position as seen by the event camera of this OVRPhysicsRaycaster
/// </summary>
/// <param name="worldPosition"></param>
/// <returns></returns>
public Vector2 GetScreenPos(Vector3 worldPosition)
{
// In future versions of Uinty RaycastResult will contain screenPosition so this will not be necessary
return eventCamera.WorldToScreenPoint(worldPosition);
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: f8e7ff1cdf4c4e74db00c3684108bc9a
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,636 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System;
using UnityEngine;
/// <summary>
/// Controls the player's movement in virtual reality.
/// </summary>
[RequireComponent(typeof(CharacterController))]
public class OVRPlayerController : MonoBehaviour
{
/// <summary>
/// The rate acceleration during movement.
/// </summary>
public float Acceleration = 0.1f;
/// <summary>
/// The rate of damping on movement.
/// </summary>
public float Damping = 0.3f;
/// <summary>
/// The rate of additional damping when moving sideways or backwards.
/// </summary>
public float BackAndSideDampen = 0.5f;
/// <summary>
/// The force applied to the character when jumping.
/// </summary>
public float JumpForce = 0.3f;
/// <summary>
/// The rate of rotation when using a gamepad.
/// </summary>
public float RotationAmount = 1.5f;
/// <summary>
/// The rate of rotation when using the keyboard.
/// </summary>
public float RotationRatchet = 45.0f;
/// <summary>
/// The player will rotate in fixed steps if Snap Rotation is enabled.
/// </summary>
[Tooltip("The player will rotate in fixed steps if Snap Rotation is enabled.")]
public bool SnapRotation = true;
/// <summary>
/// [Deprecated] When enabled, snap rotation will happen about the guardian rather
/// than the player/camera viewpoint.
/// </summary>
[Tooltip("[Deprecated] When enabled, snap rotation will happen about the center of the " +
"guardian rather than the center of the player/camera viewpoint. This (legacy) " +
"option should be left off except for edge cases that require extreme behavioral " +
"backwards compatibility.")]
public bool RotateAroundGuardianCenter = false;
/// <summary>
/// How many fixed speeds to use with linear movement? 0=linear control
/// </summary>
[Tooltip("How many fixed speeds to use with linear movement? 0=linear control")]
public int FixedSpeedSteps;
/// <summary>
/// If true, reset the initial yaw of the player controller when the Hmd pose is recentered.
/// </summary>
public bool HmdResetsY = true;
/// <summary>
/// If true, tracking data from a child OVRCameraRig will update the direction of movement.
/// </summary>
public bool HmdRotatesY = true;
/// <summary>
/// Modifies the strength of gravity.
/// </summary>
public float GravityModifier = 0.379f;
/// <summary>
/// If true, each OVRPlayerController will use the player's physical height.
/// </summary>
public bool useProfileData = true;
/// <summary>
/// The CameraHeight is the actual height of the HMD and can be used to adjust the height of the character controller, which will affect the
/// ability of the character to move into areas with a low ceiling.
/// </summary>
[NonSerialized]
public float CameraHeight;
/// <summary>
/// This event is raised after the character controller is moved. This is used by the OVRAvatarLocomotion script to keep the avatar transform synchronized
/// with the OVRPlayerController.
/// </summary>
public event Action<Transform> TransformUpdated;
/// <summary>
/// This bool is set to true whenever the player controller has been teleported. It is reset after every frame. Some systems, such as
/// CharacterCameraConstraint, test this boolean in order to disable logic that moves the character controller immediately
/// following the teleport.
/// </summary>
[NonSerialized] // This doesn't need to be visible in the inspector.
public bool Teleported;
/// <summary>
/// This event is raised immediately after the camera transform has been updated, but before movement is updated.
/// </summary>
public event Action CameraUpdated;
/// <summary>
/// This event is raised right before the character controller is actually moved in order to provide other systems the opportunity to
/// move the character controller in response to things other than user input, such as movement of the HMD. See CharacterCameraConstraint.cs
/// for an example of this.
/// </summary>
public event Action PreCharacterMove;
/// <summary>
/// When true, user input will be applied to linear movement. Set this to false whenever the player controller needs to ignore input for
/// linear movement.
/// </summary>
public bool EnableLinearMovement = true;
/// <summary>
/// When true, user input will be applied to rotation. Set this to false whenever the player controller needs to ignore input for rotation.
/// </summary>
public bool EnableRotation = true;
/// <summary>
/// Rotation defaults to secondary thumbstick. You can allow either here. Note that this won't behave well if EnableLinearMovement is true.
/// </summary>
public bool RotationEitherThumbstick = false;
protected CharacterController Controller = null;
protected OVRCameraRig CameraRig = null;
private float MoveScale = 1.0f;
private Vector3 MoveThrottle = Vector3.zero;
private float FallSpeed = 0.0f;
private OVRPose? InitialPose;
public float InitialYRotation { get; private set; }
private float MoveScaleMultiplier = 1.0f;
private float RotationScaleMultiplier = 1.0f;
private bool SkipMouseRotation = true; // It is rare to want to use mouse movement in VR, so ignore the mouse by default.
private bool HaltUpdateMovement = false;
private bool prevHatLeft = false;
private bool prevHatRight = false;
private float SimulationRate = 60f;
private float buttonRotation = 0f;
private bool ReadyToSnapTurn; // Set to true when a snap turn has occurred, code requires one frame of centered thumbstick to enable another snap turn.
private bool playerControllerEnabled = false;
void Start()
{
// Add eye-depth as a camera offset from the player controller
var p = CameraRig.transform.localPosition;
p.z = OVRManager.profile.eyeDepth;
CameraRig.transform.localPosition = p;
}
void Awake()
{
Controller = gameObject.GetComponent<CharacterController>();
if (Controller == null)
Debug.LogWarning("OVRPlayerController: No CharacterController attached.");
// We use OVRCameraRig to set rotations to cameras,
// and to be influenced by rotation
OVRCameraRig[] CameraRigs = gameObject.GetComponentsInChildren<OVRCameraRig>();
if (CameraRigs.Length == 0)
Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached.");
else if (CameraRigs.Length > 1)
Debug.LogWarning("OVRPlayerController: More then 1 OVRCameraRig attached.");
else
CameraRig = CameraRigs[0];
InitialYRotation = transform.rotation.eulerAngles.y;
}
void OnEnable()
{
}
void OnDisable()
{
if (playerControllerEnabled)
{
OVRManager.display.RecenteredPose -= ResetOrientation;
if (CameraRig != null)
{
CameraRig.UpdatedAnchors -= UpdateTransform;
}
playerControllerEnabled = false;
}
}
void Update()
{
if (!playerControllerEnabled)
{
if (OVRManager.OVRManagerinitialized)
{
OVRManager.display.RecenteredPose += ResetOrientation;
if (CameraRig != null)
{
CameraRig.UpdatedAnchors += UpdateTransform;
}
playerControllerEnabled = true;
}
else
return;
}
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
//Use keys to ratchet rotation
if (Input.GetKeyDown(KeyCode.Q))
buttonRotation -= RotationRatchet;
if (Input.GetKeyDown(KeyCode.E))
buttonRotation += RotationRatchet;
#endif
}
protected virtual void UpdateController()
{
if (useProfileData)
{
if (InitialPose == null)
{
// Save the initial pose so it can be recovered if useProfileData
// is turned off later.
InitialPose = new OVRPose()
{
position = CameraRig.transform.localPosition,
orientation = CameraRig.transform.localRotation
};
}
var p = CameraRig.transform.localPosition;
if (OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel)
{
p.y = OVRManager.profile.eyeHeight - (0.5f * Controller.height) + Controller.center.y;
}
else if (OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.FloorLevel)
{
p.y = -(0.5f * Controller.height) + Controller.center.y;
}
CameraRig.transform.localPosition = p;
}
else if (InitialPose != null)
{
// Return to the initial pose if useProfileData was turned off at runtime
CameraRig.transform.localPosition = InitialPose.Value.position;
CameraRig.transform.localRotation = InitialPose.Value.orientation;
InitialPose = null;
}
CameraHeight = CameraRig.centerEyeAnchor.localPosition.y;
if (CameraUpdated != null)
{
CameraUpdated();
}
UpdateMovement();
Vector3 moveDirection = Vector3.zero;
float motorDamp = (1.0f + (Damping * SimulationRate * Time.deltaTime));
MoveThrottle.x /= motorDamp;
MoveThrottle.y = (MoveThrottle.y > 0.0f) ? (MoveThrottle.y / motorDamp) : MoveThrottle.y;
MoveThrottle.z /= motorDamp;
moveDirection += MoveThrottle * SimulationRate * Time.deltaTime;
// Gravity
if (Controller.isGrounded && FallSpeed <= 0)
FallSpeed = ((Physics.gravity.y * (GravityModifier * 0.002f)));
else
FallSpeed += ((Physics.gravity.y * (GravityModifier * 0.002f)) * SimulationRate * Time.deltaTime);
moveDirection.y += FallSpeed * SimulationRate * Time.deltaTime;
if (Controller.isGrounded && MoveThrottle.y <= transform.lossyScale.y * 0.001f)
{
// Offset correction for uneven ground
float bumpUpOffset = Mathf.Max(Controller.stepOffset, new Vector3(moveDirection.x, 0, moveDirection.z).magnitude);
moveDirection -= bumpUpOffset * Vector3.up;
}
if (PreCharacterMove != null)
{
PreCharacterMove();
Teleported = false;
}
Vector3 predictedXZ = Vector3.Scale((Controller.transform.localPosition + moveDirection), new Vector3(1, 0, 1));
// Move contoller
Controller.Move(moveDirection);
Vector3 actualXZ = Vector3.Scale(Controller.transform.localPosition, new Vector3(1, 0, 1));
if (predictedXZ != actualXZ)
MoveThrottle += (actualXZ - predictedXZ) / (SimulationRate * Time.deltaTime);
}
public virtual void UpdateMovement()
{
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
if (HaltUpdateMovement)
return;
if (EnableLinearMovement)
{
bool moveForward = Input.GetKey(KeyCode.W) || Input.GetKey(KeyCode.UpArrow);
bool moveLeft = Input.GetKey(KeyCode.A) || Input.GetKey(KeyCode.LeftArrow);
bool moveRight = Input.GetKey(KeyCode.D) || Input.GetKey(KeyCode.RightArrow);
bool moveBack = Input.GetKey(KeyCode.S) || Input.GetKey(KeyCode.DownArrow);
bool dpad_move = false;
if (OVRInput.Get(OVRInput.Button.DpadUp))
{
moveForward = true;
dpad_move = true;
}
if (OVRInput.Get(OVRInput.Button.DpadDown))
{
moveBack = true;
dpad_move = true;
}
MoveScale = 1.0f;
if ((moveForward && moveLeft) || (moveForward && moveRight) ||
(moveBack && moveLeft) || (moveBack && moveRight))
MoveScale = 0.70710678f;
// No positional movement if we are in the air
if (!Controller.isGrounded)
MoveScale = 0.0f;
MoveScale *= SimulationRate * Time.deltaTime;
// Compute this for key movement
float moveInfluence = Acceleration * 0.1f * MoveScale * MoveScaleMultiplier;
// Run!
if (dpad_move || Input.GetKey(KeyCode.LeftShift) || Input.GetKey(KeyCode.RightShift))
moveInfluence *= 2.0f;
Quaternion ort = transform.rotation;
Vector3 ortEuler = ort.eulerAngles;
ortEuler.z = ortEuler.x = 0f;
ort = Quaternion.Euler(ortEuler);
if (moveForward)
MoveThrottle += ort * (transform.lossyScale.z * moveInfluence * Vector3.forward);
if (moveBack)
MoveThrottle += ort * (transform.lossyScale.z * moveInfluence * BackAndSideDampen * Vector3.back);
if (moveLeft)
MoveThrottle += ort * (transform.lossyScale.x * moveInfluence * BackAndSideDampen * Vector3.left);
if (moveRight)
MoveThrottle += ort * (transform.lossyScale.x * moveInfluence * BackAndSideDampen * Vector3.right);
moveInfluence = Acceleration * 0.1f * MoveScale * MoveScaleMultiplier;
#if !UNITY_ANDROID // LeftTrigger not avail on Android game pad
moveInfluence *= 1.0f + OVRInput.Get(OVRInput.Axis1D.PrimaryIndexTrigger);
#endif
Vector2 primaryAxis = OVRInput.Get(OVRInput.Axis2D.PrimaryThumbstick);
// If speed quantization is enabled, adjust the input to the number of fixed speed steps.
if (FixedSpeedSteps > 0)
{
primaryAxis.y = Mathf.Round(primaryAxis.y * FixedSpeedSteps) / FixedSpeedSteps;
primaryAxis.x = Mathf.Round(primaryAxis.x * FixedSpeedSteps) / FixedSpeedSteps;
}
if (primaryAxis.y > 0.0f)
MoveThrottle += ort * (primaryAxis.y * transform.lossyScale.z * moveInfluence * Vector3.forward);
if (primaryAxis.y < 0.0f)
MoveThrottle += ort * (Mathf.Abs(primaryAxis.y) * transform.lossyScale.z * moveInfluence *
BackAndSideDampen * Vector3.back);
if (primaryAxis.x < 0.0f)
MoveThrottle += ort * (Mathf.Abs(primaryAxis.x) * transform.lossyScale.x * moveInfluence *
BackAndSideDampen * Vector3.left);
if (primaryAxis.x > 0.0f)
MoveThrottle += ort * (primaryAxis.x * transform.lossyScale.x * moveInfluence * BackAndSideDampen *
Vector3.right);
}
if (EnableRotation)
{
Vector3 euler = RotateAroundGuardianCenter ? transform.rotation.eulerAngles : Vector3.zero;
float rotateInfluence = SimulationRate * Time.deltaTime * RotationAmount * RotationScaleMultiplier;
bool curHatLeft = OVRInput.Get(OVRInput.Button.PrimaryShoulder);
if (curHatLeft && !prevHatLeft)
euler.y -= RotationRatchet;
prevHatLeft = curHatLeft;
bool curHatRight = OVRInput.Get(OVRInput.Button.SecondaryShoulder);
if (curHatRight && !prevHatRight)
euler.y += RotationRatchet;
prevHatRight = curHatRight;
euler.y += buttonRotation;
buttonRotation = 0f;
#if !UNITY_ANDROID || UNITY_EDITOR
if (!SkipMouseRotation)
euler.y += Input.GetAxis("Mouse X") * rotateInfluence * 3.25f;
#endif
if (SnapRotation)
{
if (OVRInput.Get(OVRInput.Button.SecondaryThumbstickLeft) ||
(RotationEitherThumbstick && OVRInput.Get(OVRInput.Button.PrimaryThumbstickLeft)))
{
if (ReadyToSnapTurn)
{
euler.y -= RotationRatchet;
ReadyToSnapTurn = false;
}
}
else if (OVRInput.Get(OVRInput.Button.SecondaryThumbstickRight) ||
(RotationEitherThumbstick && OVRInput.Get(OVRInput.Button.PrimaryThumbstickRight)))
{
if (ReadyToSnapTurn)
{
euler.y += RotationRatchet;
ReadyToSnapTurn = false;
}
}
else
{
ReadyToSnapTurn = true;
}
}
else
{
Vector2 secondaryAxis = OVRInput.Get(OVRInput.Axis2D.SecondaryThumbstick);
if (RotationEitherThumbstick)
{
Vector2 altSecondaryAxis = OVRInput.Get(OVRInput.Axis2D.PrimaryThumbstick);
if (secondaryAxis.sqrMagnitude < altSecondaryAxis.sqrMagnitude)
{
secondaryAxis = altSecondaryAxis;
}
}
euler.y += secondaryAxis.x * rotateInfluence;
}
if (RotateAroundGuardianCenter)
{
transform.rotation = Quaternion.Euler(euler);
}
else
{
transform.RotateAround(CameraRig.centerEyeAnchor.position, Vector3.up, euler.y);
}
}
#endif
}
/// <summary>
/// Invoked by OVRCameraRig's UpdatedAnchors callback. Allows the Hmd rotation to update the facing direction of the player.
/// </summary>
public void UpdateTransform(OVRCameraRig rig)
{
Transform root = CameraRig.trackingSpace;
Transform centerEye = CameraRig.centerEyeAnchor;
if (HmdRotatesY && !Teleported)
{
Vector3 prevPos = root.position;
Quaternion prevRot = root.rotation;
transform.rotation = Quaternion.Euler(0.0f, centerEye.rotation.eulerAngles.y, 0.0f);
root.position = prevPos;
root.rotation = prevRot;
}
UpdateController();
if (TransformUpdated != null)
{
TransformUpdated(root);
}
}
/// <summary>
/// Jump! Must be enabled manually.
/// </summary>
public bool Jump()
{
if (!Controller.isGrounded)
return false;
MoveThrottle += new Vector3(0, transform.lossyScale.y * JumpForce, 0);
return true;
}
/// <summary>
/// Stop this instance.
/// </summary>
public void Stop()
{
Controller.Move(Vector3.zero);
MoveThrottle = Vector3.zero;
FallSpeed = 0.0f;
}
/// <summary>
/// Gets the move scale multiplier.
/// </summary>
/// <param name="moveScaleMultiplier">Move scale multiplier.</param>
public void GetMoveScaleMultiplier(ref float moveScaleMultiplier)
{
moveScaleMultiplier = MoveScaleMultiplier;
}
/// <summary>
/// Sets the move scale multiplier.
/// </summary>
/// <param name="moveScaleMultiplier">Move scale multiplier.</param>
public void SetMoveScaleMultiplier(float moveScaleMultiplier)
{
MoveScaleMultiplier = moveScaleMultiplier;
}
/// <summary>
/// Gets the rotation scale multiplier.
/// </summary>
/// <param name="rotationScaleMultiplier">Rotation scale multiplier.</param>
public void GetRotationScaleMultiplier(ref float rotationScaleMultiplier)
{
rotationScaleMultiplier = RotationScaleMultiplier;
}
/// <summary>
/// Sets the rotation scale multiplier.
/// </summary>
/// <param name="rotationScaleMultiplier">Rotation scale multiplier.</param>
public void SetRotationScaleMultiplier(float rotationScaleMultiplier)
{
RotationScaleMultiplier = rotationScaleMultiplier;
}
/// <summary>
/// Gets the allow mouse rotation.
/// </summary>
/// <param name="skipMouseRotation">Allow mouse rotation.</param>
public void GetSkipMouseRotation(ref bool skipMouseRotation)
{
skipMouseRotation = SkipMouseRotation;
}
/// <summary>
/// Sets the allow mouse rotation.
/// </summary>
/// <param name="skipMouseRotation">If set to <c>true</c> allow mouse rotation.</param>
public void SetSkipMouseRotation(bool skipMouseRotation)
{
SkipMouseRotation = skipMouseRotation;
}
/// <summary>
/// Gets the halt update movement.
/// </summary>
/// <param name="haltUpdateMovement">Halt update movement.</param>
public void GetHaltUpdateMovement(ref bool haltUpdateMovement)
{
haltUpdateMovement = HaltUpdateMovement;
}
/// <summary>
/// Sets the halt update movement.
/// </summary>
/// <param name="haltUpdateMovement">If set to <c>true</c> halt update movement.</param>
public void SetHaltUpdateMovement(bool haltUpdateMovement)
{
HaltUpdateMovement = haltUpdateMovement;
}
/// <summary>
/// Resets the player look rotation when the device orientation is reset.
/// </summary>
public void ResetOrientation()
{
if (HmdResetsY && !HmdRotatesY)
{
Vector3 euler = transform.rotation.eulerAngles;
euler.y = InitialYRotation;
transform.rotation = Quaternion.Euler(euler);
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 0950df82e7936c84983497630bde5b54
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,90 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System;
using System.Text;
using UnityEngine;
using UnityEngine.Assertions;
namespace UnityEngine.EventSystems
{
/// <summary>
/// Extension of Unity's PointerEventData to support ray based pointing and also touchpad swiping
/// </summary>
public class OVRPointerEventData : PointerEventData
{
public OVRPointerEventData(EventSystem eventSystem)
: base(eventSystem)
{
}
public Ray worldSpaceRay;
public Vector2 swipeStart;
public override string ToString()
{
var sb = new StringBuilder();
sb.AppendLine("<b>Position</b>: " + position);
sb.AppendLine("<b>delta</b>: " + delta);
sb.AppendLine("<b>eligibleForClick</b>: " + eligibleForClick);
sb.AppendLine("<b>pointerEnter</b>: " + pointerEnter);
sb.AppendLine("<b>pointerPress</b>: " + pointerPress);
sb.AppendLine("<b>lastPointerPress</b>: " + lastPress);
sb.AppendLine("<b>pointerDrag</b>: " + pointerDrag);
sb.AppendLine("<b>worldSpaceRay</b>: " + worldSpaceRay);
sb.AppendLine("<b>swipeStart</b>: " + swipeStart);
sb.AppendLine("<b>Use Drag Threshold</b>: " + useDragThreshold);
return sb.ToString();
}
}
/// <summary>
/// Static helpers for OVRPointerEventData.
/// </summary>
public static class PointerEventDataExtension
{
public static bool IsVRPointer(this PointerEventData pointerEventData)
{
return (pointerEventData is OVRPointerEventData);
}
public static Ray GetRay(this PointerEventData pointerEventData)
{
OVRPointerEventData vrPointerEventData = pointerEventData as OVRPointerEventData;
Assert.IsNotNull(vrPointerEventData);
return vrPointerEventData.worldSpaceRay;
}
public static Vector2 GetSwipeStart(this PointerEventData pointerEventData)
{
OVRPointerEventData vrPointerEventData = pointerEventData as OVRPointerEventData;
Assert.IsNotNull(vrPointerEventData);
return vrPointerEventData.swipeStart;
}
public static void SetSwipeStart(this PointerEventData pointerEventData, Vector2 start)
{
OVRPointerEventData vrPointerEventData = pointerEventData as OVRPointerEventData;
Assert.IsNotNull(vrPointerEventData);
vrPointerEventData.swipeStart = start;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 646c937ce12610744adc2b5e487f77ac
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,13 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
// Moved to /Scripts/Editor. This stub only exists to overwrite previous instances of OVRProfiler.

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 3303d4232ee59ac40a9fdc223870fbbc
timeCreated: 1520636357
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,40 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
/// <summary>
/// Visualizes progress for operations such as loading.
/// </summary>
public class OVRProgressIndicator : MonoBehaviour
{
public MeshRenderer progressImage;
[Range(0, 1)]
public float currentProgress = 0.7f;
void Awake()
{
progressImage.sortingOrder = 150;
}
// Update is called once per frame
void Update()
{
progressImage.sharedMaterial.SetFloat("_AlphaCutoff", 1-currentProgress);
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: f14ece5575e2b1e4d80619901d65b428
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,324 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Text;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.EventSystems;
using UnityEngine.Serialization;
/// <summary>
/// Extension of GraphicRaycaster to support ray casting with world space rays instead of just screen-space
/// pointer positions
/// </summary>
[RequireComponent(typeof(Canvas))]
public class OVRRaycaster : GraphicRaycaster, IPointerEnterHandler
{
[Tooltip("A world space pointer for this canvas")]
public GameObject pointer;
public int sortOrder = 0;
protected OVRRaycaster()
{ }
[NonSerialized]
private Canvas m_Canvas;
private Canvas canvas
{
get
{
if (m_Canvas != null)
return m_Canvas;
m_Canvas = GetComponent<Canvas>();
return m_Canvas;
}
}
public override Camera eventCamera
{
get
{
return canvas.worldCamera;
}
}
public override int sortOrderPriority
{
get
{
return sortOrder;
}
}
protected override void Start()
{
if(!canvas.worldCamera)
{
Debug.Log("Canvas does not have an event camera attached. Attaching OVRCameraRig.centerEyeAnchor as default.");
OVRCameraRig rig = FindObjectOfType<OVRCameraRig>();
canvas.worldCamera = rig.centerEyeAnchor.gameObject.GetComponent<Camera>();
}
}
/// <summary>
/// For the given ray, find graphics on this canvas which it intersects and are not blocked by other
/// world objects
/// </summary>
[NonSerialized]
private List<RaycastHit> m_RaycastResults = new List<RaycastHit>();
private void Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList, Ray ray, bool checkForBlocking)
{
//This function is closely based on
//void GraphicRaycaster.Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList)
if (canvas == null)
return;
float hitDistance = float.MaxValue;
if (checkForBlocking && blockingObjects != BlockingObjects.None)
{
float dist = eventCamera.farClipPlane;
if (blockingObjects == BlockingObjects.ThreeD || blockingObjects == BlockingObjects.All)
{
var hits = Physics.RaycastAll(ray, dist, m_BlockingMask);
if (hits.Length > 0 && hits[0].distance < hitDistance)
{
hitDistance = hits[0].distance;
}
}
if (blockingObjects == BlockingObjects.TwoD || blockingObjects == BlockingObjects.All)
{
var hits = Physics2D.GetRayIntersectionAll(ray, dist, m_BlockingMask);
if (hits.Length > 0 && hits[0].fraction * dist < hitDistance)
{
hitDistance = hits[0].fraction * dist;
}
}
}
m_RaycastResults.Clear();
GraphicRaycast(canvas, ray, m_RaycastResults);
for (var index = 0; index < m_RaycastResults.Count; index++)
{
var go = m_RaycastResults[index].graphic.gameObject;
bool appendGraphic = true;
if (ignoreReversedGraphics)
{
// If we have a camera compare the direction against the cameras forward.
var cameraFoward = ray.direction;
var dir = go.transform.rotation * Vector3.forward;
appendGraphic = Vector3.Dot(cameraFoward, dir) > 0;
}
// Ignore points behind us (can happen with a canvas pointer)
if (eventCamera.transform.InverseTransformPoint(m_RaycastResults[index].worldPos).z <= 0)
{
appendGraphic = false;
}
if (appendGraphic)
{
float distance = Vector3.Distance(ray.origin, m_RaycastResults[index].worldPos);
if (distance >= hitDistance)
{
continue;
}
var castResult = new RaycastResult
{
gameObject = go,
module = this,
distance = distance,
index = resultAppendList.Count,
depth = m_RaycastResults[index].graphic.depth,
worldPosition = m_RaycastResults[index].worldPos
};
resultAppendList.Add(castResult);
}
}
}
/// <summary>
/// Performs a raycast using eventData.worldSpaceRay
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
public override void Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList)
{
if (eventData.IsVRPointer())
{
Raycast(eventData, resultAppendList, eventData.GetRay(), true);
}
}
/// <summary>
/// Performs a raycast using the pointer object attached to this OVRRaycaster
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
public void RaycastPointer(PointerEventData eventData, List<RaycastResult> resultAppendList)
{
if (pointer != null && pointer.activeInHierarchy)
{
Raycast(eventData, resultAppendList, new Ray(eventCamera.transform.position, (pointer.transform.position - eventCamera.transform.position).normalized), false);
}
}
/// <summary>
/// Perform a raycast into the screen and collect all graphics underneath it.
/// </summary>
[NonSerialized]
static readonly List<RaycastHit> s_SortedGraphics = new List<RaycastHit>();
private void GraphicRaycast(Canvas canvas, Ray ray, List<RaycastHit> results)
{
//This function is based closely on :
// void GraphicRaycaster.Raycast(Canvas canvas, Camera eventCamera, Vector2 pointerPosition, List<Graphic> results)
// But modified to take a Ray instead of a canvas pointer, and also to explicitly ignore
// the graphic associated with the pointer
// Necessary for the event system
var foundGraphics = GraphicRegistry.GetGraphicsForCanvas(canvas);
s_SortedGraphics.Clear();
for (int i = 0; i < foundGraphics.Count; ++i)
{
Graphic graphic = foundGraphics[i];
// -1 means it hasn't been processed by the canvas, which means it isn't actually drawn
if (graphic.depth == -1 || (pointer == graphic.gameObject))
continue;
Vector3 worldPos;
if (RayIntersectsRectTransform(graphic.rectTransform, ray, out worldPos))
{
//Work out where this is on the screen for compatibility with existing Unity UI code
Vector2 screenPos = eventCamera.WorldToScreenPoint(worldPos);
// mask/image intersection - See Unity docs on eventAlphaThreshold for when this does anything
if (graphic.Raycast(screenPos, eventCamera))
{
RaycastHit hit;
hit.graphic = graphic;
hit.worldPos = worldPos;
hit.fromMouse = false;
s_SortedGraphics.Add(hit);
}
}
}
s_SortedGraphics.Sort((g1, g2) => g2.graphic.depth.CompareTo(g1.graphic.depth));
for (int i = 0; i < s_SortedGraphics.Count; ++i)
{
results.Add(s_SortedGraphics[i]);
}
}
/// <summary>
/// Get screen position of worldPosition contained in this RaycastResult
/// </summary>
/// <param name="worldPosition"></param>
/// <returns></returns>
public Vector2 GetScreenPosition(RaycastResult raycastResult)
{
// In future versions of Uinty RaycastResult will contain screenPosition so this will not be necessary
return eventCamera.WorldToScreenPoint(raycastResult.worldPosition);
}
/// <summary>
/// Detects whether a ray intersects a RectTransform and if it does also
/// returns the world position of the intersection.
/// </summary>
/// <param name="rectTransform"></param>
/// <param name="ray"></param>
/// <param name="worldPos"></param>
/// <returns></returns>
static bool RayIntersectsRectTransform(RectTransform rectTransform, Ray ray, out Vector3 worldPos)
{
Vector3[] corners = new Vector3[4];
rectTransform.GetWorldCorners(corners);
Plane plane = new Plane(corners[0], corners[1], corners[2]);
float enter;
if (!plane.Raycast(ray, out enter))
{
worldPos = Vector3.zero;
return false;
}
Vector3 intersection = ray.GetPoint(enter);
Vector3 BottomEdge = corners[3] - corners[0];
Vector3 LeftEdge = corners[1] - corners[0];
float BottomDot = Vector3.Dot(intersection - corners[0], BottomEdge);
float LeftDot = Vector3.Dot(intersection - corners[0], LeftEdge);
if (BottomDot < BottomEdge.sqrMagnitude && // Can use sqrMag because BottomEdge is not normalized
LeftDot < LeftEdge.sqrMagnitude &&
BottomDot >= 0 &&
LeftDot >= 0)
{
worldPos = corners[0] + LeftDot * LeftEdge / LeftEdge.sqrMagnitude + BottomDot * BottomEdge / BottomEdge.sqrMagnitude;
return true;
}
else
{
worldPos = Vector3.zero;
return false;
}
}
struct RaycastHit
{
public Graphic graphic;
public Vector3 worldPos;
public bool fromMouse;
};
/// <summary>
/// Is this the currently focussed Raycaster according to the InputModule
/// </summary>
/// <returns></returns>
public bool IsFocussed()
{
OVRInputModule inputModule = EventSystem.current.currentInputModule as OVRInputModule;
return inputModule && inputModule.activeGraphicRaycaster == this;
}
public void OnPointerEnter(PointerEventData e)
{
if (e.IsVRPointer())
{
// Gaze has entered this canvas. We'll make it the active one so that canvas-mouse pointer can be used.
OVRInputModule inputModule = EventSystem.current.currentInputModule as OVRInputModule;
if(inputModule != null)
{
inputModule.activeGraphicRaycaster = this;
}
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 7aaf960227867044282d921171d2d7ac
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,67 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Assets.OVR.Scripts
{
public class Record
{
public int sortOrder;
public string category;
public string message;
public Record(int order, string cat, string msg)
{
sortOrder = order;
category = cat;
message = msg;
}
}
public class RangedRecord : Record
{
public float value;
public float min;
public float max;
public RangedRecord(int order, string cat, string msg, float val, float minVal, float maxVal)
: base(order, cat, msg)
{
value = val;
min = minVal;
max = maxVal;
}
}
public delegate void FixMethodDelegate(UnityEngine.Object obj, bool isLastInSet, int selectedIndex);
public class FixRecord : Record
{
public FixMethodDelegate fixMethod;
public UnityEngine.Object targetObject;
public string[] buttonNames;
public bool editModeRequired;
public bool complete;
public FixRecord(int order, string cat, string msg, FixMethodDelegate fix, UnityEngine.Object target, bool editRequired, string[] buttons)
: base(order, cat, msg)
{
buttonNames = buttons;
fixMethod = fix;
targetObject = target;
editModeRequired = editRequired;
complete = false;
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 63f0fe0d60ddeb54f9f43d701286af2d
timeCreated: 1520636357
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,40 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Allows you to reset VR input tracking with a gamepad button press.
/// </summary>
public class OVRResetOrientation : MonoBehaviour
{
/// <summary>
/// The gamepad button that will reset VR input tracking.
/// </summary>
public OVRInput.RawButton resetButton = OVRInput.RawButton.Y;
/// <summary>
/// Check input and reset orientation if necessary
/// See the input mapping setup in the Unity Integration guide
/// </summary>
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if (OVRInput.GetDown(resetButton))
{
//*************************
// reset orientation
//*************************
OVRManager.display.RecenterPose();
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 09bb0a17b6a704298b65be4fb08ef480
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,134 @@
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class OVRRuntimeController : MonoBehaviour
{
/// <summary>
/// The controller that determines whether or not to enable rendering of the controller model.
/// </summary>
public OVRInput.Controller m_controller;
/// <summary>
/// Shader that will be used for the controller model
/// </summary>
public Shader m_controllerModelShader;
private GameObject m_controllerObject;
private static string leftControllerModelPath = "/model_fb/controller/left";
private static string rightControllerModelPath = "/model_fb/controller/right";
private string m_controllerModelPath;
private bool m_modelSupported = false;
private bool m_hasInputFocus = true;
private bool m_hasInputFocusPrev = false;
private bool m_controllerConnectedPrev = false;
// Start is called before the first frame update
void Start()
{
if (m_controller == OVRInput.Controller.LTouch)
m_controllerModelPath = leftControllerModelPath;
else if (m_controller == OVRInput.Controller.RTouch)
m_controllerModelPath = rightControllerModelPath;
m_modelSupported = IsModelSupported(m_controllerModelPath);
if (m_modelSupported)
{
StartCoroutine(UpdateControllerModel());
}
OVRManager.InputFocusAcquired += InputFocusAquired;
OVRManager.InputFocusLost += InputFocusLost;
}
// Update is called once per frame
void Update()
{
bool controllerConnected = OVRInput.IsControllerConnected(m_controller);
if (m_hasInputFocus != m_hasInputFocusPrev || controllerConnected != m_controllerConnectedPrev)
{
if (m_controllerObject != null)
{
m_controllerObject.SetActive(controllerConnected && m_hasInputFocus);
}
m_hasInputFocusPrev = m_hasInputFocus;
m_controllerConnectedPrev = controllerConnected;
}
}
private bool IsModelSupported(string modelPath)
{
string[] modelPaths = OVRPlugin.GetRenderModelPaths();
if (modelPaths.Length == 0)
{
Debug.LogError("Failed to enumerate model paths from the runtime. Check that the render model feature is enabled in OVRManager.");
return false;
}
for (int i = 0; i < modelPaths.Length; i++)
{
if (modelPaths[i].Equals(modelPath))
return true;
}
Debug.LogError("Render model path not supported by this device.");
return false;
}
private bool LoadControllerModel(string modelPath)
{
var modelProperties = new OVRPlugin.RenderModelProperties();
if (OVRPlugin.GetRenderModelProperties(modelPath, ref modelProperties))
{
if (modelProperties.ModelKey != OVRPlugin.RENDER_MODEL_NULL_KEY)
{
byte[] modelData = OVRPlugin.LoadRenderModel(modelProperties.ModelKey);
if (modelData != null)
{
OVRGLTFLoader loader = new OVRGLTFLoader(modelData);
loader.SetModelShader(m_controllerModelShader);
m_controllerObject = loader.LoadGLB().root;
if (m_controllerObject != null)
{
m_controllerObject.transform.SetParent(transform, false);
// Apply the OpenXR grip pose offset so runtime controller models are in the right position
m_controllerObject.transform.parent.localPosition = new Vector3(0.0f, -0.03f, -0.04f);
m_controllerObject.transform.parent.localRotation = Quaternion.AngleAxis(-60.0f, new Vector3(1.0f, 0.0f, 0.0f));
return true;
}
}
}
Debug.LogError("Retrived a null model key.");
}
Debug.LogError("Failed to load controller model");
return false;
}
private IEnumerator UpdateControllerModel()
{
while (true)
{
bool controllerConnected = OVRInput.IsControllerConnected(m_controller);
if (m_controllerObject == null && controllerConnected)
{
LoadControllerModel(m_controllerModelPath);
}
yield return new WaitForSeconds(.5f);
}
}
public void InputFocusAquired()
{
m_hasInputFocus = true;
}
public void InputFocusLost()
{
m_hasInputFocus = false;
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: ed24508d182abcf4ba02d983e302d34e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,220 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
/// <summary>
/// Sample that allows you to play with various VR settings.
/// </summary>
public class OVRSceneSampleController : MonoBehaviour
{
/// <summary>
/// The key that quits the application.
/// </summary>
public KeyCode quitKey = KeyCode.Escape;
/// <summary>
/// An optional texture that appears before the menu fades in.
/// </summary>
public Texture fadeInTexture = null;
/// <summary>
/// Controls how quickly the player's speed and rotation change based on input.
/// </summary>
public float speedRotationIncrement = 0.05f;
private OVRPlayerController playerController = null;
// Handle to OVRCameraRig
private OVRCameraRig cameraController = null;
/// <summary>
/// We can set the layer to be anything we want to, this allows
/// a specific camera to render it.
/// </summary>
public string layerName = "Default";
// Vision mode on/off
private bool visionMode = true;
// We want to hold onto GridCube, for potential sharing
// of the menu RenderTarget
OVRGridCube gridCube = null;
#if SHOW_DK2_VARIABLES
private string strVisionMode = "Vision Enabled: ON";
#endif
#region MonoBehaviour Message Handlers
/// <summary>
/// Awake this instance.
/// </summary>
void Awake()
{
// Find camera controller
OVRCameraRig[] cameraControllers;
cameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>();
if (cameraControllers.Length == 0)
{
Debug.LogWarning("OVRMainMenu: No OVRCameraRig attached.");
}
else if (cameraControllers.Length > 1)
{
Debug.LogWarning("OVRMainMenu: More then 1 OVRCameraRig attached.");
}
else
{
cameraController = cameraControllers[0];
}
// Find player controller
OVRPlayerController[] playerControllers;
playerControllers = gameObject.GetComponentsInChildren<OVRPlayerController>();
if (playerControllers.Length == 0)
{
Debug.LogWarning("OVRMainMenu: No OVRPlayerController attached.");
}
else if (playerControllers.Length > 1)
{
Debug.LogWarning("OVRMainMenu: More then 1 OVRPlayerController attached.");
}
else
{
playerController = playerControllers[0];
}
}
/// <summary>
/// Start this instance.
/// </summary>
void Start()
{
// Make sure to hide cursor
if (Application.isEditor == false)
{
Cursor.visible = false;
Cursor.lockState = CursorLockMode.Locked;
}
// CameraController updates
if (cameraController != null)
{
// Add a GridCube component to this object
gridCube = gameObject.AddComponent<OVRGridCube>();
gridCube.SetOVRCameraController(ref cameraController);
}
}
/// <summary>
/// Update this instance.
/// </summary>
void Update()
{
// Recenter pose
UpdateRecenterPose();
// Turn On/Off Vision Mode
UpdateVisionMode();
// Update Speed and Rotation Scale
if (playerController != null)
UpdateSpeedAndRotationScaleMultiplier();
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
// Toggle Fullscreen
if (Input.GetKeyDown(KeyCode.F11))
Screen.fullScreen = !Screen.fullScreen;
if (Input.GetKeyDown(KeyCode.M))
UnityEngine.XR.XRSettings.showDeviceView = !UnityEngine.XR.XRSettings.showDeviceView;
#if !UNITY_ANDROID || UNITY_EDITOR
// Escape Application
if (Input.GetKeyDown(quitKey))
Application.Quit();
#endif
#endif
}
#endregion
/// <summary>
/// Updates the vision mode.
/// </summary>
void UpdateVisionMode()
{
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
if (Input.GetKeyDown(KeyCode.F2))
{
visionMode ^= visionMode;
OVRManager.tracker.isEnabled = visionMode;
}
#endif
}
/// <summary>
/// Updates the speed and rotation scale multiplier.
/// </summary>
void UpdateSpeedAndRotationScaleMultiplier()
{
float moveScaleMultiplier = 0.0f;
playerController.GetMoveScaleMultiplier(ref moveScaleMultiplier);
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
if (Input.GetKeyDown(KeyCode.Alpha7))
{
moveScaleMultiplier -= speedRotationIncrement;
}
else if (Input.GetKeyDown(KeyCode.Alpha8))
{
moveScaleMultiplier += speedRotationIncrement;
}
#endif
playerController.SetMoveScaleMultiplier(moveScaleMultiplier);
float rotationScaleMultiplier = 0.0f;
playerController.GetRotationScaleMultiplier(ref rotationScaleMultiplier);
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
if (Input.GetKeyDown(KeyCode.Alpha9))
{
rotationScaleMultiplier -= speedRotationIncrement;
}
else if (Input.GetKeyDown(KeyCode.Alpha0))
{
rotationScaleMultiplier += speedRotationIncrement;
}
#endif
playerController.SetRotationScaleMultiplier(rotationScaleMultiplier);
}
/// <summary>
/// Recenter pose
/// </summary>
void UpdateRecenterPose()
{
//todo: enable for Unity Input System
#if ENABLE_LEGACY_INPUT_MANAGER
if (Input.GetKeyDown(KeyCode.R))
OVRManager.display.RecenterPose();
#endif
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 4f07515ada089df47868559a20dd6783
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,228 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections; // required for Coroutines
/// <summary>
/// Fades the screen from black after a new scene is loaded. Fade can also be controlled mid-scene using SetUIFade and SetFadeLevel
/// </summary>
public class OVRScreenFade : MonoBehaviour
{
public static OVRScreenFade instance { get; private set; }
[Tooltip("Fade duration")]
public float fadeTime = 2.0f;
[Tooltip("Screen color at maximum fade")]
public Color fadeColor = new Color(0.01f, 0.01f, 0.01f, 1.0f);
public bool fadeOnStart = true;
/// <summary>
/// The render queue used by the fade mesh. Reduce this if you need to render on top of it.
/// </summary>
public int renderQueue = 5000;
/// <summary>
/// Renders the current alpha value being used to fade the screen.
/// </summary>
public float currentAlpha { get { return Mathf.Max(explicitFadeAlpha, animatedFadeAlpha, uiFadeAlpha); } }
private float explicitFadeAlpha = 0.0f;
private float animatedFadeAlpha = 0.0f;
private float uiFadeAlpha = 0.0f;
private MeshRenderer fadeRenderer;
private MeshFilter fadeMesh;
private Material fadeMaterial = null;
private bool isFading = false;
/// <summary>
/// Automatically starts a fade in
/// </summary>
void Start()
{
if (gameObject.name.StartsWith("OculusMRC_"))
{
Destroy(this);
return;
}
// create the fade material
fadeMaterial = new Material(Shader.Find("Oculus/Unlit Transparent Color"));
fadeMesh = gameObject.AddComponent<MeshFilter>();
fadeRenderer = gameObject.AddComponent<MeshRenderer>();
var mesh = new Mesh();
fadeMesh.mesh = mesh;
Vector3[] vertices = new Vector3[4];
float width = 2f;
float height = 2f;
float depth = 1f;
vertices[0] = new Vector3(-width, -height, depth);
vertices[1] = new Vector3(width, -height, depth);
vertices[2] = new Vector3(-width, height, depth);
vertices[3] = new Vector3(width, height, depth);
mesh.vertices = vertices;
int[] tri = new int[6];
tri[0] = 0;
tri[1] = 2;
tri[2] = 1;
tri[3] = 2;
tri[4] = 3;
tri[5] = 1;
mesh.triangles = tri;
Vector3[] normals = new Vector3[4];
normals[0] = -Vector3.forward;
normals[1] = -Vector3.forward;
normals[2] = -Vector3.forward;
normals[3] = -Vector3.forward;
mesh.normals = normals;
Vector2[] uv = new Vector2[4];
uv[0] = new Vector2(0, 0);
uv[1] = new Vector2(1, 0);
uv[2] = new Vector2(0, 1);
uv[3] = new Vector2(1, 1);
mesh.uv = uv;
explicitFadeAlpha = 0.0f;
animatedFadeAlpha = 0.0f;
uiFadeAlpha = 0.0f;
if (fadeOnStart)
{
FadeIn();
}
instance = this;
}
/// <summary>
/// Start a fade in
/// </summary>
public void FadeIn()
{
StartCoroutine(Fade(1.0f, 0.0f));
}
/// <summary>
/// Start a fade out
/// </summary>
public void FadeOut()
{
StartCoroutine(Fade(0,1));
}
/// <summary>
/// Starts a fade in when a new level is loaded
/// </summary>
void OnLevelFinishedLoading(int level)
{
FadeIn();
}
void OnEnable()
{
if (!fadeOnStart)
{
explicitFadeAlpha = 0.0f;
animatedFadeAlpha = 0.0f;
uiFadeAlpha = 0.0f;
}
}
/// <summary>
/// Cleans up the fade material
/// </summary>
void OnDestroy()
{
instance = null;
if (fadeRenderer != null)
Destroy(fadeRenderer);
if (fadeMaterial != null)
Destroy(fadeMaterial);
if (fadeMesh != null)
Destroy(fadeMesh);
}
/// <summary>
/// Set the UI fade level - fade due to UI in foreground
/// </summary>
public void SetUIFade(float level)
{
uiFadeAlpha = Mathf.Clamp01(level);
SetMaterialAlpha();
}
/// <summary>
/// Override current fade level
/// </summary>
/// <param name="level"></param>
public void SetExplicitFade(float level)
{
explicitFadeAlpha = level;
SetMaterialAlpha();
}
/// <summary>
/// Fades alpha from 1.0 to 0.0
/// </summary>
IEnumerator Fade(float startAlpha, float endAlpha)
{
float elapsedTime = 0.0f;
while (elapsedTime < fadeTime)
{
elapsedTime += Time.deltaTime;
animatedFadeAlpha = Mathf.Lerp(startAlpha, endAlpha, Mathf.Clamp01(elapsedTime / fadeTime));
SetMaterialAlpha();
yield return new WaitForEndOfFrame();
}
animatedFadeAlpha = endAlpha;
SetMaterialAlpha();
}
/// <summary>
/// Update material alpha. UI fade and the current fade due to fade in/out animations (or explicit control)
/// both affect the fade. (The max is taken)
/// </summary>
private void SetMaterialAlpha()
{
Color color = fadeColor;
color.a = currentAlpha;
isFading = color.a > 0;
if (fadeMaterial != null)
{
fadeMaterial.color = color;
fadeMaterial.renderQueue = renderQueue;
fadeRenderer.material = fadeMaterial;
fadeRenderer.enabled = isFading;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: df8e1d778abf442e4bec449c360e9e1c
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: -100
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,606 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System;
using System.Collections.Generic;
using UnityEngine;
[DefaultExecutionOrder(-80)]
public class OVRSkeleton : MonoBehaviour
{
public interface IOVRSkeletonDataProvider
{
SkeletonType GetSkeletonType();
SkeletonPoseData GetSkeletonPoseData();
}
public struct SkeletonPoseData
{
public OVRPlugin.Posef RootPose { get; set; }
public float RootScale { get; set; }
public OVRPlugin.Quatf[] BoneRotations { get; set; }
public bool IsDataValid { get; set; }
public bool IsDataHighConfidence { get; set; }
public int SkeletonChangedCount { get; set; }
}
public enum SkeletonType
{
None = OVRPlugin.SkeletonType.None,
HandLeft = OVRPlugin.SkeletonType.HandLeft,
HandRight = OVRPlugin.SkeletonType.HandRight,
}
public enum BoneId
{
Invalid = OVRPlugin.BoneId.Invalid,
// hand bones
Hand_Start = OVRPlugin.BoneId.Hand_Start,
Hand_WristRoot = OVRPlugin.BoneId.Hand_WristRoot, // root frame of the hand, where the wrist is located
Hand_ForearmStub = OVRPlugin.BoneId.Hand_ForearmStub, // frame for user's forearm
Hand_Thumb0 = OVRPlugin.BoneId.Hand_Thumb0, // thumb trapezium bone
Hand_Thumb1 = OVRPlugin.BoneId.Hand_Thumb1, // thumb metacarpal bone
Hand_Thumb2 = OVRPlugin.BoneId.Hand_Thumb2, // thumb proximal phalange bone
Hand_Thumb3 = OVRPlugin.BoneId.Hand_Thumb3, // thumb distal phalange bone
Hand_Index1 = OVRPlugin.BoneId.Hand_Index1, // index proximal phalange bone
Hand_Index2 = OVRPlugin.BoneId.Hand_Index2, // index intermediate phalange bone
Hand_Index3 = OVRPlugin.BoneId.Hand_Index3, // index distal phalange bone
Hand_Middle1 = OVRPlugin.BoneId.Hand_Middle1, // middle proximal phalange bone
Hand_Middle2 = OVRPlugin.BoneId.Hand_Middle2, // middle intermediate phalange bone
Hand_Middle3 = OVRPlugin.BoneId.Hand_Middle3, // middle distal phalange bone
Hand_Ring1 = OVRPlugin.BoneId.Hand_Ring1, // ring proximal phalange bone
Hand_Ring2 = OVRPlugin.BoneId.Hand_Ring2, // ring intermediate phalange bone
Hand_Ring3 = OVRPlugin.BoneId.Hand_Ring3, // ring distal phalange bone
Hand_Pinky0 = OVRPlugin.BoneId.Hand_Pinky0, // pinky metacarpal bone
Hand_Pinky1 = OVRPlugin.BoneId.Hand_Pinky1, // pinky proximal phalange bone
Hand_Pinky2 = OVRPlugin.BoneId.Hand_Pinky2, // pinky intermediate phalange bone
Hand_Pinky3 = OVRPlugin.BoneId.Hand_Pinky3, // pinky distal phalange bone
Hand_MaxSkinnable = OVRPlugin.BoneId.Hand_MaxSkinnable,
// Bone tips are position only. They are not used for skinning but are useful for hit-testing.
// NOTE: Hand_ThumbTip == Hand_MaxSkinnable since the extended tips need to be contiguous
Hand_ThumbTip = OVRPlugin.BoneId.Hand_ThumbTip, // tip of the thumb
Hand_IndexTip = OVRPlugin.BoneId.Hand_IndexTip, // tip of the index finger
Hand_MiddleTip = OVRPlugin.BoneId.Hand_MiddleTip, // tip of the middle finger
Hand_RingTip = OVRPlugin.BoneId.Hand_RingTip, // tip of the ring finger
Hand_PinkyTip = OVRPlugin.BoneId.Hand_PinkyTip, // tip of the pinky
Hand_End = OVRPlugin.BoneId.Hand_End,
// add new bones here
Max = OVRPlugin.BoneId.Max
}
[SerializeField]
protected SkeletonType _skeletonType = SkeletonType.None;
[SerializeField]
private IOVRSkeletonDataProvider _dataProvider;
[SerializeField]
private bool _updateRootPose = false;
[SerializeField]
private bool _updateRootScale = false;
[SerializeField]
private bool _enablePhysicsCapsules = false;
private GameObject _bonesGO;
private GameObject _bindPosesGO;
private GameObject _capsulesGO;
protected List<OVRBone> _bones;
private List<OVRBone> _bindPoses;
private List<OVRBoneCapsule> _capsules;
protected OVRPlugin.Skeleton2 _skeleton = new OVRPlugin.Skeleton2();
private readonly Quaternion wristFixupRotation = new Quaternion(0.0f, 1.0f, 0.0f, 0.0f);
public bool IsInitialized { get; private set; }
public bool IsDataValid { get; private set; }
public bool IsDataHighConfidence { get; private set; }
public IList<OVRBone> Bones { get; protected set; }
public IList<OVRBone> BindPoses { get; private set; }
public IList<OVRBoneCapsule> Capsules { get; private set; }
public SkeletonType GetSkeletonType() { return _skeletonType; }
public int SkeletonChangedCount { get; private set; }
private void Awake()
{
if (_dataProvider == null)
{
_dataProvider = GetComponent<IOVRSkeletonDataProvider>();
}
_bones = new List<OVRBone>();
Bones = _bones.AsReadOnly();
_bindPoses = new List<OVRBone>();
BindPoses = _bindPoses.AsReadOnly();
_capsules = new List<OVRBoneCapsule>();
Capsules = _capsules.AsReadOnly();
}
private void Start()
{
if (ShouldInitialize())
{
Initialize();
}
}
private bool ShouldInitialize()
{
if (IsInitialized)
{
return false;
}
if (_skeletonType == SkeletonType.None)
{
return false;
}
else if (_skeletonType == SkeletonType.HandLeft || _skeletonType == SkeletonType.HandRight)
{
#if UNITY_EDITOR
return OVRInput.IsControllerConnected(OVRInput.Controller.Hands);
#else
return true;
#endif
}
else
{
return true;
}
}
private void Initialize()
{
if (OVRPlugin.GetSkeleton2((OVRPlugin.SkeletonType)_skeletonType, ref _skeleton))
{
InitializeBones();
InitializeBindPose();
InitializeCapsules();
IsInitialized = true;
}
}
protected virtual void InitializeBones()
{
bool flipX = (_skeletonType == SkeletonType.HandLeft || _skeletonType == SkeletonType.HandRight);
if (!_bonesGO)
{
_bonesGO = new GameObject("Bones");
_bonesGO.transform.SetParent(transform, false);
_bonesGO.transform.localPosition = Vector3.zero;
_bonesGO.transform.localRotation = Quaternion.identity;
}
if (_bones == null || _bones.Count != _skeleton.NumBones)
{
_bones = new List<OVRBone>(new OVRBone[_skeleton.NumBones]);
Bones = _bones.AsReadOnly();
}
// pre-populate bones list before attempting to apply bone hierarchy
for (int i = 0; i < _bones.Count; ++i)
{
OVRBone bone = _bones[i] ?? (_bones[i] = new OVRBone());
bone.Id = (OVRSkeleton.BoneId)_skeleton.Bones[i].Id;
bone.ParentBoneIndex = _skeleton.Bones[i].ParentBoneIndex;
Transform trans = bone.Transform ??
(bone.Transform = new GameObject(BoneLabelFromBoneId(_skeletonType, bone.Id)).transform);
trans.localPosition = flipX ? _skeleton.Bones[i].Pose.Position.FromFlippedXVector3f() : _skeleton.Bones[i].Pose.Position.FromFlippedZVector3f();
trans.localRotation = flipX ? _skeleton.Bones[i].Pose.Orientation.FromFlippedXQuatf() : _skeleton.Bones[i].Pose.Orientation.FromFlippedZQuatf();
}
for (int i = 0; i < _bones.Count; ++i)
{
if ((BoneId)_bones[i].ParentBoneIndex == BoneId.Invalid)
{
_bones[i].Transform.SetParent(_bonesGO.transform, false);
}
else
{
_bones[i].Transform.SetParent(_bones[_bones[i].ParentBoneIndex].Transform, false);
}
}
}
private void InitializeBindPose()
{
if (!_bindPosesGO)
{
_bindPosesGO = new GameObject("BindPoses");
_bindPosesGO.transform.SetParent(transform, false);
_bindPosesGO.transform.localPosition = Vector3.zero;
_bindPosesGO.transform.localRotation = Quaternion.identity;
}
if (_bindPoses == null || _bindPoses.Count != _bones.Count)
{
_bindPoses = new List<OVRBone>(new OVRBone[_bones.Count]);
BindPoses = _bindPoses.AsReadOnly();
}
// pre-populate bones list before attempting to apply bone hierarchy
for (int i = 0; i < _bindPoses.Count; ++i)
{
OVRBone bone = _bones[i];
OVRBone bindPoseBone = _bindPoses[i] ?? (_bindPoses[i] = new OVRBone());
bindPoseBone.Id = bone.Id;
bindPoseBone.ParentBoneIndex = bone.ParentBoneIndex;
Transform trans = bindPoseBone.Transform ?? (bindPoseBone.Transform =
new GameObject(BoneLabelFromBoneId(_skeletonType, bindPoseBone.Id)).transform);
trans.localPosition = bone.Transform.localPosition;
trans.localRotation = bone.Transform.localRotation;
}
for (int i = 0; i < _bindPoses.Count; ++i)
{
if ((BoneId)_bindPoses[i].ParentBoneIndex == BoneId.Invalid)
{
_bindPoses[i].Transform.SetParent(_bindPosesGO.transform, false);
}
else
{
_bindPoses[i].Transform.SetParent(_bindPoses[_bindPoses[i].ParentBoneIndex].Transform, false);
}
}
}
private void InitializeCapsules()
{
bool flipX = (_skeletonType == SkeletonType.HandLeft || _skeletonType == SkeletonType.HandRight);
if (_enablePhysicsCapsules)
{
if (!_capsulesGO)
{
_capsulesGO = new GameObject("Capsules");
_capsulesGO.transform.SetParent(transform, false);
_capsulesGO.transform.localPosition = Vector3.zero;
_capsulesGO.transform.localRotation = Quaternion.identity;
}
if (_capsules == null || _capsules.Count != _skeleton.NumBoneCapsules)
{
_capsules = new List<OVRBoneCapsule>(new OVRBoneCapsule[_skeleton.NumBoneCapsules]);
Capsules = _capsules.AsReadOnly();
}
for (int i = 0; i < _capsules.Count; ++i)
{
OVRBone bone = _bones[_skeleton.BoneCapsules[i].BoneIndex];
OVRBoneCapsule capsule = _capsules[i] ?? (_capsules[i] = new OVRBoneCapsule());
capsule.BoneIndex = _skeleton.BoneCapsules[i].BoneIndex;
if (capsule.CapsuleRigidbody == null)
{
capsule.CapsuleRigidbody = new GameObject(BoneLabelFromBoneId(_skeletonType, bone.Id) + "_CapsuleRigidbody").AddComponent<Rigidbody>();
capsule.CapsuleRigidbody.mass = 1.0f;
capsule.CapsuleRigidbody.isKinematic = true;
capsule.CapsuleRigidbody.useGravity = false;
capsule.CapsuleRigidbody.collisionDetectionMode = CollisionDetectionMode.ContinuousSpeculative;
}
GameObject rbGO = capsule.CapsuleRigidbody.gameObject;
rbGO.transform.SetParent(_capsulesGO.transform, false);
rbGO.transform.position = bone.Transform.position;
rbGO.transform.rotation = bone.Transform.rotation;
if (capsule.CapsuleCollider == null)
{
capsule.CapsuleCollider = new GameObject(BoneLabelFromBoneId(_skeletonType, bone.Id) + "_CapsuleCollider").AddComponent<CapsuleCollider>();
capsule.CapsuleCollider.isTrigger = false;
}
var p0 = flipX ? _skeleton.BoneCapsules[i].StartPoint.FromFlippedXVector3f() : _skeleton.BoneCapsules[i].StartPoint.FromFlippedZVector3f();
var p1 = flipX ? _skeleton.BoneCapsules[i].EndPoint.FromFlippedXVector3f() : _skeleton.BoneCapsules[i].EndPoint.FromFlippedZVector3f();
var delta = p1 - p0;
var mag = delta.magnitude;
var rot = Quaternion.FromToRotation(Vector3.right, delta);
capsule.CapsuleCollider.radius = _skeleton.BoneCapsules[i].Radius;
capsule.CapsuleCollider.height = mag + _skeleton.BoneCapsules[i].Radius * 2.0f;
capsule.CapsuleCollider.direction = 0;
capsule.CapsuleCollider.center = Vector3.right * mag * 0.5f;
GameObject ccGO = capsule.CapsuleCollider.gameObject;
ccGO.transform.SetParent(rbGO.transform, false);
ccGO.transform.localPosition = p0;
ccGO.transform.localRotation = rot;
}
}
}
private void Update()
{
#if UNITY_EDITOR
if (ShouldInitialize())
{
Initialize();
}
#endif
if (!IsInitialized || _dataProvider == null)
{
IsDataValid = false;
IsDataHighConfidence = false;
return;
}
var data = _dataProvider.GetSkeletonPoseData();
IsDataValid = data.IsDataValid;
if (data.IsDataValid)
{
if (SkeletonChangedCount != data.SkeletonChangedCount)
{
SkeletonChangedCount = data.SkeletonChangedCount;
IsInitialized = false;
Initialize();
}
IsDataHighConfidence = data.IsDataHighConfidence;
if (_updateRootPose)
{
transform.localPosition = data.RootPose.Position.FromFlippedZVector3f();
transform.localRotation = data.RootPose.Orientation.FromFlippedZQuatf();
}
if (_updateRootScale)
{
transform.localScale = new Vector3(data.RootScale, data.RootScale, data.RootScale);
}
for (var i = 0; i < _bones.Count; ++i)
{
if (_bones[i].Transform != null)
{
if (_skeletonType == SkeletonType.HandLeft || _skeletonType == SkeletonType.HandRight)
{
_bones[i].Transform.localRotation = data.BoneRotations[i].FromFlippedXQuatf();
if (_bones[i].Id == BoneId.Hand_WristRoot)
{
_bones[i].Transform.localRotation *= wristFixupRotation;
}
}
else
{
_bones[i].Transform.localRotation = data.BoneRotations[i].FromFlippedZQuatf();
}
}
}
}
}
private void FixedUpdate()
{
if (!IsInitialized || _dataProvider == null)
{
IsDataValid = false;
IsDataHighConfidence = false;
return;
}
Update();
if (_enablePhysicsCapsules)
{
var data = _dataProvider.GetSkeletonPoseData();
IsDataValid = data.IsDataValid;
IsDataHighConfidence = data.IsDataHighConfidence;
for (int i = 0; i < _capsules.Count; ++i)
{
OVRBoneCapsule capsule = _capsules[i];
var capsuleGO = capsule.CapsuleRigidbody.gameObject;
if (data.IsDataValid && data.IsDataHighConfidence)
{
Transform bone = _bones[(int)capsule.BoneIndex].Transform;
if (capsuleGO.activeSelf)
{
capsule.CapsuleRigidbody.MovePosition(bone.position);
capsule.CapsuleRigidbody.MoveRotation(bone.rotation);
}
else
{
capsuleGO.SetActive(true);
capsule.CapsuleRigidbody.position = bone.position;
capsule.CapsuleRigidbody.rotation = bone.rotation;
}
}
else
{
if (capsuleGO.activeSelf)
{
capsuleGO.SetActive(false);
}
}
}
}
}
public BoneId GetCurrentStartBoneId()
{
switch (_skeletonType)
{
case SkeletonType.HandLeft:
case SkeletonType.HandRight:
return BoneId.Hand_Start;
case SkeletonType.None:
default:
return BoneId.Invalid;
}
}
public BoneId GetCurrentEndBoneId()
{
switch (_skeletonType)
{
case SkeletonType.HandLeft:
case SkeletonType.HandRight:
return BoneId.Hand_End;
case SkeletonType.None:
default:
return BoneId.Invalid;
}
}
private BoneId GetCurrentMaxSkinnableBoneId()
{
switch (_skeletonType)
{
case SkeletonType.HandLeft:
case SkeletonType.HandRight:
return BoneId.Hand_MaxSkinnable;
case SkeletonType.None:
default:
return BoneId.Invalid;
}
}
public int GetCurrentNumBones()
{
switch (_skeletonType)
{
case SkeletonType.HandLeft:
case SkeletonType.HandRight:
return GetCurrentEndBoneId() - GetCurrentStartBoneId();
case SkeletonType.None:
default:
return 0;
}
}
public int GetCurrentNumSkinnableBones()
{
switch (_skeletonType)
{
case SkeletonType.HandLeft:
case SkeletonType.HandRight:
return GetCurrentMaxSkinnableBoneId() - GetCurrentStartBoneId();
case SkeletonType.None:
default:
return 0;
}
}
// force aliased enum values to the more appropriate value
public static string BoneLabelFromBoneId(OVRSkeleton.SkeletonType skeletonType, BoneId boneId)
{
if (skeletonType == OVRSkeleton.SkeletonType.HandLeft || skeletonType == OVRSkeleton.SkeletonType.HandRight)
{
switch (boneId)
{
case OVRSkeleton.BoneId.Hand_WristRoot:
return "Hand_WristRoot";
case OVRSkeleton.BoneId.Hand_ForearmStub:
return "Hand_ForearmStub";
case OVRSkeleton.BoneId.Hand_Thumb0:
return "Hand_Thumb0";
case OVRSkeleton.BoneId.Hand_Thumb1:
return "Hand_Thumb1";
case OVRSkeleton.BoneId.Hand_Thumb2:
return "Hand_Thumb2";
case OVRSkeleton.BoneId.Hand_Thumb3:
return "Hand_Thumb3";
case OVRSkeleton.BoneId.Hand_Index1:
return "Hand_Index1";
case OVRSkeleton.BoneId.Hand_Index2:
return "Hand_Index2";
case OVRSkeleton.BoneId.Hand_Index3:
return "Hand_Index3";
case OVRSkeleton.BoneId.Hand_Middle1:
return "Hand_Middle1";
case OVRSkeleton.BoneId.Hand_Middle2:
return "Hand_Middle2";
case OVRSkeleton.BoneId.Hand_Middle3:
return "Hand_Middle3";
case OVRSkeleton.BoneId.Hand_Ring1:
return "Hand_Ring1";
case OVRSkeleton.BoneId.Hand_Ring2:
return "Hand_Ring2";
case OVRSkeleton.BoneId.Hand_Ring3:
return "Hand_Ring3";
case OVRSkeleton.BoneId.Hand_Pinky0:
return "Hand_Pinky0";
case OVRSkeleton.BoneId.Hand_Pinky1:
return "Hand_Pinky1";
case OVRSkeleton.BoneId.Hand_Pinky2:
return "Hand_Pinky2";
case OVRSkeleton.BoneId.Hand_Pinky3:
return "Hand_Pinky3";
case OVRSkeleton.BoneId.Hand_ThumbTip:
return "Hand_ThumbTip";
case OVRSkeleton.BoneId.Hand_IndexTip:
return "Hand_IndexTip";
case OVRSkeleton.BoneId.Hand_MiddleTip:
return "Hand_MiddleTip";
case OVRSkeleton.BoneId.Hand_RingTip:
return "Hand_RingTip";
case OVRSkeleton.BoneId.Hand_PinkyTip:
return "Hand_PinkyTip";
default:
return "Hand_Unknown";
}
}
else
{
return "Skeleton_Unknown";
}
}
}
public class OVRBone
{
public OVRSkeleton.BoneId Id { get; set; }
public short ParentBoneIndex { get; set; }
public Transform Transform { get; set; }
public OVRBone() { }
public OVRBone(OVRSkeleton.BoneId id, short parentBoneIndex, Transform trans)
{
Id = id;
ParentBoneIndex = parentBoneIndex;
Transform = trans;
}
}
public class OVRBoneCapsule
{
public short BoneIndex { get; set; }
public Rigidbody CapsuleRigidbody { get; set; }
public CapsuleCollider CapsuleCollider { get; set; }
public OVRBoneCapsule() { }
public OVRBoneCapsule(short boneIndex, Rigidbody capsuleRigidBody, CapsuleCollider capsuleCollider)
{
BoneIndex = boneIndex;
CapsuleRigidbody = capsuleRigidBody;
CapsuleCollider = capsuleCollider;
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 2609c54f376cffc4da1ab9401cc1a36f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,369 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
[DefaultExecutionOrder(-70)]
public class OVRSkeletonRenderer : MonoBehaviour
{
public interface IOVRSkeletonRendererDataProvider
{
SkeletonRendererData GetSkeletonRendererData();
}
public struct SkeletonRendererData
{
public float RootScale { get; set; }
public bool IsDataValid { get; set; }
public bool IsDataHighConfidence { get; set; }
public bool ShouldUseSystemGestureMaterial { get; set; }
}
public enum ConfidenceBehavior
{
None,
ToggleRenderer,
}
public enum SystemGestureBehavior
{
None,
SwapMaterial,
}
[SerializeField]
private IOVRSkeletonRendererDataProvider _dataProvider;
[SerializeField]
private ConfidenceBehavior _confidenceBehavior = ConfidenceBehavior.ToggleRenderer;
[SerializeField]
private SystemGestureBehavior _systemGestureBehavior = SystemGestureBehavior.SwapMaterial;
[SerializeField]
private bool _renderPhysicsCapsules = false;
[SerializeField]
private Material _skeletonMaterial;
private Material _skeletonDefaultMaterial;
[SerializeField]
private Material _capsuleMaterial;
private Material _capsuleDefaultMaterial;
[SerializeField]
private Material _systemGestureMaterial = null;
private Material _systemGestureDefaultMaterial;
private const float LINE_RENDERER_WIDTH = 0.005f;
private List<BoneVisualization> _boneVisualizations;
private List<CapsuleVisualization> _capsuleVisualizations;
private OVRSkeleton _ovrSkeleton;
private GameObject _skeletonGO;
private float _scale;
private static readonly Quaternion _capsuleRotationOffset = Quaternion.Euler(0, 0, 90);
public bool IsInitialized { get; private set; }
public bool IsDataValid { get; private set; }
public bool IsDataHighConfidence { get; private set; }
public bool ShouldUseSystemGestureMaterial { get; private set; }
private class BoneVisualization
{
private GameObject BoneGO;
private Transform BoneBegin;
private Transform BoneEnd;
private LineRenderer Line;
private Material RenderMaterial;
private Material SystemGestureMaterial;
public BoneVisualization(GameObject rootGO,
Material renderMat,
Material systemGestureMat,
float scale,
Transform begin,
Transform end)
{
RenderMaterial = renderMat;
SystemGestureMaterial = systemGestureMat;
BoneBegin = begin;
BoneEnd = end;
BoneGO = new GameObject(begin.name);
BoneGO.transform.SetParent(rootGO.transform, false);
Line = BoneGO.AddComponent<LineRenderer>();
Line.sharedMaterial = RenderMaterial;
Line.useWorldSpace = true;
Line.positionCount = 2;
Line.SetPosition(0, BoneBegin.position);
Line.SetPosition(1, BoneEnd.position);
Line.startWidth = LINE_RENDERER_WIDTH * scale;
Line.endWidth = LINE_RENDERER_WIDTH * scale;
}
public void Update(float scale,
bool shouldRender,
bool shouldUseSystemGestureMaterial,
ConfidenceBehavior confidenceBehavior,
SystemGestureBehavior systemGestureBehavior)
{
Line.SetPosition(0, BoneBegin.position);
Line.SetPosition(1, BoneEnd.position);
Line.startWidth = LINE_RENDERER_WIDTH * scale;
Line.endWidth = LINE_RENDERER_WIDTH * scale;
if (confidenceBehavior == ConfidenceBehavior.ToggleRenderer)
{
Line.enabled = shouldRender;
}
if (systemGestureBehavior == SystemGestureBehavior.SwapMaterial)
{
if (shouldUseSystemGestureMaterial && Line.sharedMaterial != SystemGestureMaterial)
{
Line.sharedMaterial = SystemGestureMaterial;
}
else if (!shouldUseSystemGestureMaterial && Line.sharedMaterial != RenderMaterial)
{
Line.sharedMaterial = RenderMaterial;
}
}
}
}
private class CapsuleVisualization
{
private GameObject CapsuleGO;
private OVRBoneCapsule BoneCapsule;
private Vector3 capsuleScale;
private MeshRenderer Renderer;
private Material RenderMaterial;
private Material SystemGestureMaterial;
public CapsuleVisualization(GameObject rootGO,
Material renderMat,
Material systemGestureMat,
float scale,
OVRBoneCapsule boneCapsule)
{
RenderMaterial = renderMat;
SystemGestureMaterial = systemGestureMat;
BoneCapsule = boneCapsule;
CapsuleGO = GameObject.CreatePrimitive(PrimitiveType.Capsule);
CapsuleCollider collider = CapsuleGO.GetComponent<CapsuleCollider>();
Destroy(collider);
Renderer = CapsuleGO.GetComponent<MeshRenderer>();
Renderer.sharedMaterial = RenderMaterial;
capsuleScale = Vector3.one;
capsuleScale.y = boneCapsule.CapsuleCollider.height / 2;
capsuleScale.x = boneCapsule.CapsuleCollider.radius * 2;
capsuleScale.z = boneCapsule.CapsuleCollider.radius * 2;
CapsuleGO.transform.localScale = capsuleScale * scale;
}
public void Update(float scale,
bool shouldRender,
bool shouldUseSystemGestureMaterial,
ConfidenceBehavior confidenceBehavior,
SystemGestureBehavior systemGestureBehavior)
{
if (confidenceBehavior == ConfidenceBehavior.ToggleRenderer)
{
if (CapsuleGO.activeSelf != shouldRender)
{
CapsuleGO.SetActive(shouldRender);
}
}
CapsuleGO.transform.rotation = BoneCapsule.CapsuleCollider.transform.rotation * _capsuleRotationOffset;
CapsuleGO.transform.position = BoneCapsule.CapsuleCollider.transform.TransformPoint(BoneCapsule.CapsuleCollider.center);
CapsuleGO.transform.localScale = capsuleScale * scale;
if (systemGestureBehavior == SystemGestureBehavior.SwapMaterial)
{
if (shouldUseSystemGestureMaterial && Renderer.sharedMaterial != SystemGestureMaterial)
{
Renderer.sharedMaterial = SystemGestureMaterial;
}
else if (!shouldUseSystemGestureMaterial && Renderer.sharedMaterial != RenderMaterial)
{
Renderer.sharedMaterial = RenderMaterial;
}
}
}
}
private void Awake()
{
if (_dataProvider == null)
{
_dataProvider = GetComponent<IOVRSkeletonRendererDataProvider>();
}
if (_ovrSkeleton == null)
{
_ovrSkeleton = GetComponent<OVRSkeleton>();
}
}
private void Start()
{
if (_ovrSkeleton == null)
{
this.enabled = false;
return;
}
if (ShouldInitialize())
{
Initialize();
}
}
private bool ShouldInitialize()
{
if (IsInitialized)
{
return false;
}
return _ovrSkeleton.IsInitialized;
}
private void Initialize()
{
_boneVisualizations = new List<BoneVisualization>();
_capsuleVisualizations = new List<CapsuleVisualization>();
_ovrSkeleton = GetComponent<OVRSkeleton>();
_skeletonGO = new GameObject("SkeletonRenderer");
_skeletonGO.transform.SetParent(transform, false);
if (_skeletonMaterial == null)
{
_skeletonDefaultMaterial = new Material(Shader.Find("Diffuse"));
_skeletonMaterial = _skeletonDefaultMaterial;
}
if (_capsuleMaterial == null)
{
_capsuleDefaultMaterial = new Material(Shader.Find("Diffuse"));
_capsuleMaterial = _capsuleDefaultMaterial;
}
if (_systemGestureMaterial == null)
{
_systemGestureDefaultMaterial = new Material(Shader.Find("Diffuse"));
_systemGestureDefaultMaterial.color = Color.blue;
_systemGestureMaterial = _systemGestureDefaultMaterial;
}
if (_ovrSkeleton.IsInitialized)
{
for (int i = 0; i < _ovrSkeleton.Bones.Count; i++)
{
var boneVis = new BoneVisualization(
_skeletonGO,
_skeletonMaterial,
_systemGestureMaterial,
_scale,
_ovrSkeleton.Bones[i].Transform,
_ovrSkeleton.Bones[i].Transform.parent);
_boneVisualizations.Add(boneVis);
}
if (_renderPhysicsCapsules && _ovrSkeleton.Capsules != null)
{
for (int i = 0; i < _ovrSkeleton.Capsules.Count; i++)
{
var capsuleVis = new CapsuleVisualization(
_skeletonGO,
_capsuleMaterial,
_systemGestureMaterial,
_scale,
_ovrSkeleton.Capsules[i]);
_capsuleVisualizations.Add(capsuleVis);
}
}
IsInitialized = true;
}
}
public void Update()
{
#if UNITY_EDITOR
if (ShouldInitialize())
{
Initialize();
}
#endif
IsDataValid = false;
IsDataHighConfidence = false;
ShouldUseSystemGestureMaterial = false;
if (IsInitialized)
{
bool shouldRender = false;
if (_dataProvider != null)
{
var data = _dataProvider.GetSkeletonRendererData();
IsDataValid = data.IsDataValid;
IsDataHighConfidence = data.IsDataHighConfidence;
ShouldUseSystemGestureMaterial = data.ShouldUseSystemGestureMaterial;
shouldRender = data.IsDataValid && data.IsDataHighConfidence;
if (data.IsDataValid)
{
_scale = data.RootScale;
}
}
for (int i = 0; i < _boneVisualizations.Count; i++)
{
_boneVisualizations[i].Update(_scale, shouldRender, ShouldUseSystemGestureMaterial, _confidenceBehavior, _systemGestureBehavior);
}
for (int i = 0; i < _capsuleVisualizations.Count; i++)
{
_capsuleVisualizations[i].Update(_scale, shouldRender, ShouldUseSystemGestureMaterial, _confidenceBehavior, _systemGestureBehavior);
}
}
}
private void OnDestroy()
{
if (_skeletonDefaultMaterial != null)
{
DestroyImmediate(_skeletonDefaultMaterial, false);
}
if (_capsuleDefaultMaterial != null)
{
DestroyImmediate(_capsuleDefaultMaterial, false);
}
if (_systemGestureDefaultMaterial != null)
{
DestroyImmediate(_systemGestureDefaultMaterial, false);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 54c16b381e28e8d479237771d234dbae
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,290 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if UNITY_ANDROID && !UNITY_EDITOR
#define OVR_ANDROID_MRC
#endif
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class OVRSpectatorModeDomeTest : MonoBehaviour {
bool inited = false;
public Camera defaultExternalCamera;
OVRPlugin.Fovf defaultFov;
public Transform SpectatorAnchor;
public Transform Head;
#if OVR_ANDROID_MRC
private OVRPlugin.Media.PlatformCameraMode camMode = OVRPlugin.Media.PlatformCameraMode.Disabled;
private bool readyToSwitch = false;
private Transform SpectatorCamera;
// Dome sphere representation
private float distance = 0.8f;
private float elevation = 0.0f;
private float polar = 90.0f;
private const float distance_near = 0.5f;
private const float distance_far = 1.2f;
private const float elevationLimit = 30.0f;
#endif
// Start is called before the first frame update
void Awake()
{
#if OVR_ANDROID_MRC
OVRPlugin.Media.SetPlatformInitialized();
SpectatorCamera = defaultExternalCamera.transform.parent;
#endif
}
// Use this for initialization
void Start ()
{
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
if (!defaultExternalCamera)
{
Debug.LogWarning("defaultExternalCamera undefined");
}
#if !OVR_ANDROID_MRC
// On Quest, we enable MRC automatically through the configuration
if (!OVRManager.instance.enableMixedReality)
{
OVRManager.instance.enableMixedReality = true;
}
#endif
#endif
}
void Initialize()
{
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
if (inited)
return;
#if OVR_ANDROID_MRC
if (!OVRPlugin.Media.GetInitialized())
return;
#else
if (!OVRPlugin.IsMixedRealityInitialized())
return;
#endif
OVRPlugin.ResetDefaultExternalCamera();
Debug.LogFormat("GetExternalCameraCount before adding manual external camera {0}", OVRPlugin.GetExternalCameraCount());
UpdateDefaultExternalCamera();
Debug.LogFormat("GetExternalCameraCount after adding manual external camera {0}", OVRPlugin.GetExternalCameraCount());
// obtain default FOV
{
OVRPlugin.CameraIntrinsics cameraIntrinsics;
OVRPlugin.CameraExtrinsics cameraExtrinsics;
OVRPlugin.GetMixedRealityCameraInfo(0, out cameraExtrinsics, out cameraIntrinsics);
defaultFov = cameraIntrinsics.FOVPort;
}
inited = true;
#if OVR_ANDROID_MRC
readyToSwitch = true;
#endif
#endif
}
void UpdateDefaultExternalCamera()
{
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
// always build a 1080p external camera
const int cameraPixelWidth = 1920;
const int cameraPixelHeight = 1080;
const float cameraAspect = (float)cameraPixelWidth / cameraPixelHeight;
string cameraName = "UnityExternalCamera";
OVRPlugin.CameraIntrinsics cameraIntrinsics = new OVRPlugin.CameraIntrinsics();
OVRPlugin.CameraExtrinsics cameraExtrinsics = new OVRPlugin.CameraExtrinsics();
// intrinsics
cameraIntrinsics.IsValid = OVRPlugin.Bool.True;
cameraIntrinsics.LastChangedTimeSeconds = Time.time;
float vFov = defaultExternalCamera.fieldOfView * Mathf.Deg2Rad;
float hFov = Mathf.Atan(Mathf.Tan(vFov * 0.5f) * cameraAspect) * 2.0f;
OVRPlugin.Fovf fov = new OVRPlugin.Fovf();
fov.UpTan = fov.DownTan = Mathf.Tan(vFov * 0.5f);
fov.LeftTan = fov.RightTan = Mathf.Tan(hFov * 0.5f);
cameraIntrinsics.FOVPort = fov;
cameraIntrinsics.VirtualNearPlaneDistanceMeters = defaultExternalCamera.nearClipPlane;
cameraIntrinsics.VirtualFarPlaneDistanceMeters = defaultExternalCamera.farClipPlane;
cameraIntrinsics.ImageSensorPixelResolution.w = cameraPixelWidth;
cameraIntrinsics.ImageSensorPixelResolution.h = cameraPixelHeight;
// extrinsics
cameraExtrinsics.IsValid = OVRPlugin.Bool.True;
cameraExtrinsics.LastChangedTimeSeconds = Time.time;
cameraExtrinsics.CameraStatusData = OVRPlugin.CameraStatus.CameraStatus_Calibrated;
cameraExtrinsics.AttachedToNode = OVRPlugin.Node.None;
Camera mainCamera = Camera.main;
OVRCameraRig cameraRig = mainCamera.GetComponentInParent<OVRCameraRig>();
if (cameraRig)
{
Transform trackingSpace = cameraRig.trackingSpace;
OVRPose trackingSpacePose = trackingSpace.ToOVRPose(false);
OVRPose cameraPose = defaultExternalCamera.transform.ToOVRPose(false);
OVRPose relativePose = trackingSpacePose.Inverse() * cameraPose;
#if OVR_ANDROID_MRC
OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
OVRPose localToStagePose = stageToLocalPose.Inverse();
relativePose = localToStagePose * relativePose;
#endif
cameraExtrinsics.RelativePose = relativePose.ToPosef();
}
else
{
cameraExtrinsics.RelativePose = OVRPlugin.Posef.identity;
}
if (!OVRPlugin.SetDefaultExternalCamera(cameraName, ref cameraIntrinsics, ref cameraExtrinsics))
{
Debug.LogError("SetDefaultExternalCamera() failed");
}
#endif
}
private void UpdateSpectatorCameraStatus()
{
#if OVR_ANDROID_MRC
// Trigger to switch between 1st person and spectator mode during casting to phone
if (OVRInput.GetDown(OVRInput.Button.PrimaryIndexTrigger) || OVRInput.GetDown(OVRInput.Button.SecondaryIndexTrigger))
{
camMode = OVRPlugin.Media.GetPlatformCameraMode();
if (camMode == OVRPlugin.Media.PlatformCameraMode.Disabled && readyToSwitch)
{
OVRPlugin.Media.SetMrcFrameImageFlipped(false);
OVRPlugin.Media.SetPlatformCameraMode(OVRPlugin.Media.PlatformCameraMode.Initialized);
StartCoroutine(TimerCoroutine());
}
if (camMode == OVRPlugin.Media.PlatformCameraMode.Initialized && readyToSwitch)
{
OVRPlugin.Media.SetMrcFrameImageFlipped(true);
OVRPlugin.Media.SetPlatformCameraMode(OVRPlugin.Media.PlatformCameraMode.Disabled);
StartCoroutine(TimerCoroutine());
}
}
// Keep spectator camera on dome surface
Vector2 axis = OVRInput.Get(OVRInput.Axis2D.SecondaryThumbstick);
if (Mathf.Abs(axis.x) > 0.2f)
{
polar = polar - axis.x * 0.5f;
}
if (Mathf.Abs(axis.y) > 0.2f)
{
elevation = elevation + axis.y * 0.5f;
if (elevation < -90.0f + elevationLimit) elevation = -90.0f + elevationLimit;
if (elevation > 90.0f) elevation = 90.0f;
}
axis = OVRInput.Get(OVRInput.Axis2D.PrimaryThumbstick);
if (Mathf.Abs(axis.y) > 0.1f)
{
distance = axis.y * 0.05f + distance;
if (distance > distance_far) distance = distance_far;
if (distance < distance_near) distance = distance_near;
}
SpectatorCamera.position = SpectatorCameraDomePosition(SpectatorAnchor.position, distance, elevation, polar);
SpectatorCamera.rotation = Quaternion.LookRotation(SpectatorCamera.position - SpectatorAnchor.position);
Head.position = SpectatorAnchor.position;
Head.rotation = SpectatorAnchor.rotation;
#endif
}
Vector3 SpectatorCameraDomePosition(Vector3 spectatorAnchorPosition, float d, float e, float p)
{
float x = d * Mathf.Cos(Mathf.Deg2Rad * e) * Mathf.Cos(Mathf.Deg2Rad * p);
float y = d * Mathf.Sin(Mathf.Deg2Rad * e);
float z = d * Mathf.Cos(Mathf.Deg2Rad * e) * Mathf.Sin(Mathf.Deg2Rad * p);
return new Vector3(x + spectatorAnchorPosition.x, y + spectatorAnchorPosition.y, z + spectatorAnchorPosition.z);
}
IEnumerator TimerCoroutine()
{
#if OVR_ANDROID_MRC
readyToSwitch = false;
#endif
yield return new WaitForSeconds(2);
#if OVR_ANDROID_MRC
readyToSwitch = true;
#endif
}
// Update is called once per frame
void Update () {
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || OVR_ANDROID_MRC
if (!inited)
{
Initialize();
return;
}
if (!defaultExternalCamera)
{
return;
}
#if OVR_ANDROID_MRC
if (!OVRPlugin.Media.GetInitialized())
{
return;
}
#else
if (!OVRPlugin.IsMixedRealityInitialized())
{
return;
}
#endif
UpdateSpectatorCameraStatus();
UpdateDefaultExternalCamera();
OVRPlugin.OverrideExternalCameraFov(0, false, new OVRPlugin.Fovf());
OVRPlugin.OverrideExternalCameraStaticPose(0, false, OVRPlugin.Posef.identity);
#endif
}
void OnApplicationPause()
{
#if OVR_ANDROID_MRC
OVRPlugin.Media.SetMrcFrameImageFlipped(true);
OVRPlugin.Media.SetPlatformCameraMode(OVRPlugin.Media.PlatformCameraMode.Disabled);
#endif
}
void OnApplicationQuit()
{
#if OVR_ANDROID_MRC
OVRPlugin.Media.SetMrcFrameImageFlipped(true);
OVRPlugin.Media.SetPlatformCameraMode(OVRPlugin.Media.PlatformCameraMode.Disabled);
#endif
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: c4f8903397a67414fbb142fa1bfacede
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,272 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using UnityEngine;
using UnityEngine.Networking;
using Debug = UnityEngine.Debug;
public class OVRSystemPerfMetrics
{
public const int TcpListeningPort = 32419;
public const int PayloadTypeMetrics = 100;
public const int MaxBufferLength = 65536;
public const int MaxMessageLength = MaxBufferLength - sizeof(int);
public class PerfMetrics
{
public int frameCount;
public float frameTime;
public float deltaFrameTime;
public bool appCpuTime_IsValid;
public float appCpuTime;
public bool appGpuTime_IsValid;
public float appGpuTime;
public bool compositorCpuTime_IsValid;
public float compositorCpuTime;
public bool compositorGpuTime_IsValid;
public float compositorGpuTime;
public bool compositorDroppedFrameCount_IsValid;
public int compositorDroppedFrameCount;
public bool systemGpuUtilPercentage_IsValid;
public float systemGpuUtilPercentage;
public bool systemCpuUtilAveragePercentage_IsValid;
public float systemCpuUtilAveragePercentage;
public bool systemCpuUtilWorstPercentage_IsValid;
public float systemCpuUtilWorstPercentage;
public bool deviceCpuClockFrequencyInMHz_IsValid;
public float deviceCpuClockFrequencyInMHz;
public bool deviceGpuClockFrequencyInMHz_IsValid;
public float deviceGpuClockFrequencyInMHz;
public bool deviceCpuClockLevel_IsValid;
public int deviceCpuClockLevel;
public bool deviceGpuClockLevel_IsValid;
public int deviceGpuClockLevel;
public string ToJSON()
{
OVRSimpleJSON.JSONObject jsonNode = new OVRSimpleJSON.JSONObject();
jsonNode.Add("frameCount", new OVRSimpleJSON.JSONNumber(frameCount));
jsonNode.Add("frameTime", new OVRSimpleJSON.JSONNumber(frameTime));
jsonNode.Add("deltaFrameTime", new OVRSimpleJSON.JSONNumber(deltaFrameTime));
if (appCpuTime_IsValid)
{
jsonNode.Add("appCpuTime", new OVRSimpleJSON.JSONNumber(appCpuTime));
}
if (appGpuTime_IsValid)
{
jsonNode.Add("appGpuTime", new OVRSimpleJSON.JSONNumber(appGpuTime));
}
if (compositorCpuTime_IsValid)
{
jsonNode.Add("compositorCpuTime", new OVRSimpleJSON.JSONNumber(compositorCpuTime));
}
if (compositorGpuTime_IsValid)
{
jsonNode.Add("compositorGpuTime", new OVRSimpleJSON.JSONNumber(compositorGpuTime));
}
if (compositorDroppedFrameCount_IsValid)
{
jsonNode.Add("compositorDroppedFrameCount", new OVRSimpleJSON.JSONNumber(compositorDroppedFrameCount));
}
if (systemGpuUtilPercentage_IsValid)
{
jsonNode.Add("systemGpuUtilPercentage", new OVRSimpleJSON.JSONNumber(systemGpuUtilPercentage));
}
if (systemCpuUtilAveragePercentage_IsValid)
{
jsonNode.Add("systemCpuUtilAveragePercentage", new OVRSimpleJSON.JSONNumber(systemCpuUtilAveragePercentage));
}
if (systemCpuUtilWorstPercentage_IsValid)
{
jsonNode.Add("systemCpuUtilWorstPercentage", new OVRSimpleJSON.JSONNumber(systemCpuUtilWorstPercentage));
}
if (deviceCpuClockFrequencyInMHz_IsValid)
{
jsonNode.Add("deviceCpuClockFrequencyInMHz", new OVRSimpleJSON.JSONNumber(deviceCpuClockFrequencyInMHz));
}
if (deviceGpuClockFrequencyInMHz_IsValid)
{
jsonNode.Add("deviceGpuClockFrequencyInMHz", new OVRSimpleJSON.JSONNumber(deviceGpuClockFrequencyInMHz));
}
if (deviceCpuClockLevel_IsValid)
{
jsonNode.Add("deviceCpuClockLevel", new OVRSimpleJSON.JSONNumber(deviceCpuClockLevel));
}
if (deviceGpuClockLevel_IsValid)
{
jsonNode.Add("deviceGpuClockLevel", new OVRSimpleJSON.JSONNumber(deviceGpuClockLevel));
}
string str = jsonNode.ToString();
return str;
}
public bool LoadFromJSON(string json)
{
OVRSimpleJSON.JSONObject jsonNode = OVRSimpleJSON.JSONObject.Parse(json) as OVRSimpleJSON.JSONObject;
if (jsonNode == null)
{
return false;
}
frameCount = jsonNode["frameCount"] != null ? jsonNode["frameCount"].AsInt : 0;
frameTime = jsonNode["frameTime"] != null ? jsonNode["frameTime"].AsFloat : 0;
deltaFrameTime = jsonNode["deltaFrameTime"] != null ? jsonNode["deltaFrameTime"].AsFloat : 0;
appCpuTime_IsValid = jsonNode["appCpuTime"] != null;
appCpuTime = appCpuTime_IsValid ? jsonNode["appCpuTime"].AsFloat : 0;
appGpuTime_IsValid = jsonNode["appGpuTime"] != null;
appGpuTime = appGpuTime_IsValid ? jsonNode["appGpuTime"].AsFloat : 0;
compositorCpuTime_IsValid = jsonNode["compositorCpuTime"] != null;
compositorCpuTime = compositorCpuTime_IsValid ? jsonNode["compositorCpuTime"].AsFloat : 0;
compositorGpuTime_IsValid = jsonNode["compositorGpuTime"] != null;
compositorGpuTime = compositorGpuTime_IsValid ? jsonNode["compositorGpuTime"].AsFloat : 0;
compositorDroppedFrameCount_IsValid = jsonNode["compositorDroppedFrameCount"] != null;
compositorDroppedFrameCount = compositorDroppedFrameCount_IsValid ? jsonNode["ompositorDroppedFrameCount"].AsInt : 0;
systemGpuUtilPercentage_IsValid = jsonNode["systemGpuUtilPercentage"] != null;
systemGpuUtilPercentage = systemGpuUtilPercentage_IsValid ? jsonNode["systemGpuUtilPercentage"].AsFloat : 0;
systemCpuUtilAveragePercentage_IsValid = jsonNode["systemCpuUtilAveragePercentage"] != null;
systemCpuUtilAveragePercentage = systemCpuUtilAveragePercentage_IsValid ? jsonNode["systemCpuUtilAveragePercentage"].AsFloat : 0;
systemCpuUtilWorstPercentage_IsValid = jsonNode["systemCpuUtilWorstPercentage"] != null;
systemCpuUtilWorstPercentage = systemCpuUtilWorstPercentage_IsValid ? jsonNode["systemCpuUtilWorstPercentage"].AsFloat : 0;
deviceCpuClockFrequencyInMHz_IsValid = jsonNode["deviceCpuClockFrequencyInMHz"] != null;
deviceCpuClockFrequencyInMHz = deviceCpuClockFrequencyInMHz_IsValid ? jsonNode["deviceCpuClockFrequencyInMHz"].AsFloat : 0;
deviceGpuClockFrequencyInMHz_IsValid = jsonNode["deviceGpuClockFrequencyInMHz"] != null;
deviceGpuClockFrequencyInMHz = deviceGpuClockFrequencyInMHz_IsValid ? jsonNode["deviceGpuClockFrequencyInMHz"].AsFloat : 0;
deviceCpuClockLevel_IsValid = jsonNode["deviceCpuClockLevel"] != null;
deviceCpuClockLevel = deviceCpuClockLevel_IsValid ? jsonNode["deviceCpuClockLevel"].AsInt : 0;
deviceGpuClockLevel_IsValid = jsonNode["deviceGpuClockLevel"] != null;
deviceGpuClockLevel = deviceGpuClockLevel_IsValid ? jsonNode["deviceGpuClockLevel"].AsInt : 0;
return true;
}
}
public class OVRSystemPerfMetricsTcpServer : MonoBehaviour
{
public static OVRSystemPerfMetricsTcpServer singleton = null;
private OVRNetwork.OVRNetworkTcpServer tcpServer = new OVRNetwork.OVRNetworkTcpServer();
public int listeningPort = OVRSystemPerfMetrics.TcpListeningPort;
void OnEnable()
{
if (singleton != null)
{
Debug.LogError("Mutiple OVRSystemPerfMetricsTcpServer exists");
return;
}
else
{
singleton = this;
}
if (Application.isEditor)
{
Application.runInBackground = true;
}
tcpServer.StartListening(listeningPort);
}
void OnDisable()
{
tcpServer.StopListening();
singleton = null;
Debug.Log("[OVRSystemPerfMetricsTcpServer] server destroyed");
}
private void Update()
{
if (tcpServer.HasConnectedClient())
{
PerfMetrics metrics = GatherPerfMetrics();
string json = metrics.ToJSON();
byte[] bytes = Encoding.UTF8.GetBytes(json);
tcpServer.Broadcast(OVRSystemPerfMetrics.PayloadTypeMetrics, bytes);
}
}
PerfMetrics GatherPerfMetrics()
{
PerfMetrics metrics = new PerfMetrics();
metrics.frameCount = Time.frameCount;
metrics.frameTime = Time.unscaledTime;
metrics.deltaFrameTime = Time.unscaledDeltaTime;
float? floatValue;
int? intValue;
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.App_CpuTime_Float);
metrics.appCpuTime_IsValid = floatValue.HasValue;
metrics.appCpuTime = floatValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.App_GpuTime_Float);
metrics.appGpuTime_IsValid = floatValue.HasValue;
metrics.appGpuTime = floatValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.Compositor_CpuTime_Float);
metrics.compositorCpuTime_IsValid = floatValue.HasValue;
metrics.compositorCpuTime = floatValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.Compositor_GpuTime_Float);
metrics.compositorGpuTime_IsValid = floatValue.HasValue;
metrics.compositorGpuTime = floatValue.GetValueOrDefault();
intValue = OVRPlugin.GetPerfMetricsInt(OVRPlugin.PerfMetrics.Compositor_DroppedFrameCount_Int);
metrics.compositorDroppedFrameCount_IsValid = intValue.HasValue;
metrics.compositorDroppedFrameCount = intValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.System_GpuUtilPercentage_Float);
metrics.systemGpuUtilPercentage_IsValid = floatValue.HasValue;
metrics.systemGpuUtilPercentage = floatValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.System_CpuUtilAveragePercentage_Float);
metrics.systemCpuUtilAveragePercentage_IsValid = floatValue.HasValue;
metrics.systemCpuUtilAveragePercentage = floatValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.System_CpuUtilWorstPercentage_Float);
metrics.systemCpuUtilWorstPercentage_IsValid = floatValue.HasValue;
metrics.systemCpuUtilWorstPercentage = floatValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.Device_CpuClockFrequencyInMHz_Float);
metrics.deviceCpuClockFrequencyInMHz_IsValid = floatValue.HasValue;
metrics.deviceCpuClockFrequencyInMHz = floatValue.GetValueOrDefault();
floatValue = OVRPlugin.GetPerfMetricsFloat(OVRPlugin.PerfMetrics.Device_GpuClockFrequencyInMHz_Float);
metrics.deviceGpuClockFrequencyInMHz_IsValid = floatValue.HasValue;
metrics.deviceGpuClockFrequencyInMHz = floatValue.GetValueOrDefault();
intValue = OVRPlugin.GetPerfMetricsInt(OVRPlugin.PerfMetrics.Device_CpuClockLevel_Int);
metrics.deviceCpuClockLevel_IsValid = intValue.HasValue;
metrics.deviceCpuClockLevel = intValue.GetValueOrDefault();
intValue = OVRPlugin.GetPerfMetricsInt(OVRPlugin.PerfMetrics.Device_GpuClockLevel_Int);
metrics.deviceGpuClockLevel_IsValid = intValue.HasValue;
metrics.deviceGpuClockLevel = intValue.GetValueOrDefault();
return metrics;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a987a7ebe2d568f459dab8dfb866479f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,444 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Rendering;
/// <summary>
/// A component to apply a Colored vignette effect to the camera
/// </summary>
[RequireComponent(typeof(Camera))]
[ExecuteInEditMode]
public class OVRVignette : MonoBehaviour {
/// <summary>
/// Controls the number of triangles in the vignette mesh.
/// </summary>
public enum MeshComplexityLevel
{
VerySimple,
Simple,
Normal,
Detailed,
VeryDetailed
}
/// <summary>
/// Controls the falloff appearance.
/// </summary>
public enum FalloffType
{
Linear,
Quadratic
}
private static readonly string QUADRATIC_FALLOFF = "QUADRATIC_FALLOFF";
[SerializeField]
[HideInInspector]
private Shader VignetteShader;
// These are only used at startup.
[SerializeField]
[Tooltip("Controls the number of triangles used for the vignette mesh." +
" Normal is best for most purposes.")]
private MeshComplexityLevel MeshComplexity = MeshComplexityLevel.Normal;
[SerializeField]
[Tooltip("Controls how the falloff looks.")]
private FalloffType Falloff = FalloffType.Linear;
// These can be controlled dynamically at runtime
[Tooltip("The Vertical FOV of the vignette")]
public float VignetteFieldOfView = 60;
[Tooltip("The Aspect ratio of the vignette controls the " +
"Horizontal FOV. (Larger numbers are wider)")]
public float VignetteAspectRatio = 1f;
[Tooltip("The width of the falloff for the vignette in degrees")]
public float VignetteFalloffDegrees = 10f;
[ColorUsage(false)]
[Tooltip("The color of the vignette. Alpha value is ignored")]
public Color VignetteColor;
private Camera _Camera;
private MeshFilter _OpaqueMeshFilter;
private MeshFilter _TransparentMeshFilter;
private MeshRenderer _OpaqueMeshRenderer;
private MeshRenderer _TransparentMeshRenderer;
private Mesh _OpaqueMesh;
private Mesh _TransparentMesh;
private Material _OpaqueMaterial;
private Material _TransparentMaterial;
private int _ShaderScaleAndOffset0Property;
private int _ShaderScaleAndOffset1Property;
private Vector4[] _TransparentScaleAndOffset0 = new Vector4[2];
private Vector4[] _TransparentScaleAndOffset1 = new Vector4[2];
private Vector4[] _OpaqueScaleAndOffset0 = new Vector4[2];
private Vector4[] _OpaqueScaleAndOffset1 = new Vector4[2];
private bool _OpaqueVignetteVisible = false;
private bool _TransparentVignetteVisible = false;
#if UNITY_EDITOR
// in the editor, allow these to be changed at runtime
private MeshComplexityLevel _InitialMeshComplexity;
private FalloffType _InitialFalloff;
#endif
private int GetTriangleCount()
{
switch(MeshComplexity)
{
case MeshComplexityLevel.VerySimple: return 32;
case MeshComplexityLevel.Simple: return 64;
case MeshComplexityLevel.Normal: return 128;
case MeshComplexityLevel.Detailed: return 256;
case MeshComplexityLevel.VeryDetailed: return 512;
default: return 128;
}
}
private void BuildMeshes()
{
#if UNITY_EDITOR
_InitialMeshComplexity = MeshComplexity;
#endif
int triangleCount = GetTriangleCount();
Vector3[] innerVerts = new Vector3[triangleCount];
Vector2[] innerUVs = new Vector2[triangleCount];
Vector3[] outerVerts = new Vector3[triangleCount];
Vector2[] outerUVs = new Vector2[triangleCount];
int[] tris = new int[triangleCount * 3];
for (int i = 0; i < triangleCount; i += 2)
{
float angle = 2 * i * Mathf.PI / triangleCount;
float x = Mathf.Cos(angle);
float y = Mathf.Sin(angle);
outerVerts[i] = new Vector3(x, y, 0);
outerVerts[i + 1] = new Vector3(x, y, 0);
outerUVs[i] = new Vector2(0, 1);
outerUVs[i + 1] = new Vector2(1, 1);
innerVerts[i] = new Vector3(x, y, 0);
innerVerts[i + 1] = new Vector3(x, y, 0);
innerUVs[i] = new Vector2(0, 1);
innerUVs[i + 1] = new Vector2(1, 0);
int ti = i * 3;
tris[ti] = i;
tris[ti + 1] = i + 1;
tris[ti + 2] = (i + 2) % triangleCount;
tris[ti + 3] = i + 1;
tris[ti + 4] = (i + 3) % triangleCount;
tris[ti + 5] = (i + 2) % triangleCount;
}
if (_OpaqueMesh != null)
{
DestroyImmediate(_OpaqueMesh);
}
if (_TransparentMesh != null)
{
DestroyImmediate(_TransparentMesh);
}
_OpaqueMesh = new Mesh()
{
name = "Opaque Vignette Mesh",
hideFlags = HideFlags.HideAndDontSave
};
_TransparentMesh = new Mesh()
{
name = "Transparent Vignette Mesh",
hideFlags = HideFlags.HideAndDontSave
};
_OpaqueMesh.vertices = outerVerts;
_OpaqueMesh.uv = outerUVs;
_OpaqueMesh.triangles = tris;
_OpaqueMesh.UploadMeshData(true);
_OpaqueMesh.bounds = new Bounds(Vector3.zero, Vector3.one * 10000);
_OpaqueMeshFilter.sharedMesh = _OpaqueMesh;
_TransparentMesh.vertices = innerVerts;
_TransparentMesh.uv = innerUVs;
_TransparentMesh.triangles = tris;
_TransparentMesh.UploadMeshData(true);
_TransparentMesh.bounds = new Bounds(Vector3.zero, Vector3.one * 10000);
_TransparentMeshFilter.sharedMesh = _TransparentMesh;
}
private void BuildMaterials()
{
#if UNITY_EDITOR
_InitialFalloff = Falloff;
#endif
if (VignetteShader == null)
{
VignetteShader = Shader.Find("Oculus/OVRVignette");
}
if (VignetteShader == null)
{
Debug.LogError("Could not find Vignette Shader! Vignette will not be drawn!");
return;
}
if (_OpaqueMaterial == null)
{
_OpaqueMaterial = new Material(VignetteShader)
{
name = "Opaque Vignette Material",
hideFlags = HideFlags.HideAndDontSave,
renderQueue = (int)RenderQueue.Background
};
_OpaqueMaterial.SetFloat("_BlendSrc", (float)BlendMode.One);
_OpaqueMaterial.SetFloat("_BlendDst", (float)BlendMode.Zero);
_OpaqueMaterial.SetFloat("_ZWrite", 1);
}
_OpaqueMeshRenderer.sharedMaterial = _OpaqueMaterial;
if (_TransparentMaterial == null)
{
_TransparentMaterial = new Material(VignetteShader)
{
name = "Transparent Vignette Material",
hideFlags = HideFlags.HideAndDontSave,
renderQueue = (int)RenderQueue.Overlay
};
_TransparentMaterial.SetFloat("_BlendSrc", (float)BlendMode.SrcAlpha);
_TransparentMaterial.SetFloat("_BlendDst", (float)BlendMode.OneMinusSrcAlpha);
_TransparentMaterial.SetFloat("_ZWrite", 0);
}
if (Falloff == FalloffType.Quadratic)
{
_TransparentMaterial.EnableKeyword(QUADRATIC_FALLOFF);
}
else
{
_TransparentMaterial.DisableKeyword(QUADRATIC_FALLOFF);
}
_TransparentMeshRenderer.sharedMaterial = _TransparentMaterial;
}
private void OnEnable()
{
#if UNITY_2019_1_OR_NEWER
RenderPipelineManager.beginCameraRendering += OnBeginCameraRendering;
#elif UNITY_2018_1_OR_NEWER
UnityEngine.Experimental.Rendering.RenderPipeline.beginCameraRendering += OnBeginCameraRendering;
#endif
}
private void OnDisable()
{
#if UNITY_2019_1_OR_NEWER
RenderPipelineManager.beginCameraRendering -= OnBeginCameraRendering;
#elif UNITY_2018_1_OR_NEWER
UnityEngine.Experimental.Rendering.RenderPipeline.beginCameraRendering -= OnBeginCameraRendering;
#endif
DisableRenderers();
}
private void Awake()
{
_Camera = GetComponent<Camera>();
_ShaderScaleAndOffset0Property = Shader.PropertyToID("_ScaleAndOffset0");
_ShaderScaleAndOffset1Property = Shader.PropertyToID("_ScaleAndOffset1");
GameObject opaqueObject = new GameObject("Opaque Vignette") { hideFlags = HideFlags.HideAndDontSave };
opaqueObject.transform.SetParent(_Camera.transform, false);
_OpaqueMeshFilter = opaqueObject.AddComponent<MeshFilter>();
_OpaqueMeshRenderer = opaqueObject.AddComponent<MeshRenderer>();
_OpaqueMeshRenderer.receiveShadows = false;
_OpaqueMeshRenderer.shadowCastingMode = ShadowCastingMode.Off;
_OpaqueMeshRenderer.lightProbeUsage = LightProbeUsage.Off;
_OpaqueMeshRenderer.reflectionProbeUsage = ReflectionProbeUsage.Off;
_OpaqueMeshRenderer.allowOcclusionWhenDynamic = false;
_OpaqueMeshRenderer.enabled = false;
GameObject transparentObject = new GameObject("Transparent Vignette") { hideFlags = HideFlags.HideAndDontSave };
transparentObject.transform.SetParent(_Camera.transform, false);
_TransparentMeshFilter = transparentObject.AddComponent<MeshFilter>();
_TransparentMeshRenderer = transparentObject.AddComponent<MeshRenderer>();
_TransparentMeshRenderer.receiveShadows = false;
_TransparentMeshRenderer.shadowCastingMode = ShadowCastingMode.Off;
_TransparentMeshRenderer.lightProbeUsage = LightProbeUsage.Off;
_TransparentMeshRenderer.reflectionProbeUsage = ReflectionProbeUsage.Off;
_TransparentMeshRenderer.allowOcclusionWhenDynamic = false;
_TransparentMeshRenderer.enabled = false;
BuildMeshes();
BuildMaterials();
}
private void GetTanFovAndOffsetForStereoEye(Camera.StereoscopicEye eye, out float tanFovX, out float tanFovY, out float offsetX, out float offsetY)
{
var pt = _Camera.GetStereoProjectionMatrix(eye).transpose;
var right = pt * new Vector4(-1, 0, 0, 1);
var left = pt * new Vector4(1, 0, 0, 1);
var up = pt * new Vector4(0, -1, 0, 1);
var down = pt * new Vector4(0, 1, 0, 1);
float rightTanFovX = right.z / right.x;
float leftTanFovX = left.z / left.x;
float upTanFovY = up.z / up.y;
float downTanFovY = down.z / down.y;
offsetX = -(rightTanFovX + leftTanFovX) / 2;
offsetY = -(upTanFovY + downTanFovY) / 2;
tanFovX = (rightTanFovX - leftTanFovX) / 2;
tanFovY = (upTanFovY - downTanFovY) / 2;
}
private void GetTanFovAndOffsetForMonoEye(out float tanFovX, out float tanFovY, out float offsetX, out float offsetY)
{
// When calculating from Unity's camera fields, this is the calculation used.
// We can't use this for stereo eyes because VR projection matrices are usually asymmetric.
tanFovY = Mathf.Tan(Mathf.Deg2Rad * _Camera.fieldOfView * 0.5f);
tanFovX = tanFovY * _Camera.aspect;
offsetX = 0f;
offsetY = 0f;
}
private bool VisibilityTest(float scaleX, float scaleY, float offsetX, float offsetY)
{
// because the corners of our viewport are the furthest from the center of our vignette,
// we only need to test that the farthest corner is outside the vignette ring.
return new Vector2((1 + Mathf.Abs(offsetX)) / scaleX, (1 + Mathf.Abs(offsetY)) / scaleY).sqrMagnitude > 1.0f;
}
private void Update()
{
#if UNITY_EDITOR
if (MeshComplexity != _InitialMeshComplexity)
{
// rebuild meshes
BuildMeshes();
}
if(Falloff != _InitialFalloff)
{
// rebuild materials
BuildMaterials();
}
#endif
// The opaque material could not be created, so just return
if (_OpaqueMaterial == null)
{
return;
}
float tanInnerFovY = Mathf.Tan(VignetteFieldOfView * Mathf.Deg2Rad * 0.5f);
float tanInnerFovX = tanInnerFovY * VignetteAspectRatio;
float tanMiddleFovX = Mathf.Tan((VignetteFieldOfView + VignetteFalloffDegrees) * Mathf.Deg2Rad * 0.5f);
float tanMiddleFovY = tanMiddleFovX * VignetteAspectRatio;
_TransparentVignetteVisible = false;
_OpaqueVignetteVisible = false;
for (int i = 0; i < 2; i++)
{
float tanFovX, tanFovY, offsetX, offsetY;
if (_Camera.stereoEnabled)
{
GetTanFovAndOffsetForStereoEye((Camera.StereoscopicEye)i, out tanFovX, out tanFovY, out offsetX, out offsetY);
}
else
{
GetTanFovAndOffsetForMonoEye(out tanFovX, out tanFovY, out offsetX, out offsetY);
}
float borderScale = new Vector2((1 + Mathf.Abs(offsetX)) / VignetteAspectRatio, 1 + Mathf.Abs(offsetY)).magnitude * 1.01f;
float innerScaleX = tanInnerFovX / tanFovX;
float innerScaleY = tanInnerFovY / tanFovY;
float middleScaleX = tanMiddleFovX / tanFovX;
float middleScaleY = tanMiddleFovY / tanFovY;
float outerScaleX = borderScale * VignetteAspectRatio;
float outerScaleY = borderScale;
// test for visibility.
_TransparentVignetteVisible |= VisibilityTest(innerScaleX, innerScaleY, offsetX, offsetY);
_OpaqueVignetteVisible |= VisibilityTest(middleScaleX, middleScaleY, offsetX, offsetY);
_OpaqueScaleAndOffset0[i] = new Vector4(outerScaleX, outerScaleY, offsetX, offsetY);
_OpaqueScaleAndOffset1[i] = new Vector4(middleScaleX, middleScaleY, offsetX, offsetY);
_TransparentScaleAndOffset0[i] = new Vector4(middleScaleX, middleScaleY, offsetX, offsetY);
_TransparentScaleAndOffset1[i] = new Vector4(innerScaleX, innerScaleY, offsetX, offsetY);
}
// if the vignette falloff is less than or equal to zero, we don't need to draw
// the transparent mesh.
_TransparentVignetteVisible &= VignetteFalloffDegrees > 0.0f;
_OpaqueMaterial.SetVectorArray(_ShaderScaleAndOffset0Property, _OpaqueScaleAndOffset0);
_OpaqueMaterial.SetVectorArray(_ShaderScaleAndOffset1Property, _OpaqueScaleAndOffset1);
_OpaqueMaterial.color = VignetteColor;
_TransparentMaterial.SetVectorArray(_ShaderScaleAndOffset0Property, _TransparentScaleAndOffset0);
_TransparentMaterial.SetVectorArray(_ShaderScaleAndOffset1Property, _TransparentScaleAndOffset1);
_TransparentMaterial.color = VignetteColor;
}
private void EnableRenderers()
{
_OpaqueMeshRenderer.enabled = _OpaqueVignetteVisible;
_TransparentMeshRenderer.enabled = _TransparentVignetteVisible;
}
private void DisableRenderers()
{
_OpaqueMeshRenderer.enabled = false;
_TransparentMeshRenderer.enabled = false;
}
// Objects are enabled on pre cull and disabled on post render so they only draw in this camera
private void OnPreCull()
{
EnableRenderers();
}
private void OnPostRender()
{
DisableRenderers();
}
#if UNITY_2019_1_OR_NEWER
private void OnBeginCameraRendering(ScriptableRenderContext context, Camera camera)
#else
private void OnBeginCameraRendering(Camera camera)
#endif
{
if (camera == _Camera)
{
EnableRenderers();
}
else
{
DisableRenderers();
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: ead80e53296fc6244b6e5c18100c2c2a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences:
- VignetteShader: {fileID: 4800000, guid: 9d034fa3f535a1648b5059e6907c647d, type: 3}
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,29 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Rotates this GameObject at a given speed.
/// </summary>
public class OVRWaitCursor : MonoBehaviour
{
public Vector3 rotateSpeeds = new Vector3(0.0f, 0.0f, -60.0f);
/// <summary>
/// Auto rotates the attached cursor.
/// </summary>
void Update()
{
transform.Rotate(rotateSpeeds * Time.smoothDeltaTime);
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: d313011a8bc474fe49260bde01cffcd3
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData: