clean project

This commit is contained in:
Helar Jaadla
2022-03-07 17:52:41 +02:00
parent a174b45bd2
commit cbeb10ec35
5100 changed files with 837159 additions and 0 deletions

View File

@@ -0,0 +1,330 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using UnityEngine.Rendering;
using System.Collections;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
public abstract class OVRCameraComposition : OVRComposition {
protected GameObject cameraFramePlaneObject = null;
protected float cameraFramePlaneDistance;
protected readonly bool hasCameraDeviceOpened = false;
internal readonly OVRPlugin.CameraDevice cameraDevice = OVRPlugin.CameraDevice.WebCamera0;
private Mesh boundaryMesh = null;
private float boundaryMeshTopY = 0.0f;
private float boundaryMeshBottomY = 0.0f;
private OVRManager.VirtualGreenScreenType boundaryMeshType = OVRManager.VirtualGreenScreenType.Off;
private OVRCameraFrameCompositionManager cameraFrameCompositionManager = null;
protected OVRCameraComposition(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
: base(parentObject, mainCamera, configuration)
{
cameraDevice = OVRCompositionUtil.ConvertCameraDevice(configuration.capturingCameraDevice);
Debug.Assert(!hasCameraDeviceOpened);
Debug.Assert(!OVRPlugin.IsCameraDeviceAvailable(cameraDevice) || !OVRPlugin.HasCameraDeviceOpened(cameraDevice));
hasCameraDeviceOpened = false;
bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice);
if (configuration.useDynamicLighting && !cameraSupportsDepth)
{
Debug.LogWarning("The camera device doesn't support depth. The result of dynamic lighting might not be correct");
}
if (OVRPlugin.IsCameraDeviceAvailable(cameraDevice))
{
OVRPlugin.CameraExtrinsics extrinsics;
OVRPlugin.CameraIntrinsics intrinsics;
if (OVRPlugin.GetExternalCameraCount() > 0 && OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
{
OVRPlugin.SetCameraDevicePreferredColorFrameSize(cameraDevice, intrinsics.ImageSensorPixelResolution.w, intrinsics.ImageSensorPixelResolution.h);
}
if (configuration.useDynamicLighting)
{
OVRPlugin.SetCameraDeviceDepthSensingMode(cameraDevice, OVRPlugin.CameraDeviceDepthSensingMode.Fill);
OVRPlugin.CameraDeviceDepthQuality quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
if (configuration.depthQuality == OVRManager.DepthQuality.Low)
{
quality = OVRPlugin.CameraDeviceDepthQuality.Low;
}
else if (configuration.depthQuality == OVRManager.DepthQuality.Medium)
{
quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
}
else if (configuration.depthQuality == OVRManager.DepthQuality.High)
{
quality = OVRPlugin.CameraDeviceDepthQuality.High;
}
else
{
Debug.LogWarning("Unknown depth quality");
}
OVRPlugin.SetCameraDevicePreferredDepthQuality(cameraDevice, quality);
}
Debug.LogFormat("Opening camera device {0}", cameraDevice);
OVRPlugin.OpenCameraDevice(cameraDevice);
if (OVRPlugin.HasCameraDeviceOpened(cameraDevice))
{
Debug.LogFormat("Opened camera device {0}", cameraDevice);
hasCameraDeviceOpened = true;
}
}
}
public override void Cleanup()
{
OVRCompositionUtil.SafeDestroy(ref cameraFramePlaneObject);
if (hasCameraDeviceOpened)
{
Debug.LogFormat("Close camera device {0}", cameraDevice);
OVRPlugin.CloseCameraDevice(cameraDevice);
}
}
public override void RecenterPose()
{
boundaryMesh = null;
}
protected void RefreshCameraFramePlaneObject(GameObject parentObject, Camera mixedRealityCamera, OVRMixedRealityCaptureConfiguration configuration)
{
OVRCompositionUtil.SafeDestroy(ref cameraFramePlaneObject);
Debug.Assert(cameraFramePlaneObject == null);
cameraFramePlaneObject = GameObject.CreatePrimitive(PrimitiveType.Quad);
cameraFramePlaneObject.name = "OculusMRC_CameraFrame";
cameraFramePlaneObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
cameraFramePlaneObject.GetComponent<Collider>().enabled = false;
cameraFramePlaneObject.GetComponent<MeshRenderer>().shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
Material cameraFrameMaterial = new Material(Shader.Find(configuration.useDynamicLighting ? "Oculus/OVRMRCameraFrameLit" : "Oculus/OVRMRCameraFrame"));
cameraFramePlaneObject.GetComponent<MeshRenderer>().material = cameraFrameMaterial;
cameraFrameMaterial.SetColor("_Color", Color.white);
cameraFrameMaterial.SetFloat("_Visible", 0.0f);
cameraFramePlaneObject.transform.localScale = new Vector3(4, 4, 4);
cameraFramePlaneObject.SetActive(true);
cameraFrameCompositionManager = mixedRealityCamera.gameObject.AddComponent<OVRCameraFrameCompositionManager>();
cameraFrameCompositionManager.configuration = configuration;
cameraFrameCompositionManager.cameraFrameGameObj = cameraFramePlaneObject;
cameraFrameCompositionManager.composition = this;
}
private bool nullcameraRigWarningDisplayed = false;
protected void UpdateCameraFramePlaneObject(Camera mainCamera, Camera mixedRealityCamera, OVRMixedRealityCaptureConfiguration configuration, RenderTexture boundaryMeshMaskTexture)
{
cameraFrameCompositionManager.configuration = configuration;
bool hasError = false;
Material cameraFrameMaterial = cameraFramePlaneObject.GetComponent<MeshRenderer>().material;
Texture2D colorTexture = Texture2D.blackTexture;
Texture2D depthTexture = Texture2D.whiteTexture;
if (OVRPlugin.IsCameraDeviceColorFrameAvailable(cameraDevice))
{
colorTexture = OVRPlugin.GetCameraDeviceColorFrameTexture(cameraDevice);
}
else
{
Debug.LogWarning("Camera: color frame not ready");
hasError = true;
}
bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice);
if (configuration.useDynamicLighting && cameraSupportsDepth)
{
if (OVRPlugin.IsCameraDeviceDepthFrameAvailable(cameraDevice))
{
depthTexture = OVRPlugin.GetCameraDeviceDepthFrameTexture(cameraDevice);
}
else
{
Debug.LogWarning("Camera: depth frame not ready");
hasError = true;
}
}
if (!hasError)
{
Vector3 offset = mainCamera.transform.position - mixedRealityCamera.transform.position;
float distance = Vector3.Dot(mixedRealityCamera.transform.forward, offset);
cameraFramePlaneDistance = distance;
cameraFramePlaneObject.transform.position = mixedRealityCamera.transform.position + mixedRealityCamera.transform.forward * distance;
cameraFramePlaneObject.transform.rotation = mixedRealityCamera.transform.rotation;
float tanFov = Mathf.Tan(mixedRealityCamera.fieldOfView * Mathf.Deg2Rad * 0.5f);
cameraFramePlaneObject.transform.localScale = new Vector3(distance * mixedRealityCamera.aspect * tanFov * 2.0f, distance * tanFov * 2.0f, 1.0f);
float worldHeight = distance * tanFov * 2.0f;
float worldWidth = worldHeight * mixedRealityCamera.aspect;
float cullingDistance = float.MaxValue;
if (OVRManager.instance.virtualGreenScreenType != OVRManager.VirtualGreenScreenType.Off)
{
RefreshBoundaryMesh(mixedRealityCamera, configuration, out cullingDistance);
}
cameraFrameMaterial.mainTexture = colorTexture;
cameraFrameMaterial.SetTexture("_DepthTex", depthTexture);
cameraFrameMaterial.SetVector("_FlipParams", new Vector4((configuration.flipCameraFrameHorizontally ? 1.0f : 0.0f), (configuration.flipCameraFrameVertically ? 1.0f : 0.0f), 0.0f, 0.0f));
cameraFrameMaterial.SetColor("_ChromaKeyColor", configuration.chromaKeyColor);
cameraFrameMaterial.SetFloat("_ChromaKeySimilarity", configuration.chromaKeySimilarity);
cameraFrameMaterial.SetFloat("_ChromaKeySmoothRange", configuration.chromaKeySmoothRange);
cameraFrameMaterial.SetFloat("_ChromaKeySpillRange", configuration.chromaKeySpillRange);
cameraFrameMaterial.SetVector("_TextureDimension", new Vector4(colorTexture.width, colorTexture.height, 1.0f / colorTexture.width, 1.0f / colorTexture.height));
cameraFrameMaterial.SetVector("_TextureWorldSize", new Vector4(worldWidth, worldHeight, 0, 0));
cameraFrameMaterial.SetFloat("_SmoothFactor", configuration.dynamicLightingSmoothFactor);
cameraFrameMaterial.SetFloat("_DepthVariationClamp", configuration.dynamicLightingDepthVariationClampingValue);
cameraFrameMaterial.SetFloat("_CullingDistance", cullingDistance);
if (configuration.virtualGreenScreenType == OVRManager.VirtualGreenScreenType.Off || boundaryMesh == null || boundaryMeshMaskTexture == null)
{
cameraFrameMaterial.SetTexture("_MaskTex", Texture2D.whiteTexture);
}
else
{
if (cameraRig == null)
{
if (!nullcameraRigWarningDisplayed)
{
Debug.LogWarning("Could not find the OVRCameraRig/CenterEyeAnchor object. Please check if the OVRCameraRig has been setup properly. The virtual green screen has been temporarily disabled");
nullcameraRigWarningDisplayed = true;
}
cameraFrameMaterial.SetTexture("_MaskTex", Texture2D.whiteTexture);
}
else
{
if (nullcameraRigWarningDisplayed)
{
Debug.Log("OVRCameraRig/CenterEyeAnchor object found. Virtual green screen is activated");
nullcameraRigWarningDisplayed = false;
}
cameraFrameMaterial.SetTexture("_MaskTex", boundaryMeshMaskTexture);
}
}
}
}
protected void RefreshBoundaryMesh(Camera camera, OVRMixedRealityCaptureConfiguration configuration, out float cullingDistance)
{
float depthTolerance = configuration.virtualGreenScreenApplyDepthCulling ? configuration.virtualGreenScreenDepthTolerance : float.PositiveInfinity;
cullingDistance = OVRCompositionUtil.GetMaximumBoundaryDistance(camera, OVRCompositionUtil.ToBoundaryType(configuration.virtualGreenScreenType)) + depthTolerance;
if (boundaryMesh == null || boundaryMeshType != configuration.virtualGreenScreenType || boundaryMeshTopY != configuration.virtualGreenScreenTopY || boundaryMeshBottomY != configuration.virtualGreenScreenBottomY)
{
boundaryMeshTopY = configuration.virtualGreenScreenTopY;
boundaryMeshBottomY = configuration.virtualGreenScreenBottomY;
boundaryMesh = OVRCompositionUtil.BuildBoundaryMesh(OVRCompositionUtil.ToBoundaryType(configuration.virtualGreenScreenType), boundaryMeshTopY, boundaryMeshBottomY);
boundaryMeshType = configuration.virtualGreenScreenType;
// Creating GameObject for testing purpose only
//GameObject boundaryMeshObject = new GameObject("BoundaryMeshObject");
//boundaryMeshObject.AddComponent<MeshFilter>().mesh = boundaryMesh;
//boundaryMeshObject.AddComponent<MeshRenderer>();
}
}
public class OVRCameraFrameCompositionManager : MonoBehaviour {
public OVRMixedRealityCaptureConfiguration configuration;
public GameObject cameraFrameGameObj;
public OVRCameraComposition composition;
public RenderTexture boundaryMeshMaskTexture;
private Material cameraFrameMaterial;
private Material whiteMaterial;
#if UNITY_2019_1_OR_NEWER
private Camera mixedRealityCamera;
#endif
void Start()
{
Shader shader = Shader.Find("Oculus/Unlit");
if (!shader)
{
Debug.LogError("Oculus/Unlit shader does not exist");
return;
}
whiteMaterial = new Material(shader);
whiteMaterial.color = Color.white;
#if UNITY_2019_1_OR_NEWER
// Attach to render pipeline callbacks when on URP
if(GraphicsSettings.renderPipelineAsset != null)
{
RenderPipelineManager.beginCameraRendering += OnCameraBeginRendering;
RenderPipelineManager.endCameraRendering += OnCameraEndRendering;
mixedRealityCamera = GetComponent<Camera>();
}
#endif
}
void OnPreRender()
{
if (configuration != null && configuration.virtualGreenScreenType != OVRManager.VirtualGreenScreenType.Off && boundaryMeshMaskTexture != null && composition.boundaryMesh != null)
{
RenderTexture oldRT = RenderTexture.active;
RenderTexture.active = boundaryMeshMaskTexture;
// The camera matrices haven't been setup when OnPreRender() is executed. Load the projection manually
GL.PushMatrix();
GL.LoadProjectionMatrix(GetComponent<Camera>().projectionMatrix);
GL.Clear(false, true, Color.black);
for (int i = 0; i < whiteMaterial.passCount; ++i)
{
if (whiteMaterial.SetPass(i))
{
Graphics.DrawMeshNow(composition.boundaryMesh, composition.cameraRig.ComputeTrackReferenceMatrix());
}
}
GL.PopMatrix();
RenderTexture.active = oldRT;
}
if (cameraFrameGameObj)
{
if (cameraFrameMaterial == null)
cameraFrameMaterial = cameraFrameGameObj.GetComponent<MeshRenderer>().material;
cameraFrameMaterial.SetFloat("_Visible", 1.0f);
}
}
void OnPostRender()
{
if (cameraFrameGameObj)
{
Debug.Assert(cameraFrameMaterial);
cameraFrameMaterial.SetFloat("_Visible", 0.0f);
}
}
#if UNITY_2019_1_OR_NEWER
private void OnCameraBeginRendering(ScriptableRenderContext renderContext, Camera camera)
{
if (mixedRealityCamera != null && mixedRealityCamera == camera)
OnPreRender();
}
private void OnCameraEndRendering(ScriptableRenderContext renderContext, Camera camera)
{
if (mixedRealityCamera != null && mixedRealityCamera == camera)
OnPostRender();
}
#endif
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 70818bad1fe6859439b190a61dfb6eb8
timeCreated: 1503089686
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,100 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if UNITY_ANDROID && !UNITY_EDITOR
#define OVR_ANDROID_MRC
#endif
using UnityEngine;
using System.Collections;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
public abstract class OVRComposition {
public bool cameraInTrackingSpace = false;
public OVRCameraRig cameraRig = null;
protected OVRComposition(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration) {
RefreshCameraRig(parentObject, mainCamera);
}
public abstract OVRManager.CompositionMethod CompositionMethod();
public abstract void Update(GameObject gameObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration, OVRManager.TrackingOrigin trackingOrigin);
public abstract void Cleanup();
public virtual void RecenterPose() { }
protected bool usingLastAttachedNodePose = false;
protected OVRPose lastAttachedNodePose = new OVRPose(); // Sometimes the attach node pose is not readable (lose tracking, low battery, etc.) Use the last pose instead when it happens
public void RefreshCameraRig(GameObject parentObject, Camera mainCamera)
{
OVRCameraRig cameraRig = mainCamera.GetComponentInParent<OVRCameraRig>();
if (cameraRig == null)
{
cameraRig = parentObject.GetComponent<OVRCameraRig>();
}
cameraInTrackingSpace = (cameraRig != null && cameraRig.trackingSpace != null);
this.cameraRig = cameraRig;
Debug.Log(cameraRig == null ? "[OVRComposition] CameraRig not found" : "[OVRComposition] CameraRig found");
}
public OVRPose ComputeCameraWorldSpacePose(OVRPlugin.CameraExtrinsics extrinsics)
{
OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
OVRPose worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
return worldSpacePose;
}
public OVRPose ComputeCameraTrackingSpacePose(OVRPlugin.CameraExtrinsics extrinsics)
{
OVRPose trackingSpacePose = new OVRPose();
OVRPose cameraTrackingSpacePose = extrinsics.RelativePose.ToOVRPose();
#if OVR_ANDROID_MRC
OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
cameraTrackingSpacePose = stageToLocalPose * cameraTrackingSpacePose;
#endif
trackingSpacePose = cameraTrackingSpacePose;
if (extrinsics.AttachedToNode != OVRPlugin.Node.None && OVRPlugin.GetNodePresent(extrinsics.AttachedToNode))
{
if (usingLastAttachedNodePose)
{
Debug.Log("The camera attached node get tracked");
usingLastAttachedNodePose = false;
}
OVRPose attachedNodePose = OVRPlugin.GetNodePose(extrinsics.AttachedToNode, OVRPlugin.Step.Render).ToOVRPose();
lastAttachedNodePose = attachedNodePose;
trackingSpacePose = attachedNodePose * trackingSpacePose;
}
else
{
if (extrinsics.AttachedToNode != OVRPlugin.Node.None)
{
if (!usingLastAttachedNodePose)
{
Debug.LogWarning("The camera attached node could not be tracked, using the last pose");
usingLastAttachedNodePose = true;
}
trackingSpacePose = lastAttachedNodePose * trackingSpacePose;
}
}
return trackingSpacePose;
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 829a382f3380d4b46ad9670463232a0b
timeCreated: 1502990005
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,164 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections.Generic;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
internal class OVRCompositionUtil {
public static void SafeDestroy(GameObject obj)
{
if (Application.isPlaying)
{
GameObject.Destroy(obj);
}
else
{
GameObject.DestroyImmediate(obj);
}
}
public static void SafeDestroy(ref GameObject obj)
{
if (obj != null)
{
SafeDestroy(obj);
obj = null;
}
}
public static OVRPlugin.CameraDevice ConvertCameraDevice(OVRManager.CameraDevice cameraDevice)
{
if (cameraDevice == OVRManager.CameraDevice.WebCamera0)
{
return OVRPlugin.CameraDevice.WebCamera0;
}
else if (cameraDevice == OVRManager.CameraDevice.WebCamera1)
{
return OVRPlugin.CameraDevice.WebCamera1;
}
else if (cameraDevice == OVRManager.CameraDevice.ZEDCamera)
{
return OVRPlugin.CameraDevice.ZEDCamera;
}
else
{
return OVRPlugin.CameraDevice.None;
}
}
public static OVRBoundary.BoundaryType ToBoundaryType(OVRManager.VirtualGreenScreenType type)
{
/*if (type == OVRManager.VirtualGreenScreenType.OuterBoundary)
{
return OVRBoundary.BoundaryType.OuterBoundary;
}
else */if (type == OVRManager.VirtualGreenScreenType.PlayArea)
{
return OVRBoundary.BoundaryType.PlayArea;
}
else
{
Debug.LogWarning("Unmatched VirtualGreenScreenType");
return OVRBoundary.BoundaryType.PlayArea;
}
}
public static Vector3 GetWorldPosition(Vector3 trackingSpacePosition)
{
OVRPose tsPose;
tsPose.position = trackingSpacePosition;
tsPose.orientation = Quaternion.identity;
OVRPose wsPose = OVRExtensions.ToWorldSpacePose(tsPose);
Vector3 pos = wsPose.position;
return pos;
}
public static float GetMaximumBoundaryDistance(Camera camera, OVRBoundary.BoundaryType boundaryType)
{
if (!OVRManager.boundary.GetConfigured())
{
return float.MaxValue;
}
Vector3[] geometry = OVRManager.boundary.GetGeometry(boundaryType);
if (geometry.Length == 0)
{
return float.MaxValue;
}
float maxDistance = -float.MaxValue;
foreach (Vector3 v in geometry)
{
Vector3 pos = GetWorldPosition(v);
float distance = Vector3.Dot(camera.transform.forward, pos);
if (maxDistance < distance)
{
maxDistance = distance;
}
}
return maxDistance;
}
public static Mesh BuildBoundaryMesh(OVRBoundary.BoundaryType boundaryType, float topY, float bottomY)
{
if (!OVRManager.boundary.GetConfigured())
{
return null;
}
List<Vector3> geometry = new List<Vector3>(OVRManager.boundary.GetGeometry(boundaryType));
if (geometry.Count == 0)
{
return null;
}
geometry.Add(geometry[0]);
int numPoints = geometry.Count;
Vector3[] vertices = new Vector3[numPoints * 2];
Vector2[] uvs = new Vector2[numPoints * 2];
for (int i = 0; i < numPoints; ++i)
{
Vector3 v = geometry[i];
vertices[i] = new Vector3(v.x, bottomY, v.z);
vertices[i + numPoints] = new Vector3(v.x, topY, v.z);
uvs[i] = new Vector2((float)i / (numPoints - 1), 0.0f);
uvs[i + numPoints] = new Vector2(uvs[i].x, 1.0f);
}
int[] triangles = new int[(numPoints - 1) * 2 * 3];
for (int i = 0; i < numPoints - 1; ++i)
{
// the geometry is built clockwised. only the back faces should be rendered in the camera frame mask
triangles[i * 6 + 0] = i;
triangles[i * 6 + 1] = i + numPoints;
triangles[i * 6 + 2] = i + 1 + numPoints;
triangles[i * 6 + 3] = i;
triangles[i * 6 + 4] = i + 1 + numPoints;
triangles[i * 6 + 5] = i + 1;
}
Mesh mesh = new Mesh();
mesh.vertices = vertices;
mesh.uv = uvs;
mesh.triangles = triangles;
return mesh;
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 43bf91d46b2eb874a842be95aee2cc9a
timeCreated: 1502992822
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,195 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
using System;
using UnityEngine;
using System.Collections;
using Object = UnityEngine.Object;
#if USING_URP
using UnityEngine.Rendering.Universal;
#endif
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
public class OVRDirectComposition : OVRCameraComposition
{
private GameObject previousMainCameraObject = null;
public GameObject directCompositionCameraGameObject = null;
public Camera directCompositionCamera = null;
public RenderTexture boundaryMeshMaskTexture = null;
public override OVRManager.CompositionMethod CompositionMethod() { return OVRManager.CompositionMethod.Direct; }
public OVRDirectComposition(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
: base(parentObject, mainCamera, configuration)
{
RefreshCameraObjects(parentObject, mainCamera, configuration);
}
private void RefreshCameraObjects(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
{
if (!hasCameraDeviceOpened)
{
Debug.LogWarning("[OVRDirectComposition] RefreshCameraObjects(): Unable to open camera device " + cameraDevice);
return;
}
if (mainCamera.gameObject != previousMainCameraObject)
{
Debug.LogFormat("[OVRDirectComposition] Camera refreshed. Rebind camera to {0}", mainCamera.gameObject.name);
OVRCompositionUtil.SafeDestroy(ref directCompositionCameraGameObject);
directCompositionCamera = null;
RefreshCameraRig(parentObject, mainCamera);
Debug.Assert(directCompositionCameraGameObject == null);
if (configuration.instantiateMixedRealityCameraGameObject != null)
{
directCompositionCameraGameObject = configuration.instantiateMixedRealityCameraGameObject(mainCamera.gameObject, OVRManager.MrcCameraType.Normal);
}
else
{
directCompositionCameraGameObject = Object.Instantiate(mainCamera.gameObject);
}
directCompositionCameraGameObject.name = "OculusMRC_DirectCompositionCamera";
directCompositionCameraGameObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
if (directCompositionCameraGameObject.GetComponent<AudioListener>())
{
Object.Destroy(directCompositionCameraGameObject.GetComponent<AudioListener>());
}
if (directCompositionCameraGameObject.GetComponent<OVRManager>())
{
Object.Destroy(directCompositionCameraGameObject.GetComponent<OVRManager>());
}
directCompositionCamera = directCompositionCameraGameObject.GetComponent<Camera>();
#if USING_MRC_COMPATIBLE_URP_VERSION
var directCamData = directCompositionCamera.GetUniversalAdditionalCameraData();
if (directCamData != null)
{
directCamData.allowXRRendering = false;
}
#elif USING_URP
Debug.LogError("Using URP with MRC is only supported with URP version 10.0.0 or higher. Consider using Unity 2020 or higher.");
#else
directCompositionCamera.stereoTargetEye = StereoTargetEyeMask.None;
#endif
directCompositionCamera.depth = float.MaxValue;
directCompositionCamera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
directCompositionCamera.cullingMask = (directCompositionCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
Debug.Log("DirectComposition activated : useDynamicLighting " + (configuration.useDynamicLighting ? "ON" : "OFF"));
RefreshCameraFramePlaneObject(parentObject, directCompositionCamera, configuration);
previousMainCameraObject = mainCamera.gameObject;
}
}
public override void Update(GameObject gameObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration, OVRManager.TrackingOrigin trackingOrigin)
{
if (!hasCameraDeviceOpened)
{
return;
}
RefreshCameraObjects(gameObject, mainCamera, configuration);
if (!OVRPlugin.SetHandNodePoseStateLatency(configuration.handPoseStateLatency))
{
Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + configuration.handPoseStateLatency);
}
directCompositionCamera.clearFlags = mainCamera.clearFlags;
directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
if (configuration.dynamicCullingMask)
{
directCompositionCamera.cullingMask = (mainCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
}
directCompositionCamera.nearClipPlane = mainCamera.nearClipPlane;
directCompositionCamera.farClipPlane = mainCamera.farClipPlane;
if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
{
OVRPose trackingSpacePose = new OVRPose();
trackingSpacePose.position = trackingOrigin == OVRManager.TrackingOrigin.EyeLevel ?
OVRMixedReality.fakeCameraEyeLevelPosition :
OVRMixedReality.fakeCameraFloorLevelPosition;
trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
directCompositionCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
directCompositionCamera.aspect = OVRMixedReality.fakeCameraAspect;
if (cameraInTrackingSpace)
{
directCompositionCamera.transform.FromOVRPose(trackingSpacePose, true);
}
else
{
OVRPose worldSpacePose = new OVRPose();
worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
directCompositionCamera.transform.FromOVRPose(worldSpacePose);
}
}
else
{
OVRPlugin.CameraExtrinsics extrinsics;
OVRPlugin.CameraIntrinsics intrinsics;
// So far, only support 1 camera for MR and always use camera index 0
if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
{
float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
directCompositionCamera.fieldOfView = fovY;
directCompositionCamera.aspect = aspect;
if (cameraInTrackingSpace)
{
OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
directCompositionCamera.transform.FromOVRPose(trackingSpacePose, true);
}
else
{
OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
directCompositionCamera.transform.FromOVRPose(worldSpacePose);
}
}
else
{
Debug.LogWarning("Failed to get external camera information");
}
}
if (hasCameraDeviceOpened)
{
if (boundaryMeshMaskTexture == null || boundaryMeshMaskTexture.width != Screen.width || boundaryMeshMaskTexture.height != Screen.height)
{
boundaryMeshMaskTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8);
boundaryMeshMaskTexture.Create();
}
UpdateCameraFramePlaneObject(mainCamera, directCompositionCamera, configuration, boundaryMeshMaskTexture);
directCompositionCamera.GetComponent<OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = boundaryMeshMaskTexture;
}
}
public override void Cleanup()
{
base.Cleanup();
OVRCompositionUtil.SafeDestroy(ref directCompositionCameraGameObject);
directCompositionCamera = null;
Debug.Log("DirectComposition deactivated");
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 8e9d1c62d6c68c7429ce265558cfd2b2
timeCreated: 1502990248
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,567 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if UNITY_ANDROID && !UNITY_EDITOR
#define OVR_ANDROID_MRC
#endif
using System;
using UnityEngine;
using System.Collections.Generic;
using System.Threading;
using UnityEngine.Rendering;
using Object = UnityEngine.Object;
#if USING_URP
using UnityEngine.Rendering.Universal;
#endif
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_ANDROID
public class OVRExternalComposition : OVRComposition
{
private GameObject previousMainCameraObject = null;
public GameObject foregroundCameraGameObject = null;
public Camera foregroundCamera = null;
public GameObject backgroundCameraGameObject = null;
public Camera backgroundCamera = null;
#if OVR_ANDROID_MRC
private bool skipFrame = false;
private float fpsThreshold = 80.0f;
private bool isFrameSkipped = true;
public bool renderCombinedFrame = false;
public AudioListener audioListener;
public OVRMRAudioFilter audioFilter;
public RenderTexture[] mrcRenderTextureArray = new RenderTexture[2];
public int frameIndex;
public int lastMrcEncodeFrameSyncId;
// when rendererSupportsCameraRect is false, mrcRenderTextureArray would only store the background frame (regular width)
public RenderTexture[] mrcForegroundRenderTextureArray = new RenderTexture[2];
// this is used for moving MRC camera where we would need to be able to synchronize the camera position from the game with that on the client for composition
public double[] cameraPoseTimeArray = new double[2];
#endif
public override OVRManager.CompositionMethod CompositionMethod() { return OVRManager.CompositionMethod.External; }
public OVRExternalComposition(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
: base(parentObject, mainCamera, configuration)
{
#if OVR_ANDROID_MRC
renderCombinedFrame = false;
int frameWidth;
int frameHeight;
OVRPlugin.Media.GetMrcFrameSize(out frameWidth, out frameHeight);
Debug.LogFormat("[OVRExternalComposition] Create render texture {0}, {1}", renderCombinedFrame ? frameWidth : frameWidth/2, frameHeight);
for (int i=0; i<2; ++i)
{
mrcRenderTextureArray[i] = new RenderTexture(renderCombinedFrame ? frameWidth : frameWidth/2, frameHeight, 24, RenderTextureFormat.ARGB32);
mrcRenderTextureArray[i].Create();
cameraPoseTimeArray[i] = 0.0;
}
skipFrame = OVRManager.display.displayFrequency > fpsThreshold;
OVRManager.DisplayRefreshRateChanged += DisplayRefreshRateChanged;
frameIndex = 0;
lastMrcEncodeFrameSyncId = -1;
if (!renderCombinedFrame)
{
Debug.LogFormat("[OVRExternalComposition] Create extra render textures for foreground");
for (int i = 0; i < 2; ++i)
{
mrcForegroundRenderTextureArray[i] = new RenderTexture(frameWidth / 2, frameHeight, 24, RenderTextureFormat.ARGB32);
mrcForegroundRenderTextureArray[i].Create();
}
}
#endif
RefreshCameraObjects(parentObject, mainCamera, configuration);
}
private void RefreshCameraObjects(GameObject parentObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration)
{
if (mainCamera.gameObject != previousMainCameraObject)
{
Debug.LogFormat("[OVRExternalComposition] Camera refreshed. Rebind camera to {0}", mainCamera.gameObject.name);
OVRCompositionUtil.SafeDestroy(ref backgroundCameraGameObject);
backgroundCamera = null;
OVRCompositionUtil.SafeDestroy(ref foregroundCameraGameObject);
foregroundCamera = null;
RefreshCameraRig(parentObject, mainCamera);
Debug.Assert(backgroundCameraGameObject == null);
if (configuration.instantiateMixedRealityCameraGameObject != null)
{
backgroundCameraGameObject = configuration.instantiateMixedRealityCameraGameObject(mainCamera.gameObject, OVRManager.MrcCameraType.Background);
}
else
{
backgroundCameraGameObject = Object.Instantiate(mainCamera.gameObject);
}
backgroundCameraGameObject.name = "OculusMRC_BackgroundCamera";
backgroundCameraGameObject.transform.parent =
cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
if (backgroundCameraGameObject.GetComponent<AudioListener>()) {
Object.Destroy(backgroundCameraGameObject.GetComponent<AudioListener>());
}
if (backgroundCameraGameObject.GetComponent<OVRManager>()) {
Object.Destroy(backgroundCameraGameObject.GetComponent<OVRManager>());
}
backgroundCamera = backgroundCameraGameObject.GetComponent<Camera>();
backgroundCamera.tag = "Untagged";
#if USING_MRC_COMPATIBLE_URP_VERSION
var backgroundCamData = backgroundCamera.GetUniversalAdditionalCameraData();
if (backgroundCamData != null)
{
backgroundCamData.allowXRRendering = false;
}
#elif USING_URP
Debug.LogError("Using URP with MRC is only supported with URP version 10.0.0 or higher. Consider using Unity 2020 or higher.");
#else
backgroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
#endif
backgroundCamera.depth = 99990.0f;
backgroundCamera.rect = new Rect(0.0f, 0.0f, 0.5f, 1.0f);
backgroundCamera.cullingMask = (backgroundCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
#if OVR_ANDROID_MRC
backgroundCamera.targetTexture = mrcRenderTextureArray[0];
if (!renderCombinedFrame)
{
backgroundCamera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
}
#endif
Debug.Assert(foregroundCameraGameObject == null);
if (configuration.instantiateMixedRealityCameraGameObject != null)
{
foregroundCameraGameObject = configuration.instantiateMixedRealityCameraGameObject(mainCamera.gameObject, OVRManager.MrcCameraType.Foreground);
}
else
{
foregroundCameraGameObject = Object.Instantiate(mainCamera.gameObject);
}
foregroundCameraGameObject.name = "OculusMRC_ForgroundCamera";
foregroundCameraGameObject.transform.parent = cameraInTrackingSpace ? cameraRig.trackingSpace : parentObject.transform;
if (foregroundCameraGameObject.GetComponent<AudioListener>())
{
Object.Destroy(foregroundCameraGameObject.GetComponent<AudioListener>());
}
if (foregroundCameraGameObject.GetComponent<OVRManager>())
{
Object.Destroy(foregroundCameraGameObject.GetComponent<OVRManager>());
}
foregroundCamera = foregroundCameraGameObject.GetComponent<Camera>();
foregroundCamera.tag = "Untagged";
#if USING_MRC_COMPATIBLE_URP_VERSION
var foregroundCamData = foregroundCamera.GetUniversalAdditionalCameraData();
if (foregroundCamData != null)
{
foregroundCamData.allowXRRendering = false;
}
#elif USING_URP
Debug.LogError("Using URP with MRC is only supported with URP version 10.0.0 or higher. Consider using Unity 2020 or higher.");
#else
foregroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
#endif
foregroundCamera.depth = backgroundCamera.depth + 1.0f; // enforce the forground be rendered after the background
foregroundCamera.rect = new Rect(0.5f, 0.0f, 0.5f, 1.0f);
foregroundCamera.clearFlags = CameraClearFlags.Color;
#if OVR_ANDROID_MRC
foregroundCamera.backgroundColor = configuration.externalCompositionBackdropColorQuest;
#else
foregroundCamera.backgroundColor = configuration.externalCompositionBackdropColorRift;
#endif
foregroundCamera.cullingMask = (foregroundCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
#if OVR_ANDROID_MRC
if (renderCombinedFrame)
{
foregroundCamera.targetTexture = mrcRenderTextureArray[0];
}
else
{
foregroundCamera.targetTexture = mrcForegroundRenderTextureArray[0];
foregroundCamera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
}
#endif
previousMainCameraObject = mainCamera.gameObject;
}
}
#if OVR_ANDROID_MRC
private void RefreshAudioFilter()
{
if (cameraRig != null && (audioListener == null || !audioListener.enabled || !audioListener.gameObject.activeInHierarchy))
{
CleanupAudioFilter();
AudioListener tmpAudioListener = cameraRig.centerEyeAnchor.gameObject.activeInHierarchy ? cameraRig.centerEyeAnchor.GetComponent<AudioListener>() : null;
if (tmpAudioListener != null && !tmpAudioListener.enabled) tmpAudioListener = null;
if (tmpAudioListener == null)
{
if (Camera.main != null && Camera.main.gameObject.activeInHierarchy)
{
tmpAudioListener = Camera.main.GetComponent<AudioListener>();
if (tmpAudioListener != null && !tmpAudioListener.enabled) tmpAudioListener = null;
}
}
if (tmpAudioListener == null)
{
Object[] allListeners = Object.FindObjectsOfType<AudioListener>();
foreach (var l in allListeners)
{
AudioListener al = l as AudioListener;
if (al != null && al.enabled && al.gameObject.activeInHierarchy)
{
tmpAudioListener = al;
break;
}
}
}
if (tmpAudioListener == null)
{
Debug.LogWarning("[OVRExternalComposition] No AudioListener in scene");
}
else
{
Debug.LogFormat("[OVRExternalComposition] AudioListener found, obj {0}", tmpAudioListener.gameObject.name);
}
audioListener = tmpAudioListener;
if(audioListener != null)
{
audioFilter = audioListener.gameObject.AddComponent<OVRMRAudioFilter>();
audioFilter.composition = this;
Debug.LogFormat("OVRMRAudioFilter added");
}
}
}
private float[] cachedAudioDataArray = null;
private int CastMrcFrame(int castTextureIndex)
{
int audioFrames;
int audioChannels;
GetAndResetAudioData(ref cachedAudioDataArray, out audioFrames, out audioChannels);
int syncId = -1;
//Debug.Log("EncodeFrameThreadObject EncodeMrcFrame");
bool ret = false;
if (OVRPlugin.Media.GetMrcInputVideoBufferType() == OVRPlugin.Media.InputVideoBufferType.TextureHandle)
{
ret = OVRPlugin.Media.EncodeMrcFrame(mrcRenderTextureArray[castTextureIndex].GetNativeTexturePtr(),
renderCombinedFrame ? System.IntPtr.Zero : mrcForegroundRenderTextureArray[castTextureIndex].GetNativeTexturePtr(),
cachedAudioDataArray, audioFrames, audioChannels, AudioSettings.dspTime, cameraPoseTimeArray[castTextureIndex], ref syncId);
}
else
{
ret = OVRPlugin.Media.EncodeMrcFrame(mrcRenderTextureArray[castTextureIndex], cachedAudioDataArray, audioFrames, audioChannels, AudioSettings.dspTime, cameraPoseTimeArray[castTextureIndex], ref syncId);
}
if (!ret)
{
Debug.LogWarning("EncodeMrcFrame failed. Likely caused by OBS plugin disconnection");
return -1;
}
return syncId;
}
private void SetCameraTargetTexture(int drawTextureIndex)
{
if (renderCombinedFrame)
{
RenderTexture texture = mrcRenderTextureArray[drawTextureIndex];
if (backgroundCamera.targetTexture != texture)
{
backgroundCamera.targetTexture = texture;
}
if (foregroundCamera.targetTexture != texture)
{
foregroundCamera.targetTexture = texture;
}
}
else
{
RenderTexture bgTexture = mrcRenderTextureArray[drawTextureIndex];
RenderTexture fgTexture = mrcForegroundRenderTextureArray[drawTextureIndex];
if (backgroundCamera.targetTexture != bgTexture)
{
backgroundCamera.targetTexture = bgTexture;
}
if (foregroundCamera.targetTexture != fgTexture)
{
foregroundCamera.targetTexture = fgTexture;
}
}
}
#endif
public override void Update(GameObject gameObject, Camera mainCamera, OVRMixedRealityCaptureConfiguration configuration, OVRManager.TrackingOrigin trackingOrigin)
{
#if OVR_ANDROID_MRC
if (skipFrame && OVRPlugin.Media.IsCastingToRemoteClient()) {
isFrameSkipped = !isFrameSkipped;
if(isFrameSkipped) { return; }
}
#endif
RefreshCameraObjects(gameObject, mainCamera, configuration);
OVRPlugin.SetHandNodePoseStateLatency(0.0); // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0
// For third-person camera to use for calculating camera position with different anchors
OVRPose stageToLocalPose = OVRPlugin.GetTrackingTransformRelativePose(OVRPlugin.TrackingOrigin.Stage).ToOVRPose();
OVRPose localToStagePose = stageToLocalPose.Inverse();
OVRPose head = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.Head, OVRPlugin.Step.Render).ToOVRPose();
OVRPose leftC = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandLeft, OVRPlugin.Step.Render).ToOVRPose();
OVRPose rightC = localToStagePose * OVRPlugin.GetNodePose(OVRPlugin.Node.HandRight, OVRPlugin.Step.Render).ToOVRPose();
OVRPlugin.Media.SetMrcHeadsetControllerPose(head.ToPosef(), leftC.ToPosef(), rightC.ToPosef());
#if OVR_ANDROID_MRC
RefreshAudioFilter();
int drawTextureIndex = (frameIndex / 2) % 2;
int castTextureIndex = 1 - drawTextureIndex;
backgroundCamera.enabled = (frameIndex % 2) == 0;
foregroundCamera.enabled = (frameIndex % 2) == 1;
if (frameIndex % 2 == 0)
{
if (lastMrcEncodeFrameSyncId != -1)
{
OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
lastMrcEncodeFrameSyncId = -1;
}
lastMrcEncodeFrameSyncId = CastMrcFrame(castTextureIndex);
SetCameraTargetTexture(drawTextureIndex);
}
++ frameIndex;
#endif
backgroundCamera.clearFlags = mainCamera.clearFlags;
backgroundCamera.backgroundColor = mainCamera.backgroundColor;
if (configuration.dynamicCullingMask)
{
backgroundCamera.cullingMask = (mainCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
}
backgroundCamera.nearClipPlane = mainCamera.nearClipPlane;
backgroundCamera.farClipPlane = mainCamera.farClipPlane;
if (configuration.dynamicCullingMask)
{
foregroundCamera.cullingMask = (mainCamera.cullingMask & ~configuration.extraHiddenLayers) | configuration.extraVisibleLayers;
}
foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
foregroundCamera.farClipPlane = mainCamera.farClipPlane;
if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
{
OVRPose worldSpacePose = new OVRPose();
OVRPose trackingSpacePose = new OVRPose();
trackingSpacePose.position = trackingOrigin == OVRManager.TrackingOrigin.EyeLevel ?
OVRMixedReality.fakeCameraEyeLevelPosition :
OVRMixedReality.fakeCameraFloorLevelPosition;
trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
backgroundCamera.aspect = OVRMixedReality.fakeCameraAspect;
foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
foregroundCamera.aspect = OVRMixedReality.fakeCameraAspect;
if (cameraInTrackingSpace)
{
backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
}
else
{
backgroundCamera.transform.FromOVRPose(worldSpacePose);
foregroundCamera.transform.FromOVRPose(worldSpacePose);
}
}
else
{
OVRPlugin.CameraExtrinsics extrinsics;
OVRPlugin.CameraIntrinsics intrinsics;
// So far, only support 1 camera for MR and always use camera index 0
if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
{
float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
backgroundCamera.fieldOfView = fovY;
backgroundCamera.aspect = aspect;
foregroundCamera.fieldOfView = fovY;
foregroundCamera.aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
if (cameraInTrackingSpace)
{
OVRPose trackingSpacePose = ComputeCameraTrackingSpacePose(extrinsics);
backgroundCamera.transform.FromOVRPose(trackingSpacePose, true);
foregroundCamera.transform.FromOVRPose(trackingSpacePose, true);
}
else
{
OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
backgroundCamera.transform.FromOVRPose(worldSpacePose);
foregroundCamera.transform.FromOVRPose(worldSpacePose);
}
#if OVR_ANDROID_MRC
cameraPoseTimeArray[drawTextureIndex] = extrinsics.LastChangedTimeSeconds;
#endif
}
else
{
Debug.LogError("Failed to get external camera information");
return;
}
}
Vector3 headToExternalCameraVec = mainCamera.transform.position - foregroundCamera.transform.position;
float clipDistance = Vector3.Dot(headToExternalCameraVec, foregroundCamera.transform.forward);
foregroundCamera.farClipPlane = Mathf.Max(foregroundCamera.nearClipPlane + 0.001f, clipDistance);
}
#if OVR_ANDROID_MRC
private void CleanupAudioFilter()
{
if (audioFilter)
{
audioFilter.composition = null;
Object.Destroy(audioFilter);
Debug.LogFormat("OVRMRAudioFilter destroyed");
audioFilter = null;
}
}
#endif
public override void Cleanup()
{
OVRCompositionUtil.SafeDestroy(ref backgroundCameraGameObject);
backgroundCamera = null;
OVRCompositionUtil.SafeDestroy(ref foregroundCameraGameObject);
foregroundCamera = null;
Debug.Log("ExternalComposition deactivated");
#if OVR_ANDROID_MRC
if (lastMrcEncodeFrameSyncId != -1)
{
OVRPlugin.Media.SyncMrcFrame(lastMrcEncodeFrameSyncId);
lastMrcEncodeFrameSyncId = -1;
}
CleanupAudioFilter();
for (int i=0; i<2; ++i)
{
mrcRenderTextureArray[i].Release();
mrcRenderTextureArray[i] = null;
if (!renderCombinedFrame)
{
mrcForegroundRenderTextureArray[i].Release();
mrcForegroundRenderTextureArray[i] = null;
}
}
OVRManager.DisplayRefreshRateChanged -= DisplayRefreshRateChanged;
frameIndex = 0;
#endif
}
private readonly object audioDataLock = new object();
private List<float> cachedAudioData = new List<float>(16384);
private int cachedChannels = 0;
public void CacheAudioData(float[] data, int channels)
{
lock(audioDataLock)
{
if (channels != cachedChannels)
{
cachedAudioData.Clear();
}
cachedChannels = channels;
cachedAudioData.AddRange(data);
//Debug.LogFormat("[CacheAudioData] dspTime {0} indata {1} channels {2} accu_len {3}", AudioSettings.dspTime, data.Length, channels, cachedAudioData.Count);
}
}
public void GetAndResetAudioData(ref float[] audioData, out int audioFrames, out int channels)
{
lock(audioDataLock)
{
//Debug.LogFormat("[GetAndResetAudioData] dspTime {0} accu_len {1}", AudioSettings.dspTime, cachedAudioData.Count);
if (audioData == null || audioData.Length < cachedAudioData.Count)
{
audioData = new float[cachedAudioData.Capacity];
}
cachedAudioData.CopyTo(audioData);
audioFrames = cachedAudioData.Count;
channels = cachedChannels;
cachedAudioData.Clear();
}
}
#if OVR_ANDROID_MRC
private void DisplayRefreshRateChanged(float fromRefreshRate, float toRefreshRate)
{
skipFrame = toRefreshRate > fpsThreshold;
}
#endif
}
#if OVR_ANDROID_MRC
public class OVRMRAudioFilter : MonoBehaviour
{
private bool running = false;
public OVRExternalComposition composition;
void Start()
{
running = true;
}
void OnAudioFilterRead(float[] data, int channels)
{
if (!running)
return;
if (composition != null)
{
composition.CacheAudioData(data, channels);
}
}
}
#endif
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 2c109ff55176f71418ec2c06d1b5d28e
timeCreated: 1502990231
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,16 @@
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
public class OVRSandwichComposition
{
// deprecated since SDK 1.41
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 3c02efcdd3fb2aa4e9c641b0c2a54b9a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant: