clean project

This commit is contained in:
Helar Jaadla
2022-03-07 17:52:41 +02:00
parent a174b45bd2
commit cbeb10ec35
5100 changed files with 837159 additions and 0 deletions

View File

@@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 27d84f95a4766db44a26aea09cc67373
folderAsset: yes
timeCreated: 1444949045
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,224 @@
/************************************************************************************
Filename : OVRLipSyncDebugConsole.cs
Content : Write to a text string, used by UI.Text
Created : May 22, 2015
Copyright : Copyright Facebook Technologies, LLC and its affiliates.
All rights reserved.
Licensed under the Oculus Audio SDK License Version 3.3 (the "License");
you may not use the Oculus Audio SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/audio-3.3/
Unless required by applicable law or agreed to in writing, the Oculus Audio SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
public class OVRLipSyncDebugConsole : MonoBehaviour
{
public ArrayList messages = new ArrayList();
public int maxMessages = 15; // The max number of messages displayed
public Text textMsg; // text string to display
// Our instance to allow this script to be called without a direct connection.
private static OVRLipSyncDebugConsole s_Instance = null;
// Clear timeout
private bool clearTimeoutOn = false;
private float clearTimeout = 0.0f;
/// <summary>
/// Gets the instance.
/// </summary>
/// <value>The instance.</value>
public static OVRLipSyncDebugConsole instance
{
get
{
if (s_Instance == null)
{
s_Instance = FindObjectOfType(typeof(OVRLipSyncDebugConsole)) as OVRLipSyncDebugConsole;
if (s_Instance == null)
{
GameObject console = new GameObject();
console.AddComponent<OVRLipSyncDebugConsole>();
console.name = "OVRLipSyncDebugConsole";
s_Instance = FindObjectOfType(typeof(OVRLipSyncDebugConsole)) as OVRLipSyncDebugConsole;
}
}
return s_Instance;
}
}
/// <summary>
/// Awake this instance.
/// </summary>
void Awake()
{
s_Instance = this;
Init();
}
/// <summary>
/// Update this instance.
/// </summary>
void Update()
{
if(clearTimeoutOn == true)
{
clearTimeout -= Time.deltaTime;
if(clearTimeout < 0.0f)
{
Clear();
clearTimeout = 0.0f;
clearTimeoutOn = false;
}
}
}
/// <summary>
/// Init this instance.
/// </summary>
public void Init()
{
if(textMsg == null)
{
Debug.LogWarning("DebugConsole Init WARNING::UI text not set. Will not be able to display anything.");
}
Clear();
}
//+++++++++ INTERFACE FUNCTIONS ++++++++++++++++++++++++++++++++
/// <summary>
/// Log the specified message.
/// </summary>
/// <param name="message">Message.</param>
public static void Log(string message)
{
OVRLipSyncDebugConsole.instance.AddMessage(message, Color.white);
}
/// <summary>
/// Log the specified message and color.
/// </summary>
/// <param name="message">Message.</param>
/// <param name="color">Color.</param>
public static void Log(string message, Color color)
{
OVRLipSyncDebugConsole.instance.AddMessage(message, color);
}
/// <summary>
/// Clear this instance.
/// </summary>
public static void Clear()
{
OVRLipSyncDebugConsole.instance.ClearMessages();
}
/// <summary>
/// Calls clear after a certain time.
/// </summary>
/// <param name="timeToClear">Time to clear.</param>
public static void ClearTimeout(float timeToClear)
{
OVRLipSyncDebugConsole.instance.SetClearTimeout(timeToClear);
}
//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
/// <summary>
/// Adds the message.
/// </summary>
/// <param name="message">Message.</param>
/// <param name="color">Color.</param>
public void AddMessage(string message, Color color)
{
messages.Add(message);
if(textMsg != null)
textMsg.color = color;
Display();
}
/// <summary>
/// Clears the messages.
/// </summary>
public void ClearMessages()
{
messages.Clear();
Display();
}
/// <summary>
/// Sets the clear timeout.
/// </summary>
/// <param name="timeout">Timeout.</param>
public void SetClearTimeout(float timeout)
{
clearTimeout = timeout;
clearTimeoutOn = true;
}
/// <summary>
// Prunes the array to fit within the maxMessages limit
/// </summary>
void Prune()
{
int diff;
if (messages.Count > maxMessages)
{
if (messages.Count <= 0)
{
diff = 0;
}
else
{
diff = messages.Count - maxMessages;
}
messages.RemoveRange(0, (int)diff);
}
}
/// <summary>
/// Display this instance.
/// </summary>
void Display()
{
if (messages.Count > maxMessages)
{
Prune();
}
if(textMsg != null)
{
textMsg.text = ""; // Clear text out
int x = 0;
while (x < messages.Count)
{
textMsg.text += (string)messages[x];
textMsg.text +='\n';
x += 1;
}
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: c60ad94815c68aa41a786306cd588495
timeCreated: 1439845885
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,29 @@
/************************************************************************************
Filename : OVRNamedArrayAttribute.cs
Content : Adds support for a named array attribute in the editor
Created : May 17th, 2018
Copyright : Copyright Facebook Technologies, LLC and its affiliates.
All rights reserved.
Licensed under the Oculus Audio SDK License Version 3.3 (the "License");
you may not use the Oculus Audio SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/audio-3.3/
Unless required by applicable law or agreed to in writing, the Oculus Audio SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
// Adds support for a named array attribute in the editor
public class OVRNamedArrayAttribute : PropertyAttribute {
public readonly string[] names;
public OVRNamedArrayAttribute( string[] names ) { this.names = names; }
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 7537accd8e2c6024c860b20e3e7e3424
timeCreated: 1534993516
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,201 @@
/************************************************************************************
Filename : OVRTouchpad.cs
Content : Interface to touchpad
Created : November 13, 2013
Copyright : Copyright Facebook Technologies, LLC and its affiliates.
All rights reserved.
Licensed under the Oculus Audio SDK License Version 3.3 (the "License");
you may not use the Oculus Audio SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/audio-3.3/
Unless required by applicable law or agreed to in writing, the Oculus Audio SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System;
//-------------------------------------------------------------------------------------
// ***** OVRTouchpad
//
// OVRTouchpad is an interface class to a touchpad.
//
public static class OVRTouchpad
{
//-------------------------
// Input enums
public enum TouchEvent { SingleTap, DoubleTap, Left, Right, Up, Down };
// mouse
static Vector3 moveAmountMouse;
static float minMovMagnitudeMouse = 25.0f;
public delegate void OVRTouchpadCallback<TouchEvent>(TouchEvent arg);
static public Delegate touchPadCallbacks = null;
//Disable the unused variable warning
#pragma warning disable 0414
//Ensures that the TouchpadHelper will be created automatically upon start of the game.
static private OVRTouchpadHelper touchpadHelper =
( new GameObject("OVRTouchpadHelper") ).AddComponent< OVRTouchpadHelper >();
#pragma warning restore 0414
// We will call this to create the TouchpadHelper class. This will
// add the Touchpad game object into the world and we can call into
// TouchEvent static functions to hook delegates into for touch capture
static public void Create()
{
// Does nothing but call constructor to add game object into scene
}
// Update
static public void Update()
{
// MOUSE INPUT
if(Input.GetMouseButtonDown(0))
{
moveAmountMouse = Input.mousePosition;
}
else if(Input.GetMouseButtonUp(0))
{
moveAmountMouse -= Input.mousePosition;
HandleInputMouse(ref moveAmountMouse);
}
}
// OnDisable
static public void OnDisable()
{
}
// HandleInputMouse
static void HandleInputMouse(ref Vector3 move)
{
if (touchPadCallbacks == null)
{
return;
}
OVRTouchpadCallback<TouchEvent> callback = touchPadCallbacks as OVRTouchpadCallback<TouchEvent>;
if ( move.magnitude < minMovMagnitudeMouse)
{
callback(TouchEvent.SingleTap);
}
else
{
move.Normalize();
// Left/Right
if (Mathf.Abs(move.x) > Mathf.Abs(move.y))
{
if (move.x > 0.0f)
callback(TouchEvent.Left);
else
callback(TouchEvent.Right);
}
// Up/Down
else
{
if (move.y > 0.0f)
callback(TouchEvent.Down);
else
callback(TouchEvent.Up);
}
}
}
static public void AddListener(OVRTouchpadCallback<TouchEvent> handler)
{
touchPadCallbacks = (OVRTouchpadCallback<TouchEvent>)touchPadCallbacks + handler;
}
}
//-------------------------------------------------------------------------------------
// ***** OVRTouchpadHelper
//
// This singleton class gets created and stays resident in the application. It is used to
// trap the touchpad values, which get broadcast to any listener on the "Touchpad" channel.
//
// This class also demontrates how to make calls from any class that needs these events by
// setting up a listener to "Touchpad" channel.
public sealed class OVRTouchpadHelper : MonoBehaviour
{
void Awake ()
{
DontDestroyOnLoad(gameObject);
}
void Start ()
{
// Add a listener to the OVRTouchpad for testing
OVRTouchpad.AddListener(LocalTouchEventCallback);
}
void Update ()
{
OVRTouchpad.Update();
}
public void OnDisable()
{
OVRTouchpad.OnDisable();
}
// LocalTouchEventCallback
void LocalTouchEventCallback(OVRTouchpad.TouchEvent touchEvent)
{
switch(touchEvent)
{
case(OVRTouchpad.TouchEvent.SingleTap):
// OVRLipSyncDebugConsole.Clear();
// OVRLipSyncDebugConsole.ClearTimeout(1.5f);
// OVRLipSyncDebugConsole.Log("TP-SINGLE TAP");
break;
case(OVRTouchpad.TouchEvent.DoubleTap):
// OVRLipSyncDebugConsole.Clear();
// OVRLipSyncDebugConsole.ClearTimeout(1.5f);
// OVRLipSyncDebugConsole.Log("TP-DOUBLE TAP");
break;
case(OVRTouchpad.TouchEvent.Left):
// OVRLipSyncDebugConsole.Clear();
// OVRLipSyncDebugConsole.ClearTimeout(1.5f);
// OVRLipSyncDebugConsole.Log("TP-SWIPE LEFT");
break;
case(OVRTouchpad.TouchEvent.Right):
// OVRLipSyncDebugConsole.Clear();
// OVRLipSyncDebugConsole.ClearTimeout(1.5f);
// OVRLipSyncDebugConsole.Log("TP-SWIPE RIGHT");
break;
case(OVRTouchpad.TouchEvent.Up):
// OVRLipSyncDebugConsole.Clear();
// OVRLipSyncDebugConsole.ClearTimeout(1.5f);
// OVRLipSyncDebugConsole.Log("TP-SWIPE UP");
break;
case(OVRTouchpad.TouchEvent.Down):
// OVRLipSyncDebugConsole.Clear();
// OVRLipSyncDebugConsole.ClearTimeout(1.5f);
// OVRLipSyncDebugConsole.Log("TP-SWIPE DOWN");
break;
}
}
}

View File

@@ -0,0 +1,13 @@
fileFormatVersion: 2
guid: edde1cb2a78471f409fce5084e6c720c
timeCreated: 1528830158
licenseType: Store
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,441 @@
/************************************************************************************
Filename : OVRLipSync.cs
Content : Interface to Oculus Lip Sync engine
Created : August 4th, 2015
Copyright : Copyright Facebook Technologies, LLC and its affiliates.
All rights reserved.
Licensed under the Oculus Audio SDK License Version 3.3 (the "License");
you may not use the Oculus Audio SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/audio-3.3/
Unless required by applicable law or agreed to in writing, the Oculus Audio SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System;
using System.Runtime.InteropServices;
//-------------------------------------------------------------------------------------
// ***** OVRLipSync
//
/// <summary>
/// OVRLipSync interfaces into the Oculus lip sync engine. This component should be added
/// into the scene once.
///
/// </summary>
public class OVRLipSync : MonoBehaviour
{
// Error codes that may return from Lip Sync engine
public enum Result
{
Success = 0,
Unknown = -2200, //< An unknown error has occurred
CannotCreateContext = -2201, //< Unable to create a context
InvalidParam = -2202, //< An invalid parameter, e.g. NULL pointer or out of range
BadSampleRate = -2203, //< An unsupported sample rate was declared
MissingDLL = -2204, //< The DLL or shared library could not be found
BadVersion = -2205, //< Mismatched versions between header and libs
UndefinedFunction = -2206 //< An undefined function
};
// Audio buffer data type
public enum AudioDataType
{
// Signed 16-bit integer mono audio stream
S16_Mono,
// Signed 16-bit integer stereo audio stream
S16_Stereo,
// Signed 32-bit float mono audio stream
F32_Mono,
// Signed 32-bit float stereo audio stream
F32_Stereo
};
// Various visemes
public enum Viseme
{
sil,
PP,
FF,
TH,
DD,
kk,
CH,
SS,
nn,
RR,
aa,
E,
ih,
oh,
ou
};
public static readonly int VisemeCount = Enum.GetNames(typeof(Viseme)).Length;
// Enum for sending lip-sync engine specific signals
public enum Signals
{
VisemeOn,
VisemeOff,
VisemeAmount,
VisemeSmoothing,
LaughterAmount
};
public static readonly int SignalCount = Enum.GetNames(typeof(Signals)).Length;
// Enum for provider context to create
public enum ContextProviders
{
Original,
Enhanced,
Enhanced_with_Laughter,
};
/// NOTE: Opaque typedef for lip-sync context is an unsigned int (uint)
/// Current phoneme frame results
[System.Serializable]
public class Frame
{
public void CopyInput(Frame input)
{
frameNumber = input.frameNumber;
frameDelay = input.frameDelay;
input.Visemes.CopyTo(Visemes, 0);
laughterScore = input.laughterScore;
}
public void Reset()
{
frameNumber = 0;
frameDelay = 0;
Array.Clear(Visemes, 0, VisemeCount);
laughterScore = 0;
}
public int frameNumber; // count from start of recognition
public int frameDelay; // in ms
public float[] Visemes = new float[VisemeCount]; // Array of floats for viseme frame. Size of Viseme Count, above
public float laughterScore; // probability of laughter presence.
};
// * * * * * * * * * * * * *
// Import functions
#if !UNITY_IOS || UNITY_EDITOR
public const string strOVRLS = "OVRLipSync";
#else
public const string strOVRLS = "__Internal";
#endif
[DllImport(strOVRLS)]
private static extern int ovrLipSyncDll_Initialize(int samplerate, int buffersize);
[DllImport(strOVRLS)]
private static extern void ovrLipSyncDll_Shutdown();
[DllImport(strOVRLS)]
private static extern IntPtr ovrLipSyncDll_GetVersion(ref int Major,
ref int Minor,
ref int Patch);
[DllImport(strOVRLS)]
private static extern int ovrLipSyncDll_CreateContextEx(ref uint context,
ContextProviders provider,
int sampleRate,
bool enableAcceleration);
[DllImport(strOVRLS)]
private static extern int ovrLipSyncDll_CreateContextWithModelFile(ref uint context,
ContextProviders provider,
string modelPath,
int sampleRate,
bool enableAcceleration);
[DllImport(strOVRLS)]
private static extern int ovrLipSyncDll_DestroyContext(uint context);
[DllImport(strOVRLS)]
private static extern int ovrLipSyncDll_ResetContext(uint context);
[DllImport(strOVRLS)]
private static extern int ovrLipSyncDll_SendSignal(uint context,
Signals signal,
int arg1, int arg2);
[DllImport(strOVRLS)]
private static extern int ovrLipSyncDll_ProcessFrameEx(
uint context,
IntPtr audioBuffer,
uint bufferSize,
AudioDataType dataType,
ref int frameNumber,
ref int frameDelay,
float[] visemes,
int visemeCount,
ref float laughterScore,
float[] laughterCategories,
int laughterCategoriesLength);
// * * * * * * * * * * * * *
// Public members
// * * * * * * * * * * * * *
// Static members
private static Result sInitialized = Result.Unknown;
// interface through this static member.
public static OVRLipSync sInstance = null;
// * * * * * * * * * * * * *
// MonoBehaviour overrides
/// <summary>
/// Awake this instance.
/// </summary>
void Awake()
{
// We can only have one instance of OVRLipSync in a scene (use this for local property query)
if (sInstance == null)
{
sInstance = this;
}
else
{
Debug.LogWarning(System.String.Format("OVRLipSync Awake: Only one instance of OVRPLipSync can exist in the scene."));
return;
}
if (IsInitialized() != Result.Success)
{
sInitialized = Initialize();
if (sInitialized != Result.Success)
{
Debug.LogWarning(System.String.Format
("OvrLipSync Awake: Failed to init Speech Rec library"));
}
}
// Important: Use the touchpad mechanism for input, call Create on the OVRTouchpad helper class
OVRTouchpad.Create();
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
if (sInstance != this)
{
Debug.LogWarning(
"OVRLipSync OnDestroy: This is not the correct OVRLipSync instance.");
return;
}
// Do not shut down at this time
// ovrLipSyncDll_Shutdown();
// sInitialized = (int)Result.Unknown;
}
// * * * * * * * * * * * * *
// Public Functions
public static Result Initialize()
{
int sampleRate;
int bufferSize;
int numbuf;
// Get the current sample rate
sampleRate = AudioSettings.outputSampleRate;
// Get the current buffer size and number of buffers
AudioSettings.GetDSPBufferSize(out bufferSize, out numbuf);
String str = System.String.Format
("OvrLipSync Awake: Queried SampleRate: {0:F0} BufferSize: {1:F0}", sampleRate, bufferSize);
Debug.LogWarning(str);
sInitialized = (Result)ovrLipSyncDll_Initialize(sampleRate, bufferSize);
return sInitialized;
}
public static Result Initialize(int sampleRate, int bufferSize)
{
String str = System.String.Format
("OvrLipSync Awake: Queried SampleRate: {0:F0} BufferSize: {1:F0}", sampleRate, bufferSize);
Debug.LogWarning(str);
sInitialized = (Result)ovrLipSyncDll_Initialize(sampleRate, bufferSize);
return sInitialized;
}
public static void Shutdown()
{
ovrLipSyncDll_Shutdown();
sInitialized = Result.Unknown;
}
/// <summary>
/// Determines if is initialized.
/// </summary>
/// <returns><c>true</c> if is initialized; otherwise, <c>false</c>.</returns>
public static Result IsInitialized()
{
return sInitialized;
}
/// <summary>
/// Creates a lip-sync context.
/// </summary>
/// <returns>error code</returns>
/// <param name="context">Context.</param>
/// <param name="provider">Provider.</param>
/// <param name="enableAcceleration">Enable DSP Acceleration.</param>
public static Result CreateContext(
ref uint context,
ContextProviders provider,
int sampleRate = 0,
bool enableAcceleration = false)
{
if (IsInitialized() != Result.Success && Initialize() != Result.Success)
return Result.CannotCreateContext;
return (Result)ovrLipSyncDll_CreateContextEx(ref context, provider, sampleRate, enableAcceleration);
}
/// <summary>
/// Creates a lip-sync context with specified model file.
/// </summary>
/// <returns>error code</returns>
/// <param name="context">Context.</param>
/// <param name="provider">Provider.</param>
/// <param name="modelPath">Model Dir.</param>
/// <param name="sampleRate">Sampling Rate.</param>
/// <param name="enableAcceleration">Enable DSP Acceleration.</param>
public static Result CreateContextWithModelFile(
ref uint context,
ContextProviders provider,
string modelPath,
int sampleRate = 0,
bool enableAcceleration = false)
{
if (IsInitialized() != Result.Success && Initialize() != Result.Success)
return Result.CannotCreateContext;
return (Result)ovrLipSyncDll_CreateContextWithModelFile(
ref context,
provider,
modelPath,
sampleRate,
enableAcceleration);
}
/// <summary>
/// Destroy a lip-sync context.
/// </summary>
/// <returns>The context.</returns>
/// <param name="context">Context.</param>
public static Result DestroyContext(uint context)
{
if (IsInitialized() != Result.Success)
return Result.Unknown;
return (Result)ovrLipSyncDll_DestroyContext(context);
}
/// <summary>
/// Resets the context.
/// </summary>
/// <returns>error code</returns>
/// <param name="context">Context.</param>
public static Result ResetContext(uint context)
{
if (IsInitialized() != Result.Success)
return Result.Unknown;
return (Result)ovrLipSyncDll_ResetContext(context);
}
/// <summary>
/// Sends a signal to the lip-sync engine.
/// </summary>
/// <returns>error code</returns>
/// <param name="context">Context.</param>
/// <param name="signal">Signal.</param>
/// <param name="arg1">Arg1.</param>
/// <param name="arg2">Arg2.</param>
public static Result SendSignal(uint context, Signals signal, int arg1, int arg2)
{
if (IsInitialized() != Result.Success)
return Result.Unknown;
return (Result)ovrLipSyncDll_SendSignal(context, signal, arg1, arg2);
}
/// <summary>
/// Process float[] audio buffer by lip-sync engine.
/// </summary>
/// <returns>error code</returns>
/// <param name="context">Context.</param>
/// <param name="audioBuffer"> PCM audio buffer.</param>
/// <param name="frame">Lip-sync Frame.</param>
/// <param name="stereo">Whether buffer is part of stereo or mono stream.</param>
public static Result ProcessFrame(
uint context, float[] audioBuffer, Frame frame, bool stereo = true)
{
if (IsInitialized() != Result.Success)
return Result.Unknown;
var dataType = stereo ? AudioDataType.F32_Stereo : AudioDataType.F32_Mono;
var numSamples = (uint)(stereo ? audioBuffer.Length / 2 : audioBuffer.Length);
var handle = GCHandle.Alloc(audioBuffer, GCHandleType.Pinned);
var rc = ovrLipSyncDll_ProcessFrameEx(context,
handle.AddrOfPinnedObject(), numSamples, dataType,
ref frame.frameNumber, ref frame.frameDelay,
frame.Visemes, frame.Visemes.Length,
ref frame.laughterScore,
null, 0
);
handle.Free();
return (Result)rc;
}
/// <summary>
/// Process short[] audio buffer by lip-sync engine.
/// </summary>
/// <returns>error code</returns>
/// <param name="context">Context.</param>
/// <param name="audioBuffer"> PCM audio buffer.</param>
/// <param name="frame">Lip-sync Frame.</param>
/// <param name="stereo">Whether buffer is part of stereo or mono stream.</param>
public static Result ProcessFrame(
uint context, short[] audioBuffer, Frame frame, bool stereo = true)
{
if (IsInitialized() != Result.Success)
return Result.Unknown;
var dataType = stereo ? AudioDataType.S16_Stereo : AudioDataType.S16_Mono;
var numSamples = (uint)(stereo ? audioBuffer.Length / 2 : audioBuffer.Length);
var handle = GCHandle.Alloc(audioBuffer, GCHandleType.Pinned);
var rc = ovrLipSyncDll_ProcessFrameEx(context,
handle.AddrOfPinnedObject(), numSamples, dataType,
ref frame.frameNumber, ref frame.frameDelay,
frame.Visemes, frame.Visemes.Length,
ref frame.laughterScore,
null, 0
);
handle.Free();
return (Result)rc;
}
}

View File

@@ -0,0 +1,13 @@
fileFormatVersion: 2
guid: 82aa5cb7a870de440baadff9083be41c
timeCreated: 1444935432
licenseType: Store
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,391 @@
/************************************************************************************
Filename : OVRLipSyncContext.cs
Content : Interface to Oculus Lip-Sync engine
Created : August 6th, 2015
Copyright : Copyright Facebook Technologies, LLC and its affiliates.
All rights reserved.
Licensed under the Oculus Audio SDK License Version 3.3 (the "License");
you may not use the Oculus Audio SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/audio-3.3/
Unless required by applicable law or agreed to in writing, the Oculus Audio SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
[RequireComponent(typeof(AudioSource))]
//-------------------------------------------------------------------------------------
// ***** OVRLipSyncContext
//
/// <summary>
/// OVRLipSyncContext interfaces into the Oculus phoneme recognizer.
/// This component should be added into the scene once for each Audio Source.
///
/// </summary>
public class OVRLipSyncContext : OVRLipSyncContextBase
{
// * * * * * * * * * * * * *
// Public members
[Tooltip("Allow capturing of keyboard input to control operation.")]
public bool enableKeyboardInput = false;
[Tooltip("Register a mouse/touch callback to control loopback and gain (requires script restart).")]
public bool enableTouchInput = false;
[Tooltip("Play input audio back through audio output.")]
public bool audioLoopback = false;
[Tooltip("Key to toggle audio loopback.")]
public KeyCode loopbackKey = KeyCode.L;
[Tooltip("Show viseme scores in an OVRLipSyncDebugConsole display.")]
public bool showVisemes = false;
[Tooltip("Key to toggle viseme score display.")]
public KeyCode debugVisemesKey = KeyCode.D;
[Tooltip("Skip data from the Audio Source. Use if you intend to pass audio data in manually.")]
public bool skipAudioSource = false;
[Tooltip("Adjust the linear audio gain multiplier before processing lipsync")]
public float gain = 1.0f;
private bool hasDebugConsole = false;
public KeyCode debugLaughterKey = KeyCode.H;
public bool showLaughter = false;
public float laughterScore = 0.0f;
// * * * * * * * * * * * * *
// Private members
/// <summary>
/// Start this instance.
/// Note: make sure to always have a Start function for classes that have editor scripts.
/// </summary>
void Start()
{
// Add a listener to the OVRTouchpad for touch events
if (enableTouchInput)
{
OVRTouchpad.AddListener(LocalTouchEventCallback);
}
// Find console
OVRLipSyncDebugConsole[] consoles = FindObjectsOfType<OVRLipSyncDebugConsole>();
if (consoles.Length > 0)
{
hasDebugConsole = consoles[0];
}
}
/// <summary>
/// Handle keyboard input
/// </summary>
void HandleKeyboard()
{
// Turn loopback on/off
if (Input.GetKeyDown(loopbackKey))
{
ToggleAudioLoopback();
}
else if (Input.GetKeyDown(debugVisemesKey))
{
showVisemes = !showVisemes;
if (showVisemes)
{
if (hasDebugConsole)
{
Debug.Log("DEBUG SHOW VISEMES: ENABLED");
}
else
{
Debug.LogWarning("Warning: No OVRLipSyncDebugConsole in the scene!");
showVisemes = false;
}
}
else
{
if (hasDebugConsole)
{
OVRLipSyncDebugConsole.Clear();
}
Debug.Log("DEBUG SHOW VISEMES: DISABLED");
}
}
else if (Input.GetKeyDown(debugLaughterKey))
{
showLaughter = !showLaughter;
if (showLaughter)
{
if (hasDebugConsole)
{
Debug.Log("DEBUG SHOW LAUGHTER: ENABLED");
}
else
{
Debug.LogWarning("Warning: No OVRLipSyncDebugConsole in the scene!");
showLaughter = false;
}
}
else
{
if (hasDebugConsole)
{
OVRLipSyncDebugConsole.Clear();
}
Debug.Log("DEBUG SHOW LAUGHTER: DISABLED");
}
}
else if (Input.GetKeyDown(KeyCode.LeftArrow))
{
gain -= 1.0f;
if (gain < 1.0f) gain = 1.0f;
string g = "LINEAR GAIN: ";
g += gain;
if (hasDebugConsole)
{
OVRLipSyncDebugConsole.Clear();
OVRLipSyncDebugConsole.Log(g);
OVRLipSyncDebugConsole.ClearTimeout(1.5f);
}
}
else if (Input.GetKeyDown(KeyCode.RightArrow))
{
gain += 1.0f;
if (gain > 15.0f)
gain = 15.0f;
string g = "LINEAR GAIN: ";
g += gain;
if (hasDebugConsole)
{
OVRLipSyncDebugConsole.Clear();
OVRLipSyncDebugConsole.Log(g);
OVRLipSyncDebugConsole.ClearTimeout(1.5f);
}
}
}
/// <summary>
/// Run processes that need to be updated in our game thread
/// </summary>
void Update()
{
if (enableKeyboardInput)
{
HandleKeyboard();
}
laughterScore = this.Frame.laughterScore;
DebugShowVisemesAndLaughter();
}
/// <summary>
/// Preprocess F32 PCM audio buffer
/// </summary>
/// <param name="data">Data.</param>
/// <param name="channels">Channels.</param>
public void PreprocessAudioSamples(float[] data, int channels)
{
// Increase the gain of the input
for (int i = 0; i < data.Length; ++i)
{
data[i] = data[i] * gain;
}
}
/// <summary>
/// Postprocess F32 PCM audio buffer
/// </summary>
/// <param name="data">Data.</param>
/// <param name="channels">Channels.</param>
public void PostprocessAudioSamples(float[] data, int channels)
{
// Turn off output (so that we don't get feedback from mics too close to speakers)
if (!audioLoopback)
{
for (int i = 0; i < data.Length; ++i)
data[i] = data[i] * 0.0f;
}
}
/// <summary>
/// Pass F32 PCM audio buffer to the lip sync module
/// </summary>
/// <param name="data">Data.</param>
/// <param name="channels">Channels.</param>
public void ProcessAudioSamplesRaw(float[] data, int channels)
{
// Send data into Phoneme context for processing (if context is not 0)
lock (this)
{
if (Context == 0 || OVRLipSync.IsInitialized() != OVRLipSync.Result.Success)
{
return;
}
var frame = this.Frame;
OVRLipSync.ProcessFrame(Context, data, frame, channels == 2);
}
}
/// <summary>
/// Pass S16 PCM audio buffer to the lip sync module
/// </summary>
/// <param name="data">Data.</param>
/// <param name="channels">Channels.</param>
public void ProcessAudioSamplesRaw(short[] data, int channels)
{
// Send data into Phoneme context for processing (if context is not 0)
lock (this)
{
if (Context == 0 || OVRLipSync.IsInitialized() != OVRLipSync.Result.Success)
{
return;
}
var frame = this.Frame;
OVRLipSync.ProcessFrame(Context, data, frame, channels == 2);
}
}
/// <summary>
/// Process F32 audio sample and pass it to the lip sync module for computation
/// </summary>
/// <param name="data">Data.</param>
/// <param name="channels">Channels.</param>
public void ProcessAudioSamples(float[] data, int channels)
{
// Do not process if we are not initialized, or if there is no
// audio source attached to game object
if ((OVRLipSync.IsInitialized() != OVRLipSync.Result.Success) || audioSource == null)
{
return;
}
PreprocessAudioSamples(data, channels);
ProcessAudioSamplesRaw(data, channels);
PostprocessAudioSamples(data, channels);
}
/// <summary>
/// Raises the audio filter read event.
/// </summary>
/// <param name="data">Data.</param>
/// <param name="channels">Channels.</param>
void OnAudioFilterRead(float[] data, int channels)
{
if (!skipAudioSource)
{
ProcessAudioSamples(data, channels);
}
}
/// <summary>
/// Print the visemes and laughter score to game window
/// </summary>
void DebugShowVisemesAndLaughter()
{
if (hasDebugConsole)
{
string seq = "";
if (showLaughter)
{
seq += "Laughter:";
int count = (int)(50.0f * this.Frame.laughterScore);
for (int c = 0; c < count; c++)
seq += "*";
seq += "\n";
}
if (showVisemes)
{
for (int i = 0; i < this.Frame.Visemes.Length; i++)
{
seq += ((OVRLipSync.Viseme)i).ToString();
seq += ":";
int count = (int)(50.0f * this.Frame.Visemes[i]);
for (int c = 0; c < count; c++)
seq += "*";
seq += "\n";
}
}
OVRLipSyncDebugConsole.Clear();
if (seq != "")
{
OVRLipSyncDebugConsole.Log(seq);
}
}
}
void ToggleAudioLoopback()
{
audioLoopback = !audioLoopback;
if (hasDebugConsole)
{
OVRLipSyncDebugConsole.Clear();
OVRLipSyncDebugConsole.ClearTimeout(1.5f);
if (audioLoopback)
OVRLipSyncDebugConsole.Log("LOOPBACK MODE: ENABLED");
else
OVRLipSyncDebugConsole.Log("LOOPBACK MODE: DISABLED");
}
}
// LocalTouchEventCallback
void LocalTouchEventCallback(OVRTouchpad.TouchEvent touchEvent)
{
string g = "LINEAR GAIN: ";
switch (touchEvent)
{
case (OVRTouchpad.TouchEvent.SingleTap):
ToggleAudioLoopback();
break;
case (OVRTouchpad.TouchEvent.Up):
gain += 1.0f;
if (gain > 15.0f)
gain = 15.0f;
g += gain;
if (hasDebugConsole)
{
OVRLipSyncDebugConsole.Clear();
OVRLipSyncDebugConsole.Log(g);
OVRLipSyncDebugConsole.ClearTimeout(1.5f);
}
break;
case (OVRTouchpad.TouchEvent.Down):
gain -= 1.0f;
if (gain < 1.0f) gain = 1.0f;
g += gain;
if (hasDebugConsole)
{
OVRLipSyncDebugConsole.Clear();
OVRLipSyncDebugConsole.Log(g);
OVRLipSyncDebugConsole.ClearTimeout(1.5f);
}
break;
}
}
}

View File

@@ -0,0 +1,13 @@
fileFormatVersion: 2
guid: f43c520a9bad8a3489109c869f454576
timeCreated: 1438890961
licenseType: Store
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,208 @@
/************************************************************************************
Filename : OVRLipSyncContext.cs
Content : Interface to Oculus Lip-Sync engine
Created : August 6th, 2015
Copyright : Copyright Facebook Technologies, LLC and its affiliates.
All rights reserved.
Licensed under the Oculus Audio SDK License Version 3.3 (the "License");
you may not use the Oculus Audio SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/audio-3.3/
Unless required by applicable law or agreed to in writing, the Oculus Audio SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
[RequireComponent(typeof(AudioSource))]
//-------------------------------------------------------------------------------------
// ***** OVRLipSyncContextBase
//
/// <summary>
/// OVRLipSyncContextBase interfaces into the Oculus phoneme recognizer.
/// This component should be added into the scene once for each Audio Source.
///
/// </summary>
public class OVRLipSyncContextBase : MonoBehaviour
{
// * * * * * * * * * * * * *
// Public members
public AudioSource audioSource = null;
[Tooltip("Which lip sync provider to use for viseme computation.")]
public OVRLipSync.ContextProviders provider = OVRLipSync.ContextProviders.Enhanced;
[Tooltip("Enable DSP offload on supported Android devices.")]
public bool enableAcceleration = true;
// * * * * * * * * * * * * *
// Private members
private OVRLipSync.Frame frame = new OVRLipSync.Frame();
private uint context = 0; // 0 is no context
private int _smoothing;
public int Smoothing
{
set
{
OVRLipSync.Result result =
OVRLipSync.SendSignal(context, OVRLipSync.Signals.VisemeSmoothing, value, 0);
if (result != OVRLipSync.Result.Success)
{
if (result == OVRLipSync.Result.InvalidParam)
{
Debug.LogError("OVRLipSyncContextBase.SetSmoothing: A viseme smoothing" +
" parameter is invalid, it should be between 1 and 100!");
}
else
{
Debug.LogError("OVRLipSyncContextBase.SetSmoothing: An unexpected" +
" error occured.");
}
}
_smoothing = value;
}
get
{
return _smoothing;
}
}
public uint Context
{
get
{
return context;
}
}
protected OVRLipSync.Frame Frame
{
get
{
return frame;
}
}
/// <summary>
/// Awake this instance.
/// </summary>
void Awake()
{
// Cache the audio source we are going to be using to pump data to the SR
if (!audioSource)
{
audioSource = GetComponent<AudioSource>();
}
lock (this)
{
if (context == 0)
{
if (OVRLipSync.CreateContext(ref context, provider, 0, enableAcceleration)
!= OVRLipSync.Result.Success)
{
Debug.LogError("OVRLipSyncContextBase.Start ERROR: Could not create" +
" Phoneme context.");
return;
}
}
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
// Create the context that we will feed into the audio buffer
lock (this)
{
if (context != 0)
{
if (OVRLipSync.DestroyContext(context) != OVRLipSync.Result.Success)
{
Debug.LogError("OVRLipSyncContextBase.OnDestroy ERROR: Could not delete" +
" Phoneme context.");
}
}
}
}
// * * * * * * * * * * * * *
// Public Functions
/// <summary>
/// Gets the current phoneme frame (lock and copy current frame to caller frame)
/// </summary>
/// <returns>error code</returns>
/// <param name="inFrame">In frame.</param>
public OVRLipSync.Frame GetCurrentPhonemeFrame()
{
return frame;
}
/// <summary>
/// Sets a given viseme id blend weight to a given amount
/// </summary>
/// <param name="viseme">Integer viseme ID</param>
/// <param name="amount">Integer viseme amount</param>
public void SetVisemeBlend(int viseme, int amount)
{
OVRLipSync.Result result =
OVRLipSync.SendSignal(context, OVRLipSync.Signals.VisemeAmount, viseme, amount);
if (result != OVRLipSync.Result.Success)
{
if (result == OVRLipSync.Result.InvalidParam)
{
Debug.LogError("OVRLipSyncContextBase.SetVisemeBlend: Viseme ID is invalid.");
}
else
{
Debug.LogError("OVRLipSyncContextBase.SetVisemeBlend: An unexpected" +
" error occured.");
}
}
}
/// <summary>
/// Sets a given viseme id blend weight to a given amount
/// </summary>
/// <param name="amount">Integer viseme amount</param>
public void SetLaughterBlend(int amount)
{
OVRLipSync.Result result =
OVRLipSync.SendSignal(context, OVRLipSync.Signals.LaughterAmount, amount, 0);
if (result != OVRLipSync.Result.Success)
{
Debug.LogError("OVRLipSyncContextBase.SetLaughterBlend: An unexpected" +
" error occured.");
}
}
/// <summary>
/// Resets the context.
/// </summary>
/// <returns>error code</returns>
public OVRLipSync.Result ResetContext()
{
// Reset visemes to silence etc.
frame.Reset();
return OVRLipSync.ResetContext(context);
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: c0d528b758a004fcaac677043e8de6ad
timeCreated: 1496772358
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,51 @@
/************************************************************************************
Filename : OVRLipSyncContextCanned.cs
Content : Interface to Oculus Lip-Sync engine
Created : August 6th, 2015
Copyright : Copyright Facebook Technologies, LLC and its affiliates.
All rights reserved.
Licensed under the Oculus Audio SDK License Version 3.3 (the "License");
you may not use the Oculus Audio SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/audio-3.3/
Unless required by applicable law or agreed to in writing, the Oculus Audio SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
[RequireComponent(typeof(AudioSource))]
//-------------------------------------------------------------------------------------
// ***** OVRLipSyncContextCanned
//
/// <summary>
/// OVRLipSyncContextCanned drives a canned phoneme sequence based on a pre-generated asset.
///
/// </summary>
public class OVRLipSyncContextCanned : OVRLipSyncContextBase
{
[Tooltip("Pre-computed viseme sequence asset. Compute from audio in Unity with Tools -> Oculus -> Generate Lip Sync Assets.")]
public OVRLipSyncSequence currentSequence;
/// <summary>
/// Run processes that need to be updated in game thread
/// </summary>
void Update()
{
if (audioSource.isPlaying && currentSequence != null)
{
OVRLipSync.Frame currentFrame = currentSequence.GetFrameAtTime(audioSource.time);
this.Frame.CopyInput(currentFrame);
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: e073e338e215b4ae9a7fcdf6891e7955
timeCreated: 1496772358
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,227 @@
/************************************************************************************
Filename : OVRLipSyncContextMorphTarget.cs
Content : This bridges the viseme output to the morph targets
Created : August 7th, 2015
Copyright : Copyright Facebook Technologies, LLC and its affiliates.
All rights reserved.
Licensed under the Oculus Audio SDK License Version 3.3 (the "License");
you may not use the Oculus Audio SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/audio-3.3/
Unless required by applicable law or agreed to in writing, the Oculus Audio SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Linq;
public class OVRLipSyncContextMorphTarget : MonoBehaviour
{
// PUBLIC
// Manually assign the skinned mesh renderer to this script
[Tooltip("Skinned Mesh Rendered target to be driven by Oculus Lipsync")]
public SkinnedMeshRenderer skinnedMeshRenderer = null;
// Set the blendshape index to go to (-1 means there is not one assigned)
[Tooltip("Blendshape index to trigger for each viseme.")]
public int [] visemeToBlendTargets = Enumerable.Range(0, OVRLipSync.VisemeCount).ToArray();
// enable/disable sending signals to viseme engine
[Tooltip("Enable using the test keys defined below to manually trigger each viseme.")]
public bool enableVisemeTestKeys = false;
[Tooltip("Test keys used to manually trigger an individual viseme - by " +
"default the QWERTY row of a US keyboard.")]
public KeyCode[] visemeTestKeys =
{
KeyCode.BackQuote,
KeyCode.Tab,
KeyCode.Q,
KeyCode.W,
KeyCode.E,
KeyCode.R,
KeyCode.T,
KeyCode.Y,
KeyCode.U,
KeyCode.I,
KeyCode.O,
KeyCode.P,
KeyCode.LeftBracket,
KeyCode.RightBracket,
KeyCode.Backslash,
};
[Tooltip("Test key used to manually trigger laughter and visualise the results")]
public KeyCode laughterKey = KeyCode.CapsLock;
[Tooltip("Blendshape index to trigger for laughter")]
public int laughterBlendTarget = OVRLipSync.VisemeCount;
[Range(0.0f, 1.0f)]
[Tooltip("Laughter probability threshold above which the laughter blendshape will be activated")]
public float laughterThreshold = 0.5f;
[Range(0.0f, 3.0f)]
[Tooltip("Laughter animation linear multiplier, the final output will be clamped to 1.0")]
public float laughterMultiplier = 1.5f;
// smoothing amount
[Range(1, 100)]
[Tooltip("Smoothing of 1 will yield only the current predicted viseme, 100 will yield an extremely smooth viseme response.")]
public int smoothAmount = 70;
// PRIVATE
// Look for a lip-sync Context (should be set at the same level as this component)
private OVRLipSyncContextBase lipsyncContext = null;
/// <summary>
/// Start this instance.
/// </summary>
void Start ()
{
// morph target needs to be set manually; possibly other components will need the same
if(skinnedMeshRenderer == null)
{
Debug.LogError("LipSyncContextMorphTarget.Start Error: " +
"Please set the target Skinned Mesh Renderer to be controlled!");
return;
}
// make sure there is a phoneme context assigned to this object
lipsyncContext = GetComponent<OVRLipSyncContextBase>();
if(lipsyncContext == null)
{
Debug.LogError("LipSyncContextMorphTarget.Start Error: " +
"No OVRLipSyncContext component on this object!");
}
else
{
// Send smoothing amount to context
lipsyncContext.Smoothing = smoothAmount;
}
}
/// <summary>
/// Update this instance.
/// </summary>
void Update ()
{
if((lipsyncContext != null) && (skinnedMeshRenderer != null))
{
// get the current viseme frame
OVRLipSync.Frame frame = lipsyncContext.GetCurrentPhonemeFrame();
if (frame != null)
{
SetVisemeToMorphTarget(frame);
SetLaughterToMorphTarget(frame);
}
// TEST visemes by capturing key inputs and sending a signal
CheckForKeys();
// Update smoothing value
if (smoothAmount != lipsyncContext.Smoothing)
{
lipsyncContext.Smoothing = smoothAmount;
}
}
}
/// <summary>
/// Sends the signals.
/// </summary>
void CheckForKeys()
{
if (enableVisemeTestKeys)
{
for (int i = 0; i < OVRLipSync.VisemeCount; ++i)
{
CheckVisemeKey(visemeTestKeys[i], i, 100);
}
}
CheckLaughterKey();
}
/// <summary>
/// Sets the viseme to morph target.
/// </summary>
void SetVisemeToMorphTarget(OVRLipSync.Frame frame)
{
for (int i = 0; i < visemeToBlendTargets.Length; i++)
{
if (visemeToBlendTargets[i] != -1)
{
// Viseme blend weights are in range of 0->1.0, we need to make range 100
skinnedMeshRenderer.SetBlendShapeWeight(
visemeToBlendTargets[i],
frame.Visemes[i] * 100.0f);
}
}
}
/// <summary>
/// Sets the laughter to morph target.
/// </summary>
void SetLaughterToMorphTarget(OVRLipSync.Frame frame)
{
if (laughterBlendTarget != -1)
{
// Laughter score will be raw classifier output in [0,1]
float laughterScore = frame.laughterScore;
// Threshold then re-map to [0,1]
laughterScore = laughterScore < laughterThreshold ? 0.0f : laughterScore - laughterThreshold;
laughterScore = Mathf.Min(laughterScore * laughterMultiplier, 1.0f);
laughterScore *= 1.0f / laughterThreshold;
skinnedMeshRenderer.SetBlendShapeWeight(
laughterBlendTarget,
laughterScore * 100.0f);
}
}
/// <summary>
/// Sends the viseme signal.
/// </summary>
/// <param name="key">Key.</param>
/// <param name="viseme">Viseme.</param>
/// <param name="arg1">Arg1.</param>
void CheckVisemeKey(KeyCode key, int viseme, int amount)
{
if (Input.GetKeyDown(key))
{
lipsyncContext.SetVisemeBlend(visemeToBlendTargets[viseme], amount);
}
if (Input.GetKeyUp(key))
{
lipsyncContext.SetVisemeBlend(visemeToBlendTargets[viseme], 0);
}
}
/// <summary>
/// Sends the laughter signal.
/// </summary>
void CheckLaughterKey()
{
if (Input.GetKeyDown(laughterKey))
{
lipsyncContext.SetLaughterBlend(100);
}
if (Input.GetKeyUp(laughterKey))
{
lipsyncContext.SetLaughterBlend(0);
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: b0b97b38f2f1fd24185315141a6c6a56
timeCreated: 1438985973
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,150 @@
/************************************************************************************
Filename : OVRLipSyncContextTextureFlip.cs
Content : This bridges the phoneme/viseme output to texture flip targets
Created : August 7th, 2015
Copyright : Copyright Facebook Technologies, LLC and its affiliates.
All rights reserved.
Licensed under the Oculus Audio SDK License Version 3.3 (the "License");
you may not use the Oculus Audio SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/audio-3.3/
Unless required by applicable law or agreed to in writing, the Oculus Audio SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
public class OVRLipSyncContextTextureFlip : MonoBehaviour
{
// PUBLIC
// Manually assign the material
public Material material = null;
// Set the textures for each viseme. We should follow the viseme order as specified
// by the Phoneme list
[Tooltip("The texture used for each viseme.")]
[OVRNamedArray(new string[] { "sil", "PP", "FF", "TH", "DD", "kk", "CH",
"SS", "nn", "RR", "aa", "E", "ih", "oh", "ou" })]
public Texture[] Textures = new Texture[OVRLipSync.VisemeCount];
// smoothing amount
[Range(1, 100)]
[Tooltip("Smoothing of 1 will yield only the current predicted viseme," +
"100 will yield an extremely smooth viseme response.")]
public int smoothAmount = 70;
// PRIVATE
// Look for a Phoneme Context (should be set at the same level as this component)
private OVRLipSyncContextBase lipsyncContext = null;
// Capture the old viseme frame (we will write back into this one)
private OVRLipSync.Frame oldFrame = new OVRLipSync.Frame();
/// <summary>
/// Start this instance.
/// </summary>
void Start()
{
// make sure there is a phoneme context assigned to this object
lipsyncContext = GetComponent<OVRLipSyncContextBase>();
if (lipsyncContext == null)
{
Debug.LogWarning("LipSyncContextTextureFlip.Start WARNING:" +
" No lip sync context component set to object");
}
else
{
// Send smoothing amount to context
lipsyncContext.Smoothing = smoothAmount;
}
if (material == null)
{
Debug.LogWarning("LipSyncContextTextureFlip.Start WARNING:" +
" Lip sync context texture flip has no material target to control!");
}
}
/// <summary>
/// Update this instance.
/// </summary>
void Update ()
{
if((lipsyncContext != null) && (material != null))
{
// trap inputs and send signals to phoneme engine for testing purposes
// get the current viseme frame
OVRLipSync.Frame frame = lipsyncContext.GetCurrentPhonemeFrame();
if (frame != null)
{
// Perform smoothing here if on original provider
if (lipsyncContext.provider == OVRLipSync.ContextProviders.Original)
{
// Go through the current and old
for (int i = 0; i < frame.Visemes.Length; i++)
{
// Convert 1-100 to old * (0.00 - 0.99)
float smoothing = ((smoothAmount - 1) / 100.0f);
oldFrame.Visemes[i] =
oldFrame.Visemes[i] * smoothing +
frame.Visemes[i] * (1.0f - smoothing);
}
}
else
{
oldFrame.Visemes = frame.Visemes;
}
SetVisemeToTexture();
}
}
// Update smoothing value in context
if (smoothAmount != lipsyncContext.Smoothing)
{
lipsyncContext.Smoothing = smoothAmount;
}
}
/// <summary>
/// Sets the viseme to texture.
/// </summary>
void SetVisemeToTexture()
{
// This setting will run through all the Visemes, find the
// one with the greatest amplitude and set it to max value.
// all other visemes will be set to zero.
int gV = -1;
float gA = 0.0f;
for (int i = 0; i < oldFrame.Visemes.Length; i++)
{
if(oldFrame.Visemes[i] > gA)
{
gV = i;
gA = oldFrame.Visemes[i];
}
}
if ((gV != -1) && (gV < Textures.Length))
{
Texture t = Textures[gV];
if(t != null)
{
material.SetTexture("_MainTex", t);
}
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: bc30a49f30010eb42a8b59ec685eac57
timeCreated: 1444164852
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,372 @@
/************************************************************************************
Filename : OVRLipSyncMicInput.cs
Content : Interface to microphone input
Created : May 12, 2015
Copyright : Copyright Facebook Technologies, LLC and its affiliates.
All rights reserved.
Licensed under the Oculus Audio SDK License Version 3.3 (the "License");
you may not use the Oculus Audio SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/audio-3.3/
Unless required by applicable law or agreed to in writing, the Oculus Audio SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using UnityEngine;
using System.Diagnostics;
using Debug = UnityEngine.Debug;
using System.Threading;
[RequireComponent(typeof(AudioSource))]
public class OVRLipSyncMicInput : MonoBehaviour
{
public enum micActivation
{
HoldToSpeak,
PushToSpeak,
ConstantSpeak
}
// PUBLIC MEMBERS
[Tooltip("Manual specification of Audio Source - " +
"by default will use any attached to the same object.")]
public AudioSource audioSource = null;
[Tooltip("Enable a keypress to toggle the microphone device selection GUI.")]
public bool enableMicSelectionGUI = false;
[Tooltip("Key to toggle the microphone selection GUI if enabled.")]
public KeyCode micSelectionGUIKey = KeyCode.M;
[SerializeField]
[Range(0.0f, 100.0f)]
[Tooltip("Microphone input volume control.")]
private float micInputVolume = 100;
[SerializeField]
[Tooltip("Requested microphone input frequency")]
private int micFrequency = 48000;
public float MicFrequency
{
get { return micFrequency; }
set { micFrequency = (int)Mathf.Clamp((float)value, 0, 96000); }
}
[Tooltip("Microphone input control method. Hold To Speak and Push" +
" To Speak are driven with the Mic Activation Key.")]
public micActivation micControl = micActivation.ConstantSpeak;
[Tooltip("Key used to drive Hold To Speak and Push To Speak methods" +
" of microphone input control.")]
public KeyCode micActivationKey = KeyCode.Space;
[Tooltip("Will contain the string name of the selected microphone device - read only.")]
public string selectedDevice;
// PRIVATE MEMBERS
private bool micSelected = false;
private int minFreq, maxFreq;
private bool focused = true;
private bool initialized = false;
//----------------------------------------------------
// MONOBEHAVIOUR OVERRIDE FUNCTIONS
//----------------------------------------------------
/// <summary>
/// Awake this instance.
/// </summary>
void Awake()
{
// First thing to do, cache the unity audio source (can be managed by the
// user if audio source can change)
if (!audioSource) audioSource = GetComponent<AudioSource>();
if (!audioSource) return; // this should never happen
}
/// <summary>
/// Start this instance.
/// </summary>
void Start()
{
audioSource.loop = true; // Set the AudioClip to loop
audioSource.mute = false;
InitializeMicrophone();
}
/// <summary>
/// Initializes the microphone.
/// </summary>
private void InitializeMicrophone()
{
if (initialized)
{
return;
}
if (Microphone.devices.Length == 0)
{
return;
}
selectedDevice = Microphone.devices[0].ToString();
micSelected = true;
GetMicCaps();
initialized = true;
}
/// <summary>
/// Update this instance.
/// </summary>
void Update()
{
if (!focused)
{
if (Microphone.IsRecording(selectedDevice))
{
StopMicrophone();
}
return;
}
if (!Application.isPlaying)
{
StopMicrophone();
return;
}
// Lazy Microphone initialization (needed on Android)
if (!initialized)
{
InitializeMicrophone();
}
audioSource.volume = (micInputVolume / 100);
//Hold To Speak
if (micControl == micActivation.HoldToSpeak)
{
if (Input.GetKey(micActivationKey))
{
if (!Microphone.IsRecording(selectedDevice))
{
StartMicrophone();
}
}
else
{
if (Microphone.IsRecording(selectedDevice))
{
StopMicrophone();
}
}
}
//Push To Talk
if (micControl == micActivation.PushToSpeak)
{
if (Input.GetKeyDown(micActivationKey))
{
if (Microphone.IsRecording(selectedDevice))
{
StopMicrophone();
}
else if (!Microphone.IsRecording(selectedDevice))
{
StartMicrophone();
}
}
}
//Constant Speak
if (micControl == micActivation.ConstantSpeak)
{
if (!Microphone.IsRecording(selectedDevice))
{
StartMicrophone();
}
}
//Mic Selected = False
if (enableMicSelectionGUI)
{
if (Input.GetKeyDown(micSelectionGUIKey))
{
micSelected = false;
}
}
}
/// <summary>
/// Raises the application focus event.
/// </summary>
/// <param name="focus">If set to <c>true</c>: focused.</param>
void OnApplicationFocus(bool focus)
{
focused = focus;
if (!focused)
StopMicrophone();
}
/// <summary>
/// Raises the application pause event.
/// </summary>
/// <param name="pauseStatus">If set to <c>true</c>: paused.</param>
void OnApplicationPause(bool pauseStatus)
{
focused = !pauseStatus;
if (!focused)
StopMicrophone();
}
void OnDisable()
{
StopMicrophone();
}
/// <summary>
/// Raises the GU event.
/// </summary>
void OnGUI()
{
MicDeviceGUI((Screen.width / 2) - 150, (Screen.height / 2) - 75, 300, 50, 10, -300);
}
//----------------------------------------------------
// PUBLIC FUNCTIONS
//----------------------------------------------------
/// <summary>
/// Mics the device GU.
/// </summary>
/// <param name="left">Left.</param>
/// <param name="top">Top.</param>
/// <param name="width">Width.</param>
/// <param name="height">Height.</param>
/// <param name="buttonSpaceTop">Button space top.</param>
/// <param name="buttonSpaceLeft">Button space left.</param>
public void MicDeviceGUI(
float left,
float top,
float width,
float height,
float buttonSpaceTop,
float buttonSpaceLeft)
{
//If there is more than one device, choose one.
if (Microphone.devices.Length >= 1 && enableMicSelectionGUI == true && micSelected == false)
{
for (int i = 0; i < Microphone.devices.Length; ++i)
{
if (GUI.Button(new Rect(left + ((width + buttonSpaceLeft) * i),
top + ((height + buttonSpaceTop) * i), width, height),
Microphone.devices[i].ToString()))
{
StopMicrophone();
selectedDevice = Microphone.devices[i].ToString();
micSelected = true;
GetMicCaps();
StartMicrophone();
}
}
}
}
/// <summary>
/// Gets the mic caps.
/// </summary>
public void GetMicCaps()
{
if (micSelected == false) return;
//Gets the frequency of the device
Microphone.GetDeviceCaps(selectedDevice, out minFreq, out maxFreq);
if (minFreq == 0 && maxFreq == 0)
{
Debug.LogWarning("GetMicCaps warning:: min and max frequencies are 0");
minFreq = 44100;
maxFreq = 44100;
}
if (micFrequency > maxFreq)
micFrequency = maxFreq;
}
/// <summary>
/// Starts the microphone.
/// </summary>
public void StartMicrophone()
{
if (micSelected == false) return;
//Starts recording
audioSource.clip = Microphone.Start(selectedDevice, true, 1, micFrequency);
Stopwatch timer = Stopwatch.StartNew();
// Wait until the recording has started
while (!(Microphone.GetPosition(selectedDevice) > 0) && timer.Elapsed.TotalMilliseconds < 1000) {
Thread.Sleep(50);
}
if (Microphone.GetPosition(selectedDevice) <= 0)
{
throw new Exception("Timeout initializing microphone " + selectedDevice);
}
// Play the audio source
audioSource.Play();
}
/// <summary>
/// Stops the microphone.
/// </summary>
public void StopMicrophone()
{
if (micSelected == false) return;
// Overriden with a clip to play? Don't stop the audio source
if ((audioSource != null) &&
(audioSource.clip != null) &&
(audioSource.clip.name == "Microphone"))
{
audioSource.Stop();
}
// Reset to stop mouth movement
OVRLipSyncContext context = GetComponent<OVRLipSyncContext>();
context.ResetContext();
Microphone.End(selectedDevice);
}
//----------------------------------------------------
// PRIVATE FUNCTIONS
//----------------------------------------------------
/// <summary>
/// Gets the averaged volume.
/// </summary>
/// <returns>The averaged volume.</returns>
float GetAveragedVolume()
{
// We will use the SR to get average volume
// return OVRSpeechRec.GetAverageVolume();
return 0.0f;
}
}

View File

@@ -0,0 +1,13 @@
fileFormatVersion: 2
guid: 02d5ed157083b494e85013bad8fd5e12
timeCreated: 1437430905
licenseType: Store
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,162 @@
/************************************************************************************
Filename : OVRLipSyncSequence.cs
Content : LipSync frames container
Created : May 17th, 2018
Copyright : Copyright Facebook Technologies, LLC and its affiliates.
All rights reserved.
Licensed under the Oculus Audio SDK License Version 3.3 (the "License");
you may not use the Oculus Audio SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/audio-3.3/
Unless required by applicable law or agreed to in writing, the Oculus Audio SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using System.Collections.Generic;
using System.IO;
using UnityEngine;
// Sequence - holds ordered entries for playback
[System.Serializable]
public class OVRLipSyncSequence : ScriptableObject
{
public List<OVRLipSync.Frame> entries = new List<OVRLipSync.Frame>();
public float length; // in seconds
public OVRLipSync.Frame GetFrameAtTime(float time)
{
OVRLipSync.Frame frame = null;
if (time < length && entries.Count > 0)
{
float percentComplete = time / length;
frame = entries[(int)(entries.Count * percentComplete)];
}
return frame;
}
#if UNITY_EDITOR
private static readonly int sSampleSize = 1024;
public static OVRLipSyncSequence CreateSequenceFromAudioClip(
AudioClip clip, bool useOfflineModel = false)
{
OVRLipSyncSequence sequence = null;
if (clip.channels > 2)
{
Debug.LogError(clip.name +
": Cannot process phonemes from an audio clip with " +
"more than 2 channels");
return null;
}
if (clip.loadType != AudioClipLoadType.DecompressOnLoad)
{
Debug.LogError(clip.name +
": Cannot process phonemes from an audio clip unless " +
"its load type is set to DecompressOnLoad.");
return null;
}
if (OVRLipSync.Initialize(clip.frequency, sSampleSize) != OVRLipSync.Result.Success)
{
Debug.LogError("Could not create Lip Sync engine.");
return null;
}
if (clip.loadState != AudioDataLoadState.Loaded)
{
Debug.LogError("Clip is not loaded!");
return null;
}
uint context = 0;
OVRLipSync.Result result = useOfflineModel
? OVRLipSync.CreateContextWithModelFile(
ref context,
OVRLipSync.ContextProviders.Enhanced,
Path.Combine(Application.dataPath, "Oculus/LipSync/Assets/OfflineModel/ovrlipsync_offline_model.pb"))
: OVRLipSync.CreateContext(ref context, OVRLipSync.ContextProviders.Enhanced);
if (result != OVRLipSync.Result.Success)
{
Debug.LogError("Could not create Phoneme context. (" + result + ")");
OVRLipSync.Shutdown();
return null;
}
List<OVRLipSync.Frame> frames = new List<OVRLipSync.Frame>();
float[] samples = new float[sSampleSize * clip.channels];
OVRLipSync.Frame dummyFrame = new OVRLipSync.Frame();
OVRLipSync.ProcessFrame(
context,
samples,
dummyFrame,
(clip.channels == 2) ? true : false
);
// frame delay in ms
float frameDelayInMs = dummyFrame.frameDelay;
int frameOffset = (int)(frameDelayInMs * clip.frequency / 1000);
int totalSamples = clip.samples;
for (int x = 0; x < totalSamples + frameOffset; x += sSampleSize)
{
int remainingSamples = totalSamples - x;
if (remainingSamples >= sSampleSize) {
clip.GetData(samples, x);
} else if (remainingSamples > 0) {
float[] samples_clip = new float[remainingSamples * clip.channels];
clip.GetData(samples_clip, x);
Array.Copy(samples_clip, samples, samples_clip.Length);
Array.Clear(samples, samples_clip.Length, samples.Length - samples_clip.Length);
} else {
Array.Clear(samples, 0, samples.Length);
}
OVRLipSync.Frame frame = new OVRLipSync.Frame();
if (clip.channels == 2)
{
// interleaved = stereo data, alternating floats
OVRLipSync.ProcessFrame(context, samples, frame);
}
else
{
// mono
OVRLipSync.ProcessFrame(context, samples, frame, false);
}
if (x < frameOffset)
{
continue;
}
frames.Add(frame);
}
Debug.Log(clip.name + " produced " + frames.Count +
" viseme frames, playback rate is " + (frames.Count / clip.length) +
" fps");
OVRLipSync.DestroyContext(context);
OVRLipSync.Shutdown();
sequence = ScriptableObject.CreateInstance<OVRLipSyncSequence>();
sequence.entries = frames;
sequence.length = clip.length;
return sequence;
}
#endif
};

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 354250b5dc6a14f49b541724e9dd3c37
timeCreated: 1496772358
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant: