1
0
forked from cgvr/DeltaVR

non-vr lobby, version fix

This commit is contained in:
joonasp
2022-06-29 14:45:17 +03:00
parent 5774be9822
commit 04baadfad1
1774 changed files with 573069 additions and 1533 deletions

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: bd72d08421bfd144c9627c0d3e37d0f6
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,77 @@
#if UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace Photon.Voice.MacOS
{
public class AudioInChangeNotifier : IAudioInChangeNotifier
{
public bool IsSupported => true;
const string lib_name = "AudioIn";
[DllImport(lib_name)]
private static extern IntPtr Photon_Audio_In_CreateChangeNotifier(int instanceID, Action<int> callback);
[DllImport(lib_name)]
private static extern IntPtr Photon_Audio_In_DestroyChangeNotifier(IntPtr handle);
private delegate void CallbackDelegate(int instanceID);
IntPtr handle;
int instanceID;
Action callback;
public AudioInChangeNotifier(Action callback, ILogger logger)
{
this.callback = callback;
//nativeCallback(8888);
var handle = Photon_Audio_In_CreateChangeNotifier(instanceCnt, nativeCallback);
lock (instancePerHandle)
{
this.handle = handle;
this.instanceID = instanceCnt;
instancePerHandle.Add(instanceCnt++, this);
}
}
// IL2CPP does not support marshaling delegates that point to instance methods to native code.
// Using static method and per instance table.
static int instanceCnt;
private static Dictionary<int, AudioInChangeNotifier> instancePerHandle = new Dictionary<int, AudioInChangeNotifier>();
[MonoPInvokeCallbackAttribute(typeof(CallbackDelegate))]
private static void nativeCallback(int instanceID)
{
AudioInChangeNotifier instance;
bool ok;
lock (instancePerHandle)
{
ok = instancePerHandle.TryGetValue(instanceID, out instance);
}
if (ok)
{
instance.callback();
}
}
/// <summary>If not null, the enumerator is in invalid state.</summary>
public string Error { get; private set; }
/// <summary>Disposes enumerator.
/// Call it to free native resources.
/// </summary>
public void Dispose()
{
lock (instancePerHandle)
{
instancePerHandle.Remove(instanceID);
}
if (handle != IntPtr.Zero)
{
Photon_Audio_In_DestroyChangeNotifier(handle);
handle = IntPtr.Zero;
}
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 893b0d66c964e974798c7da6a2e88cd0
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,96 @@
#if UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace Photon.Voice.MacOS
{
public class MonoPInvokeCallbackAttribute : System.Attribute
{
private Type type;
public MonoPInvokeCallbackAttribute(Type t) { type = t; }
}
public class AudioInPusher : IAudioPusher<float>
{
const string lib_name = "AudioIn";
[DllImport(lib_name)]
private static extern IntPtr Photon_Audio_In_CreatePusher(int instanceID, int deviceID, Action<int, IntPtr, int> pushCallback);
[DllImport(lib_name)]
private static extern void Photon_Audio_In_Destroy(IntPtr handler);
private delegate void CallbackDelegate(int instanceID, IntPtr buf, int len);
public AudioInPusher(int deviceID, ILogger logger)
{
this.deviceID = deviceID;
try
{
handle = Photon_Audio_In_CreatePusher(instanceCnt, deviceID, nativePushCallback);
instancePerHandle.Add(instanceCnt++, this);
}
catch (Exception e)
{
Error = e.ToString();
if (Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
Error = "Exception in AudioInPusher constructor";
}
logger.LogError("[PV] AudioInPusher: " + Error);
}
}
private int deviceID;
// IL2CPP does not support marshaling delegates that point to instance methods to native code.
// Using static method and per instance table.
static int instanceCnt;
private static Dictionary<int, AudioInPusher> instancePerHandle = new Dictionary<int, AudioInPusher>();
[MonoPInvokeCallbackAttribute(typeof(CallbackDelegate))]
private static void nativePushCallback(int instanceID, IntPtr buf, int len)
{
AudioInPusher instance;
if (instancePerHandle.TryGetValue(instanceID, out instance))
{
instance.push(buf, len);
}
}
IntPtr handle;
Action<float[]> pushCallback;
ObjectFactory<float[], int> bufferFactory;
// Supposed to be called once at voice initialization.
// Otherwise recreate native object (instead of adding 'set callback' method to native interface)
public void SetCallback(Action<float[]> callback, ObjectFactory<float[], int> bufferFactory)
{
this.bufferFactory = bufferFactory;
this.pushCallback = callback;
}
private void push(IntPtr buf, int len)
{
if (this.pushCallback != null)
{
var bufManaged = bufferFactory.New(len);
Marshal.Copy(buf, bufManaged, 0, len);
pushCallback(bufManaged);
}
}
public int Channels { get { return 1; } }
public int SamplingRate { get { return 44100; } }
public string Error { get; private set; }
public void Dispose()
{
if (handle != IntPtr.Zero)
{
Photon_Audio_In_Destroy(handle);
handle = IntPtr.Zero;
}
// TODO: Remove this from instancePerHandle
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b0e2dc3e0f865a747b515b1a0450cc12
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,56 @@
#if UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX
using System;
using System.Runtime.InteropServices;
namespace Photon.Voice.MacOS
{
public class AudioInReader : IAudioReader<float>
{
const string lib_name = "AudioIn";
[DllImport(lib_name)]
private static extern IntPtr Photon_Audio_In_CreateReader(int deviceID);
[DllImport(lib_name)]
private static extern void Photon_Audio_In_Destroy(IntPtr handler);
[DllImport(lib_name)]
private static extern bool Photon_Audio_In_Read(IntPtr handle, float[] buf, int len);
IntPtr audioIn;
public AudioInReader(int deviceID, ILogger logger)
{
try
{
audioIn = Photon_Audio_In_CreateReader(deviceID);
}
catch (Exception e)
{
Error = e.ToString();
if (Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
Error = "Exception in AudioInReader constructor";
}
logger.LogError("[PV] AudioInReader: " + Error);
}
}
public int Channels { get { return 1; } }
public int SamplingRate { get { return 44100; } }
public string Error { get; private set; }
public void Dispose()
{
if (audioIn != IntPtr.Zero)
{
Photon_Audio_In_Destroy(audioIn);
audioIn = IntPtr.Zero;
}
}
public bool Read(float[] buf)
{
return audioIn != IntPtr.Zero && Photon_Audio_In_Read(audioIn, buf, buf.Length);
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7f22e06cf781af34abf9e24bf6010fce
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,70 @@
#if UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace Photon.Voice.MacOS
{
/// <summary>Enumerates microphones available on device.
/// </summary>
public class AudioInEnumerator : DeviceEnumeratorBase
{
const string lib_name = "AudioIn";
[DllImport(lib_name)]
private static extern IntPtr Photon_Audio_In_CreateMicEnumerator();
[DllImport(lib_name)]
private static extern void Photon_Audio_In_DestroyMicEnumerator(IntPtr handle);
[DllImport(lib_name)]
private static extern int Photon_Audio_In_MicEnumerator_Count(IntPtr handle);
[DllImport(lib_name)]
private static extern IntPtr Photon_Audio_In_MicEnumerator_NameAtIndex(IntPtr handle, int idx);
[DllImport(lib_name)]
private static extern int Photon_Audio_In_MicEnumerator_IDAtIndex(IntPtr handle, int idx);
IntPtr handle;
public AudioInEnumerator(ILogger logger) : base(logger)
{
Refresh();
}
/// <summary>Refreshes the microphones list.
/// </summary>
public override void Refresh()
{
Dispose();
try
{
handle = Photon_Audio_In_CreateMicEnumerator();
var count = Photon_Audio_In_MicEnumerator_Count(handle);
devices = new List<DeviceInfo>();
for (int i = 0; i < count; i++)
{
devices.Add(new DeviceInfo(Photon_Audio_In_MicEnumerator_IDAtIndex(handle, i), Marshal.PtrToStringAuto(Photon_Audio_In_MicEnumerator_NameAtIndex(handle, i))));
}
Error = null;
}
catch (Exception e)
{
Error = e.ToString();
if (Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
Error = "Exception in AudioInEnumerator.Refresh()";
}
}
}
/// <summary>Disposes enumerator.
/// Call it to free native resources.
/// </summary>
public override void Dispose()
{
if (handle != IntPtr.Zero && Error == null)
{
Photon_Audio_In_DestroyMicEnumerator(handle);
handle = IntPtr.Zero;
}
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7dcf1a594e0ea9845b5ec7ae2931428b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,83 @@
#if (UNITY_IOS && !UNITY_EDITOR) || __IOS__
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace Photon.Voice.IOS
{
public class AudioInChangeNotifier : IAudioInChangeNotifier
{
public bool IsSupported => true;
const string lib_name = "__Internal";
[DllImport(lib_name)]
private static extern IntPtr Photon_Audio_In_CreateChangeNotifier(int instanceID, Action<int> callback);
[DllImport(lib_name)]
private static extern IntPtr Photon_Audio_In_DestroyChangeNotifier(IntPtr handle);
private delegate void CallbackDelegate(int instanceID);
IntPtr handle;
int instanceID;
Action callback;
public AudioInChangeNotifier(Action callback, ILogger logger)
{
this.callback = callback;
var handle = Photon_Audio_In_CreateChangeNotifier(instanceCnt, nativeCallback);
lock (instancePerHandle)
{
this.handle = handle;
this.instanceID = instanceCnt;
instancePerHandle.Add(instanceCnt++, this);
}
}
public class MonoPInvokeCallbackAttribute : System.Attribute
{
private Type type;
public MonoPInvokeCallbackAttribute(Type t) { type = t; }
}
// IL2CPP does not support marshaling delegates that point to instance methods to native code.
// Using static method and per instance table.
static int instanceCnt;
private static Dictionary<int, AudioInChangeNotifier> instancePerHandle = new Dictionary<int, AudioInChangeNotifier>();
[MonoPInvokeCallbackAttribute(typeof(CallbackDelegate))]
private static void nativeCallback(int instanceID)
{
AudioInChangeNotifier instance;
bool ok;
lock (instancePerHandle)
{
ok = instancePerHandle.TryGetValue(instanceID, out instance);
}
if (ok)
{
instance.callback();
}
}
/// <summary>If not null, the enumerator is in invalid state.</summary>
public string Error { get; private set; }
/// <summary>Disposes enumerator.
/// Call it to free native resources.
/// </summary>
public void Dispose()
{
lock (instancePerHandle)
{
instancePerHandle.Remove(instanceID);
}
if (handle != IntPtr.Zero)
{
Photon_Audio_In_DestroyChangeNotifier(handle);
handle = IntPtr.Zero;
}
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 18c26ffb450f01444a49013a7455b8e7
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,139 @@
#if (UNITY_IOS && !UNITY_EDITOR) || __IOS__
using System;
using System.Threading;
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace Photon.Voice.IOS
{
public class MonoPInvokeCallbackAttribute : System.Attribute
{
private Type type;
public MonoPInvokeCallbackAttribute(Type t) { type = t; }
}
public class AudioInPusher : IAudioPusher<float>, IResettable
{
const string lib_name = "__Internal";
[DllImport(lib_name)]
private static extern IntPtr Photon_Audio_In_CreatePusher(int instanceID, Action<int, IntPtr, int> pushCallback, int sessionCategory, int sessionMode, int sessionCategoryOptions);
[DllImport(lib_name)]
private static extern void Photon_Audio_In_Reset(IntPtr handler);
[DllImport(lib_name)]
private static extern void Photon_Audio_In_Destroy(IntPtr handler);
private delegate void CallbackDelegate(int instanceID, IntPtr buf, int len);
private bool initializationFinished;
public AudioInPusher(AudioSessionParameters sessParam, ILogger logger)
{
// initialization in a separate thread to avoid 0.5 - 1 sec. pauses in main thread execution
var t = new Thread(() =>
{
lock (instancePerHandle) // prevent concurrent initialization
{
try
{
var handle = Photon_Audio_In_CreatePusher(instanceCnt, nativePushCallback, (int)sessParam.Category, (int)sessParam.Mode, sessParam.CategoryOptionsToInt());
this.handle = handle;
this.instanceID = instanceCnt;
instancePerHandle.Add(instanceCnt++, this);
}
catch (Exception e)
{
Error = e.ToString();
if (Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
Error = "Exception in AudioInPusher constructor";
}
logger.LogError("[PV] AudioInPusher: " + Error);
}
finally
{
initializationFinished = true;
}
}
});
Util.SetThreadName(t, "[PV] IOSAudioInPusherCtr");
t.Start();
}
// IL2CPP does not support marshaling delegates that point to instance methods to native code.
// Using static method and per instance table.
static int instanceCnt;
private static Dictionary<int, AudioInPusher> instancePerHandle = new Dictionary<int, AudioInPusher>();
[MonoPInvokeCallbackAttribute(typeof(CallbackDelegate))]
private static void nativePushCallback(int instanceID, IntPtr buf, int len)
{
AudioInPusher instance;
bool ok;
lock (instancePerHandle)
{
ok = instancePerHandle.TryGetValue(instanceID, out instance);
}
if (ok)
{
instance.push(buf, len);
}
}
IntPtr handle;
int instanceID;
Action<float[]> pushCallback;
ObjectFactory<float[], int> bufferFactory;
// Supposed to be called once at voice initialization.
// Otherwise recreate native object (instead of adding 'set callback' method to native interface)
public void SetCallback(Action<float[]> callback, ObjectFactory<float[], int> bufferFactory)
{
this.bufferFactory = bufferFactory;
this.pushCallback = callback;
}
private void push(IntPtr buf, int len)
{
if (this.pushCallback != null)
{
var bufManaged = bufferFactory.New(len);
Marshal.Copy(buf, bufManaged, 0, len);
pushCallback(bufManaged);
}
}
public int Channels { get { return 1; } }
public int SamplingRate { get { return 48000; } }
public string Error { get; private set; }
public void Reset()
{
lock (instancePerHandle)
{
if (handle != IntPtr.Zero)
{
Photon_Audio_In_Reset(handle);
}
}
}
public void Dispose()
{
lock (instancePerHandle)
{
instancePerHandle.Remove(instanceID);
while (!initializationFinished) // should never happen because of lock if the thread in constructor started before Dispose() call
{
Thread.Sleep(1);
}
if (handle != IntPtr.Zero)
{
Photon_Audio_In_Destroy(handle);
handle = IntPtr.Zero;
}
}
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: a173aa7f3a7bbc94aa78a6747d55f3f6
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,96 @@
#if (UNITY_IOS && !UNITY_EDITOR) || __IOS__
using System;
using System.Threading;
using System.Runtime.InteropServices;
namespace Photon.Voice.IOS
{
public class AudioInReader : IAudioReader<float>, IResettable
{
const string lib_name = "__Internal";
[DllImport(lib_name)]
private static extern IntPtr Photon_Audio_In_CreateReader(int sessionCategory, int sessionMode, int sessionCategoryOptions);
[DllImport(lib_name)]
private static extern void Photon_Audio_In_Reset(IntPtr handler);
[DllImport(lib_name)]
private static extern void Photon_Audio_In_Destroy(IntPtr handler);
[DllImport(lib_name)]
private static extern bool Photon_Audio_In_Read(IntPtr handle, float[] buf, int len);
IntPtr audioIn;
private bool initializationFinished;
public AudioInReader(AudioSessionParameters sessParam, ILogger logger)
{
// initialization in a separate thread to avoid 0.5 - 1 sec. pauses in main thread execution
var t = new Thread(() =>
{
lock (this)
{
try
{
var audioIn = Photon_Audio_In_CreateReader((int)sessParam.Category, (int)sessParam.Mode, sessParam.CategoryOptionsToInt());
lock (this)
{
this.audioIn = audioIn;
}
}
catch (Exception e)
{
Error = e.ToString();
if (Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
Error = "Exception in AudioInReader constructor";
}
logger.LogError("[PV] AudioInReader: " + Error);
}
finally
{
initializationFinished = true;
}
}
});
Util.SetThreadName(t, "[PV] IOSAudioInReaderCtr");
t.Start();
}
public int Channels { get { return 1; } }
public int SamplingRate { get { return 48000; } }
public string Error { get; private set; }
public void Reset()
{
lock (this)
{
if (audioIn != IntPtr.Zero)
{
Photon_Audio_In_Reset(audioIn);
}
}
}
public void Dispose()
{
lock (this)
{
while (!initializationFinished) // should never happen because of lock if the thread in constructor started before Dispose() call
{
Thread.Sleep(1);
}
if (audioIn != IntPtr.Zero)
{
Photon_Audio_In_Destroy(audioIn);
audioIn = IntPtr.Zero;
}
}
}
public bool Read(float[] buf)
{
return audioIn != IntPtr.Zero && Photon_Audio_In_Read(audioIn, buf, buf.Length);
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7c99541283e96384ea52e265269b1dee
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,346 @@
namespace Photon.Voice.IOS
{
public enum AudioSessionCategory // values are the same as in AudioIn.mm enums
{
/// <summary>
/// Use this category for background sounds such as rain, car engine noise, etc.
/// Mixes with other music.
/// </summary>
/// <remarks>API_AVAILABLE(ios(3.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);</remarks>
Ambient = 0,
/// <summary> Use this category for background sounds. Other music will stop playing. </summary>
/// <remarks>API_AVAILABLE(ios(3.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);</remarks>
SoloAmbient = 1,
/// <summary> Use this category for music tracks. </summary>
/// <remarks>API_AVAILABLE(ios(3.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);</remarks>
Playback = 2,
/// <summary> Use this category when recording audio. </summary>
/// <remarks>API_AVAILABLE(ios(3.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);</remarks>
Record = 3,
/// <summary> Use this category when recording and playing back audio. </summary>
/// <remarks>API_AVAILABLE(ios(3.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);</remarks>
PlayAndRecord = 4,
/// <summary> Use this category when using a hardware codec or signal processor while
/// not playing or recording audio. </summary>
/// <remarks>API_DEPRECATED("No longer supported", ios(3.0, 10.0)) API_UNAVAILABLE(watchos, tvos) API_UNAVAILABLE(macos);</remarks>
AudioProcessing = 5,
/// <summary> Use this category to customize the usage of available audio accessories and built-in audio hardware.
/// For example, this category provides an application with the ability to use an available USB output
/// and headphone output simultaneously for separate, distinct streams of audio data. Use of
/// this category by an application requires a more detailed knowledge of, and interaction with,
/// the capabilities of the available audio routes. May be used for input, output, or both.
/// Note that not all output types and output combinations are eligible for multi-route. Input is limited
/// to the last-in input port. Eligible inputs consist of the following:
/// AVAudioSessionPortUSBAudio, AVAudioSessionPortHeadsetMic, and AVAudioSessionPortBuiltInMic.
/// Eligible outputs consist of the following:
/// AVAudioSessionPortUSBAudio, AVAudioSessionPortLineOut, AVAudioSessionPortHeadphones, AVAudioSessionPortHDMI,
/// and AVAudioSessionPortBuiltInSpeaker.
/// Note that AVAudioSessionPortBuiltInSpeaker is only allowed to be used when there are no other eligible
/// outputs connected. </summary>
/// <remarks>API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);</remarks>
MultiRoute = 6,
}
public enum AudioSessionMode // values are the same as in AudioIn.mm enums
{
/// <summary>
/// Modes modify the audio category in order to introduce behavior that is tailored to the specific
/// use of audio within an application. Available in iOS 5.0 and greater.
/// </summary>
/// <remarks>
/// The default mode
/// API_AVAILABLE(ios(5.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
/// </remarks>
Default = 0,
/// <summary>
/// Only valid with AVAudioSessionCategoryPlayAndRecord. Appropriate for Voice over IP
/// (VoIP) applications. Reduces the number of allowable audio routes to be only those
/// that are appropriate for VoIP applications and may engage appropriate system-supplied
/// signal processing. Has the side effect of setting AVAudioSessionCategoryOptionAllowBluetooth
/// </summary>
/// <remarks>
/// API_AVAILABLE(ios(5.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
/// </remarks>
VoiceChat = 1,
/* Set by Game Kit on behalf of an application that uses a GKVoiceChat object; valid
only with the AVAudioSessionCategoryPlayAndRecord category.
Do not set this mode directly. If you need similar behavior and are not using
a GKVoiceChat object, use AVAudioSessionModeVoiceChat instead. */
// GameChat = 2, // API_AVAILABLE(ios(5.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
/// <summary>
/// Only valid with AVAudioSessionCategoryPlayAndRecord or AVAudioSessionCategoryRecord.
/// Modifies the audio routing options and may engage appropriate system-supplied signal processing.
/// </summary>
/// <remarks>
/// API_AVAILABLE(ios(5.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
/// </remarks>
VideoRecording = 3,
/// <summary>
/// Appropriate for applications that wish to minimize the effect of system-supplied signal
/// processing for input and/or output audio signals.
/// </summary>
/// <remarks>
/// API_AVAILABLE(ios(5.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
/// </remarks>
Measurement = 4,
/// <summary>
/// Engages appropriate output signal processing for movie playback scenarios. Currently
/// only applied during playback over built-in speaker.
/// </summary>
/// <remarks>
/// API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
/// </remarks>
MoviePlayback = 5,
/// <summary>
/// Only valid with kAudioSessionCategory_PlayAndRecord. Reduces the number of allowable audio
/// routes to be only those that are appropriate for video chat applications. May engage appropriate
/// system-supplied signal processing. Has the side effect of setting
/// AVAudioSessionCategoryOptionAllowBluetooth and AVAudioSessionCategoryOptionDefaultToSpeaker.
/// </summary>
/// <remarks>
/// API_AVAILABLE(ios(7.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
/// </remarks>
VideoChat = 6,
/* Appropriate for applications which play spoken audio and wish to be paused (via audio session interruption) rather than ducked
if another app (such as a navigation app) plays a spoken audio prompt. Examples of apps that would use this are podcast players and
audio books. For more information, see the related category option AVAudioSessionCategoryOptionInterruptSpokenAudioAndMixWithOthers. */
// SpokenAudio = 7, // API_AVAILABLE(ios(9.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
/* Appropriate for applications which play audio using text to speech. Setting this mode allows for different routing behaviors when
connected to certain audio devices such as CarPlay. An example of an app that would use this mode is a turn by turn navigation app that
plays short prompts to the user. Typically, these same types of applications would also configure their session to use
AVAudioSessionCategoryOptionDuckOthers and AVAudioSessionCategoryOptionInterruptSpokenAudioAndMixWithOthers */
// VoicePrompt = 8, // API_AVAILABLE(ios(12.0), watchos(5.0), tvos(12.0)) API_UNAVAILABLE(macos);
}
public enum AudioSessionCategoryOption // values as defined in Apple Audio Session API
{
/*
AVAudioSessionCategoryOptionMixWithOthers --
This allows an application to set whether or not other active audio apps will be interrupted or mixed with
when your app's audio session goes active. The typical cases are:
(1) AVAudioSessionCategoryPlayAndRecord or AVAudioSessionCategoryMultiRoute
this will default to false, but can be set to true. This would allow other applications to play in the background
while an app had both audio input and output enabled
(2) AVAudioSessionCategoryPlayback
this will default to false, but can be set to true. This would allow other applications to play in the background,
but an app will still be able to play regardless of the setting of the ringer switch
(3) Other categories
this defaults to false and cannot be changed (that is, the mix with others setting of these categories
cannot be overridden. An application must be prepared for setting this property to fail as behaviour
may change in future releases. If an application changes their category, they should reassert the
option (it is not sticky across category changes).
AVAudioSessionCategoryOptionDuckOthers --
This allows an application to set whether or not other active audio apps will be ducked when when your app's audio
session goes active. An example of this is the Nike app, which provides periodic updates to its user (it reduces the
volume of any music currently being played while it provides its status). This defaults to off. Note that the other
audio will be ducked for as long as the current session is active. You will need to deactivate your audio
session when you want full volume playback of the other audio.
If your category is AVAudioSessionCategoryPlayback, AVAudioSessionCategoryPlayAndRecord, or
AVAudioSessionCategoryMultiRoute, by default the audio session will be non-mixable and non-ducking.
Setting this option will also make your category mixable with others (AVAudioSessionCategoryOptionMixWithOthers
will be set).
AVAudioSessionCategoryOptionAllowBluetooth --
This allows an application to change the default behaviour of some audio session categories with regards to showing
bluetooth Hands-Free Profile (HFP) devices as available routes. The current category behavior is:
(1) AVAudioSessionCategoryPlayAndRecord
this will default to false, but can be set to true. This will allow a paired bluetooth HFP device to show up as
an available route for input, while playing through the category-appropriate output
(2) AVAudioSessionCategoryRecord
this will default to false, but can be set to true. This will allow a paired bluetooth HFP device to show up
as an available route for input
(3) Other categories
this defaults to false and cannot be changed (that is, enabling bluetooth for input in these categories is
not allowed)
An application must be prepared for setting this option to fail as behaviour may change in future releases.
If an application changes their category or mode, they should reassert the override (it is not sticky
across category and mode changes).
AVAudioSessionCategoryOptionDefaultToSpeaker --
This allows an application to change the default behaviour of some audio session categories with regards to
the audio route. The current category behavior is:
(1) AVAudioSessionCategoryPlayAndRecord category
this will default to false, but can be set to true. this will route to Speaker (instead of Receiver)
when no other audio route is connected.
(2) Other categories
this defaults to false and cannot be changed (that is, the default to speaker setting of these
categories cannot be overridden
An application must be prepared for setting this property to fail as behaviour may change in future releases.
If an application changes their category, they should reassert the override (it is not sticky across
category and mode changes).
AVAudioSessionCategoryOptionInterruptSpokenAudioAndMixWithOthers --
If another app's audio session mode is set to AVAudioSessionModeSpokenAudio (podcast playback in the background for example),
then that other app's audio will be interrupted when the current application's audio session goes active. An example of this
is a navigation app that provides navigation prompts to its user (it pauses any spoken audio currently being played while it
plays the prompt). This defaults to off. Note that the other app's audio will be paused for as long as the current session is
active. You will need to deactivate your audio session to allow the other audio to resume playback.
Setting this option will also make your category mixable with others (AVAudioSessionCategoryOptionMixWithOthers
will be set). If you want other non-spoken audio apps to duck their audio when your app's session goes active, also set
AVAudioSessionCategoryOptionDuckOthers.
AVAudioSessionCategoryOptionAllowBluetoothA2DP --
This allows an application to change the default behaviour of some audio session categories with regards to showing
bluetooth Advanced Audio Distribution Profile (A2DP), i.e. stereo Bluetooth, devices as available routes. The current
category behavior is:
(1) AVAudioSessionCategoryPlayAndRecord
this will default to false, but can be set to true. This will allow a paired bluetooth A2DP device to show up as
an available route for output, while recording through the category-appropriate input
(2) AVAudioSessionCategoryMultiRoute and AVAudioSessionCategoryRecord
this will default to false, and cannot be set to true.
(3) Other categories
this defaults to true and cannot be changed (that is, bluetooth A2DP ports are always supported in output-only categories).
An application must be prepared for setting this option to fail as behaviour may change in future releases.
If an application changes their category or mode, they should reassert the override (it is not sticky
across category and mode changes).
Setting both AVAudioSessionCategoryOptionAllowBluetooth and AVAudioSessionCategoryOptionAllowBluetoothA2DP is allowed. In cases
where a single Bluetooth device supports both HFP and A2DP, the HFP ports will be given a higher priority for routing. For HFP
and A2DP ports on separate hardware devices, the last-in wins rule applies.
AVAudioSessionCategoryOptionAllowAirPlay --
This allows an application to change the default behaviour of some audio session categories with regards to showing
AirPlay devices as available routes. See the documentation of AVAudioSessionCategoryOptionAllowBluetoothA2DP for details on
how this option applies to specific categories.
*/
/// <summary>
/// This allows an application to set whether or not other active audio apps will be interrupted or mixed with
/// when your app's audio session goes active. The typical cases are:
/// (1) AVAudioSessionCategoryPlayAndRecord or AVAudioSessionCategoryMultiRoute
/// this will default to false, but can be set to true. This would allow other applications to play in the background
/// while an app had both audio input and output enabled
/// (2) AVAudioSessionCategoryPlayback
/// this will default to false, but can be set to true. This would allow other applications to play in the background,
/// but an app will still be able to play regardless of the setting of the ringer switch
/// (3) Other categories
/// this defaults to false and cannot be changed (that is, the mix with others setting of these categories
/// cannot be overridden. An application must be prepared for setting this property to fail as behaviour
/// may change in future releases. If an application changes their category, they should reassert the
/// option (it is not sticky across category changes).
/// MixWithOthers is only valid with AVAudioSessionCategoryPlayAndRecord, AVAudioSessionCategoryPlayback, and AVAudioSessionCategoryMultiRoute
/// </summary>
MixWithOthers = 0x1,
/// <summary>
/// This allows an application to set whether or not other active audio apps will be ducked when when your app's audio
/// session goes active. An example of this is the Nike app, which provides periodic updates to its user (it reduces the
/// volume of any music currently being played while it provides its status). This defaults to off. Note that the other
/// audio will be ducked for as long as the current session is active. You will need to deactivate your audio
/// session when you want full volume playback of the other audio.
/// If your category is AVAudioSessionCategoryPlayback, AVAudioSessionCategoryPlayAndRecord, or
/// AVAudioSessionCategoryMultiRoute, by default the audio session will be non-mixable and non-ducking.
/// Setting this option will also make your category mixable with others (AVAudioSessionCategoryOptionMixWithOthers
/// will be set).
/// DuckOthers is only valid with AVAudioSessionCategoryAmbient, AVAudioSessionCategoryPlayAndRecord, AVAudioSessionCategoryPlayback, and AVAudioSessionCategoryMultiRoute
/// </summary>
DuckOthers = 0x2,
/// <summary>
/// This allows an application to change the default behaviour of some audio session categories with regards to showing
/// bluetooth Hands-Free Profile (HFP) devices as available routes. The current category behavior is:
/// (1) AVAudioSessionCategoryPlayAndRecord
/// this will default to false, but can be set to true. This will allow a paired bluetooth HFP device to show up as
/// an available route for input, while playing through the category-appropriate output
/// (2) AVAudioSessionCategoryRecord
/// this will default to false, but can be set to true. This will allow a paired bluetooth HFP device to show up
/// as an available route for input
/// (3) Other categories
/// this defaults to false and cannot be changed (that is, enabling bluetooth for input in these categories is
/// not allowed)
/// An application must be prepared for setting this option to fail as behaviour may change in future releases.
/// If an application changes their category or mode, they should reassert the override (it is not sticky
/// across category and mode changes).
/// AllowBluetooth is only valid with AVAudioSessionCategoryRecord and AVAudioSessionCategoryPlayAndRecord
/// </summary>
AllowBluetooth = 0x4, // API_UNAVAILABLE(tvos, watchos, macos)
/// <summary>
/// This allows an application to change the default behaviour of some audio session categories with regards to
/// the audio route. The current category behavior is:
/// (1) AVAudioSessionCategoryPlayAndRecord category
/// this will default to false, but can be set to true. this will route to Speaker (instead of Receiver)
/// when no other audio route is connected.
/// (2) Other categories
/// this defaults to false and cannot be changed (that is, the default to speaker setting of these
/// categories cannot be overridden
/// An application must be prepared for setting this property to fail as behaviour may change in future releases.
/// If an application changes their category, they should reassert the override (it is not sticky across
/// category and mode changes).
/// DefaultToSpeaker is only valid with AVAudioSessionCategoryPlayAndRecord
/// </summary>
DefaultToSpeaker = 0x8, // API_UNAVAILABLE(tvos, watchos, macos)
/* InterruptSpokenAudioAndMixWithOthers is only valid with AVAudioSessionCategoryPlayAndRecord, AVAudioSessionCategoryPlayback, and AVAudioSessionCategoryMultiRoute */
// InterruptSpokenAudioAndMixWithOthers = 0x11, // API_AVAILABLE(ios(9.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos)
/* AllowBluetoothA2DP is only valid with AVAudioSessionCategoryPlayAndRecord */
// AllowBluetoothA2DP = 0x20, // API_AVAILABLE(ios(10.0), watchos(3.0), tvos(10.0)) API_UNAVAILABLE(macos)
/* AllowAirPlay is only valid with AVAudioSessionCategoryPlayAndRecord */
// AllowAirPlay = 0x40, // API_AVAILABLE(ios(10.0), tvos(10.0)) API_UNAVAILABLE(watchos, macos)
}
[System.Serializable]
public struct AudioSessionParameters
{
public AudioSessionCategory Category;//= AudioSessionCategory.PlayAndRecord;
public AudioSessionMode Mode;// = AudioSessionMode.Default;
public AudioSessionCategoryOption[] CategoryOptions;
public int CategoryOptionsToInt()
{
int opt = 0;
if (CategoryOptions != null)
{
for (int i = 0; i < CategoryOptions.Length; i++)
{
opt |= (int)CategoryOptions[i];
}
}
return opt;
}
public override string ToString()
{
var opt = "[";
if (CategoryOptions != null)
{
for (int i = 0; i < CategoryOptions.Length; i++)
{
opt += CategoryOptions[i];
if (i != CategoryOptions.Length - 1)
{
opt += ", ";
}
}
}
opt += "]";
return string.Format("category = {0}, mode = {1}, options = {2}", Category, Mode, opt);
}
}
public static class AudioSessionParametersPresets
{
public static AudioSessionParameters Game = new AudioSessionParameters()
{
Category = AudioSessionCategory.PlayAndRecord,
Mode = AudioSessionMode.Default,
CategoryOptions = new AudioSessionCategoryOption[] { AudioSessionCategoryOption.DefaultToSpeaker, AudioSessionCategoryOption.AllowBluetooth }
};
public static AudioSessionParameters VoIP = new AudioSessionParameters()
{
Category = AudioSessionCategory.PlayAndRecord,
Mode = AudioSessionMode.VoiceChat,
// VoiceChat should have the side effect of setting AVAudioSessionCategoryOptionAllowBluetooth according to doc
// but tests don't confirm this
CategoryOptions = new AudioSessionCategoryOption[] { AudioSessionCategoryOption.AllowBluetooth }
};
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 76e6623773113024186d4ccda649c0a7
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 077edf4fc00f33f4bbe7674e2f12544b
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,205 @@
#if PHOTON_VOICE_FMOD_ENABLE
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using FMODLib = FMOD;
namespace Photon.Voice.FMOD
{
public class AudioInReader<T> : IAudioReader<T>
{
readonly int sizeofT = Marshal.SizeOf(default(T));
const int BUF_LENGTH_MS = 2000;
const string LOG_PREFIX = "[PV] [FMOD] AudioIn: ";
private int device;
ILogger logger;
FMODLib.System coreSystem;
FMODLib.Sound sound;
readonly FMODLib.SOUND_FORMAT soundFormat;
public bool isRecording;
int samplingRate;
int channels;
int bufLengthSamples;
public AudioInReader(FMODLib.System coreSystem, int device, int suggestedFrequency, ILogger logger)
{
if (sizeofT == 2)// (typeof(T) == typeof(short)) // sometimes T is Int16 even if short passed, checking size is more reliable
{
soundFormat = FMODLib.SOUND_FORMAT.PCM16;
}
else if (sizeofT == 4)// (typeof(T) == typeof(float))
{
soundFormat = FMODLib.SOUND_FORMAT.PCMFLOAT;
}
else
{
Error = "only float and short buffers are supported: " + typeof(T);
logger.LogError(LOG_PREFIX + Error);
return;
}
try
{
if (device == -1) // default device
{
device = 0;
}
FMODLib.RESULT res;
this.coreSystem = coreSystem;
this.device = device;
this.logger = logger;
// use given frequency, fmod will resample
this.samplingRate = suggestedFrequency;
// set this.channels to driver's value
res = this.coreSystem.getRecordDriverInfo(device, out string name, 1, out Guid guid, out int systemrate, out FMODLib.SPEAKERMODE speakermode, out this.channels, out FMODLib.DRIVER_STATE state);
if (res != FMODLib.RESULT.OK)
{
Error = "failed to getRecordDriverInfo: " + res;
logger.LogError(LOG_PREFIX + Error);
return;
}
FMODLib.CREATESOUNDEXINFO exinfo = new FMODLib.CREATESOUNDEXINFO();
exinfo.cbsize = Marshal.SizeOf(exinfo);
exinfo.numchannels = channels;
exinfo.format = soundFormat;
exinfo.defaultfrequency = samplingRate;
bufLengthSamples = samplingRate * BUF_LENGTH_MS / 1000;
exinfo.length = (uint)(bufLengthSamples * channels * sizeofT);
FMODLib.MODE soundMode = FMODLib.MODE.OPENUSER | FMODLib.MODE.LOOP_NORMAL;
res = this.coreSystem.createSound("Photon AudioIn", soundMode, ref exinfo, out sound);
if (res != FMODLib.RESULT.OK)
{
Error = "failed to createSound: " + res;
logger.LogError(LOG_PREFIX + Error);
return;
}
res = this.coreSystem.recordStart(0, sound, true);
if (res != FMODLib.RESULT.OK)
{
Error = "failed to startrecord: " + res;
logger.LogError(LOG_PREFIX + Error);
return;
}
else
{
isRecording = true;
}
//test play
//this.coreSystem.playSound(sound, channelGroup, false, out channel);
logger.LogInfo("[PV] [FMOD] Mic: microphone '{0}' initialized, frequency = {1}, channels = {2}.", device, samplingRate, channels);
}
catch (Exception e)
{
Error = e.ToString();
if (Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
Error = "Exception in [FMOD] Mic constructor";
}
logger.LogError(LOG_PREFIX + Error);
}
}
public int SamplingRate { get { return Error == null ? this.samplingRate : 0; } }
public int Channels { get { return Error == null ? this.channels : 0; } }
public string Error { get; private set; }
public void Dispose()
{
this.coreSystem.recordStop(device);
sound.release();
}
private uint micPrevPos;
private int micLoopCnt;
private uint readAbsPos; // pos in sample (sample size in bytes = sizeofT * channels)
public bool Read(T[] readBuf)
{
if (Error != null)
{
return false;
}
uint micPos;
FMODLib.RESULT res = this.coreSystem.getRecordPosition(0, out micPos);
if (res != FMODLib.RESULT.OK)
{
Error = "failed to getRecordPosition: " + res;
logger.LogError(LOG_PREFIX + Error);
return false;
}
// loop detection
if (micPos < micPrevPos)
{
micLoopCnt++;
}
micPrevPos = micPos;
var micAbsPos = micLoopCnt * bufLengthSamples + micPos;
var nextReadPos = this.readAbsPos + readBuf.Length / channels;
if (nextReadPos < micAbsPos)
{
IntPtr ptr1, ptr2;
uint len1, len2;
res = sound.@lock((uint)(this.readAbsPos % bufLengthSamples * sizeofT * channels), (uint)(readBuf.Length * sizeofT), out ptr1, out ptr2, out len1, out len2);
if (res != FMODLib.RESULT.OK)
{
Error = "failed to lock sound buffer: " + res;
logger.LogError(LOG_PREFIX + Error);
return false;
}
int len1T = (int)len1 / sizeofT;
int len2T = (int)len2 / sizeofT;
if (soundFormat == FMODLib.SOUND_FORMAT.PCM16)
{
Marshal.Copy(ptr1, readBuf as short[], 0, len1T);
if (ptr2 != IntPtr.Zero)
{
Marshal.Copy(ptr2, readBuf as short[], len1T, len2T);
}
}
else if (soundFormat == FMODLib.SOUND_FORMAT.PCMFLOAT)
{
Marshal.Copy(ptr1, readBuf as float[], 0, len1T);
if (ptr2 != IntPtr.Zero)
{
Marshal.Copy(ptr2, readBuf as float[], len1T, len2T);
}
}
res = sound.unlock(ptr1, ptr2, len1, len2);
if (res != FMODLib.RESULT.OK)
{
Error = "failed to unlock sound buffer: " + res;
logger.LogError(LOG_PREFIX + Error);
return false;
}
this.readAbsPos = (uint)nextReadPos;
return true;
}
else
{
return false;
}
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 210999343d484d745965ceb31acc2b8e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,46 @@
#if PHOTON_VOICE_FMOD_ENABLE
using System;
using System.Collections.Generic;
using FMODLib = FMOD;
namespace Photon.Voice.FMOD
{
public class AudioInEnumerator : DeviceEnumeratorBase
{
const int NAME_MAX_LENGTH = 1000;
const string LOG_PREFIX = "[PV] [FMOD] AudioInEnumerator: ";
public AudioInEnumerator(ILogger logger) : base(logger)
{
Refresh();
}
public override void Refresh()
{
FMODLib.RESULT res = FMODUnity.RuntimeManager.CoreSystem.getRecordNumDrivers(out int numDriv, out int numCon);
if (res != FMODLib.RESULT.OK)
{
Error = "failed to getRecordNumDrivers: " + res;
logger.LogError(LOG_PREFIX + Error);
return;
}
devices = new List<DeviceInfo>();
for (int id = 0; id < numDriv; id++)
{
res = FMODUnity.RuntimeManager.CoreSystem.getRecordDriverInfo(id, out string name, NAME_MAX_LENGTH, out Guid guid, out int systemRate, out FMODLib.SPEAKERMODE speakerMode, out int speakerModeChannels, out FMODLib.DRIVER_STATE state);
if (res != FMODLib.RESULT.OK)
{
Error = "failed to getRecordDriverInfo: " + res;
logger.LogError(LOG_PREFIX + Error);
return;
}
devices.Add(new DeviceInfo(id, name));
}
}
public override void Dispose()
{
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 9b872d752deb56f4e9943f33887d38ca
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,258 @@
#if PHOTON_VOICE_FMOD_ENABLE
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using FMODLib = FMOD;
namespace Photon.Voice.FMOD
{
// Plays back input audio via FMOD Sound
public class AudioOut<T> : AudioOutDelayControl<T>
{
protected readonly int sizeofT = Marshal.SizeOf(default(T));
FMODLib.System coreSystem;
FMODLib.Sound sound;
FMODLib.Channel channel;
FMODLib.SOUND_FORMAT soundFormat;
public FMODLib.Sound Sound { get { return sound; } }
public FMODLib.Channel Channel { get { return channel; } }
public AudioOut(FMODLib.System coreSystem, PlayDelayConfig playDelayConfig, ILogger logger, string logPrefix, bool debugInfo)
: base(false, playDelayConfig, logger, "[PV] [FMOD] AudioOut" + (logPrefix == "" ? "" : " ") + logPrefix + " ", debugInfo)
{
if (sizeofT == 2)// (typeof(T) == typeof(short)) // sometimes T is Int16 even if short passed, checking is more reliable
{
soundFormat = FMODLib.SOUND_FORMAT.PCM16;
}
else if (sizeofT == 4)// (typeof(T) == typeof(float))
{
soundFormat = FMODLib.SOUND_FORMAT.PCMFLOAT;
}
else
{
Error = "only float and short buffers are supported: " + typeof(T);
logger.LogError(logPrefix + Error);
return;
}
this.coreSystem = coreSystem;
}
override public void OutCreate(int samplingRate, int channels, int bufferSamples)
{
FMODLib.RESULT res;
FMODLib.CREATESOUNDEXINFO exinfo = new FMODLib.CREATESOUNDEXINFO();
exinfo.cbsize = Marshal.SizeOf(exinfo);
exinfo.numchannels = channels;
exinfo.format = soundFormat;
exinfo.defaultfrequency = samplingRate;
exinfo.length = (uint)(bufferSamples * channels * sizeofT);
FMODLib.MODE soundMode = FMODLib.MODE.OPENUSER | FMODLib.MODE.LOOP_NORMAL;
res = coreSystem.createSound("Photon AudioOut", soundMode, ref exinfo, out sound);
if (res != FMODLib.RESULT.OK)
{
Error = "failed to createSound: " + res;
logger.LogError(logPrefix + Error);
return;
}
logger.LogInfo(logPrefix + "Sound Created" + sound.handle);
}
override public void OutStart()
{
FMODLib.ChannelGroup master;
coreSystem.getMasterChannelGroup(out master);
FMODLib.RESULT res = coreSystem.playSound(sound, master, false, out channel);
if (res != FMODLib.RESULT.OK)
{
Error = "failed to playSound: " + res;
logger.LogError(logPrefix + Error);
return;
}
}
override public int OutPos
{
get
{
channel.getPosition(out uint pos, FMODLib.TIMEUNIT.PCMBYTES);
return (int)(pos / channels / sizeofT);
}
}
override public void OutWrite(T[] frame, int offsetSamples)
{
if (Error != null)
{
return;
}
FMODLib.RESULT res;
IntPtr ptr1, ptr2;
uint len1, len2;
res = sound.@lock((uint)(offsetSamples * sizeofT * channels), (uint)(frame.Length * sizeofT), out ptr1, out ptr2, out len1, out len2);
if (res != FMODLib.RESULT.OK)
{
Error = "failed to lock sound buffer: " + res;
logger.LogError(logPrefix + Error);
return;
}
int len1T = (int)len1 / sizeofT;
int len2T = (int)len2 / sizeofT;
if (soundFormat == FMODLib.SOUND_FORMAT.PCM16)
{
Marshal.Copy(frame as short[], 0, ptr1, len1T);
if (ptr2 != IntPtr.Zero)
{
Marshal.Copy(frame as short[], len1T, ptr2, len2T);
}
}
else if (soundFormat == FMODLib.SOUND_FORMAT.PCMFLOAT)
{
Marshal.Copy(frame as float[], 0, ptr1, len1T);
if (ptr2 != IntPtr.Zero)
{
Marshal.Copy(frame as float[], len1T, ptr2, len2T);
}
}
res = sound.unlock(ptr1, ptr2, len1, len2);
if (res != FMODLib.RESULT.OK)
{
Error = "failed to unlock sound buffer: " + res;
logger.LogError(logPrefix + Error);
return;
}
}
override public void Stop()
{
base.Stop();
sound.release();
}
public string Error { get; private set; }
}
// Plays back input audio via FMOD Programmer Instrument
// Provide an event with looped Programmer Instrument. AudioOutEvent<T> creates a Sound, assigns it to the Event and fires it on eaxh Start() call
public class AudioOutEvent<T> : AudioOut<T>
{
FMODLib.Studio.EventInstance fmodEvent;
public AudioOutEvent(FMODLib.System coreSystem, FMODLib.Studio.EventInstance fmodEvent, PlayDelayConfig playDelayConfig, ILogger logger, string logPrefix, bool debugInfo)
: base(coreSystem, playDelayConfig, logger, "(Event)" + (logPrefix == "" ? "" : " ") + logPrefix, debugInfo)
{
this.fmodEvent = fmodEvent;
}
override public int OutPos
{
get
{
if (fmodEvent.handle == IntPtr.Zero)
{
return 0;
}
else
{
fmodEvent.getTimelinePosition(out int position);
return (int)(position * (long)this.frequency / 1000 % this.bufferSamples);
}
}
}
static int instCnt = 0;
static Dictionary<int, AudioOutEvent<T>> instTable = new Dictionary<int, AudioOutEvent<T>>();
override public void OutStart()
{
fmodEvent.setCallback(FMODEventCallback);
IntPtr ud;
lock (instTable)
{
instTable[instCnt] = this;
ud = new IntPtr(instCnt);
instCnt++;
}
fmodEvent.setUserData(ud);
fmodEvent.start();
logger.LogInfo(logPrefix + "Event Started");
}
[AOT.MonoPInvokeCallback(typeof(FMODLib.Studio.EVENT_CALLBACK))]
static FMODLib.RESULT FMODEventCallback(FMODLib.Studio.EVENT_CALLBACK_TYPE type, IntPtr instance, IntPtr parameterPtr)
{
var evDummy = new FMODLib.Studio.EventInstance();
evDummy.handle = instance;
evDummy.getUserData(out IntPtr userdata);
AudioOutEvent<T> audioOut;
lock (instTable)
{
if (!instTable.TryGetValue(userdata.ToInt32(), out audioOut))
{
// should not happen becase we deregister callback before removing the instance from the table
return FMODLib.RESULT.ERR_NOTREADY;
}
}
return audioOut.fmodEventCallback(type, instance, parameterPtr);
}
FMODLib.RESULT fmodEventCallback(FMODLib.Studio.EVENT_CALLBACK_TYPE type, IntPtr instance, IntPtr parameterPtr)
{
logger.LogInfo(logPrefix + "EventCallback " + type);
switch (type)
{
case FMODLib.Studio.EVENT_CALLBACK_TYPE.CREATE_PROGRAMMER_SOUND:
{
var parameter = Marshal.PtrToStructure<FMODLib.Studio.PROGRAMMER_SOUND_PROPERTIES>(parameterPtr);
parameter.sound = Sound.handle;
parameter.subsoundIndex = -1;
Marshal.StructureToPtr(parameter, parameterPtr, false);
logger.LogInfo(logPrefix + "Sound Assigned to Event Parameter");
}
break;
case FMODLib.Studio.EVENT_CALLBACK_TYPE.DESTROY_PROGRAMMER_SOUND:
{
// sound is released in Stop()
//var parameter = Marshal.PtrToStructure<FMODLib.Studio.PROGRAMMER_SOUND_PROPERTIES>(parameterPtr);
//var sound = new FMODLib.Sound();
//sound.handle = parameter.sound;
//sound.release();
}
break;
case FMODLib.Studio.EVENT_CALLBACK_TYPE.DESTROYED:
// Now the event has been destroyed, unpin the string memory so it can be garbage collected
break;
}
return FMODLib.RESULT.OK;
}
override public void Stop()
{
base.Stop();
fmodEvent.setCallback(null);
lock (instTable)
{
foreach (var i in instTable)
{
if (i.Value == this)
{
instTable.Remove(i.Key);
break;
}
}
}
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f1d84cf4f41d37743b72ce9f90a1b671
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: a7df7893176fe6843a116117c3d407ed
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,148 @@
#if WINDOWS_UWP || ENABLE_WINMD_SUPPORT
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using Windows.Media.MediaProperties;
namespace Photon.Voice.UWP
{
public class AudioInPusher : IAudioPusher<short>
{
ILogger logger;
int samplingRate;
int channels;
CaptureDevice device = null;
ObjectFactory<short[], int> bufferFactory;
public AudioInPusher(ILogger logger, int samplingRate, int channels, string deviceID)
{
this.logger = logger;
this.samplingRate = samplingRate;
this.channels = channels;
device = new CaptureDevice(logger, CaptureDevice.Media.Audio, deviceID);
}
void init()
{
try
{
device.Initialize();
device.CaptureFailed += Device_CaptureFailed;
}
catch (AggregateException e)
{
logger.LogError("[PV] [AI] Device initialization Error: (HResult=" + e.HResult + ") " + e);
e.Handle((x) =>
{
if (x is UnauthorizedAccessException)
{
ErrorAccess = true;
}
Error = x.Message;
logger.LogError("[PV] [AI] Device initialization Error (Inner Level 2): (HResult=" + x.HResult + ") " + x);
if (x is AggregateException)
{
(x as AggregateException).Handle((y) =>
{
Error = y.Message;
logger.LogError("[PV] [AI] Device initialization Error (Inner Level 3): (HResult=" + y.HResult + ") " + y);
return true;
});
}
return true;
});
}
catch (Exception e)
{
Error = e.Message;
logger.LogError("[PV] [AI] Device initialization Error: " + e);
}
if (Error == null)
{
logger.LogInfo("[PV] [AI] AudioIn successfully created");
}
}
private void Device_CaptureFailed(object sender, Windows.Media.Capture.MediaCaptureFailedEventArgs e)
{
Error = e.Message;
logger.LogError("[PV] [AI] Error: " + Error);
}
public int SamplingRate { get { return samplingRate; } }
/// <summary>Number of channels in the audio signal.</summary>
public int Channels { get { return channels; } }
public void SetCallback(Action<short[]> callback, ObjectFactory<short[], int> bufferFactory)
{
init();
if (Error != null)
{
return;
}
// Use the MP4 preset to an obtain H.264 video encoding profile
// var mep = new MediaEncodingProfile();
var mep = new MediaEncodingProfile();
mep.Audio = AudioEncodingProperties.CreatePcm((uint)samplingRate, (uint)channels, 16);
mep.Video = null;
mep.Container = null;
device.StartRecordingAsync(mep, (buf, flags) =>
{
// logger.LogInfo("[PV] [AI] " + buf.Length + ": " + BitConverter.ToString(buf, 0, buf.Length > 20 ? 20 : buf.Length));
if (buf != null)
{
var sb = bufferFactory.New(buf.Length / 2);
Buffer.BlockCopy(buf, 0, sb, 0, buf.Length);
callback(sb);
}
}).ContinueWith((t) =>
{
if (t.Exception == null)
{
logger.LogInfo("[PV] [AI] Recording successfully started");
}
else
{
t.Exception.Handle((x) =>
{
Error = x.Message;
logger.LogError("[PV] [AI] Recording starting Error: " + Error);
return true;
});
}
});
}
private static readonly ArraySegment<byte> EmptyBuffer = new ArraySegment<byte>(new byte[] { });
public ArraySegment<byte> DequeueOutput(out FrameFlags flags)
{
flags = 0;
return EmptyBuffer;
}
public string Error { get; private set; }
public bool ErrorAccess { get; private set; }
public void EndOfStream()
{
}
public I GetPlatformAPI<I>() where I : class
{
return null;
}
public void Dispose()
{
device.StopRecordingAsync().ContinueWith((t) =>
{
logger.LogInfo("[PV] [AI] AudioIn disposed");
});
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7162c9f5b427069429a3363d20f3deba
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,340 @@
#if WINDOWS_UWP || ENABLE_WINMD_SUPPORT
using System;
using System.Linq;
using System.Threading.Tasks;
using Windows.Devices.Enumeration;
using Windows.Foundation;
using Windows.Media.Capture;
using Windows.Media.MediaProperties;
namespace Photon.Voice.UWP
{
public delegate void MediaCaptureInitConmpleted(MediaCapture mediaCpture, bool ok);
class CaptureDevice
{
public enum Media
{
Audio,
Video
}
private Media media;
private string deviceID;
// Media capture object
private MediaCapture mediaCapture;
// Custom media sink
private MediaExtensions.MediaSinkProxy mediaSink;
// Flag indicating if recording to custom sink has started
private bool recordingStarted = false;
private bool forwardEvents = false;
private ILogger logger;
internal MediaCapture MediaCapture { get { return mediaCapture; } }
// Wraps the capture failed and media sink incoming connection events
public event EventHandler<MediaCaptureFailedEventArgs> CaptureFailed;
public CaptureDevice(ILogger logger, Media media, string deviceID)
{
this.logger = logger;
this.media = media;
this.deviceID = deviceID;
}
/// <summary>
/// Handler for the wrapped MediaCapture object's Failed event. It just wraps and forward's MediaCapture's
/// Failed event as own CaptureFailed event
/// </summary>
private void mediaCapture_Failed(MediaCapture sender, MediaCaptureFailedEventArgs errorEventArgs)
{
if (CaptureFailed != null && forwardEvents) CaptureFailed(this, errorEventArgs);
}
/// <summary>
/// Cleans up the resources.
/// </summary>
private void CleanupSink()
{
if (mediaSink != null)
{
mediaSink.Dispose();
mediaSink = null;
recordingStarted = false;
}
}
private void DoCleanup()
{
if (mediaCapture != null)
{
mediaCapture.Failed -= mediaCapture_Failed;
mediaCapture = null;
}
CleanupSink();
}
public void Initialize()
{
InitializeAsync();
}
public void InitializeAsync()
{
try
{
var settings = new MediaCaptureInitializationSettings();
if (media == Media.Video)
{
settings.StreamingCaptureMode = StreamingCaptureMode.Video;
settings.VideoDeviceId = deviceID;
}
else
{
settings.StreamingCaptureMode = StreamingCaptureMode.Audio;
settings.AudioDeviceId = deviceID;
}
forwardEvents = true;
if (mediaCapture != null)
{
throw new InvalidOperationException("Camera is already initialized");
}
mediaCapture = new MediaCapture();
mediaCapture.Failed += mediaCapture_Failed;
var t = mediaCapture.InitializeAsync(settings);
t.AsTask().Wait();
lock (mediaCaptureInitedLock)
{
mediaCaptureInited = true;
lastMediaCaptureInitStatus = t.Status == AsyncStatus.Completed;
if (MediaCaptureInitCompleted != null)
{
MediaCaptureInitCompleted(mediaCapture, t.Status == AsyncStatus.Completed);
}
}
}
catch (Exception e)
{
DoCleanup();
throw e;
}
}
internal event MediaCaptureInitConmpleted MediaCaptureInitCompleted;
object mediaCaptureInitedLock = new object();
bool mediaCaptureInited;
bool lastMediaCaptureInitStatus;
internal void MediaCaptureInitCompletedAdd(MediaCaptureInitConmpleted x)
{
lock (mediaCaptureInitedLock)
{
if (mediaCaptureInited)
{
x.Invoke(mediaCapture, lastMediaCaptureInitStatus);
}
MediaCaptureInitCompleted += x;
}
}
/// <summary>
/// Asynchronous method cleaning up resources and stopping recording if necessary.
/// </summary>
public async Task CleanUpAsync()
{
try
{
forwardEvents = true;
if (mediaCapture == null && mediaSink == null) return;
if (recordingStarted)
{
await mediaCapture.StopRecordAsync();
}
DoCleanup();
}
catch (Exception)
{
DoCleanup();
}
}
/// <summary>
/// Creates url object from MediaCapture
/// </summary>
public MediaCapture CaptureSource
{
get { return mediaCapture; }
}
/// <summary>
/// Allow selection of camera settings.
/// </summary>
/// <param name="mediaStreamType" type="Windows.Media.Capture.MediaStreamType">
/// Type of a the media stream.
/// </param>
/// <param name="filterSettings" type="Func<Windows.Media.MediaProperties.IMediaEncodingProperties, bool>">
/// A predicate function, which will be called to filter the correct settings.
/// </param>
public async Task<IMediaEncodingProperties> SelectPreferredCameraStreamSettingAsync(MediaStreamType mediaStreamType, Func<IMediaEncodingProperties, bool> filterSettings)
{
IMediaEncodingProperties previewEncodingProperties = null;
if (mediaStreamType == MediaStreamType.Audio || mediaStreamType == MediaStreamType.Photo)
{
throw new ArgumentException("mediaStreamType value of MediaStreamType.Audio or MediaStreamType.Photo is not supported", "mediaStreamType");
}
if (filterSettings == null)
{
throw new ArgumentNullException("filterSettings");
}
var properties = mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(mediaStreamType);
var filterredProperties = properties.Where(filterSettings);
var preferredSettings = filterredProperties.ToArray();
Array.Sort<IMediaEncodingProperties>(preferredSettings, (x, y) =>
{
return (int)(((x as VideoEncodingProperties).Width) -
(y as VideoEncodingProperties).Width);
});
if (preferredSettings.Length > 0)
{
previewEncodingProperties = preferredSettings[0];
await mediaCapture.VideoDeviceController.SetMediaStreamPropertiesAsync(mediaStreamType, preferredSettings[0]);
}
return previewEncodingProperties;
}
/// <summary>
/// Starts media recording asynchronously
/// </summary>
/// <param name="encodingProfile">
/// Encoding profile used for the recording session
/// </param>
public async Task StartRecordingAsync(MediaEncodingProfile encodingProfile, Action<byte[], FrameFlags> encoderCallback)
{
try
{
// We cannot start recording twice.
if (mediaSink != null && recordingStarted)
{
throw new InvalidOperationException("Recording already started.");
}
// Release sink if there is one already.
CleanupSink();
// Create new sink
mediaSink = new MediaExtensions.MediaSinkProxy();
if (encoderCallback != null)
{
mediaSink.OutgoingPacketEvent += (object sender, MediaExtensions.Packet p) =>
{
encoderCallback(p.Buffer, p.Keyframe ? FrameFlags.KeyFrame : 0);
};
}
var mfExtension = await mediaSink.InitializeAsync(encodingProfile.Audio, encodingProfile.Video);
await mediaCapture.StartRecordToCustomSinkAsync(encodingProfile, mfExtension);
//var file = await Windows.Storage.KnownFolders.CameraRoll.CreateFileAsync("pop.mp4", Windows.Storage.CreationCollisionOption.GenerateUniqueName);
//await mediaCapture.StartRecordToStorageFileAsync(encodingProfile, file);
recordingStarted = true;
}
catch (Exception e)
{
CleanupSink();
throw e;
}
}
/// <summary>
/// Stops recording asynchronously
/// </summary>
public async Task StopRecordingAsync()
{
if (recordingStarted)
{
try
{
await mediaCapture.StopRecordAsync();
CleanupSink();
}
catch (Exception)
{
CleanupSink();
}
}
}
public static async Task<bool> CheckForRecordingDeviceAsync()
{
var cameraFound = false;
var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);
if (devices.Count > 0)
{
cameraFound = true;
}
return cameraFound;
}
}
public class DeviceEnumerator : DeviceEnumeratorBase
{
Windows.Devices.Enumeration.DeviceClass deviceClass;
public DeviceEnumerator(ILogger logger, Windows.Devices.Enumeration.DeviceClass deviceClass) : base(logger)
{
this.deviceClass = deviceClass;
Refresh();
}
public override void Refresh()
{
var op = Windows.Devices.Enumeration.DeviceInformation.FindAllAsync(deviceClass);
op.AsTask().Wait();
if (op.Status == Windows.Foundation.AsyncStatus.Error)
{
Error = op.ErrorCode.Message;
return;
}
var r = op.GetResults();
devices = new System.Collections.Generic.List<DeviceInfo>();
for (int i = 0; i < r.Count; i++)
{
devices.Add(new DeviceInfo(r[i].Id, r[i].Name));
}
}
public override void Dispose()
{
}
}
public class AudioInEnumerator : DeviceEnumerator
{
public AudioInEnumerator(ILogger logger)
: base(logger, Windows.Devices.Enumeration.DeviceClass.AudioCapture)
{
}
}
public class VideoInEnumerator : DeviceEnumerator
{
public VideoInEnumerator(ILogger logger)
: base(logger, Windows.Devices.Enumeration.DeviceClass.VideoCapture)
{
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b7c50b954b279bb43a2d7a6c6b062644
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 840de0178004b5c4a87eccafbf3b96dc
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 00a273964bddafc47ba8e7e50c23662c
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,151 @@
using System;
using UnityEngine;
namespace Photon.Voice.Unity
{
public class AndroidAudioInParameters
{
public bool EnableAEC = false;
public bool EnableAGC = false;
public bool EnableNS = false;
}
// depends on Unity's AndroidJavaProxy
public class AndroidAudioInAEC : Voice.IAudioPusher<short>, IResettable
{
class DataCallback : AndroidJavaProxy
{
Action<short[]> callback;
IntPtr javaBuf;
public DataCallback() : base("com.exitgames.photon.audioinaec.AudioInAEC$DataCallback") { }
public void SetCallback(Action<short[]> callback, IntPtr javaBuf)
{
this.callback = callback;
this.javaBuf = javaBuf;
}
public void OnData()
{
if (callback != null)
{
//TODO: copy to LocalVoiceFramed.PushDataBufferPool element instead
var buf = AndroidJNI.FromShortArray(javaBuf);
cntFrame++;
cntShort += buf.Length;
this.callback(buf);
}
}
public void OnStop()
{
AndroidJNI.DeleteGlobalRef(javaBuf);
}
int cntFrame;
int cntShort;
}
AndroidJavaObject audioIn;
IntPtr javaBuf;
Voice.ILogger logger;
int audioInSampleRate = 0;
public AndroidAudioInAEC(Voice.ILogger logger, bool enableAEC = false, bool enableAGC = false, bool enableNS = false)
{
// true means to use a route-dependent value which is usually the sample rate of the source
// otherwise, 44100 Hz requested
// On Android 4.4.4 (probably on all < 6.0), auto does not work: java.lang.IllegalArgumentException: 0Hz is not a supported sample rate.
const bool SAMPLE_RATE_AUTO = false;
// 44100Hz is currently the only rate that is guaranteed to work on all devices
// used for GetMinBufferSize call even if SAMPLE_RATE_AUTO = true
const int SAMPLE_RATE_44100 = 44100;
const int SAMPLE_RATE_UNSPECIFIED = 0;
const int SAMPLE_RATE_REQUEST = SAMPLE_RATE_AUTO ? SAMPLE_RATE_UNSPECIFIED : SAMPLE_RATE_44100;
this.logger = logger;
try
{
this.callback = new DataCallback();
audioIn = new AndroidJavaObject("com.exitgames.photon.audioinaec.AudioInAEC");
//bool aecAvailable = audioIn.Call<bool>("AECIsAvailable");
int minBufSize = audioIn.Call<int>("GetMinBufferSize", SAMPLE_RATE_44100, Channels);
logger.LogInfo("[PV] AndroidAudioInAEC: AndroidJavaObject created: aec: {0}/{1}, agc: {2}/{3}, ns: {4}/{5} minBufSize: {6}",
enableAEC, audioIn.Call<bool>("AECIsAvailable"),
enableAGC, audioIn.Call<bool>("AGCIsAvailable"),
enableNS, audioIn.Call<bool>("NSIsAvailable"),
minBufSize);
AndroidJavaClass app = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
AndroidJavaObject activity = app.GetStatic<AndroidJavaObject>("currentActivity");
// Set buffer IntPtr reference separately via pure jni call, pass other values and start capture via AndroidJavaObject helper
var ok = audioIn.Call<bool>("Start", activity, this.callback, SAMPLE_RATE_REQUEST, Channels, minBufSize * 4, enableAEC, enableAGC, enableNS);
if (ok)
{
audioInSampleRate = audioIn.Call<int>("GetSampleRate");
logger.LogInfo("[PV] AndroidAudioInAEC: AndroidJavaObject started: {0}, sampling rate: {1}, channels: {2}, record buffer size: {3}", ok, SamplingRate, Channels, minBufSize * 4);
}
else
{
Error = "[PV] AndroidAudioInAEC constructor: calling Start java method failure";
logger.LogError("[PV] AndroidAudioInAEC: {0}", Error);
}
}
catch (Exception e)
{
Error = e.ToString();
if (Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
Error = "Exception in AndroidAudioInAEC constructor";
}
logger.LogError("[PV] AndroidAudioInAEC: {0}", Error);
}
}
// Supposed to be called once at voice initialization.
// Otherwise recreate native object (instead of adding 'set callback' method to java interface)
public void SetCallback(Action<short[]> callback, ObjectFactory<short[], int> bufferFactory)
{
if (Error == null)
{
var voiceFrameSize = bufferFactory.Info;
// setting to voice FrameSize lets to avoid framing procedure
javaBuf = AndroidJNI.NewGlobalRef(AndroidJNI.NewShortArray(voiceFrameSize));
this.callback.SetCallback(callback, javaBuf);
var meth = AndroidJNI.GetMethodID(audioIn.GetRawClass(), "SetBuffer", "([S)Z");
bool ok = AndroidJNI.CallBooleanMethod(audioIn.GetRawObject(), meth, new jvalue[] { new jvalue() { l = javaBuf } });
if (!ok)
{
Error = "AndroidAudioInAEC.SetCallback(): calling SetBuffer java method failure";
}
}
if (Error != null)
{
logger.LogError("[PV] AndroidAudioInAEC: {0}", Error);
}
}
DataCallback callback;
public int Channels { get { return 1; } }
public int SamplingRate { get { return audioInSampleRate; } }
public string Error { get; private set; }
public void Reset()
{
if (audioIn != null)
{
audioIn.Call("Reset");
}
}
public void Dispose()
{
if (audioIn != null)
{
audioIn.Call<bool>("Stop");
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4b148f821e0a63242b85815126cf5c6a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,63 @@
using UnityEngine;
namespace Photon.Voice.Unity
{
// Wraps UnityEngine.AudioClip with Voice.IAudioStream interface.
// Used for playing back audio clips via Photon Voice.
public class AudioClipWrapper : IAudioReader<float>
{
private AudioClip audioClip;
private int readPos;
private float startTime;
public bool Loop { get; set; }
public AudioClipWrapper(AudioClip audioClip)
{
this.audioClip = audioClip;
startTime = Time.time;
}
private bool playing = true;
public bool Read(float[] buffer)
{
if (!playing)
{
return false;
}
var playerPos = (int)((Time.time - startTime) * audioClip.frequency);
var bufferSamplesCount = buffer.Length / audioClip.channels;
if (playerPos > readPos + bufferSamplesCount)
{
this.audioClip.GetData(buffer, readPos);
readPos += bufferSamplesCount;
if (readPos >= audioClip.samples)
{
if (this.Loop)
{
readPos = 0;
startTime = Time.time;
}
else
{
playing = false;
}
}
return true;
}
else
{
return false;
}
}
public int SamplingRate { get { return this.audioClip.frequency; } }
public int Channels { get { return this.audioClip.channels; } }
public string Error { get; private set; }
public void Dispose()
{
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 634203506e5489f44a94fb127aca71bb
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1 @@
// placeholder for deleted file

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e98313bf43e36d241a837456f38e4077
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,18 @@
using UnityEngine;
using System;
namespace Photon.Voice.Unity
{
public class AudioOutCapture : MonoBehaviour
{
public event Action<float[], int> OnAudioFrame;
void OnAudioFilterRead(float[] frame, int channels)
{
if (OnAudioFrame != null)
{
OnAudioFrame(frame, channels);
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4d8a5b2f1284e0e49b5df5c6300295d0
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,9 @@

namespace Photon.Voice.Unity
{
public class PhotonVoiceCreatedParams
{
public Voice.LocalVoice Voice { get; set; }
public Voice.IAudioDesc AudioDesc { get; set; }
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 64814f7cb6abfbf40b6d4db1fa05642e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,66 @@
using UnityEngine;
using System;
using System.Linq;
using System.Collections.Generic;
using System.Collections;
namespace Photon.Voice.Unity
{
public class AudioInEnumerator : DeviceEnumeratorBase
{
public AudioInEnumerator(ILogger logger) : base (logger)
{
Refresh();
}
public override void Refresh()
{
var unityDevs = UnityMicrophone.devices;
devices = new List<DeviceInfo>();
for (int i = 0; i < unityDevs.Length; i++)
{
var d = unityDevs[i];
devices.Add(new DeviceInfo(d));
}
}
#if UNITY_WEBGL
public override bool IsSupported => false;
public override string Error { get { return "Current platform " + Application.platform + " is not supported by AudioInEnumerator."; } }
#else
public override string Error { get { return null; } }
#endif
public override void Dispose()
{
}
}
#if PHOTON_VOICE_VIDEO_ENABLE
public class VideoInEnumerator : DeviceEnumeratorBase
{
public VideoInEnumerator(ILogger logger) : base(logger)
{
Refresh();
}
public override void Refresh()
{
var unityDevs = UnityEngine.WebCamTexture.devices;
devices = new List<DeviceInfo>();
for (int i = 0; i < unityDevs.Length; i++)
{
var d = unityDevs[i];
devices.Add(new DeviceInfo(d.name));
}
}
public override string Error { get { return null; } }
public override void Dispose()
{
}
}
#endif
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 0a7107a0602d95947b9dba4a3d31b1a1
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,12 @@
using UnityEngine;
namespace Photon.Voice.Unity
{
public class Logger : ILogger
{
public void LogError(string fmt, params object[] args) { Debug.LogErrorFormat(fmt, args); }
public void LogWarning(string fmt, params object[] args) { Debug.LogWarningFormat(fmt, args); }
public void LogInfo(string fmt, params object[] args) { Debug.LogFormat(fmt, args); }
public void LogDebug(string fmt, params object[] args) { Debug.LogFormat(fmt, args); }
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b2fbbad5642e37e45b778dfa7336b55c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,107 @@
using UnityEngine;
using System;
using System.Linq;
namespace Photon.Voice.Unity
{
// Wraps UnityEngine.Microphone with Voice.IAudioStream interface.
public class MicWrapper : IAudioReader<float>
{
private AudioClip mic;
private string device;
ILogger logger;
public MicWrapper(string device, int suggestedFrequency, ILogger logger)
{
try
{
this.device = device;
this.logger = logger;
if (UnityMicrophone.devices.Length < 1)
{
Error = "No microphones found (UnityMicrophone.devices is empty)";
logger.LogError("[PV] MicWrapper: " + Error);
return;
}
if (!string.IsNullOrEmpty(device) && !UnityMicrophone.devices.Contains(device))
{
logger.LogError(string.Format("[PV] MicWrapper: \"{0}\" is not a valid Unity microphone device, falling back to default one", device));
device = null;
}
int minFreq;
int maxFreq;
logger.LogInfo("[PV] MicWrapper: initializing microphone '{0}', suggested frequency = {1}).", device, suggestedFrequency);
UnityMicrophone.GetDeviceCaps(device, out minFreq, out maxFreq);
var frequency = suggestedFrequency;
// minFreq = maxFreq = 44100; // test like android client
if (suggestedFrequency < minFreq || maxFreq != 0 && suggestedFrequency > maxFreq)
{
logger.LogWarning("[PV] MicWrapper does not support suggested frequency {0} (min: {1}, max: {2}). Setting to {2}",
suggestedFrequency, minFreq, maxFreq);
frequency = maxFreq;
}
this.mic = UnityMicrophone.Start(device, true, 1, frequency);
logger.LogInfo("[PV] MicWrapper: microphone '{0}' initialized, frequency = {1}, channels = {2}.", device, this.mic.frequency, this.mic.channels);
}
catch (Exception e)
{
Error = e.ToString();
if (Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
Error = "Exception in MicWrapper constructor";
}
logger.LogError("[PV] MicWrapper: " + Error);
}
}
public int SamplingRate { get { return Error == null ? this.mic.frequency : 0; } }
public int Channels { get { return Error == null ? this.mic.channels : 0; } }
public string Error { get; private set; }
public void Dispose()
{
UnityMicrophone.End(this.device);
}
private int micPrevPos;
private int micLoopCnt;
private int readAbsPos;
public bool Read(float[] buffer)
{
if (Error != null)
{
return false;
}
int micPos = UnityMicrophone.GetPosition(this.device);
// loop detection
if (micPos < micPrevPos)
{
micLoopCnt++;
}
micPrevPos = micPos;
var micAbsPos = micLoopCnt * this.mic.samples + micPos;
if (mic.channels == 0)
{
Error = "Number of channels is 0 in Read()";
logger.LogError("[PV] MicWrapper: " + Error);
return false;
}
var bufferSamplesCount = buffer.Length / mic.channels;
var nextReadPos = this.readAbsPos + bufferSamplesCount;
if (nextReadPos < micAbsPos)
{
this.mic.GetData(buffer, this.readAbsPos % this.mic.samples);
this.readAbsPos = nextReadPos;
return true;
}
else
{
return false;
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 5d3c3acd76d02834cab32fd59144e27b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,343 @@
using UnityEngine;
using System;
using System.Linq;
namespace Photon.Voice.Unity
{
public class MicWrapperPusher : IAudioPusher<float>
{
private AudioSource audioSource;
private AudioClip mic;
private string device;
private ILogger logger;
private AudioOutCapture audioOutCapture;
private int sampleRate;
private int channels;
private bool destroyGameObjectOnStop;
public MicWrapperPusher(string device, AudioSource aS, int suggestedFrequency, ILogger lg, bool destroyOnStop = true)
{
try
{
this.logger = lg;
this.device = device;
this.audioSource = aS;
this.destroyGameObjectOnStop = destroyOnStop;
if (UnityMicrophone.devices.Length < 1)
{
this.Error = "No microphones found (Microphone.devices is empty)";
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
if (!string.IsNullOrEmpty(device) && !UnityMicrophone.devices.Contains(device))
{
this.logger.LogError("[PV] MicWrapperPusher: \"{0}\" is not a valid Unity microphone device, falling back to default one", device);
device = UnityMicrophone.devices[0];
}
this.sampleRate = AudioSettings.outputSampleRate;
switch (AudioSettings.speakerMode)
{
case AudioSpeakerMode.Mono: this.channels = 1; break;
case AudioSpeakerMode.Stereo: this.channels = 2; break;
default:
this.Error = string.Concat("Only Mono and Stereo project speaker mode supported. Current mode is ", AudioSettings.speakerMode);
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
int minFreq;
int maxFreq;
this.logger.LogInfo("[PV] MicWrapperPusher: initializing microphone '{0}', suggested frequency = {1}).", device, suggestedFrequency);
UnityMicrophone.GetDeviceCaps(device, out minFreq, out maxFreq);
int frequency = suggestedFrequency;
// minFreq = maxFreq = 44100; // test like android client
if (suggestedFrequency < minFreq || maxFreq != 0 && suggestedFrequency > maxFreq)
{
this.logger.LogWarning("[PV] MicWrapperPusher does not support suggested frequency {0} (min: {1}, max: {2}). Setting to {2}",
suggestedFrequency, minFreq, maxFreq);
frequency = maxFreq;
}
if (!this.audioSource.enabled)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioSource component disabled, enabling it.");
this.audioSource.enabled = true;
}
if (!this.audioSource.gameObject.activeSelf)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioSource GameObject inactive, activating it.");
this.audioSource.gameObject.SetActive(true);
}
if (!this.audioSource.gameObject.activeInHierarchy)
{
this.Error = "AudioSource GameObject is not active in hierarchy, audio input can't work.";
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
this.audioOutCapture = this.audioSource.gameObject.GetComponent<AudioOutCapture>();
if (ReferenceEquals(null, this.audioOutCapture) || !this.audioOutCapture)
{
this.audioOutCapture = this.audioSource.gameObject.AddComponent<AudioOutCapture>();
}
if (!this.audioOutCapture.enabled)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioOutCapture component disabled, enabling it.");
this.audioOutCapture.enabled = true;
}
this.mic = UnityMicrophone.Start(device, true, 1, frequency);
this.audioSource.mute = true;
this.audioSource.volume = 0f;
this.audioSource.clip = this.mic;
this.audioSource.loop = true;
this.audioSource.Play();
this.logger.LogInfo("[PV] MicWrapperPusher: microphone '{0}' initialized, frequency = in:{1}|out:{2}, channels = in:{3}|out:{4}.", device, this.mic.frequency, this.SamplingRate, this.mic.channels, this.Channels);
}
catch (Exception e)
{
this.Error = e.ToString();
if (this.Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
this.Error = "Exception in MicWrapperPusher constructor";
}
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
}
}
public MicWrapperPusher(string device, GameObject gO, int suggestedFrequency, ILogger lg, bool destroyOnStop = true)
{
try
{
this.logger = lg;
this.device = device;
this.destroyGameObjectOnStop = destroyOnStop;
if (UnityMicrophone.devices.Length < 1)
{
this.Error = "No microphones found (Microphone.devices is empty)";
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
if (!string.IsNullOrEmpty(device) && !UnityMicrophone.devices.Contains(device))
{
this.logger.LogError("[PV] MicWrapperPusher: \"{0}\" is not a valid Unity microphone device, falling back to default one", device);
device = UnityMicrophone.devices[0];
}
this.sampleRate = AudioSettings.outputSampleRate;
switch (AudioSettings.speakerMode)
{
case AudioSpeakerMode.Mono: this.channels = 1; break;
case AudioSpeakerMode.Stereo: this.channels = 2; break;
default:
this.Error = string.Concat("Only Mono and Stereo project speaker mode supported. Current mode is ", AudioSettings.speakerMode);
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
int minFreq;
int maxFreq;
this.logger.LogInfo("[PV] MicWrapperPusher: initializing microphone '{0}', suggested frequency = {1}).", device, suggestedFrequency);
UnityMicrophone.GetDeviceCaps(device, out minFreq, out maxFreq);
int frequency = suggestedFrequency;
// minFreq = maxFreq = 44100; // test like android client
if (suggestedFrequency < minFreq || maxFreq != 0 && suggestedFrequency > maxFreq)
{
this.logger.LogWarning("[PV] MicWrapperPusher does not support suggested frequency {0} (min: {1}, max: {2}). Setting to {2}",
suggestedFrequency, minFreq, maxFreq);
frequency = maxFreq;
}
if (!gO || gO == null)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioSource GameObject is destroyed or null. Creating a new one.");
gO = new GameObject("[PV] MicWrapperPusher: AudioSource + AudioOutCapture");
this.audioSource = gO.AddComponent<AudioSource>();
this.audioOutCapture = this.audioSource.gameObject.AddComponent<AudioOutCapture>();
}
else
{
if (!gO.activeSelf)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioSource GameObject inactive, activating it.");
gO.SetActive(true);
}
if (!gO.activeInHierarchy)
{
this.Error = "AudioSource GameObject is not active in hierarchy, audio input can't work.";
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
this.audioSource = gO.GetComponent<AudioSource>();
if (ReferenceEquals(null, this.audioSource) || !this.audioSource)
{
this.audioSource = gO.AddComponent<AudioSource>();
}
if (!this.audioSource.enabled)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioSource component disabled, enabling it.");
this.audioSource.enabled = true;
}
if (!this.audioSource.gameObject.activeSelf)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioSource GameObject inactive, activating it.");
this.audioSource.gameObject.SetActive(true);
}
if (!this.audioSource.gameObject.activeInHierarchy)
{
this.Error = "AudioSource GameObject is not active in hierarchy, audio input can't work.";
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
this.audioOutCapture = this.audioSource.gameObject.GetComponent<AudioOutCapture>();
if (ReferenceEquals(null, this.audioOutCapture) || !this.audioOutCapture)
{
this.audioOutCapture = this.audioSource.gameObject.AddComponent<AudioOutCapture>();
}
if (!this.audioOutCapture.enabled)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioOutCapture component disabled, enabling it.");
this.audioOutCapture.enabled = true;
}
}
this.mic = UnityMicrophone.Start(device, true, 1, frequency);
this.audioSource.mute = true;
this.audioSource.volume = 0f;
this.audioSource.clip = this.mic;
this.audioSource.loop = true;
this.audioSource.Play();
this.logger.LogInfo("[PV] MicWrapperPusher: microphone '{0}' initialized, frequency = in:{1}|out:{2}, channels = in:{3}|out:{4}.", device, this.mic.frequency, this.SamplingRate, this.mic.channels, this.Channels);
}
catch (Exception e)
{
this.Error = e.ToString();
if (this.Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
this.Error = "Exception in MicWrapperPusher constructor";
}
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
}
}
public MicWrapperPusher(string device, Transform parentTransform, int suggestedFrequency, ILogger lg, bool destroyOnStop = true)
{
try
{
this.logger = lg;
this.device = device;
this.destroyGameObjectOnStop = destroyOnStop;
if (UnityMicrophone.devices.Length < 1)
{
this.Error = "No microphones found (Microphone.devices is empty)";
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
if (!string.IsNullOrEmpty(device) && !UnityMicrophone.devices.Contains(device))
{
this.logger.LogError("[PV] MicWrapperPusher: \"{0}\" is not a valid Unity microphone device, falling back to default one", device);
device = UnityMicrophone.devices[0];
}
this.sampleRate = AudioSettings.outputSampleRate;
switch (AudioSettings.speakerMode)
{
case AudioSpeakerMode.Mono: this.channels = 1; break;
case AudioSpeakerMode.Stereo: this.channels = 2; break;
default:
this.Error = string.Concat("Only Mono and Stereo project speaker mode supported. Current mode is ", AudioSettings.speakerMode);
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
int minFreq;
int maxFreq;
this.logger.LogInfo("[PV] MicWrapperPusher: initializing microphone '{0}', suggested frequency = {1}).", device, suggestedFrequency);
UnityMicrophone.GetDeviceCaps(device, out minFreq, out maxFreq);
int frequency = suggestedFrequency;
// minFreq = maxFreq = 44100; // test like android client
if (suggestedFrequency < minFreq || maxFreq != 0 && suggestedFrequency > maxFreq)
{
this.logger.LogWarning("[PV] MicWrapperPusher does not support suggested frequency {0} (min: {1}, max: {2}). Setting to {2}",
suggestedFrequency, minFreq, maxFreq);
frequency = maxFreq;
}
GameObject gO = new GameObject("[PV] MicWrapperPusher: AudioSource + AudioOutCapture");
if (ReferenceEquals(null, parentTransform) || !parentTransform)
{
this.logger.LogWarning("[PV] MicWrapperPusher: Parent transform passed is destroyed or null. Creating AudioSource GameObject at root.");
}
else
{
gO.transform.SetParent(parentTransform, false);
if (!gO.activeSelf)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioSource GameObject inactive, activating it.");
gO.gameObject.SetActive(true);
}
if (!gO.activeInHierarchy)
{
this.Error = "AudioSource GameObject is not active in hierarchy, audio input can't work.";
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
}
this.audioSource = gO.AddComponent<AudioSource>();
this.audioOutCapture = this.audioSource.gameObject.AddComponent<AudioOutCapture>();
this.mic = UnityMicrophone.Start(device, true, 1, frequency);
this.audioSource.mute = true;
this.audioSource.volume = 0f;
this.audioSource.clip = this.mic;
this.audioSource.loop = true;
this.audioSource.Play();
this.logger.LogInfo("[PV] MicWrapperPusher: microphone '{0}' initialized, frequency = in:{1}|out:{2}, channels = in:{3}|out:{4}.", device, this.mic.frequency, this.SamplingRate, this.mic.channels, this.Channels);
}
catch (Exception e)
{
this.Error = e.ToString();
if (this.Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
this.Error = "Exception in MicWrapperPusher constructor";
}
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
}
}
private float[] frame2 = new float[0];
private void AudioOutCaptureOnOnAudioFrame(float[] frame, int channelsNumber)
{
if (channelsNumber != this.Channels)
{
this.logger.LogWarning("[PV] MicWrapperPusher: channels number mismatch; expected:{0} got:{1}.", this.Channels, channelsNumber);
}
if (this.frame2.Length != frame.Length)
{
this.frame2 = new float[frame.Length];
}
Array.Copy(frame, this.frame2, frame.Length);
this.pushCallback(frame);
Array.Clear(frame, 0, frame.Length);
}
private Action<float[]> pushCallback;
public void SetCallback(Action<float[]> callback, ObjectFactory<float[], int> bufferFactory)
{
this.pushCallback = callback;
this.audioOutCapture.OnAudioFrame += this.AudioOutCaptureOnOnAudioFrame;
}
public void Dispose()
{
if (this.pushCallback != null && this.audioOutCapture != null)
{
this.audioOutCapture.OnAudioFrame -= this.AudioOutCaptureOnOnAudioFrame;
}
UnityMicrophone.End(this.device);
if (this.destroyGameObjectOnStop && this.audioSource != null)
{
UnityEngine.Object.Destroy(this.audioSource.gameObject);
}
}
public int SamplingRate { get { return this.Error == null ? this.sampleRate : 0; } }
public int Channels { get { return this.Error == null ? this.channels : 0; } }
public string Error { get; private set; }
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4d6ac92ecc68f5844986af1f12ba3b60
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,52 @@
using UnityEngine;
using System.Collections.Generic;
using System;
using System.Runtime.InteropServices;
namespace Photon.Voice.Unity
{
// Plays back input audio via Unity AudioSource
// May consume audio packets in thread other than Unity's main thread
public class UnityAudioOut : AudioOutDelayControl<float>
{
protected readonly AudioSource source;
protected AudioClip clip;
public UnityAudioOut(AudioSource audioSource, PlayDelayConfig playDelayConfig, ILogger logger, string logPrefix, bool debugInfo)
: base(true, playDelayConfig, logger, "[PV] [Unity] AudioOut" + (logPrefix == "" ? "" : " " + logPrefix), debugInfo)
{
this.source = audioSource;
}
override public int OutPos { get { return source.timeSamples; } }
override public void OutCreate(int frequency, int channels, int bufferSamples)
{
this.source.loop = true;
// using streaming clip leads to too long delays
this.clip = AudioClip.Create("UnityAudioOut", bufferSamples, channels, frequency, false);
this.source.clip = clip;
}
override public void OutStart()
{
this.source.Play();
}
override public void OutWrite(float[] data, int offsetSamples)
{
clip.SetData(data, offsetSamples);
}
override public void Stop()
{
base.Stop();
this.source.Stop();
if (this.source != null)
{
this.source.clip = null;
clip = null;
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: c7cbffe9dfdd4b547a75479a23097ff4
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,72 @@
namespace Photon.Voice.Unity
{
#if UNITY_WEBGL
using System;
#endif
using UnityEngine;
/// <summary>A wrapper around UnityEngine.Microphone to be able to safely use Microphone and compile for WebGL.</summary>
public static class UnityMicrophone
{
#if UNITY_WEBGL
private static readonly string[] _devices = new string[0];
#endif
public static string[] devices
{
get
{
#if UNITY_WEBGL
return _devices;
#else
return Microphone.devices;
#endif
}
}
public static void End(string deviceName)
{
#if UNITY_WEBGL
throw new NotImplementedException("Unity Microphone not supported on WebGL");
#else
Microphone.End(deviceName);
#endif
}
public static void GetDeviceCaps(string deviceName, out int minFreq, out int maxFreq)
{
#if UNITY_WEBGL
throw new NotImplementedException("Unity Microphone not supported on WebGL");
#else
Microphone.GetDeviceCaps(deviceName, out minFreq, out maxFreq);
#endif
}
public static int GetPosition(string deviceName)
{
#if UNITY_WEBGL
throw new NotImplementedException("Unity Microphone not supported on WebGL");
#else
return Microphone.GetPosition(deviceName);
#endif
}
public static bool IsRecording(string deviceName)
{
#if UNITY_WEBGL
return false;
#else
return Microphone.IsRecording(deviceName);
#endif
}
public static AudioClip Start(string deviceName, bool loop, int lengthSec, int frequency)
{
#if UNITY_WEBGL
throw new NotImplementedException("Unity Microphone not supported on WebGL");
#else
return Microphone.Start(deviceName, loop, lengthSec, frequency);
#endif
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 0999de601f88ae74bb58440b4898d84a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: b47f8a0f90ad4c3478cf3e2481fd8b21
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,70 @@
#if PHOTON_VOICE_WINDOWS || UNITY_STANDALONE_WIN || UNITY_EDITOR_WIN
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace Photon.Voice.Windows
{
/// <summary>Enumerates microphones available on device.
/// </summary>
public class AudioInEnumerator : DeviceEnumeratorBase
{
const string lib_name = "AudioIn";
[DllImport(lib_name)]
private static extern IntPtr Photon_Audio_In_CreateMicEnumerator();
[DllImport(lib_name)]
private static extern void Photon_Audio_In_DestroyMicEnumerator(IntPtr handle);
[DllImport(lib_name)]
private static extern int Photon_Audio_In_MicEnumerator_Count(IntPtr handle);
[DllImport(lib_name)]
private static extern IntPtr Photon_Audio_In_MicEnumerator_NameAtIndex(IntPtr handle, int idx);
[DllImport(lib_name)]
private static extern int Photon_Audio_In_MicEnumerator_IDAtIndex(IntPtr handle, int idx);
IntPtr handle;
public AudioInEnumerator(ILogger logger) : base(logger)
{
Refresh();
}
/// <summary>Refreshes the microphones list.
/// </summary>
public override void Refresh()
{
Dispose();
try
{
handle = Photon_Audio_In_CreateMicEnumerator();
var count = Photon_Audio_In_MicEnumerator_Count(handle);
devices = new List<DeviceInfo>();
for (int i = 0; i < count; i++)
{
devices.Add(new DeviceInfo(Photon_Audio_In_MicEnumerator_IDAtIndex(handle, i), Marshal.PtrToStringAuto(Photon_Audio_In_MicEnumerator_NameAtIndex(handle, i))));
}
Error = null;
}
catch (Exception e)
{
Error = e.ToString();
if (Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
Error = "Exception in AudioInEnumerator.Refresh()";
}
}
}
/// <summary>Disposes enumerator.
/// Call it to free native resources.
/// </summary>
public override void Dispose()
{
if (handle != IntPtr.Zero && Error == null)
{
Photon_Audio_In_DestroyMicEnumerator(handle);
handle = IntPtr.Zero;
}
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 81007f7b708822448a7f5940c71ac788
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,124 @@
#if PHOTON_VOICE_WINDOWS || UNITY_STANDALONE_WIN || UNITY_EDITOR_WIN
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace Photon.Voice.Windows
{
#pragma warning disable 0414
public class MonoPInvokeCallbackAttribute : System.Attribute
{
private Type type;
public MonoPInvokeCallbackAttribute(Type t) { type = t; }
}
#pragma warning restore 0414
public class WindowsAudioInPusher : IAudioPusher<short>
{
enum SystemMode
{
SINGLE_CHANNEL_AEC = 0,
OPTIBEAM_ARRAY_ONLY = 2,
OPTIBEAM_ARRAY_AND_AEC = 4,
SINGLE_CHANNEL_NSAGC = 5,
}
[DllImport("AudioIn")]
private static extern IntPtr Photon_Audio_In_Create(int instanceID, SystemMode systemMode, int micDevIdx, int spkDevIdx, Action<int, IntPtr, int> callback, bool featrModeOn, bool noiseSup, bool agc, bool cntrClip);
[DllImport("AudioIn")]
private static extern void Photon_Audio_In_Destroy(IntPtr handler);
private delegate void CallbackDelegate(int instanceID, IntPtr buf, int len);
IntPtr handle;
int instanceID;
Action<short[]> pushCallback;
ObjectFactory<short[], int> bufferFactory;
public WindowsAudioInPusher(int deviceID, ILogger logger)
{
try
{
lock (instancePerHandle)
{
// use default playback device
handle = Photon_Audio_In_Create(instanceCnt, SystemMode.SINGLE_CHANNEL_AEC, deviceID, -1, nativePushCallback, true, true, true, true); // defaults in original ms sample: false, true, false, false
this.instanceID = instanceCnt;
instancePerHandle.Add(instanceCnt++, this);
}
}
catch (Exception e)
{
Error = e.ToString();
if (Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
Error = "Exception in WindowsAudioInPusher constructor";
}
logger.LogError("[PV] WindowsAudioInPusher: " + Error);
}
}
// IL2CPP does not support marshaling delegates that point to instance methods to native code.
// Using static method and per instance table.
static int instanceCnt;
private static Dictionary<int, WindowsAudioInPusher> instancePerHandle = new Dictionary<int, WindowsAudioInPusher>();
[MonoPInvokeCallbackAttribute(typeof(CallbackDelegate))]
private static void nativePushCallback(int instanceID, IntPtr buf, int len)
{
WindowsAudioInPusher instance;
bool ok;
lock (instancePerHandle)
{
ok = instancePerHandle.TryGetValue(instanceID, out instance);
}
if (ok)
{
instance.push(buf, len);
}
}
// Supposed to be called once at voice initialization.
// Otherwise recreate native object (instead of adding 'set callback' method to native interface)
public void SetCallback(Action<short[]> callback, ObjectFactory<short[], int> bufferFactory)
{
this.bufferFactory = bufferFactory;
this.pushCallback = callback;
}
private void push(IntPtr buf, int lenBytes)
{
if (pushCallback != null)
{
var len = lenBytes / sizeof(short);
var bufManaged = this.bufferFactory.New(len);
Marshal.Copy(buf, bufManaged, 0, len);
pushCallback(bufManaged);
}
}
public int Channels { get { return 1; } }
// Hardcoded in AudioInAec.cpp
// Supported sample rates: 8000, 11025, 16000, 22050
// https://docs.microsoft.com/en-us/windows/win32/medfound/voicecapturedmo?redirectedfrom=MSDN (Voice Capture DSP)
public int SamplingRate { get { return 16000; } }
public string Error { get; private set; }
public void Dispose()
{
lock (instancePerHandle)
{
instancePerHandle.Remove(instanceID);
}
if (handle != IntPtr.Zero)
{
Photon_Audio_In_Destroy(handle);
handle = IntPtr.Zero;
}
// TODO: Remove this from instancePerHandle
}
}
}
#endif

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: f2c69afb30794f445b1c5cebc5e6a3fe
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant: