non-vr lobby, version fix

This commit is contained in:
joonasp
2022-06-29 14:45:17 +03:00
parent 5774be9822
commit 04baadfad1
1774 changed files with 573069 additions and 1533 deletions

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 00a273964bddafc47ba8e7e50c23662c
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,151 @@
using System;
using UnityEngine;
namespace Photon.Voice.Unity
{
public class AndroidAudioInParameters
{
public bool EnableAEC = false;
public bool EnableAGC = false;
public bool EnableNS = false;
}
// depends on Unity's AndroidJavaProxy
public class AndroidAudioInAEC : Voice.IAudioPusher<short>, IResettable
{
class DataCallback : AndroidJavaProxy
{
Action<short[]> callback;
IntPtr javaBuf;
public DataCallback() : base("com.exitgames.photon.audioinaec.AudioInAEC$DataCallback") { }
public void SetCallback(Action<short[]> callback, IntPtr javaBuf)
{
this.callback = callback;
this.javaBuf = javaBuf;
}
public void OnData()
{
if (callback != null)
{
//TODO: copy to LocalVoiceFramed.PushDataBufferPool element instead
var buf = AndroidJNI.FromShortArray(javaBuf);
cntFrame++;
cntShort += buf.Length;
this.callback(buf);
}
}
public void OnStop()
{
AndroidJNI.DeleteGlobalRef(javaBuf);
}
int cntFrame;
int cntShort;
}
AndroidJavaObject audioIn;
IntPtr javaBuf;
Voice.ILogger logger;
int audioInSampleRate = 0;
public AndroidAudioInAEC(Voice.ILogger logger, bool enableAEC = false, bool enableAGC = false, bool enableNS = false)
{
// true means to use a route-dependent value which is usually the sample rate of the source
// otherwise, 44100 Hz requested
// On Android 4.4.4 (probably on all < 6.0), auto does not work: java.lang.IllegalArgumentException: 0Hz is not a supported sample rate.
const bool SAMPLE_RATE_AUTO = false;
// 44100Hz is currently the only rate that is guaranteed to work on all devices
// used for GetMinBufferSize call even if SAMPLE_RATE_AUTO = true
const int SAMPLE_RATE_44100 = 44100;
const int SAMPLE_RATE_UNSPECIFIED = 0;
const int SAMPLE_RATE_REQUEST = SAMPLE_RATE_AUTO ? SAMPLE_RATE_UNSPECIFIED : SAMPLE_RATE_44100;
this.logger = logger;
try
{
this.callback = new DataCallback();
audioIn = new AndroidJavaObject("com.exitgames.photon.audioinaec.AudioInAEC");
//bool aecAvailable = audioIn.Call<bool>("AECIsAvailable");
int minBufSize = audioIn.Call<int>("GetMinBufferSize", SAMPLE_RATE_44100, Channels);
logger.LogInfo("[PV] AndroidAudioInAEC: AndroidJavaObject created: aec: {0}/{1}, agc: {2}/{3}, ns: {4}/{5} minBufSize: {6}",
enableAEC, audioIn.Call<bool>("AECIsAvailable"),
enableAGC, audioIn.Call<bool>("AGCIsAvailable"),
enableNS, audioIn.Call<bool>("NSIsAvailable"),
minBufSize);
AndroidJavaClass app = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
AndroidJavaObject activity = app.GetStatic<AndroidJavaObject>("currentActivity");
// Set buffer IntPtr reference separately via pure jni call, pass other values and start capture via AndroidJavaObject helper
var ok = audioIn.Call<bool>("Start", activity, this.callback, SAMPLE_RATE_REQUEST, Channels, minBufSize * 4, enableAEC, enableAGC, enableNS);
if (ok)
{
audioInSampleRate = audioIn.Call<int>("GetSampleRate");
logger.LogInfo("[PV] AndroidAudioInAEC: AndroidJavaObject started: {0}, sampling rate: {1}, channels: {2}, record buffer size: {3}", ok, SamplingRate, Channels, minBufSize * 4);
}
else
{
Error = "[PV] AndroidAudioInAEC constructor: calling Start java method failure";
logger.LogError("[PV] AndroidAudioInAEC: {0}", Error);
}
}
catch (Exception e)
{
Error = e.ToString();
if (Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
Error = "Exception in AndroidAudioInAEC constructor";
}
logger.LogError("[PV] AndroidAudioInAEC: {0}", Error);
}
}
// Supposed to be called once at voice initialization.
// Otherwise recreate native object (instead of adding 'set callback' method to java interface)
public void SetCallback(Action<short[]> callback, ObjectFactory<short[], int> bufferFactory)
{
if (Error == null)
{
var voiceFrameSize = bufferFactory.Info;
// setting to voice FrameSize lets to avoid framing procedure
javaBuf = AndroidJNI.NewGlobalRef(AndroidJNI.NewShortArray(voiceFrameSize));
this.callback.SetCallback(callback, javaBuf);
var meth = AndroidJNI.GetMethodID(audioIn.GetRawClass(), "SetBuffer", "([S)Z");
bool ok = AndroidJNI.CallBooleanMethod(audioIn.GetRawObject(), meth, new jvalue[] { new jvalue() { l = javaBuf } });
if (!ok)
{
Error = "AndroidAudioInAEC.SetCallback(): calling SetBuffer java method failure";
}
}
if (Error != null)
{
logger.LogError("[PV] AndroidAudioInAEC: {0}", Error);
}
}
DataCallback callback;
public int Channels { get { return 1; } }
public int SamplingRate { get { return audioInSampleRate; } }
public string Error { get; private set; }
public void Reset()
{
if (audioIn != null)
{
audioIn.Call("Reset");
}
}
public void Dispose()
{
if (audioIn != null)
{
audioIn.Call<bool>("Stop");
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4b148f821e0a63242b85815126cf5c6a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,63 @@
using UnityEngine;
namespace Photon.Voice.Unity
{
// Wraps UnityEngine.AudioClip with Voice.IAudioStream interface.
// Used for playing back audio clips via Photon Voice.
public class AudioClipWrapper : IAudioReader<float>
{
private AudioClip audioClip;
private int readPos;
private float startTime;
public bool Loop { get; set; }
public AudioClipWrapper(AudioClip audioClip)
{
this.audioClip = audioClip;
startTime = Time.time;
}
private bool playing = true;
public bool Read(float[] buffer)
{
if (!playing)
{
return false;
}
var playerPos = (int)((Time.time - startTime) * audioClip.frequency);
var bufferSamplesCount = buffer.Length / audioClip.channels;
if (playerPos > readPos + bufferSamplesCount)
{
this.audioClip.GetData(buffer, readPos);
readPos += bufferSamplesCount;
if (readPos >= audioClip.samples)
{
if (this.Loop)
{
readPos = 0;
startTime = Time.time;
}
else
{
playing = false;
}
}
return true;
}
else
{
return false;
}
}
public int SamplingRate { get { return this.audioClip.frequency; } }
public int Channels { get { return this.audioClip.channels; } }
public string Error { get; private set; }
public void Dispose()
{
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 634203506e5489f44a94fb127aca71bb
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1 @@
// placeholder for deleted file

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: e98313bf43e36d241a837456f38e4077
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,18 @@
using UnityEngine;
using System;
namespace Photon.Voice.Unity
{
public class AudioOutCapture : MonoBehaviour
{
public event Action<float[], int> OnAudioFrame;
void OnAudioFilterRead(float[] frame, int channels)
{
if (OnAudioFrame != null)
{
OnAudioFrame(frame, channels);
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4d8a5b2f1284e0e49b5df5c6300295d0
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,9 @@

namespace Photon.Voice.Unity
{
public class PhotonVoiceCreatedParams
{
public Voice.LocalVoice Voice { get; set; }
public Voice.IAudioDesc AudioDesc { get; set; }
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 64814f7cb6abfbf40b6d4db1fa05642e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,66 @@
using UnityEngine;
using System;
using System.Linq;
using System.Collections.Generic;
using System.Collections;
namespace Photon.Voice.Unity
{
public class AudioInEnumerator : DeviceEnumeratorBase
{
public AudioInEnumerator(ILogger logger) : base (logger)
{
Refresh();
}
public override void Refresh()
{
var unityDevs = UnityMicrophone.devices;
devices = new List<DeviceInfo>();
for (int i = 0; i < unityDevs.Length; i++)
{
var d = unityDevs[i];
devices.Add(new DeviceInfo(d));
}
}
#if UNITY_WEBGL
public override bool IsSupported => false;
public override string Error { get { return "Current platform " + Application.platform + " is not supported by AudioInEnumerator."; } }
#else
public override string Error { get { return null; } }
#endif
public override void Dispose()
{
}
}
#if PHOTON_VOICE_VIDEO_ENABLE
public class VideoInEnumerator : DeviceEnumeratorBase
{
public VideoInEnumerator(ILogger logger) : base(logger)
{
Refresh();
}
public override void Refresh()
{
var unityDevs = UnityEngine.WebCamTexture.devices;
devices = new List<DeviceInfo>();
for (int i = 0; i < unityDevs.Length; i++)
{
var d = unityDevs[i];
devices.Add(new DeviceInfo(d.name));
}
}
public override string Error { get { return null; } }
public override void Dispose()
{
}
}
#endif
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 0a7107a0602d95947b9dba4a3d31b1a1
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,12 @@
using UnityEngine;
namespace Photon.Voice.Unity
{
public class Logger : ILogger
{
public void LogError(string fmt, params object[] args) { Debug.LogErrorFormat(fmt, args); }
public void LogWarning(string fmt, params object[] args) { Debug.LogWarningFormat(fmt, args); }
public void LogInfo(string fmt, params object[] args) { Debug.LogFormat(fmt, args); }
public void LogDebug(string fmt, params object[] args) { Debug.LogFormat(fmt, args); }
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b2fbbad5642e37e45b778dfa7336b55c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,107 @@
using UnityEngine;
using System;
using System.Linq;
namespace Photon.Voice.Unity
{
// Wraps UnityEngine.Microphone with Voice.IAudioStream interface.
public class MicWrapper : IAudioReader<float>
{
private AudioClip mic;
private string device;
ILogger logger;
public MicWrapper(string device, int suggestedFrequency, ILogger logger)
{
try
{
this.device = device;
this.logger = logger;
if (UnityMicrophone.devices.Length < 1)
{
Error = "No microphones found (UnityMicrophone.devices is empty)";
logger.LogError("[PV] MicWrapper: " + Error);
return;
}
if (!string.IsNullOrEmpty(device) && !UnityMicrophone.devices.Contains(device))
{
logger.LogError(string.Format("[PV] MicWrapper: \"{0}\" is not a valid Unity microphone device, falling back to default one", device));
device = null;
}
int minFreq;
int maxFreq;
logger.LogInfo("[PV] MicWrapper: initializing microphone '{0}', suggested frequency = {1}).", device, suggestedFrequency);
UnityMicrophone.GetDeviceCaps(device, out minFreq, out maxFreq);
var frequency = suggestedFrequency;
// minFreq = maxFreq = 44100; // test like android client
if (suggestedFrequency < minFreq || maxFreq != 0 && suggestedFrequency > maxFreq)
{
logger.LogWarning("[PV] MicWrapper does not support suggested frequency {0} (min: {1}, max: {2}). Setting to {2}",
suggestedFrequency, minFreq, maxFreq);
frequency = maxFreq;
}
this.mic = UnityMicrophone.Start(device, true, 1, frequency);
logger.LogInfo("[PV] MicWrapper: microphone '{0}' initialized, frequency = {1}, channels = {2}.", device, this.mic.frequency, this.mic.channels);
}
catch (Exception e)
{
Error = e.ToString();
if (Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
Error = "Exception in MicWrapper constructor";
}
logger.LogError("[PV] MicWrapper: " + Error);
}
}
public int SamplingRate { get { return Error == null ? this.mic.frequency : 0; } }
public int Channels { get { return Error == null ? this.mic.channels : 0; } }
public string Error { get; private set; }
public void Dispose()
{
UnityMicrophone.End(this.device);
}
private int micPrevPos;
private int micLoopCnt;
private int readAbsPos;
public bool Read(float[] buffer)
{
if (Error != null)
{
return false;
}
int micPos = UnityMicrophone.GetPosition(this.device);
// loop detection
if (micPos < micPrevPos)
{
micLoopCnt++;
}
micPrevPos = micPos;
var micAbsPos = micLoopCnt * this.mic.samples + micPos;
if (mic.channels == 0)
{
Error = "Number of channels is 0 in Read()";
logger.LogError("[PV] MicWrapper: " + Error);
return false;
}
var bufferSamplesCount = buffer.Length / mic.channels;
var nextReadPos = this.readAbsPos + bufferSamplesCount;
if (nextReadPos < micAbsPos)
{
this.mic.GetData(buffer, this.readAbsPos % this.mic.samples);
this.readAbsPos = nextReadPos;
return true;
}
else
{
return false;
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 5d3c3acd76d02834cab32fd59144e27b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,343 @@
using UnityEngine;
using System;
using System.Linq;
namespace Photon.Voice.Unity
{
public class MicWrapperPusher : IAudioPusher<float>
{
private AudioSource audioSource;
private AudioClip mic;
private string device;
private ILogger logger;
private AudioOutCapture audioOutCapture;
private int sampleRate;
private int channels;
private bool destroyGameObjectOnStop;
public MicWrapperPusher(string device, AudioSource aS, int suggestedFrequency, ILogger lg, bool destroyOnStop = true)
{
try
{
this.logger = lg;
this.device = device;
this.audioSource = aS;
this.destroyGameObjectOnStop = destroyOnStop;
if (UnityMicrophone.devices.Length < 1)
{
this.Error = "No microphones found (Microphone.devices is empty)";
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
if (!string.IsNullOrEmpty(device) && !UnityMicrophone.devices.Contains(device))
{
this.logger.LogError("[PV] MicWrapperPusher: \"{0}\" is not a valid Unity microphone device, falling back to default one", device);
device = UnityMicrophone.devices[0];
}
this.sampleRate = AudioSettings.outputSampleRate;
switch (AudioSettings.speakerMode)
{
case AudioSpeakerMode.Mono: this.channels = 1; break;
case AudioSpeakerMode.Stereo: this.channels = 2; break;
default:
this.Error = string.Concat("Only Mono and Stereo project speaker mode supported. Current mode is ", AudioSettings.speakerMode);
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
int minFreq;
int maxFreq;
this.logger.LogInfo("[PV] MicWrapperPusher: initializing microphone '{0}', suggested frequency = {1}).", device, suggestedFrequency);
UnityMicrophone.GetDeviceCaps(device, out minFreq, out maxFreq);
int frequency = suggestedFrequency;
// minFreq = maxFreq = 44100; // test like android client
if (suggestedFrequency < minFreq || maxFreq != 0 && suggestedFrequency > maxFreq)
{
this.logger.LogWarning("[PV] MicWrapperPusher does not support suggested frequency {0} (min: {1}, max: {2}). Setting to {2}",
suggestedFrequency, minFreq, maxFreq);
frequency = maxFreq;
}
if (!this.audioSource.enabled)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioSource component disabled, enabling it.");
this.audioSource.enabled = true;
}
if (!this.audioSource.gameObject.activeSelf)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioSource GameObject inactive, activating it.");
this.audioSource.gameObject.SetActive(true);
}
if (!this.audioSource.gameObject.activeInHierarchy)
{
this.Error = "AudioSource GameObject is not active in hierarchy, audio input can't work.";
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
this.audioOutCapture = this.audioSource.gameObject.GetComponent<AudioOutCapture>();
if (ReferenceEquals(null, this.audioOutCapture) || !this.audioOutCapture)
{
this.audioOutCapture = this.audioSource.gameObject.AddComponent<AudioOutCapture>();
}
if (!this.audioOutCapture.enabled)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioOutCapture component disabled, enabling it.");
this.audioOutCapture.enabled = true;
}
this.mic = UnityMicrophone.Start(device, true, 1, frequency);
this.audioSource.mute = true;
this.audioSource.volume = 0f;
this.audioSource.clip = this.mic;
this.audioSource.loop = true;
this.audioSource.Play();
this.logger.LogInfo("[PV] MicWrapperPusher: microphone '{0}' initialized, frequency = in:{1}|out:{2}, channels = in:{3}|out:{4}.", device, this.mic.frequency, this.SamplingRate, this.mic.channels, this.Channels);
}
catch (Exception e)
{
this.Error = e.ToString();
if (this.Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
this.Error = "Exception in MicWrapperPusher constructor";
}
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
}
}
public MicWrapperPusher(string device, GameObject gO, int suggestedFrequency, ILogger lg, bool destroyOnStop = true)
{
try
{
this.logger = lg;
this.device = device;
this.destroyGameObjectOnStop = destroyOnStop;
if (UnityMicrophone.devices.Length < 1)
{
this.Error = "No microphones found (Microphone.devices is empty)";
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
if (!string.IsNullOrEmpty(device) && !UnityMicrophone.devices.Contains(device))
{
this.logger.LogError("[PV] MicWrapperPusher: \"{0}\" is not a valid Unity microphone device, falling back to default one", device);
device = UnityMicrophone.devices[0];
}
this.sampleRate = AudioSettings.outputSampleRate;
switch (AudioSettings.speakerMode)
{
case AudioSpeakerMode.Mono: this.channels = 1; break;
case AudioSpeakerMode.Stereo: this.channels = 2; break;
default:
this.Error = string.Concat("Only Mono and Stereo project speaker mode supported. Current mode is ", AudioSettings.speakerMode);
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
int minFreq;
int maxFreq;
this.logger.LogInfo("[PV] MicWrapperPusher: initializing microphone '{0}', suggested frequency = {1}).", device, suggestedFrequency);
UnityMicrophone.GetDeviceCaps(device, out minFreq, out maxFreq);
int frequency = suggestedFrequency;
// minFreq = maxFreq = 44100; // test like android client
if (suggestedFrequency < minFreq || maxFreq != 0 && suggestedFrequency > maxFreq)
{
this.logger.LogWarning("[PV] MicWrapperPusher does not support suggested frequency {0} (min: {1}, max: {2}). Setting to {2}",
suggestedFrequency, minFreq, maxFreq);
frequency = maxFreq;
}
if (!gO || gO == null)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioSource GameObject is destroyed or null. Creating a new one.");
gO = new GameObject("[PV] MicWrapperPusher: AudioSource + AudioOutCapture");
this.audioSource = gO.AddComponent<AudioSource>();
this.audioOutCapture = this.audioSource.gameObject.AddComponent<AudioOutCapture>();
}
else
{
if (!gO.activeSelf)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioSource GameObject inactive, activating it.");
gO.SetActive(true);
}
if (!gO.activeInHierarchy)
{
this.Error = "AudioSource GameObject is not active in hierarchy, audio input can't work.";
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
this.audioSource = gO.GetComponent<AudioSource>();
if (ReferenceEquals(null, this.audioSource) || !this.audioSource)
{
this.audioSource = gO.AddComponent<AudioSource>();
}
if (!this.audioSource.enabled)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioSource component disabled, enabling it.");
this.audioSource.enabled = true;
}
if (!this.audioSource.gameObject.activeSelf)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioSource GameObject inactive, activating it.");
this.audioSource.gameObject.SetActive(true);
}
if (!this.audioSource.gameObject.activeInHierarchy)
{
this.Error = "AudioSource GameObject is not active in hierarchy, audio input can't work.";
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
this.audioOutCapture = this.audioSource.gameObject.GetComponent<AudioOutCapture>();
if (ReferenceEquals(null, this.audioOutCapture) || !this.audioOutCapture)
{
this.audioOutCapture = this.audioSource.gameObject.AddComponent<AudioOutCapture>();
}
if (!this.audioOutCapture.enabled)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioOutCapture component disabled, enabling it.");
this.audioOutCapture.enabled = true;
}
}
this.mic = UnityMicrophone.Start(device, true, 1, frequency);
this.audioSource.mute = true;
this.audioSource.volume = 0f;
this.audioSource.clip = this.mic;
this.audioSource.loop = true;
this.audioSource.Play();
this.logger.LogInfo("[PV] MicWrapperPusher: microphone '{0}' initialized, frequency = in:{1}|out:{2}, channels = in:{3}|out:{4}.", device, this.mic.frequency, this.SamplingRate, this.mic.channels, this.Channels);
}
catch (Exception e)
{
this.Error = e.ToString();
if (this.Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
this.Error = "Exception in MicWrapperPusher constructor";
}
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
}
}
public MicWrapperPusher(string device, Transform parentTransform, int suggestedFrequency, ILogger lg, bool destroyOnStop = true)
{
try
{
this.logger = lg;
this.device = device;
this.destroyGameObjectOnStop = destroyOnStop;
if (UnityMicrophone.devices.Length < 1)
{
this.Error = "No microphones found (Microphone.devices is empty)";
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
if (!string.IsNullOrEmpty(device) && !UnityMicrophone.devices.Contains(device))
{
this.logger.LogError("[PV] MicWrapperPusher: \"{0}\" is not a valid Unity microphone device, falling back to default one", device);
device = UnityMicrophone.devices[0];
}
this.sampleRate = AudioSettings.outputSampleRate;
switch (AudioSettings.speakerMode)
{
case AudioSpeakerMode.Mono: this.channels = 1; break;
case AudioSpeakerMode.Stereo: this.channels = 2; break;
default:
this.Error = string.Concat("Only Mono and Stereo project speaker mode supported. Current mode is ", AudioSettings.speakerMode);
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
int minFreq;
int maxFreq;
this.logger.LogInfo("[PV] MicWrapperPusher: initializing microphone '{0}', suggested frequency = {1}).", device, suggestedFrequency);
UnityMicrophone.GetDeviceCaps(device, out minFreq, out maxFreq);
int frequency = suggestedFrequency;
// minFreq = maxFreq = 44100; // test like android client
if (suggestedFrequency < minFreq || maxFreq != 0 && suggestedFrequency > maxFreq)
{
this.logger.LogWarning("[PV] MicWrapperPusher does not support suggested frequency {0} (min: {1}, max: {2}). Setting to {2}",
suggestedFrequency, minFreq, maxFreq);
frequency = maxFreq;
}
GameObject gO = new GameObject("[PV] MicWrapperPusher: AudioSource + AudioOutCapture");
if (ReferenceEquals(null, parentTransform) || !parentTransform)
{
this.logger.LogWarning("[PV] MicWrapperPusher: Parent transform passed is destroyed or null. Creating AudioSource GameObject at root.");
}
else
{
gO.transform.SetParent(parentTransform, false);
if (!gO.activeSelf)
{
this.logger.LogWarning("[PV] MicWrapperPusher: AudioSource GameObject inactive, activating it.");
gO.gameObject.SetActive(true);
}
if (!gO.activeInHierarchy)
{
this.Error = "AudioSource GameObject is not active in hierarchy, audio input can't work.";
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
return;
}
}
this.audioSource = gO.AddComponent<AudioSource>();
this.audioOutCapture = this.audioSource.gameObject.AddComponent<AudioOutCapture>();
this.mic = UnityMicrophone.Start(device, true, 1, frequency);
this.audioSource.mute = true;
this.audioSource.volume = 0f;
this.audioSource.clip = this.mic;
this.audioSource.loop = true;
this.audioSource.Play();
this.logger.LogInfo("[PV] MicWrapperPusher: microphone '{0}' initialized, frequency = in:{1}|out:{2}, channels = in:{3}|out:{4}.", device, this.mic.frequency, this.SamplingRate, this.mic.channels, this.Channels);
}
catch (Exception e)
{
this.Error = e.ToString();
if (this.Error == null) // should never happen but since Error used as validity flag, make sure that it's not null
{
this.Error = "Exception in MicWrapperPusher constructor";
}
this.logger.LogError("[PV] MicWrapperPusher: {0}", this.Error);
}
}
private float[] frame2 = new float[0];
private void AudioOutCaptureOnOnAudioFrame(float[] frame, int channelsNumber)
{
if (channelsNumber != this.Channels)
{
this.logger.LogWarning("[PV] MicWrapperPusher: channels number mismatch; expected:{0} got:{1}.", this.Channels, channelsNumber);
}
if (this.frame2.Length != frame.Length)
{
this.frame2 = new float[frame.Length];
}
Array.Copy(frame, this.frame2, frame.Length);
this.pushCallback(frame);
Array.Clear(frame, 0, frame.Length);
}
private Action<float[]> pushCallback;
public void SetCallback(Action<float[]> callback, ObjectFactory<float[], int> bufferFactory)
{
this.pushCallback = callback;
this.audioOutCapture.OnAudioFrame += this.AudioOutCaptureOnOnAudioFrame;
}
public void Dispose()
{
if (this.pushCallback != null && this.audioOutCapture != null)
{
this.audioOutCapture.OnAudioFrame -= this.AudioOutCaptureOnOnAudioFrame;
}
UnityMicrophone.End(this.device);
if (this.destroyGameObjectOnStop && this.audioSource != null)
{
UnityEngine.Object.Destroy(this.audioSource.gameObject);
}
}
public int SamplingRate { get { return this.Error == null ? this.sampleRate : 0; } }
public int Channels { get { return this.Error == null ? this.channels : 0; } }
public string Error { get; private set; }
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4d6ac92ecc68f5844986af1f12ba3b60
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,52 @@
using UnityEngine;
using System.Collections.Generic;
using System;
using System.Runtime.InteropServices;
namespace Photon.Voice.Unity
{
// Plays back input audio via Unity AudioSource
// May consume audio packets in thread other than Unity's main thread
public class UnityAudioOut : AudioOutDelayControl<float>
{
protected readonly AudioSource source;
protected AudioClip clip;
public UnityAudioOut(AudioSource audioSource, PlayDelayConfig playDelayConfig, ILogger logger, string logPrefix, bool debugInfo)
: base(true, playDelayConfig, logger, "[PV] [Unity] AudioOut" + (logPrefix == "" ? "" : " " + logPrefix), debugInfo)
{
this.source = audioSource;
}
override public int OutPos { get { return source.timeSamples; } }
override public void OutCreate(int frequency, int channels, int bufferSamples)
{
this.source.loop = true;
// using streaming clip leads to too long delays
this.clip = AudioClip.Create("UnityAudioOut", bufferSamples, channels, frequency, false);
this.source.clip = clip;
}
override public void OutStart()
{
this.source.Play();
}
override public void OutWrite(float[] data, int offsetSamples)
{
clip.SetData(data, offsetSamples);
}
override public void Stop()
{
base.Stop();
this.source.Stop();
if (this.source != null)
{
this.source.clip = null;
clip = null;
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: c7cbffe9dfdd4b547a75479a23097ff4
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,72 @@
namespace Photon.Voice.Unity
{
#if UNITY_WEBGL
using System;
#endif
using UnityEngine;
/// <summary>A wrapper around UnityEngine.Microphone to be able to safely use Microphone and compile for WebGL.</summary>
public static class UnityMicrophone
{
#if UNITY_WEBGL
private static readonly string[] _devices = new string[0];
#endif
public static string[] devices
{
get
{
#if UNITY_WEBGL
return _devices;
#else
return Microphone.devices;
#endif
}
}
public static void End(string deviceName)
{
#if UNITY_WEBGL
throw new NotImplementedException("Unity Microphone not supported on WebGL");
#else
Microphone.End(deviceName);
#endif
}
public static void GetDeviceCaps(string deviceName, out int minFreq, out int maxFreq)
{
#if UNITY_WEBGL
throw new NotImplementedException("Unity Microphone not supported on WebGL");
#else
Microphone.GetDeviceCaps(deviceName, out minFreq, out maxFreq);
#endif
}
public static int GetPosition(string deviceName)
{
#if UNITY_WEBGL
throw new NotImplementedException("Unity Microphone not supported on WebGL");
#else
return Microphone.GetPosition(deviceName);
#endif
}
public static bool IsRecording(string deviceName)
{
#if UNITY_WEBGL
return false;
#else
return Microphone.IsRecording(deviceName);
#endif
}
public static AudioClip Start(string deviceName, bool loop, int lengthSec, int frequency)
{
#if UNITY_WEBGL
throw new NotImplementedException("Unity Microphone not supported on WebGL");
#else
return Microphone.Start(deviceName, loop, lengthSec, frequency);
#endif
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 0999de601f88ae74bb58440b4898d84a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant: