non-vr lobby, version fix

This commit is contained in:
joonasp
2022-06-29 14:45:17 +03:00
parent 5774be9822
commit 04baadfad1
1774 changed files with 573069 additions and 1533 deletions

View File

@@ -0,0 +1,10 @@
fileFormatVersion: 2
guid: 6ae3234cc02f946ac8295e5e46e886d3
folderAsset: yes
timeCreated: 1534868080
licenseType: Store
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,287 @@
#if UNITY_STANDALONE_OSX || UNITY_EDITOR_OSX || UNITY_SWITCH || UNITY_IOS
#define PHOTON_AUDIO_CHANGE_IN_NOTIFIER
#endif
namespace Photon.Voice.Unity {
using Voice;
using UnityEngine;
/// <summary>
/// This component is useful to handle audio device and config changes.
/// </summary>
[RequireComponent(typeof(Recorder))]
public class AudioChangesHandler : VoiceComponent {
private IAudioInChangeNotifier photonMicChangeNotifier;
private AudioConfiguration audioConfiguration;
private Recorder recorder;
/// <summary>
/// Try to start recording when we get devices change notification and recording is not started.
/// </summary>
/// <remarks>
/// On some platforms we can't make sure that a device change notification could mean that at least a microphone device is now available.
/// Besides, the auto start of the recording might not happen if other necessary conditions set in Recorder are not met:
/// e.g. <see cref="Recorder.RecordOnlyWhenEnabled"/> or <see cref="Recorder.RecordOnlyWhenJoined"/> etc.
/// or if the Recorder has been stopped explicitly via <see cref="Recorder.StopRecording"/> call or <see cref="Recorder.IsRecording"/> set to false.
/// </remarks>
[Tooltip("Try to start recording when we get devices change notification and recording is not started.")]
public bool StartWhenDeviceChange = true;
/// <summary>
/// Try to react to device change notification when Recorder is started.
/// </summary>
/// <remarks>
/// This requires <see cref="UseNativePluginChangeNotifier"/> or <see cref="UseOnAudioConfigurationChanged"/> to be true.
/// </remarks>
[Tooltip("Try to react to device change notification when Recorder is started.")]
public bool HandleDeviceChange = true;
/// <summary>
/// Try to react to audio config change notification when Recorder is started.
/// </summary>
/// <remarks>
/// This requires <see cref="UseOnAudioConfigurationChanged"/> to be true.
/// </remarks>
[Tooltip("Try to react to audio config change notification when Recorder is started.")]
public bool HandleConfigChange = true;
/// <summary>
/// Whether or not to make use of Photon's AudioInChangeNotifier native plugin.
/// </summary>
/// <remarks>
/// This may disable <see cref="HandleDeviceChange"/> if this and <see cref="UseOnAudioConfigurationChanged"/> are both false.
/// </remarks>
[Tooltip("Whether or not to make use of Photon's AudioInChangeNotifier native plugin.")]
public bool UseNativePluginChangeNotifier = true;
/// <summary>
/// Whether or not to make use of Unity's OnAudioConfigurationChanged.
/// </summary>
/// <remarks>
/// This is needed for <see cref="HandleConfigChange"/> and may also disable
/// <see cref="HandleDeviceChange"/> if this and <see cref="UseNativePluginChangeNotifier"/> are both false.
/// </remarks>
[Tooltip("Whether or not to make use of Unity's OnAudioConfigurationChanged.")]
public bool UseOnAudioConfigurationChanged = true;
#if UNITY_EDITOR || UNITY_ANDROID
/// <summary>
/// If the recorder is set to use microphone as source with type Photon, audio device changes are handled within the native plugin by default.
/// If you set this to true, it will also be handled via this component logic.
/// </summary>
[Tooltip("If the recorder is set to use microphone as source with type Photon, audio device changes are handled within the native plugin by default. " +
"If you set this to true, it will also be handled via this component logic.")]
public bool Android_AlwaysHandleDeviceChange;
#endif
#if UNITY_EDITOR || UNITY_IOS
/// <summary>
/// If the recorder is set to use microphone as source with type Photon, audio device changes are handled within the native plugin by default.
/// If you set this to true, it will also be handled via this component logic.
/// </summary>
[Tooltip("If the recorder is set to use microphone as source with type Photon, audio device changes are handled within the native plugin by default. " +
"If you set this to true, it will also be handled via this component logic.")]
public bool iOS_AlwaysHandleDeviceChange;
#endif
private bool subscribedToSystemChangesPhoton, subscribedToSystemChangesUnity;
protected override void Awake() {
base.Awake();
this.recorder = this.GetComponent<Recorder>();
this.recorder.ReactOnSystemChanges = false;
//if (this.recorder.AutoStart) {
// if (this.AutoStartWhenMicrophoneMightBeAvailable && this.CheckIfMicrophoneIsAvailable) {
// this.recorder.AutoStart = false;
// if (this.micAvailable) {
// this.recorder.CheckAndAutoStart(true);
// }
// } else if (this.recorder.IsRecording) {
// this.recorder.AutoStart = false;
// }
//}
this.audioConfiguration = AudioSettings.GetConfiguration();
this.SubscribeToSystemChanges();
}
private void OnDestroy() {
this.UnsubscribeFromSystemChanges();
}
#if PHOTON_AUDIO_CHANGE_IN_NOTIFIER
private void PhotonMicrophoneChangeDetected() {
if (this.Logger.IsDebugEnabled) {
this.Logger.LogDebug("Microphones change detected by Photon native plugin.");
}
if (!this.recorder.MicrophoneDeviceChangeDetected && this.UseNativePluginChangeNotifier) {
this.OnDeviceChange();
}
}
#endif
private void OnDeviceChange() {
if (!this.recorder.IsRecording) {
if (this.StartWhenDeviceChange) {
this.recorder.MicrophoneDeviceChangeDetected = true;
if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("An attempt to auto start recording should follow shortly.");
}
} else if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("Device change detected but will not try to start recording as StartWhenDeviceChange is false.");
}
} else if (this.HandleDeviceChange) {
#if !UNITY_EDITOR && (UNITY_ANDROID || UNITY_IOS)
if (this.recorder.SourceType == Recorder.InputSourceType.Microphone && this.recorder.MicrophoneType == Recorder.MicType.Photon) {
#if UNITY_ANDROID
if (!this.Android_AlwaysHandleDeviceChange) {
#elif UNITY_IOS
if (!this.iOS_AlwaysHandleDeviceChange) {
#endif
if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("Device change notification ignored when using Photon microphone type as this is handled internally for iOS and Android via native plugins.");
}
return;
}
}
#endif
this.recorder.MicrophoneDeviceChangeDetected = true;
} else if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("Device change detected but will not try to handle this as HandleDeviceChange is false.");
}
}
private void SubscribeToSystemChanges() {
if (this.Logger.IsDebugEnabled) {
this.Logger.LogDebug("Subscribing to system (audio) changes.");
}
#if PHOTON_AUDIO_CHANGE_IN_NOTIFIER
if (this.subscribedToSystemChangesPhoton) {
if (this.Logger.IsWarningEnabled) {
this.Logger.LogWarning("Already subscribed to audio changes via Photon.");
}
} else {
this.photonMicChangeNotifier = Platform.CreateAudioInChangeNotifier(this.PhotonMicrophoneChangeDetected, this.Logger);
if (this.photonMicChangeNotifier.IsSupported) {
if (this.photonMicChangeNotifier.Error == null) {
this.subscribedToSystemChangesPhoton = true;
if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("Subscribed to audio in change notifications via Photon plugin.");
}
}
if (this.Logger.IsErrorEnabled) {
this.Logger.LogError("Error creating instance of photonMicChangeNotifier: {0}", this.photonMicChangeNotifier.Error);
}
} else if (this.Logger.IsErrorEnabled) {
this.Logger.LogError("Unexpected: Photon's AudioInChangeNotifier not supported on current platform: {0}", CurrentPlatform);
}
if (!this.subscribedToSystemChangesPhoton) {
this.photonMicChangeNotifier.Dispose();
this.photonMicChangeNotifier = null;
}
}
#else
if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("Skipped subscribing to audio change notifications via Photon's AudioInChangeNotifier as not supported on current platform: {0}", CurrentPlatform);
}
if (this.subscribedToSystemChangesPhoton) {
if (this.Logger.IsErrorEnabled) {
this.Logger.LogError("Unexpected: subscribedToSystemChangesPhoton is set to true while platform is not supported!.");
}
}
#endif
if (this.subscribedToSystemChangesUnity) {
if (this.Logger.IsWarningEnabled) {
this.Logger.LogWarning("Already subscribed to audio changes via Unity OnAudioConfigurationChanged callback.");
}
} else {
AudioSettings.OnAudioConfigurationChanged += this.OnAudioConfigChanged;
this.subscribedToSystemChangesUnity = true;
if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("Subscribed to audio configuration changes via Unity OnAudioConfigurationChanged callback.");
}
}
}
private void OnAudioConfigChanged(bool deviceWasChanged) {
if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("OnAudioConfigurationChanged: {0}", deviceWasChanged ? "Device was changed." : "AudioSettings.Reset was called.");
}
AudioConfiguration config = AudioSettings.GetConfiguration();
bool audioConfigChanged = false;
if (config.dspBufferSize != this.audioConfiguration.dspBufferSize) {
if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("OnAudioConfigurationChanged: dspBufferSize old={0} new={1}", this.audioConfiguration.dspBufferSize, config.dspBufferSize);
}
audioConfigChanged = true;
}
if (config.numRealVoices != this.audioConfiguration.numRealVoices) {
if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("OnAudioConfigurationChanged: numRealVoices old={0} new={1}", this.audioConfiguration.numRealVoices, config.numRealVoices);
}
audioConfigChanged = true;
}
if (config.numVirtualVoices != this.audioConfiguration.numVirtualVoices) {
if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("OnAudioConfigurationChanged: numVirtualVoices old={0} new={1}", this.audioConfiguration.numVirtualVoices, config.numVirtualVoices);
}
audioConfigChanged = true;
}
if (config.sampleRate != this.audioConfiguration.sampleRate) {
if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("OnAudioConfigurationChanged: sampleRate old={0} new={1}", this.audioConfiguration.sampleRate, config.sampleRate);
}
audioConfigChanged = true;
}
if (config.speakerMode != this.audioConfiguration.speakerMode) {
if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("OnAudioConfigurationChanged: speakerMode old={0} new={1}", this.audioConfiguration.speakerMode, config.speakerMode);
}
audioConfigChanged = true;
}
if (audioConfigChanged) {
this.audioConfiguration = config;
}
if (!this.recorder.MicrophoneDeviceChangeDetected) {
if (audioConfigChanged) {
if (this.recorder.IsRecording) {
if (this.HandleConfigChange) {
if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("Config change detected; an attempt to auto start recording should follow shortly.");
}
this.recorder.MicrophoneDeviceChangeDetected = true;
} else if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("Config change detected but will not try to handle this as HandleConfigChange is false.");
}
} else if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("Config change detected but ignored as recording not started.");
}
} else if (deviceWasChanged) {
if (this.UseOnAudioConfigurationChanged) {
this.OnDeviceChange();
} else if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("Device change detected but will not try to handle this as UseOnAudioConfigurationChanged is false.");
}
}
}
}
private void UnsubscribeFromSystemChanges() {
if (this.subscribedToSystemChangesUnity) {
AudioSettings.OnAudioConfigurationChanged -= this.OnAudioConfigChanged;
this.subscribedToSystemChangesUnity = false;
if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("Unsubscribed from audio changes via Unity OnAudioConfigurationChanged callback.");
}
}
if (this.subscribedToSystemChangesPhoton) {
if (this.photonMicChangeNotifier == null) {
if (this.Logger.IsErrorEnabled) {
this.Logger.LogError("Unexpected: photonMicChangeNotifier is null while subscribedToSystemChangesPhoton is true.");
}
} else {
this.photonMicChangeNotifier.Dispose();
this.photonMicChangeNotifier = null;
}
this.subscribedToSystemChangesPhoton = false;
if (this.Logger.IsInfoEnabled) {
this.Logger.LogInfo("Unsubscribed from audio in change notifications via Photon plugin.");
}
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b7f95aa3c06eaa84bba4eafdedf12758
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,28 @@
namespace Photon.Voice.Unity
{
using System.Linq;
// this was added for backwards compatibility
public static class AudioInEnumeratorEx
{
public static bool IDIsValid(this IDeviceEnumerator en, int id)
{
return en.Any(d => d.IDInt == id);
}
public static string NameAtIndex(this IDeviceEnumerator en, int index)
{
return en.ElementAtOrDefault(index).Name;
}
public static int IDAtIndex(this IDeviceEnumerator en, int index)
{
if (index >= 0 && index < en.Count())
{
return en.ElementAt(index).IDInt;
}
return -1;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7bb061d9fcd0e3e4ba4c36f5a7792e9c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 56896f24dbcb4e947838adee94571a94
folderAsset: yes
timeCreated: 1525440684
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,42 @@
#if UNITY_STANDALONE_OSX || UNITY_EDITOR_OSX || UNITY_SWITCH || UNITY_IOS
#define PHOTON_AUDIO_CHANGE_IN_NOTIFIER
#endif
namespace Photon.Voice.Unity.Editor {
using Unity;
using UnityEditor;
[CustomEditor(typeof(AudioChangesHandler))]
public class AudioChangesHandlerEditor : Editor {
public override void OnInspectorGUI() {
this.serializedObject.UpdateIfRequiredOrScript();
VoiceLogger.ExposeLogLevel(this.serializedObject, this.target as ILoggable);
EditorGUI.BeginChangeCheck();
this.DrawSerializedProperty("StartWhenDeviceChange");
SerializedProperty useUnity = this.DrawSerializedProperty("UseOnAudioConfigurationChanged");
bool showMore = useUnity.boolValue;
if (showMore) {
this.DrawSerializedProperty("HandleConfigChange");
}
#if PHOTON_AUDIO_CHANGE_IN_NOTIFIER
SerializedProperty usePhoton = this.DrawSerializedProperty("UseNativePluginChangeNotifier");
showMore |= usePhoton.boolValue;
#endif
if (showMore) {
if (this.DrawSerializedProperty("HandleDeviceChange").boolValue) {
this.DrawSerializedProperty("Android_AlwaysHandleDeviceChange");
this.DrawSerializedProperty("iOS_AlwaysHandleDeviceChange");
}
}
if (EditorGUI.EndChangeCheck()) {
this.serializedObject.ApplyModifiedProperties();
}
}
private SerializedProperty DrawSerializedProperty(string propertyName) {
SerializedProperty serializedProperty = this.serializedObject.FindProperty(propertyName);
EditorGUILayout.PropertyField(serializedProperty);
return serializedProperty;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 9a6ec05c84a196c429f5e48d3a770c22
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,14 @@
{
"name": "PhotonVoice.Editor",
"references": [
"PhotonRealtime",
"PhotonVoice",
"PhotonVoice.API"
],
"optionalUnityReferences": [],
"includePlatforms": [
"Editor"
],
"excludePlatforms": [],
"allowUnsafeCode": false
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: ae8024269c736ee49ba1179cb00214e5
timeCreated: 1538045250
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,321 @@
using System;
using System.IO;
using System.Linq;
using UnityEditor;
using UnityEngine;
using System.Collections.Generic;
namespace Photon.Voice.Unity.Editor
{
[InitializeOnLoad] // calls static constructor when script is recompiled
public static class PhotonVoiceEditorUtils
{
public const string PHOTON_VIDEO_DEFINE_SYMBOL = "PHOTON_VOICE_VIDEO_ENABLE";
public const string PHOTON_VIDEO_AVAILABLE_DEFINE_SYMBOL = "PHOTON_VOICE_VIDEO_AVAILABLE";
static PhotonVoiceEditorUtils()
{
if (HasVideo)
{
#if !PHOTON_VOICE_VIDEO_AVAILABLE
Debug.Log("Photon Video is available");
Realtime.PhotonEditorUtils.AddScriptingDefineSymbolToAllBuildTargetGroups(PHOTON_VIDEO_AVAILABLE_DEFINE_SYMBOL);
TriggerRecompile();
#endif
}
else
{
#if PHOTON_VOICE_VIDEO_AVAILABLE
RemoveScriptingDefineSymbolToAllBuildTargetGroups(PHOTON_VIDEO_AVAILABLE_DEFINE_SYMBOL);
TriggerRecompile();
#endif
}
}
// triggers this calss recompilation after define symbols change
private static void TriggerRecompile()
{
AssetDatabase.ImportAsset("Assets/Photon/PhotonVoice/Code/Editor/PhotonVoiceEditorUtils.cs");
}
/// <summary>True if the ChatClient of the Photon Chat API is available. If so, the editor may (e.g.) show additional options in settings.</summary>
public static bool HasChat
{
get
{
return Type.GetType("Photon.Chat.ChatClient, Assembly-CSharp") != null || Type.GetType("Photon.Chat.ChatClient, Assembly-CSharp-firstpass") != null || Type.GetType("Photon.Chat.ChatClient, PhotonChat") != null;
}
}
/// <summary>True if the PhotonNetwork of the PUN is available. If so, the editor may (e.g.) show additional options in settings.</summary>
public static bool HasPun
{
get
{
return Type.GetType("Photon.Pun.PhotonNetwork, Assembly-CSharp") != null || Type.GetType("Photon.Pun.PhotonNetwork, Assembly-CSharp-firstpass") != null || Type.GetType("Photon.Pun.PhotonNetwork, PhotonUnityNetworking") != null;
}
}
public static bool HasVideo
{
get
{
return Directory.Exists("Assets/Photon/PhotonVoice/PhotonVoiceApi/Core/Video");
}
}
[MenuItem("Window/Photon Voice/Remove PUN", true, 1)]
private static bool RemovePunValidate()
{
#if PHOTON_UNITY_NETWORKING
return true;
#else
return HasPun;
#endif
}
[MenuItem("Window/Photon Voice/Remove PUN", false, 1)]
private static void RemovePun()
{
DeleteDirectory("Assets/Photon/PhotonVoice/Demos/DemoProximityVoiceChat");
DeleteDirectory("Assets/Photon/PhotonVoice/Demos/DemoVoicePun");
DeleteDirectory("Assets/Photon/PhotonVoice/Code/PUN");
DeleteDirectory("Assets/Photon/PhotonUnityNetworking");
CleanUpPunDefineSymbols();
}
[MenuItem("Window/Photon Voice/Remove Photon Chat", true, 2)]
private static bool RemovePhotonChatValidate()
{
return HasChat;
}
[MenuItem("Window/Photon Voice/Remove Photon Chat", false, 2)]
private static void RemovePhotonChat()
{
DeleteDirectory("Assets/Photon/PhotonChat");
}
[MenuItem("Window/Photon Voice/Leave a review", false, 3)]
private static void OpenAssetStorePage()
{
Application.OpenURL("https://assetstore.unity.com/packages/tools/audio/photon-voice-2-130518");
}
#if PHOTON_VOICE_VIDEO_AVAILABLE
#if PHOTON_VOICE_VIDEO_ENABLE
[MenuItem("Window/Photon Voice/Disable Video", false, 4)]
private static void DisableVideo()
{
RemoveScriptingDefineSymbolToAllBuildTargetGroups(PHOTON_VIDEO_DEFINE_SYMBOL);
TriggerRecompile();
}
#else
[MenuItem("Window/Photon Voice/Enable Video", false, 4)]
private static void EnableVideo()
{
Realtime.PhotonEditorUtils.AddScriptingDefineSymbolToAllBuildTargetGroups(PHOTON_VIDEO_DEFINE_SYMBOL);
TriggerRecompile();
}
#endif
#endif
public static void DeleteDirectory(string path)
{
if (Directory.Exists(path))
{
if (!FileUtil.DeleteFileOrDirectory(path))
{
Debug.LogWarningFormat("Directory \"{0}\" not deleted.", path);
}
DeleteFile(string.Concat(path, ".meta"));
}
else
{
Debug.LogWarningFormat("Directory \"{0}\" does not exist.", path);
}
}
public static void DeleteFile(string path)
{
if (File.Exists(path))
{
if (!FileUtil.DeleteFileOrDirectory(path))
{
Debug.LogWarningFormat("File \"{0}\" not deleted.", path);
}
}
else
{
Debug.LogWarningFormat("File \"{0}\" does not exist.", path);
}
}
public static bool IsInTheSceneInPlayMode(GameObject go)
{
return Application.isPlaying && !IsPrefab(go);
}
public static void GetPhotonVoiceVersionsFromChangeLog(out string photonVoiceVersion, out string punChangelogVersion, out string photonVoiceApiVersion)
{
photonVoiceVersion = null;
punChangelogVersion = null;
photonVoiceApiVersion = null;
string filePath = Path.Combine("Assets", "Photon", "PhotonVoice", "changes-voice.txt");
const string guid = "63aaf8df43de62247af0bbdc549b31b5";
if (!File.Exists(filePath))
{
filePath = AssetDatabase.GUIDToAssetPath(guid);
Debug.LogFormat("Photon Voice 2's change log file was moved to this path \"{0}\"", filePath);
}
if (File.Exists(filePath))
{
try
{
using (StreamReader file = new StreamReader(filePath))
{
while (!file.EndOfStream && (string.IsNullOrEmpty(photonVoiceVersion) || string.IsNullOrEmpty(punChangelogVersion) || string.IsNullOrEmpty(photonVoiceApiVersion)))
{
string line = file.ReadLine();
if (!string.IsNullOrWhiteSpace(line))
{
line = line.Trim();
if (line.StartsWith("v2."))
{
if (!string.IsNullOrEmpty(photonVoiceVersion))
{
break;
}
photonVoiceVersion = line.TrimStart('v');
continue;
}
string[] parts = line.Split(null);
if (line.StartsWith("PUN2: ") && parts.Length > 1)
{
punChangelogVersion = parts[1];
continue;
}
if (line.StartsWith("PhotonVoiceApi: ") && parts.Length > 2)
{
photonVoiceApiVersion = string.Format("rev. {0}", parts[2]);
}
}
}
}
}
catch (IOException e)
{
Debug.LogErrorFormat("There was an error reading the file \"{0}\": ", filePath);
Debug.LogError(e.Message);
}
}
else
{
Debug.LogErrorFormat("Photon Voice 2's change log file not found (moved or deleted or not imported? or meta file changed) \"{0}\": ", filePath);
}
if (string.IsNullOrEmpty(photonVoiceVersion))
{
Debug.LogError("There was an error retrieving Photon Voice version from changelog.");
}
if (string.IsNullOrEmpty(punChangelogVersion))
{
Debug.LogError("There was an error retrieving PUN2 version from changelog.");
}
if (string.IsNullOrEmpty(photonVoiceApiVersion))
{
Debug.LogError("There was an error retrieving Photon Voice API version from changelog.");
}
}
/// <summary>
/// Removes a given scripting define symbol to all build target groups
/// You can see all scripting define symbols ( not the internal ones, only the one for this project), in the PlayerSettings inspector
/// </summary>
/// <param name="defineSymbol">Define symbol.</param>
public static void RemoveScriptingDefineSymbolToAllBuildTargetGroups(string defineSymbol)
{
foreach (BuildTarget target in Enum.GetValues(typeof(BuildTarget)))
{
BuildTargetGroup group = BuildPipeline.GetBuildTargetGroup(target);
if (group == BuildTargetGroup.Unknown)
{
continue;
}
var defineSymbols = PlayerSettings.GetScriptingDefineSymbolsForGroup(group).Split(';').Select(d => d.Trim()).ToList();
if (defineSymbols.Contains(defineSymbol) && defineSymbols.Remove(defineSymbol))
{
try
{
PlayerSettings.SetScriptingDefineSymbolsForGroup(group, string.Join(";", defineSymbols.ToArray()));
}
catch (Exception e)
{
Debug.LogErrorFormat("Could not remove \"{0}\" Scripting Define Symbol for build target: {1} group: {2} {3}", defineSymbol, target, group, e);
}
}
}
}
/// <summary>
/// Check if a GameObject is a prefab asset or part of a prefab asset, as opposed to an instance in the scene hierarchy
/// </summary>
/// <returns><c>true</c>, if a prefab asset or part of it, <c>false</c> otherwise.</returns>
/// <param name="go">The GameObject to check</param>
public static bool IsPrefab(GameObject go)
{
#if UNITY_2021_2_OR_NEWER
return UnityEditor.SceneManagement.PrefabStageUtility.GetPrefabStage(go) != null || EditorUtility.IsPersistent(go);
#elif UNITY_2018_3_OR_NEWER
return UnityEditor.Experimental.SceneManagement.PrefabStageUtility.GetPrefabStage(go) != null || EditorUtility.IsPersistent(go);
#else
return EditorUtility.IsPersistent(go);
#endif
}
/// <summary>
/// Removes PUN2's Script Define Symbols from project
/// </summary>
public static void CleanUpPunDefineSymbols()
{
foreach (BuildTarget target in Enum.GetValues(typeof(BuildTarget)))
{
BuildTargetGroup group = BuildPipeline.GetBuildTargetGroup(target);
if (group == BuildTargetGroup.Unknown)
{
continue;
}
var defineSymbols = PlayerSettings.GetScriptingDefineSymbolsForGroup(group)
.Split(';')
.Select(d => d.Trim())
.ToList();
List<string> newDefineSymbols = new List<string>();
foreach (var symbol in defineSymbols)
{
if ("PHOTON_UNITY_NETWORKING".Equals(symbol) || symbol.StartsWith("PUN_2_"))
{
continue;
}
newDefineSymbols.Add(symbol);
}
try
{
PlayerSettings.SetScriptingDefineSymbolsForGroup(group, string.Join(";", newDefineSymbols.ToArray()));
}
catch (Exception e)
{
Debug.LogErrorFormat("Could not set clean up PUN2's define symbols for build target: {0} group: {1}, {2}", target, group, e);
}
}
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 4e837e30d97d9fd4a984a83cc6db75a7
timeCreated: 1550755478
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,741 @@
#if UNITY_EDITOR_OSX || UNITY_EDITOR_WIN
#define PHOTON_MICROPHONE_ENUMERATOR
#endif
#if UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_ANDROID
#define WEBRTC_AUDIO_DSP_SUPPORTED_PLATFORMS
#endif
namespace Photon.Voice.Unity.Editor
{
using POpusCodec.Enums;
using System;
using System.Linq;
using Unity;
using UnityEditor;
using UnityEngine;
#if UNITY_IOS
using IOS;
#endif
[CustomEditor(typeof(Recorder))]
public class RecorderEditor : Editor
{
private Recorder recorder;
private int unityMicrophoneDeviceIndex;
#if PHOTON_MICROPHONE_ENUMERATOR
private string[] photonDeviceNames;
private int[] photonDeviceIDs;
private int photonDeviceIndex;
#endif
private int calibrationTime = 200;
private SerializedProperty voiceDetectionSp;
private SerializedProperty voiceDetectionThresholdSp;
private SerializedProperty voiceDetectionDelayMsSp;
private SerializedProperty photonMicrophoneDeviceIdSp;
private SerializedProperty interestGroupSp;
private SerializedProperty debugEchoModeSp;
private SerializedProperty reliableModeSp;
private SerializedProperty encryptSp;
private SerializedProperty transmitEnabledSp;
private SerializedProperty samplingRateSp;
private SerializedProperty frameDurationSp;
private SerializedProperty bitrateSp;
private SerializedProperty sourceTypeSp;
private SerializedProperty microphoneTypeSp;
private SerializedProperty audioClipSp;
private SerializedProperty loopAudioClipSp;
private SerializedProperty autoStartSp;
private SerializedProperty recordOnlyWhenEnabledSp;
private SerializedProperty stopRecordingWhenPausedSp;
private SerializedProperty useMicrophoneTypeFallbackSp;
private SerializedProperty recordOnlyWhenJoinedSp;
#if UNITY_IOS
private SerializedProperty useCustomAudioSessionParametersSp;
private SerializedProperty audioSessionParametersSp;
private SerializedProperty audioSessionPresetIndexSp;
private SerializedProperty audioSessionParametersCategorySp;
private SerializedProperty audioSessionParametersModeSp;
private SerializedProperty audioSessionParametersCategoryOptionsSp;
private string[] iOSAudioSessionPresetsNames = {"Game", "VoIP"};
private AudioSessionParameters[] iOSAudioSessionPresetsValues =
{AudioSessionParametersPresets.Game, AudioSessionParametersPresets.VoIP};
#elif UNITY_ANDROID
private SerializedProperty nativeAndroidMicrophoneSettingsSp;
#else
private SerializedProperty reactOnSystemChangesSp;
private SerializedProperty skipDeviceChecksSp;
#endif
private void OnEnable()
{
this.recorder = this.target as Recorder;
AudioSettings.OnAudioConfigurationChanged += this.OnAudioConfigChanged;
this.RefreshMicrophones();
this.voiceDetectionSp = this.serializedObject.FindProperty("voiceDetection");
this.voiceDetectionThresholdSp = this.serializedObject.FindProperty("voiceDetectionThreshold");
this.voiceDetectionDelayMsSp = this.serializedObject.FindProperty("voiceDetectionDelayMs");
this.photonMicrophoneDeviceIdSp = this.serializedObject.FindProperty("photonMicrophoneDeviceId");
this.interestGroupSp = this.serializedObject.FindProperty("interestGroup");
this.debugEchoModeSp = this.serializedObject.FindProperty("debugEchoMode");
this.reliableModeSp = this.serializedObject.FindProperty("reliableMode");
this.encryptSp = this.serializedObject.FindProperty("encrypt");
this.transmitEnabledSp = this.serializedObject.FindProperty("transmitEnabled");
this.samplingRateSp = this.serializedObject.FindProperty("samplingRate");
this.frameDurationSp = this.serializedObject.FindProperty("frameDuration");
this.bitrateSp = this.serializedObject.FindProperty("bitrate");
this.sourceTypeSp = this.serializedObject.FindProperty("sourceType");
this.microphoneTypeSp = this.serializedObject.FindProperty("microphoneType");
this.audioClipSp = this.serializedObject.FindProperty("audioClip");
this.loopAudioClipSp = this.serializedObject.FindProperty("loopAudioClip");
this.autoStartSp = this.serializedObject.FindProperty("autoStart");
this.recordOnlyWhenEnabledSp = this.serializedObject.FindProperty("recordOnlyWhenEnabled");
this.stopRecordingWhenPausedSp = this.serializedObject.FindProperty("stopRecordingWhenPaused");
this.useMicrophoneTypeFallbackSp = this.serializedObject.FindProperty("useMicrophoneTypeFallback");
this.recordOnlyWhenJoinedSp = this.serializedObject.FindProperty("recordOnlyWhenJoined");
#if UNITY_IOS
this.useCustomAudioSessionParametersSp = this.serializedObject.FindProperty("useCustomAudioSessionParameters");
this.audioSessionPresetIndexSp = this.serializedObject.FindProperty("audioSessionPresetIndex");
this.audioSessionParametersSp = this.serializedObject.FindProperty("audioSessionParameters");
this.audioSessionParametersCategorySp = this.audioSessionParametersSp.FindPropertyRelative("Category");
this.audioSessionParametersModeSp = this.audioSessionParametersSp.FindPropertyRelative("Mode");
this.audioSessionParametersCategoryOptionsSp = this.audioSessionParametersSp.FindPropertyRelative("CategoryOptions");
#elif UNITY_ANDROID
this.nativeAndroidMicrophoneSettingsSp = this.serializedObject.FindProperty("nativeAndroidMicrophoneSettings");
#else
this.reactOnSystemChangesSp = this.serializedObject.FindProperty("reactOnSystemChanges");
this.skipDeviceChecksSp = this.serializedObject.FindProperty("skipDeviceChangeChecks");
#endif
}
private void OnDisable()
{
AudioSettings.OnAudioConfigurationChanged -= this.OnAudioConfigChanged;
}
public override bool RequiresConstantRepaint() { return true; }
public override void OnInspectorGUI()
{
this.serializedObject.UpdateIfRequiredOrScript();
//serializedObject.Update();
WebRtcAudioDsp webRtcAudioDsp = this.recorder.GetComponent<WebRtcAudioDsp>();
bool webRtcAudioDspAttached = webRtcAudioDsp && webRtcAudioDsp != null && webRtcAudioDsp.enabled;
AudioChangesHandler audioChangesHandler = this.recorder.GetComponent<AudioChangesHandler>();
bool audioChangesHandlerAttached = !ReferenceEquals(null, audioChangesHandler) && audioChangesHandler;
if (PhotonVoiceEditorUtils.IsInTheSceneInPlayMode(this.recorder.gameObject))
{
if (this.recorder.RequiresRestart)
{
EditorGUILayout.HelpBox("Recorder requires restart. Call Recorder.RestartRecording().", MessageType.Warning);
if (GUILayout.Button("RestartRecording"))
{
this.recorder.RestartRecording();
}
}
else if (!this.recorder.IsInitialized)
{
EditorGUILayout.HelpBox("Recorder requires initialization. Call Recorder.Init or VoiceConnection.InitRecorder.", MessageType.Warning);
}
}
VoiceLogger.ExposeLogLevel(this.serializedObject, this.recorder);
EditorGUI.BeginChangeCheck();
if (PhotonVoiceEditorUtils.IsInTheSceneInPlayMode(this.recorder.gameObject))
{
if (!audioChangesHandlerAttached)
{
#if !UNITY_ANDROID && !UNITY_IOS
this.recorder.ReactOnSystemChanges = EditorGUILayout.Toggle(new GUIContent("React On System Changes", "If true, recording is restarted when Unity detects Audio Config. changes."), this.recorder.ReactOnSystemChanges);
if (this.recorder.ReactOnSystemChanges)
{
EditorGUI.indentLevel++;
EditorGUILayout.PropertyField(this.skipDeviceChecksSp, new GUIContent("Skip Device Checks", "If true, restarts recording without checking if audio config/device changes affected recording."));
EditorGUI.indentLevel--;
EditorGUILayout.HelpBox("ReactOnSystemChanges will be deprecated. AudioChangesHandler component is now the preferred solution to handle audio changes.", MessageType.Warning);
}
else
{
EditorGUILayout.HelpBox("ReactOnSystemChanges will be deprecated. AudioChangesHandler component is now the preferred solution to handle audio changes.", MessageType.Info);
}
#endif
if (GUILayout.Button("Add AudioChangesHandler component"))
{
this.recorder.gameObject.AddComponent<AudioChangesHandler>();
}
}
this.recorder.RecordOnlyWhenJoined = EditorGUILayout.Toggle(new GUIContent("Record Only When Joined", "If true, recording can start only when client is joined to a room. Auto start is also delayed until client is joined to a room."),
this.recorder.RecordOnlyWhenJoined);
this.recorder.RecordOnlyWhenEnabled = EditorGUILayout.Toggle(new GUIContent("Record Only When Enabled", "If true, component will work only when enabled and active in hierarchy. Auto start is also delayed until these conditions are met."),
this.recorder.RecordOnlyWhenEnabled);
EditorGUILayout.PropertyField(this.stopRecordingWhenPausedSp,
new GUIContent("Stop Recording When Paused",
"If true, stop recording when paused resume/restart when un-paused."));
this.recorder.TransmitEnabled = EditorGUILayout.Toggle(new GUIContent("Transmit Enabled", "If true, audio transmission is enabled."), this.recorder.TransmitEnabled);
if (this.recorder.IsInitialized)
{
this.recorder.IsRecording = EditorGUILayout.Toggle(new GUIContent("IsRecording", "If true, audio recording is on."), this.recorder.IsRecording);
}
else
{
this.recorder.AutoStart = EditorGUILayout.Toggle(new GUIContent("Auto Start", "If true, recording is started when Recorder is initialized."), this.recorder.AutoStart);
}
if (this.recorder.IsRecording && this.recorder.TransmitEnabled)
{
float amplitude = 0f;
if (this.recorder.IsCurrentlyTransmitting)
{
amplitude = this.recorder.LevelMeter.CurrentPeakAmp;
}
EditorGUILayout.Slider("Level", amplitude, 0, 1);
}
this.recorder.Encrypt = EditorGUILayout.Toggle(new GUIContent("Encrypt", "If true, voice stream is sent encrypted."), this.recorder.Encrypt);
this.recorder.InterestGroup = (byte)EditorGUILayout.IntField(new GUIContent("Interest Group", "Target interest group that will receive transmitted audio."), this.recorder.InterestGroup);
if (this.recorder.InterestGroup == 0)
{
this.recorder.DebugEchoMode = EditorGUILayout.Toggle(new GUIContent("Debug Echo", "If true, outgoing stream routed back to client via server same way as for remote client's streams."), this.recorder.DebugEchoMode);
}
this.recorder.ReliableMode = EditorGUILayout.Toggle(new GUIContent("Reliable Mode", "If true, stream data sent in reliable mode."), this.recorder.ReliableMode);
EditorGUILayout.LabelField("Codec Parameters", EditorStyles.boldLabel);
OpusCodec.FrameDuration frameDuration = (OpusCodec.FrameDuration)EditorGUILayout.EnumPopup(new GUIContent("Frame Duration", "Outgoing audio stream encoder delay."), this.recorder.FrameDuration);
SamplingRate samplingRate = (SamplingRate)EditorGUILayout.EnumPopup(new GUIContent("Sampling Rate", "Outgoing audio stream sampling rate."), this.recorder.SamplingRate);
if (webRtcAudioDspAttached)
{
int samplingRateInt = (int) samplingRate;
if (Array.IndexOf(WebRTCAudioProcessor.SupportedSamplingRates, samplingRateInt) < 0)
{
if (this.recorder.SamplingRate == SamplingRate.Sampling48000)
{
Debug.LogWarningFormat("Sampling rate requested ({0}Hz) is not supported by WebRTC Audio DSP. Ignoring change.", samplingRateInt);
}
else
{
Debug.LogWarningFormat("Sampling rate requested ({0}Hz) is not supported by WebRTC Audio DSP, switching to the closest supported value: {1}Hz.", samplingRateInt, "48k");
}
samplingRate = SamplingRate.Sampling48000;
}
switch (frameDuration)
{
case OpusCodec.FrameDuration.Frame2dot5ms:
case OpusCodec.FrameDuration.Frame5ms:
if (this.recorder.FrameDuration == OpusCodec.FrameDuration.Frame10ms)
{
Debug.LogWarningFormat("Frame duration requested ({0}ms) is not supported by WebRTC Audio DSP. Ignoring change.", frameDuration);
}
else
{
Debug.LogWarningFormat("Frame duration requested ({0}ms) is not supported by WebRTC Audio DSP (it needs to be N x 10ms), switching to the closest supported value: {1}ms.", (int)frameDuration / 1000, 10);
}
frameDuration = OpusCodec.FrameDuration.Frame10ms;
break;
}
}
this.recorder.SamplingRate = samplingRate;
this.recorder.FrameDuration = frameDuration;
this.recorder.Bitrate = EditorGUILayout.IntSlider(new GUIContent("Bitrate", "Outgoing audio stream bitrate."), this.recorder.Bitrate, Recorder.MIN_OPUS_BITRATE, Recorder.MAX_OPUS_BITRATE);
EditorGUILayout.LabelField("Audio Source Settings", EditorStyles.boldLabel);
this.recorder.SourceType = (Recorder.InputSourceType) EditorGUILayout.EnumPopup(new GUIContent("Input Source Type", "Input audio data source type"), this.recorder.SourceType);
switch (this.recorder.SourceType)
{
case Recorder.InputSourceType.Microphone:
this.recorder.MicrophoneType = (Recorder.MicType) EditorGUILayout.EnumPopup(
new GUIContent("Microphone Type",
"Which microphone API to use when the Source is set to UnityMicrophone."),
this.recorder.MicrophoneType);
EditorGUILayout.PropertyField(this.useMicrophoneTypeFallbackSp, new GUIContent("Use Fallback", "If true, if recording fails to start with Unity microphone type, Photon microphone type is used -if available- as a fallback and vice versa."));
switch (this.recorder.MicrophoneType)
{
case Recorder.MicType.Unity:
if (UnityMicrophone.devices.Length == 0)
{
EditorGUILayout.HelpBox("No microphone device found", MessageType.Error);
}
else
{
EditorGUILayout.HelpBox("Devices list and current selection is valid in Unity Editor only. In build, you need to set it via code preferably at runtime.", MessageType.Info);
this.unityMicrophoneDeviceIndex = EditorGUILayout.Popup("Microphone Device", this.GetUnityMicrophoneDeviceIndex(), UnityMicrophone.devices);
this.recorder.UnityMicrophoneDevice = UnityMicrophone.devices[this.unityMicrophoneDeviceIndex];
int minFreq, maxFreq;
UnityMicrophone.GetDeviceCaps(UnityMicrophone.devices[this.unityMicrophoneDeviceIndex], out minFreq, out maxFreq);
EditorGUILayout.LabelField("Microphone Device Caps", string.Format("{0}..{1} Hz", minFreq, maxFreq));
}
break;
case Recorder.MicType.Photon:
#if PHOTON_MICROPHONE_ENUMERATOR
if (this.recorder.MicrophonesEnumerator.IsSupported)
{
if (!this.recorder.MicrophonesEnumerator.Any())
{
EditorGUILayout.HelpBox("No microphone device found", MessageType.Error);
}
else
{
EditorGUILayout.HelpBox("Devices list and current selection is valid in Unity Editor only. In build, you need to set it via code preferably at runtime.", MessageType.Info);
EditorGUILayout.BeginHorizontal();
if (this.photonDeviceIndex >= this.photonDeviceNames.Length)
{
Debug.LogWarningFormat("Unexpected photonDeviceIndex = {0} >= photonDeviceNames.Length = {1}, forcing refresh.", this.photonDeviceIndex, this.photonDeviceNames.Length);
this.RefreshMicrophones();
}
this.photonDeviceIndex = EditorGUILayout.Popup("Microphone Device", this.photonDeviceIndex, this.photonDeviceNames);
this.recorder.PhotonMicrophoneDeviceId = this.photonDeviceIDs[this.photonDeviceIndex];
if (GUILayout.Button("Refresh", EditorStyles.miniButton, GUILayout.Width(70)))
{
this.RefreshPhotonMicrophoneDevices();
}
EditorGUILayout.EndHorizontal();
}
}
else
{
this.recorder.PhotonMicrophoneDeviceId = -1;
EditorGUILayout.HelpBox("PhotonMicrophoneEnumerator Not Supported", MessageType.Error);
}
#endif
#if UNITY_IOS
EditorGUILayout.LabelField("iOS Audio Session Parameters", EditorStyles.boldLabel);
EditorGUI.indentLevel++;
EditorGUILayout.PropertyField(this.useCustomAudioSessionParametersSp, new GUIContent("Use Custom"));
if (this.useCustomAudioSessionParametersSp.boolValue)
{
EditorGUILayout.PropertyField(this.audioSessionParametersCategorySp);
EditorGUILayout.PropertyField(this.audioSessionParametersModeSp);
EditorGUILayout.PropertyField(this.audioSessionParametersCategoryOptionsSp, true);
}
else
{
int index = EditorGUILayout.Popup("Preset", this.audioSessionPresetIndexSp.intValue, this.iOSAudioSessionPresetsNames);
if (index != this.audioSessionPresetIndexSp.intValue)
{
this.audioSessionPresetIndexSp.intValue = index;
AudioSessionParameters parameters = this.iOSAudioSessionPresetsValues[index];
this.SetEnumIndex(this.audioSessionParametersCategorySp,
typeof(AudioSessionCategory), parameters.Category);
this.SetEnumIndex(this.audioSessionParametersModeSp,
typeof(AudioSessionMode), parameters.Mode);
if (parameters.CategoryOptions != null)
{
this.audioSessionParametersCategoryOptionsSp.ClearArray();
this.audioSessionParametersCategoryOptionsSp.arraySize =
parameters.CategoryOptions.Length;
if (index == 0)
{
this.SetEnumIndex(this.audioSessionParametersCategoryOptionsSp
.GetArrayElementAtIndex(0), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.DefaultToSpeaker);
this.SetEnumIndex(this.audioSessionParametersCategoryOptionsSp
.GetArrayElementAtIndex(1), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.AllowBluetooth);
}
else if (index == 1)
{
this.SetEnumIndex(this.audioSessionParametersCategoryOptionsSp
.GetArrayElementAtIndex(0), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.AllowBluetooth);
}
}
}
}
EditorGUI.indentLevel--;
#elif UNITY_ANDROID
EditorGUILayout.LabelField("Android Native Microphone Settings", EditorStyles.boldLabel);
EditorGUI.indentLevel++;
EditorGUILayout.PropertyField(this.nativeAndroidMicrophoneSettingsSp.FindPropertyRelative("AcousticEchoCancellation"));
EditorGUILayout.PropertyField(this.nativeAndroidMicrophoneSettingsSp.FindPropertyRelative("AutomaticGainControl"));
EditorGUILayout.PropertyField(this.nativeAndroidMicrophoneSettingsSp.FindPropertyRelative("NoiseSuppression"));
EditorGUI.indentLevel--;
#endif
break;
default:
throw new ArgumentOutOfRangeException();
}
break;
case Recorder.InputSourceType.AudioClip:
this.recorder.AudioClip = EditorGUILayout.ObjectField(new GUIContent("Audio Clip", "Source audio clip."), this.recorder.AudioClip, typeof(AudioClip), false) as AudioClip;
this.recorder.LoopAudioClip =
EditorGUILayout.Toggle(new GUIContent("Loop", "Loop playback for audio clip sources."),
this.recorder.LoopAudioClip);
break;
case Recorder.InputSourceType.Factory:
EditorGUILayout.HelpBox("Add a custom InputFactory method in code.", MessageType.Info);
break;
default:
throw new ArgumentOutOfRangeException();
}
EditorGUILayout.LabelField("Voice Activity Detection (VAD)", EditorStyles.boldLabel);
if (webRtcAudioDspAttached)
{
if (webRtcAudioDsp.VAD)
{
EditorGUILayout.HelpBox("WebRtcAudioDsp.VAD is already enabled no need to use the built-in Recorder VAD", MessageType.Info);
}
else
{
EditorGUILayout.HelpBox("It's recommended to use VAD from WebRtcAudioDsp instead of built-in Recorder VAD", MessageType.Info);
}
}
this.recorder.VoiceDetection = EditorGUILayout.Toggle(new GUIContent("Detect", "If true, voice detection enabled."), this.recorder.VoiceDetection);
if (this.recorder.VoiceDetection)
{
if (webRtcAudioDspAttached && !webRtcAudioDsp.VAD && GUILayout.Button("Use WebRtcAudioDsp.VAD instead"))
{
this.recorder.VoiceDetection = false;
webRtcAudioDsp.VAD = true;
}
this.recorder.VoiceDetectionThreshold =
EditorGUILayout.Slider(
new GUIContent("Threshold", "Voice detection threshold (0..1, where 1 is full amplitude)."),
this.recorder.VoiceDetectionThreshold, 0f, 1f);
this.recorder.VoiceDetectionDelayMs =
EditorGUILayout.IntField(new GUIContent("Delay (ms)", "Keep detected state during this time after signal level dropped below threshold. Default is 500ms"), this.recorder.VoiceDetectionDelayMs);
EditorGUILayout.HelpBox("Do not speak and stay in a silent environment when calibrating.", MessageType.Info);
if (this.recorder.VoiceDetectorCalibrating)
{
EditorGUILayout.LabelField(string.Format("Calibrating {0} ms", this.calibrationTime));
}
else
{
this.calibrationTime = EditorGUILayout.IntField("Calibration Time (ms)", this.calibrationTime);
if (this.recorder.IsRecording && this.recorder.TransmitEnabled)
{
if (GUILayout.Button("Calibrate"))
{
this.recorder.VoiceDetectorCalibrate(this.calibrationTime);
}
}
}
}
}
else
{
if (!audioChangesHandlerAttached)
{
#if !UNITY_ANDROID && !UNITY_IOS
EditorGUILayout.PropertyField(this.reactOnSystemChangesSp,
new GUIContent("React On System Changes",
"If true, recording is restarted when Unity detects Audio Config. changes."));
if (this.reactOnSystemChangesSp.boolValue)
{
EditorGUI.indentLevel++;
EditorGUILayout.PropertyField(this.skipDeviceChecksSp, new GUIContent("Skip Device Checks", "If true, restarts recording without checking if audio config/device changes affected recording."));
EditorGUI.indentLevel--;
EditorGUILayout.HelpBox("ReactOnSystemChanges will be deprecated. AudioChangesHandler component is now the preferred solution to handle audio changes.", MessageType.Warning);
}
else
{
EditorGUILayout.HelpBox("ReactOnSystemChanges will be deprecated. AudioChangesHandler component is now the preferred solution to handle audio changes.", MessageType.Info);
}
#endif
if (GUILayout.Button("Add AudioChangesHandler component"))
{
this.recorder.gameObject.AddComponent<AudioChangesHandler>();
}
}
EditorGUILayout.PropertyField(this.recordOnlyWhenEnabledSp,
new GUIContent("Record Only When Enabled",
"If true, component will work only when enabled and active in hierarchy."));
EditorGUILayout.PropertyField(this.recordOnlyWhenJoinedSp,
new GUIContent("Record Only When Joined",
"If true, recording can start only when client is joined to a room. Auto start is also delayed until client is joined to a room."));
EditorGUILayout.PropertyField(this.stopRecordingWhenPausedSp,
new GUIContent("Stop Recording When Paused",
"If true, stop recording when paused resume/restart when un-paused."));
EditorGUILayout.PropertyField(this.transmitEnabledSp,
new GUIContent("Transmit Enabled", "If true, audio transmission is enabled."));
EditorGUILayout.PropertyField(this.autoStartSp,
new GUIContent("Auto Start", "If true, recording is started when Recorder is initialized."));
EditorGUILayout.PropertyField(this.encryptSp,
new GUIContent("Encrypt", "If true, voice stream is sent encrypted."));
EditorGUILayout.PropertyField(this.interestGroupSp,
new GUIContent("Interest Group", "Target interest group that will receive transmitted audio."));
if (this.interestGroupSp.intValue == 0)
{
EditorGUILayout.PropertyField(this.debugEchoModeSp,
new GUIContent("Debug Echo",
"If true, outgoing stream routed back to client via server same way as for remote client's streams."));
}
else if (this.debugEchoModeSp.boolValue)
{
Debug.LogWarningFormat("DebugEchoMode disabled because InterestGroup changed to {0}. DebugEchoMode works only with Interest Group 0.", this.interestGroupSp.intValue);
this.debugEchoModeSp.boolValue = false;
}
EditorGUILayout.PropertyField(this.reliableModeSp, new GUIContent("Reliable Mode",
"If true, stream data sent in reliable mode."));
EditorGUILayout.LabelField("Codec Parameters", EditorStyles.boldLabel);
EditorGUILayout.PropertyField(this.frameDurationSp,
new GUIContent("Frame Duration", "Outgoing audio stream encoder delay."));
EditorGUILayout.PropertyField(this.samplingRateSp,
new GUIContent("Sampling Rate", "Outgoing audio stream sampling rate."));
EditorGUILayout.PropertyField(this.bitrateSp,
new GUIContent("Bitrate", "Outgoing audio stream bitrate."));
//if (webRtcAudioDspAttached)
//{
// SamplingRate samplingRate = (SamplingRate)Enum.GetValues(typeof(SamplingRate)).GetValue(this.samplingRateSp.enumValueIndex);
// if (Array.IndexOf(WebRTCAudioProcessor.SupportedSamplingRates, samplingRate) < 0)
// {
// Debug.LogWarningFormat("Sampling rate requested ({0}Hz) is not supported by WebRTC Audio DSP, switching to the closest supported value: {1}ms.", samplingRate, "48k");
// this.samplingRateSp.enumValueIndex = 4;
// }
// OpusCodec.FrameDuration frameDuration = (OpusCodec.FrameDuration)Enum.GetValues(typeof(OpusCodec.FrameDuration)).GetValue(this.frameDurationSp.enumValueIndex);
// switch (frameDuration)
// {
// case OpusCodec.FrameDuration.Frame2dot5ms:
// case OpusCodec.FrameDuration.Frame5ms:
// Debug.LogWarningFormat("Frame duration requested ({0}ms) is not supported by WebRTC Audio DSP (it needs to be N x 10ms), switching to the closest supported value: {1}Hz.", (int)frameDuration / 1000, 10);
// this.frameDurationSp.enumValueIndex = 2;
// break;
// }
//}
EditorGUILayout.LabelField("Audio Source Settings", EditorStyles.boldLabel);
EditorGUILayout.PropertyField(this.sourceTypeSp,
new GUIContent("Input Source Type", "Input audio data source type"));
switch ((Recorder.InputSourceType)this.sourceTypeSp.enumValueIndex)
{
case Recorder.InputSourceType.Microphone:
EditorGUILayout.PropertyField(this.microphoneTypeSp, new GUIContent("Microphone Type",
"Which microphone API to use when the Source is set to UnityMicrophone."));
EditorGUILayout.PropertyField(this.useMicrophoneTypeFallbackSp, new GUIContent("Use Fallback", "If true, if recording fails to start with Unity microphone type, Photon microphone type is used -if available- as a fallback and vice versa."));
switch (this.recorder.MicrophoneType)
{
case Recorder.MicType.Unity:
break;
case Recorder.MicType.Photon:
#if PHOTON_MICROPHONE_ENUMERATOR
if (this.recorder.MicrophonesEnumerator.IsSupported)
{
if (!this.recorder.MicrophonesEnumerator.Any())
{
EditorGUILayout.HelpBox("No microphone device found", MessageType.Error);
}
else
{
EditorGUILayout.BeginHorizontal();
if (this.photonDeviceIndex >= this.photonDeviceNames.Length)
{
Debug.LogWarningFormat("Unexpected photonDeviceIndex = {0} >= photonDeviceNames.Length = {1}, forcing refresh.", this.photonDeviceIndex, this.photonDeviceNames.Length);
this.RefreshMicrophones();
}
this.photonDeviceIndex = EditorGUILayout.Popup("Microphone Device", this.photonDeviceIndex, this.photonDeviceNames);
this.photonMicrophoneDeviceIdSp.intValue = this.photonDeviceIDs[this.photonDeviceIndex];
if (GUILayout.Button("Refresh", EditorStyles.miniButton, GUILayout.Width(70)))
{
this.RefreshPhotonMicrophoneDevices();
}
EditorGUILayout.EndHorizontal();
}
}
else
{
this.photonMicrophoneDeviceIdSp.intValue = -1;
EditorGUILayout.HelpBox("PhotonMicrophoneEnumerator Not Supported", MessageType.Error);
}
#endif
#if UNITY_IOS
EditorGUILayout.LabelField("iOS Audio Session Parameters", EditorStyles.boldLabel);
EditorGUI.indentLevel++;
EditorGUILayout.PropertyField(this.useCustomAudioSessionParametersSp, new GUIContent("Use Custom"));
if (this.useCustomAudioSessionParametersSp.boolValue)
{
EditorGUILayout.PropertyField(this.audioSessionParametersCategorySp);
EditorGUILayout.PropertyField(this.audioSessionParametersModeSp);
EditorGUILayout.PropertyField(this.audioSessionParametersCategoryOptionsSp, true);
}
else
{
int index = EditorGUILayout.Popup("Preset", this.audioSessionPresetIndexSp.intValue, this.iOSAudioSessionPresetsNames);
if (index != this.audioSessionPresetIndexSp.intValue)
{
this.audioSessionPresetIndexSp.intValue = index;
AudioSessionParameters parameters = this.iOSAudioSessionPresetsValues[index];
this.SetEnumIndex(this.audioSessionParametersCategorySp,
typeof(AudioSessionCategory), parameters.Category);
this.SetEnumIndex(this.audioSessionParametersModeSp,
typeof(AudioSessionMode), parameters.Mode);
if (parameters.CategoryOptions != null)
{
this.audioSessionParametersCategoryOptionsSp.ClearArray();
this.audioSessionParametersCategoryOptionsSp.arraySize =
parameters.CategoryOptions.Length;
if (index == 0)
{
this.SetEnumIndex(this.audioSessionParametersCategoryOptionsSp
.GetArrayElementAtIndex(0), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.DefaultToSpeaker);
this.SetEnumIndex(this.audioSessionParametersCategoryOptionsSp
.GetArrayElementAtIndex(1), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.AllowBluetooth);
}
else if (index == 1)
{
this.SetEnumIndex(this.audioSessionParametersCategoryOptionsSp
.GetArrayElementAtIndex(0), typeof(AudioSessionCategoryOption), AudioSessionCategoryOption.AllowBluetooth);
}
}
}
}
EditorGUI.indentLevel--;
#elif UNITY_ANDROID
EditorGUILayout.LabelField("Android Native Microphone Settings", EditorStyles.boldLabel);
EditorGUI.indentLevel++;
EditorGUILayout.PropertyField(this.nativeAndroidMicrophoneSettingsSp.FindPropertyRelative("AcousticEchoCancellation"));
EditorGUILayout.PropertyField(this.nativeAndroidMicrophoneSettingsSp.FindPropertyRelative("AutomaticGainControl"));
EditorGUILayout.PropertyField(this.nativeAndroidMicrophoneSettingsSp.FindPropertyRelative("NoiseSuppression"));
EditorGUI.indentLevel--;
#endif
break;
default:
throw new ArgumentOutOfRangeException();
}
break;
case Recorder.InputSourceType.AudioClip:
EditorGUILayout.PropertyField(this.audioClipSp,
new GUIContent("Audio Clip", "Source audio clip."));
EditorGUILayout.PropertyField(this.loopAudioClipSp,
new GUIContent("Loop", "Loop playback for audio clip sources."));
break;
case Recorder.InputSourceType.Factory:
EditorGUILayout.HelpBox("Add a custom InputFactory method in code.", MessageType.Info);
break;
default:
throw new ArgumentOutOfRangeException();
}
EditorGUILayout.LabelField("Voice Activity Detection (VAD)", EditorStyles.boldLabel);
if (webRtcAudioDspAttached)
{
if (webRtcAudioDsp.VAD)
{
EditorGUILayout.HelpBox("WebRtcAudioDsp.VAD is already enabled no need to use the built-in Recorder VAD", MessageType.Info);
}
else
{
EditorGUILayout.HelpBox("It's recommended to use VAD from WebRtcAudioDsp instead of built-in Recorder VAD", MessageType.Info);
}
}
EditorGUILayout.PropertyField(this.voiceDetectionSp,
new GUIContent("Detect", "If true, voice detection enabled."));
if (this.voiceDetectionSp.boolValue)
{
if (webRtcAudioDspAttached && !webRtcAudioDsp.VAD && GUILayout.Button("Use WebRtcAudioDsp.VAD instead"))
{
this.recorder.VoiceDetection = false;
webRtcAudioDsp.VAD = true;
}
this.voiceDetectionThresholdSp.floatValue = EditorGUILayout.Slider(
new GUIContent("Threshold", "Voice detection threshold (0..1, where 1 is full amplitude)."),
this.voiceDetectionThresholdSp.floatValue, 0f, 1f);
this.voiceDetectionDelayMsSp.intValue =
EditorGUILayout.IntField(new GUIContent("Delay (ms)", "Keep detected state during this time after signal level dropped below threshold. Default is 500ms"), this.voiceDetectionDelayMsSp.intValue);
}
}
#if WEBRTC_AUDIO_DSP_SUPPORTED_PLATFORMS
if (!webRtcAudioDspAttached)
{
if (GUILayout.Button("Add WebRtcAudioDsp component"))
{
this.recorder.gameObject.AddComponent<WebRtcAudioDsp>();
}
}
#endif
if (EditorGUI.EndChangeCheck())
{
this.serializedObject.ApplyModifiedProperties();
}
}
private void OnAudioConfigChanged(bool deviceWasChanged)
{
if (deviceWasChanged)
{
this.RefreshMicrophones();
}
}
private void RefreshMicrophones()
{
if (UnityMicrophone.devices.Length == 0)
{
this.unityMicrophoneDeviceIndex = 0;
}
else
{
this.unityMicrophoneDeviceIndex = Mathf.Clamp(ArrayUtility.IndexOf(UnityMicrophone.devices, this.recorder.UnityMicrophoneDevice), 0, UnityMicrophone.devices.Length - 1);
}
#if PHOTON_MICROPHONE_ENUMERATOR
this.RefreshPhotonMicrophoneDevices();
#endif
}
#if PHOTON_MICROPHONE_ENUMERATOR
private void RefreshPhotonMicrophoneDevices()
{
if (this.recorder.MicrophonesEnumerator.IsSupported)
{
this.recorder.MicrophonesEnumerator.Refresh();
int count = this.recorder.MicrophonesEnumerator.Count();
if (count == 0)
{
this.recorder.PhotonMicrophoneDeviceId = -1;
this.photonDeviceNames = null;
this.photonDeviceIDs = null;
this.photonDeviceIndex = 0;
}
else
{
this.photonDeviceNames = new string[count];
this.photonDeviceIDs = new int[count];
int i = 0;
foreach (DeviceInfo deviceInfo in this.recorder.MicrophonesEnumerator)
{
this.photonDeviceIDs[i] = deviceInfo.IDInt;
this.photonDeviceNames[i] = string.Format("{0} - {1} [{2}]", i, deviceInfo.Name.Replace('/', '_'), deviceInfo.IDInt);
i++;
}
this.photonDeviceIndex = Mathf.Clamp(Array.IndexOf(this.photonDeviceIDs, this.recorder.PhotonMicrophoneDeviceId), 0, count - 1);
this.recorder.PhotonMicrophoneDeviceId = this.photonDeviceIDs[this.photonDeviceIndex];
}
}
else
{
this.recorder.PhotonMicrophoneDeviceId = -1;
}
}
#endif
#if UNITY_IOS
private void SetEnumIndex(SerializedProperty property, Type enumType, object enumValue)
{
string enumName = Enum.GetName(enumType, enumValue);
int index = Array.IndexOf(property.enumNames, enumName);
if (index >= 0)
{
property.enumValueIndex = index;
}
}
#endif
private int GetUnityMicrophoneDeviceIndex()
{
if (this.unityMicrophoneDeviceIndex == 0 && !Recorder.IsDefaultUnityMic(this.recorder.UnityMicrophoneDevice) ||
this.unityMicrophoneDeviceIndex > 0 && this.unityMicrophoneDeviceIndex < UnityMicrophone.devices.Length &&
!Recorder.CompareUnityMicNames(UnityMicrophone.devices[this.unityMicrophoneDeviceIndex], this.recorder.UnityMicrophoneDevice))
{
int newIndex = Array.IndexOf(UnityMicrophone.devices, this.recorder.UnityMicrophoneDevice);
if (newIndex >= 0)
{
return newIndex;
}
}
return this.unityMicrophoneDeviceIndex;
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: ded9902b49048f04d8d9788d6a740eb7
timeCreated: 1525441327
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,79 @@
namespace Photon.Voice.Unity.Editor
{
using UnityEngine;
using UnityEditor;
using Unity;
[CustomEditor(typeof(Speaker))]
public class SpeakerEditor : Editor
{
private Speaker speaker;
private SerializedProperty playbackDelaySettingsSp;
private SerializedProperty playbackOnlyWhenEnabledSp;
#region AnimationCurve
private AudioSource audioSource;
private FFTWindow window = FFTWindow.Hanning;
private float[] samples = new float[512];
private AnimationCurve curve;
private void DrawAnimationCurve()
{
this.audioSource.GetSpectrumData(this.samples, 0, this.window);
this.curve = new AnimationCurve();
for (var i = 0; i < this.samples.Length; i++)
{
this.curve.AddKey(1.0f / this.samples.Length * i, this.samples[i] * 100);
}
EditorGUILayout.CurveField(this.curve, Color.green, new Rect(0, 0, 1.0f, 0.1f), GUILayout.Height(64));
}
#endregion
private void OnEnable()
{
this.speaker = this.target as Speaker;
this.audioSource = this.speaker.GetComponent<AudioSource>();
this.playbackDelaySettingsSp = this.serializedObject.FindProperty("playbackDelaySettings");
this.playbackOnlyWhenEnabledSp = this.serializedObject.FindProperty("playbackOnlyWhenEnabled");
}
public override bool RequiresConstantRepaint()
{
return true;
}
public override void OnInspectorGUI()
{
this.serializedObject.UpdateIfRequiredOrScript();
VoiceLogger.ExposeLogLevel(this.serializedObject, this.speaker);
EditorGUI.BeginChangeCheck();
EditorGUILayout.PropertyField(this.playbackDelaySettingsSp, new GUIContent("Playback Delay Settings", "Remote audio stream playback delay to compensate packets latency variations."), true);
if (PhotonVoiceEditorUtils.IsInTheSceneInPlayMode(this.speaker.gameObject))
{
this.speaker.SetPlaybackDelaySettings(this.playbackDelaySettingsSp.FindPropertyRelative("MinDelaySoft").intValue, this.playbackDelaySettingsSp.FindPropertyRelative("MaxDelaySoft").intValue, this.playbackDelaySettingsSp.FindPropertyRelative("MaxDelayHard").intValue);
this.speaker.PlaybackOnlyWhenEnabled = EditorGUILayout.Toggle(new GUIContent("Playback Only When Enabled", "If true, component will work only when enabled and active in hierarchy."),
this.speaker.PlaybackOnlyWhenEnabled);
}
else
{
EditorGUILayout.PropertyField(this.playbackOnlyWhenEnabledSp, new GUIContent("Playback Only When Enabled", "If true, component will work only when enabled and active in hierarchy."));
}
if (EditorGUI.EndChangeCheck())
{
this.serializedObject.ApplyModifiedProperties();
}
if (this.speaker.IsPlaying)
{
EditorGUILayout.LabelField(string.Format("Current Buffer Lag: {0}", this.speaker.Lag));
this.DrawAnimationCurve();
}
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 10964642c52ad634da4de24866140e2d
timeCreated: 1525867684
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,520 @@
//#define DEBUG_DISCARD
namespace Photon.Voice.Unity.Editor
{
using ExitGames.Client.Photon;
using System.Collections.Generic;
using UnityEngine;
using UnityEditor;
using Unity;
using Realtime;
[CustomEditor(typeof(VoiceConnection))]
public class VoiceConnectionEditor : Editor
{
private VoiceConnection connection;
private SerializedProperty updateIntervalSp;
private SerializedProperty enableSupportLoggerSp;
private SerializedProperty settingsSp;
#if !UNITY_ANDROID && !UNITY_IOS
private SerializedProperty runInBackground;
#endif
#if !UNITY_IOS
private SerializedProperty keepAliveInBackgroundSp;
#endif
private SerializedProperty applyDontDestroyOnLoadSp;
private SerializedProperty statsResetInterval;
private SerializedProperty primaryRecorderSp;
private SerializedProperty speakerPrefabSp;
private SerializedProperty autoCreateSpeakerIfNotFoundSp;
private SerializedProperty globalRecordersLogLevelSp;
private SerializedProperty globalSpeakersLogLevelSp;
private SerializedProperty globalPlayDelaySettingsSp;
private const string notAvailable = "N/A?";
protected string photonLibraryVersion;
protected string photonVoiceVersion;
protected string punChangelogVersion;
protected string photonVoiceApiVersion;
protected bool versionFoldout;
protected virtual void OnEnable()
{
this.connection = this.target as VoiceConnection;
this.updateIntervalSp = this.serializedObject.FindProperty("updateInterval");
this.enableSupportLoggerSp = this.serializedObject.FindProperty("enableSupportLogger");
this.settingsSp = this.serializedObject.FindProperty("Settings");
#if !UNITY_ANDROID && !UNITY_IOS
this.runInBackground = this.serializedObject.FindProperty("runInBackground");
#endif
#if !UNITY_IOS
this.keepAliveInBackgroundSp = this.serializedObject.FindProperty("KeepAliveInBackground");
#endif
this.applyDontDestroyOnLoadSp = this.serializedObject.FindProperty("ApplyDontDestroyOnLoad");
this.statsResetInterval = this.serializedObject.FindProperty("statsResetInterval");
this.primaryRecorderSp = this.serializedObject.FindProperty("primaryRecorder");
if (this.primaryRecorderSp == null) // [FormerlySerializedAs("PrimaryRecorder")]
{
this.primaryRecorderSp = this.serializedObject.FindProperty("PrimaryRecorder");
}
this.speakerPrefabSp = this.serializedObject.FindProperty("speakerPrefab");
this.autoCreateSpeakerIfNotFoundSp = this.serializedObject.FindProperty("AutoCreateSpeakerIfNotFound");
this.globalRecordersLogLevelSp = this.serializedObject.FindProperty("globalRecordersLogLevel");
this.globalSpeakersLogLevelSp = this.serializedObject.FindProperty("globalSpeakersLogLevel");
this.globalPlayDelaySettingsSp = this.serializedObject.FindProperty("globalPlaybackDelaySettings");
PhotonVoiceEditorUtils.GetPhotonVoiceVersionsFromChangeLog(out this.photonVoiceVersion, out this.punChangelogVersion, out this.photonVoiceApiVersion);
this.photonLibraryVersion = System.Reflection.Assembly.GetAssembly(typeof(ExitGames.Client.Photon.PhotonPeer)).GetName().Version.ToString();
}
public override void OnInspectorGUI()
{
this.ShowHeader();
this.serializedObject.UpdateIfRequiredOrScript();
VoiceLogger.ExposeLogLevel(this.serializedObject, this.connection);
EditorGUI.BeginChangeCheck();
this.connection.GlobalRecordersLogLevel = VoiceLogger.ExposeLogLevel(this.globalRecordersLogLevelSp);
this.connection.GlobalSpeakersLogLevel = VoiceLogger.ExposeLogLevel(this.globalSpeakersLogLevelSp);
EditorGUILayout.PropertyField(this.autoCreateSpeakerIfNotFoundSp, new GUIContent("Create Speaker If Not Found", "Auto instantiate a GameObject and attach a Speaker component to link to a remote audio stream if no candidate could be foun"));
EditorGUILayout.PropertyField(this.updateIntervalSp, new GUIContent("Update Interval (ms)", "time [ms] between consecutive SendOutgoingCommands calls"));
if (PhotonVoiceEditorUtils.IsInTheSceneInPlayMode(this.connection.gameObject))
{
this.connection.PrimaryRecorder = EditorGUILayout.ObjectField(
new GUIContent("Primary Recorder", "Main Recorder to be used for transmission by default"),
this.connection.PrimaryRecorder, typeof(Recorder), true) as Recorder;
if (this.connection.SpeakerPrefab == null)
{
EditorGUILayout.HelpBox("Speaker prefab needs to have a Speaker component in the hierarchy.", MessageType.Info);
}
this.connection.SpeakerPrefab = EditorGUILayout.ObjectField(new GUIContent("Speaker Prefab",
"Prefab that contains Speaker component to be instantiated when receiving a new remote audio source info"), this.connection.SpeakerPrefab,
typeof(GameObject), false) as GameObject;
EditorGUILayout.PropertyField(this.globalPlayDelaySettingsSp, new GUIContent("Global Playback Delay Configuration", "Remote audio stream playback delay to compensate packets latency variations."), true);
this.connection.SetGlobalPlaybackDelaySettings(
this.globalPlayDelaySettingsSp.FindPropertyRelative("MinDelaySoft").intValue,
this.globalPlayDelaySettingsSp.FindPropertyRelative("MaxDelaySoft").intValue,
this.globalPlayDelaySettingsSp.FindPropertyRelative("MaxDelayHard").intValue);
}
else
{
EditorGUILayout.PropertyField(this.enableSupportLoggerSp, new GUIContent("Support Logger", "Logs additional info for debugging.\nUse this when you submit bugs to the Photon Team."));
#if !UNITY_ANDROID && !UNITY_IOS
EditorGUILayout.PropertyField(this.runInBackground, new GUIContent("Run In Background", "Sets Unity's Application.runInBackground: Should the application keep running when the application is in the background?"));
#endif
#if !UNITY_IOS
EditorGUILayout.PropertyField(this.keepAliveInBackgroundSp, new GUIContent("Background Timeout (ms)", "Defines for how long the Fallback Thread should keep the connection, before it may time out as usual."));
#endif
EditorGUILayout.PropertyField(this.applyDontDestroyOnLoadSp, new GUIContent("Don't Destroy On Load", "Persists the GameObject across scenes using Unity's GameObject.DontDestroyOnLoad"));
if (this.applyDontDestroyOnLoadSp.boolValue && !PhotonVoiceEditorUtils.IsPrefab(this.connection.gameObject))
{
if (this.connection.transform.parent != null)
{
EditorGUILayout.HelpBox("DontDestroyOnLoad only works for root GameObjects or components on root GameObjects.", MessageType.Warning);
if (GUILayout.Button("Detach"))
{
this.connection.transform.parent = null;
}
}
}
EditorGUILayout.PropertyField(this.primaryRecorderSp,
new GUIContent("Primary Recorder", "Main Recorder to be used for transmission by default"));
GameObject prefab = this.speakerPrefabSp.objectReferenceValue as GameObject;
if (prefab == null)
{
EditorGUILayout.HelpBox("Speaker prefab needs to have a Speaker component in the hierarchy.", MessageType.Info);
}
prefab = EditorGUILayout.ObjectField(new GUIContent("Speaker Prefab",
"Prefab that contains Speaker component to be instantiated when receiving a new remote audio source info"), prefab,
typeof(GameObject), false) as GameObject;
if (prefab == null || prefab.GetComponentInChildren<Speaker>() != null)
{
this.speakerPrefabSp.objectReferenceValue = prefab;
}
else
{
Debug.LogError("SpeakerPrefab must have a component of type Speaker in its hierarchy.", this);
}
EditorGUILayout.PropertyField(this.globalPlayDelaySettingsSp, new GUIContent("Global Playback Delay Settings", "Remote audio stream playback delay to compensate packets latency variations."), true);
}
if (!this.connection.Client.IsConnected)
{
this.DisplayAppSettings();
}
EditorGUILayout.PropertyField(this.statsResetInterval, new GUIContent("Stats Reset Interval (ms)", "time [ms] between statistics calculations"));
if (EditorGUI.EndChangeCheck())
{
this.serializedObject.ApplyModifiedProperties();
}
if (PhotonVoiceEditorUtils.IsInTheSceneInPlayMode(this.connection.gameObject))
{
this.DisplayVoiceStats();
this.DisplayDebugInfo(this.connection.Client);
this.DisplayCachedVoiceInfo();
this.DisplayTrafficStats(this.connection.Client.LoadBalancingPeer);
}
}
private bool showVoiceStats;
private bool showPlayersList;
private bool showDebugInfo;
private bool showCachedVoices;
private bool showTrafficStats;
protected virtual void DisplayVoiceStats()
{
this.showVoiceStats =
EditorGUILayout.Foldout(this.showVoiceStats, new GUIContent("Voice Frames Stats", "Show frames stats"));
if (this.showVoiceStats)
{
this.DrawLabel("Frames Received /s", this.connection.FramesReceivedPerSecond.ToString());
this.DrawLabel("Frames Lost /s", this.connection.FramesLostPerSecond.ToString());
this.DrawLabel("Frames Lost %", this.connection.FramesLostPercent.ToString());
}
}
protected virtual void DisplayDebugInfo(LoadBalancingClient client)
{
this.showDebugInfo = EditorGUILayout.Foldout(this.showDebugInfo, new GUIContent("Client Debug Info", "Debug info for Photon client"));
if (this.showDebugInfo)
{
EditorGUI.indentLevel++;
this.DrawLabel("Client State", client.State.ToString());
if (!string.IsNullOrEmpty(client.AppId))
{
this.DrawLabel("AppId", client.AppId);
}
if (!string.IsNullOrEmpty(client.AppVersion))
{
this.DrawLabel("AppVersion", client.AppVersion);
}
if (!string.IsNullOrEmpty(client.CloudRegion))
{
this.DrawLabel("Current Cloud Region", client.CloudRegion);
}
if (client.IsConnected)
{
this.DrawLabel("Current Server Address", client.CurrentServerAddress);
}
if (client.InRoom)
{
this.DrawLabel("Room Name", client.CurrentRoom.Name);
this.showPlayersList = EditorGUILayout.Foldout(this.showPlayersList, new GUIContent("Players List", "List of players joined to the room"));
if (this.showPlayersList)
{
EditorGUI.indentLevel++;
foreach (Player player in client.CurrentRoom.Players.Values)
{
this.DisplayPlayerDebugInfo(player);
EditorGUILayout.LabelField(string.Empty, GUI.skin.horizontalSlider);
}
EditorGUI.indentLevel--;
}
}
EditorGUI.indentLevel--;
}
}
protected virtual void DisplayPlayerDebugInfo(Player player)
{
this.DrawLabel("Actor Number", player.ActorNumber.ToString());
if (!string.IsNullOrEmpty(player.UserId))
{
this.DrawLabel("UserId", player.UserId);
}
if (!string.IsNullOrEmpty(player.NickName))
{
this.DrawLabel("NickName", player.NickName);
}
if (player.IsMasterClient)
{
EditorGUILayout.LabelField("Master Client");
}
if (player.IsLocal)
{
EditorGUILayout.LabelField("Local");
}
if (player.IsInactive)
{
EditorGUILayout.LabelField("Inactive");
}
}
protected virtual void DisplayCachedVoiceInfo()
{
this.showCachedVoices =
EditorGUILayout.Foldout(this.showCachedVoices, new GUIContent("Cached Remote Voices' Info", "Show remote voices info cached by local client"));
if (this.showCachedVoices)
{
List<RemoteVoiceLink> cachedVoices = this.connection.CachedRemoteVoices;
Speaker[] speakers = FindObjectsOfType<Speaker>();
for (int i = 0; i < cachedVoices.Count; i++)
{
//VoiceInfo info = cachedVoices[i].Info;
EditorGUI.indentLevel++;
this.DrawLabel("Voice #", cachedVoices[i].VoiceId.ToString());
this.DrawLabel("Player #", cachedVoices[i].PlayerId.ToString());
this.DrawLabel("Channel #", cachedVoices[i].ChannelId.ToString());
if (cachedVoices[i].Info.UserData != null)
{
this.DrawLabel("UserData: ", cachedVoices[i].Info.UserData.ToString());
}
bool linked = false;
for (int j = 0; j < speakers.Length; j++)
{
Speaker speaker = speakers[j];
if (speaker.IsLinked && speaker.RemoteVoiceLink.PlayerId == cachedVoices[i].PlayerId &&
speaker.RemoteVoiceLink.VoiceId == cachedVoices[i].VoiceId)
{
linked = true;
EditorGUILayout.ObjectField(new GUIContent("Linked Speaker"), speaker, typeof(Speaker), false);
break;
}
}
if (!linked)
{
EditorGUILayout.LabelField("Not Linked");
}
EditorGUILayout.LabelField(string.Empty, GUI.skin.horizontalSlider);
EditorGUI.indentLevel--;
}
}
}
#if DEBUG_DISCARD
private int maxDeltaUnreliableNumber;
private int maxCountDiscarded;
#endif
// inspired by PhotonVoiceStatsGui.TrafficStatsWindow
protected virtual void DisplayTrafficStats(LoadBalancingPeer peer)
{
this.showTrafficStats = EditorGUILayout.Foldout(this.showTrafficStats, new GUIContent("Traffic Stats", "Traffic Statistics for Photon Client"));
if (this.showTrafficStats)
{
#if DEBUG_DISCARD
if (peer.DeltaUnreliableNumber > this.maxDeltaUnreliableNumber) this.maxDeltaUnreliableNumber = peer.DeltaUnreliableNumber;
if (peer.CountDiscarded > this.maxCountDiscarded) this.maxCountDiscarded = peer.CountDiscarded;
GUILayout.Label(string.Format("Discarded: {0} (max: {1}) UnreliableDelta: {2} (max: {3})",peer.CountDiscarded, this.maxCountDiscarded, peer.DeltaUnreliableNumber, maxDeltaUnreliableNumber));
#endif
GUILayout.Label(string.Format("RTT (ping): {0}[+/-{1}]ms, last={2}ms", peer.RoundTripTime, peer.RoundTripTimeVariance, peer.LastRoundTripTime));
//GUILayout.Label(string.Format("{0}ms since last ACK sent, {1}ms since last sent, {2}ms since last received", peer.ConnectionTime - peer.LastSendAckTime, peer.ConnectionTime - peer.LastSendOutgoingTime, peer.ConnectionTime - peer.TimestampOfLastSocketReceive)); //add
GUILayout.Label(string.Format("Reliable Commands Resent: {0}", peer.ResentReliableCommands));
//GUILayout.Label(string.Format("last operation={0}B current dispatch:{1}B", peer.ByteCountLastOperation, peer.ByteCountCurrentDispatch));
//GUILayout.Label(string.Format("Packets Lost: by challenge={0} by CRC={1}", peer.PacketLossByChallenge, peer.PacketLossByCrc));
//GUILayout.Label(string.Format("Total Traffic: In={0} - {1} Out={2} - {3}", this.FormatSize(peer.BytesIn, ti:string.Empty), this.FormatSize(this.connection.BytesReceivedPerSecond), this.FormatSize(peer.BytesOut, ti:string.Empty), this.FormatSize(this.connection.BytesSentPerSecond)));
GUILayout.Label(string.Format("Total Traffic: In={0} Out={1}", this.FormatSize(peer.BytesIn, ti:string.Empty), this.FormatSize(peer.BytesOut, ti:string.Empty)));
peer.TrafficStatsEnabled = EditorGUILayout.Toggle(new GUIContent("Advanced", "Enable or disable traffic Statistics for Photon Peer"), peer.TrafficStatsEnabled);
if (peer.TrafficStatsEnabled)
{
long elapsedSeconds = peer.TrafficStatsElapsedMs / 1000;
if (elapsedSeconds == 0)
{
elapsedSeconds = 1;
}
GUILayout.Label(string.Format("Time elapsed: {0} seconds", elapsedSeconds));
this.DisplayTrafficStatsGameLevel(peer.TrafficStatsGameLevel, elapsedSeconds);
TrafficStats trafficStats = peer.TrafficStatsIncoming;
GUILayout.Label(string.Format("Protocol: {0} Package Header Size={1}B", peer.TransportProtocol, trafficStats.PackageHeaderSize));
EditorGUILayout.LabelField("Commands/Packets Incoming", EditorStyles.boldLabel);
this.DisplayTrafficStats(/*peer, */trafficStats, elapsedSeconds);
EditorGUILayout.LabelField("Commands/Packets Outgoing", EditorStyles.boldLabel);
trafficStats = peer.TrafficStatsOutgoing;
this.DisplayTrafficStats(/*peer, */trafficStats, elapsedSeconds);
if (GUILayout.Button("Reset"))
{
peer.TrafficStatsReset();
}
}
}
}
private void DisplayTrafficStats(/*PhotonPeer peer, */TrafficStats trafficStats, long elapsedSeconds)
{
GUILayout.Label(string.Format("\tControl Commands: #={0} a#={1}/s s={2} as={3}", trafficStats.ControlCommandCount, trafficStats.ControlCommandCount/elapsedSeconds, this.FormatSize(trafficStats.ControlCommandBytes, ti:string.Empty), this.FormatSize(trafficStats.ControlCommandBytes/elapsedSeconds)));
GUILayout.Label(string.Format("\tFragment Commands: #={0} a#={1}/s s={2} as={3}", trafficStats.FragmentCommandCount, trafficStats.FragmentCommandCount/elapsedSeconds, this.FormatSize(trafficStats.FragmentCommandBytes, ti:string.Empty), this.FormatSize(trafficStats.FragmentCommandBytes/elapsedSeconds)));
GUILayout.Label(string.Format("\tReliable Commands: #={0} a#={1}/s s={2} as={3}", trafficStats.ReliableCommandCount, trafficStats.ReliableCommandCount/elapsedSeconds, this.FormatSize(trafficStats.ReliableCommandBytes, ti:string.Empty), this.FormatSize(trafficStats.ReliableCommandCount/elapsedSeconds)));
GUILayout.Label(string.Format("\tUnreliable Commands: #={0} a#={1}/s s={2} as={3}", trafficStats.UnreliableCommandCount, trafficStats.UnreliableCommandCount/elapsedSeconds, this.FormatSize(trafficStats.UnreliableCommandBytes, ti:string.Empty), this.FormatSize(trafficStats.UnreliableCommandBytes/elapsedSeconds)));
GUILayout.Label(string.Format("\tTotal Commands: #={0} a#={1}/s s={2} as={3}", trafficStats.TotalCommandCount, trafficStats.TotalCommandCount/elapsedSeconds, this.FormatSize(trafficStats.TotalCommandBytes, ti:string.Empty), this.FormatSize(trafficStats.TotalCommandBytes/elapsedSeconds)));
GUILayout.Label(string.Format("\tTotal Packets: #={0} a#={1}/s s={2} as={3}", trafficStats.TotalPacketCount, trafficStats.TotalPacketCount/elapsedSeconds, this.FormatSize(trafficStats.TotalPacketBytes, ti:string.Empty), this.FormatSize(trafficStats.TotalPacketBytes/elapsedSeconds)));
GUILayout.Label(string.Format("\tTotal Commands in Packets: {0}", trafficStats.TotalCommandsInPackets));
//GUILayout.Label(string.Format("\t{0}ms since last ACK", peer.ConnectionTime - trafficStats.TimestampOfLastAck));
//GUILayout.Label(string.Format("\t{0} ms since last reliable Command", peer.ConnectionTime - trafficStats.TimestampOfLastReliableCommand));
}
private void DisplayTrafficStatsGameLevel(TrafficStatsGameLevel gls, long elapsedSeconds)
{
GUILayout.Label("In Game", EditorStyles.boldLabel);
GUILayout.Label(string.Format("\tmax. delta between\n\t\tsend: {0,4}ms \n\t\tdispatch: {1,4}ms \n\tlongest dispatch for: \n\t\tev({3}):{2,3}ms \n\t\top({5}):{4,3}ms",
gls.LongestDeltaBetweenSending,
gls.LongestDeltaBetweenDispatching,
gls.LongestEventCallback,
gls.LongestEventCallbackCode,
gls.LongestOpResponseCallback,
gls.LongestOpResponseCallbackOpCode));
GUILayout.Label("\tMessages", EditorStyles.boldLabel);
GUILayout.Label(string.Format("\t\tTotal: Out {0,4}msg | In {1,4}msg | Sum {2,4}msg",
gls.TotalOutgoingMessageCount,
gls.TotalIncomingMessageCount,
gls.TotalMessageCount));
GUILayout.Label(string.Format("\t\tAverage: Out {0,4}msg/s | In {1,4}msg/s | Sum {2,4}msg/s",
gls.TotalOutgoingMessageCount / elapsedSeconds,
gls.TotalIncomingMessageCount / elapsedSeconds,
gls.TotalMessageCount / elapsedSeconds));
}
private void DrawLabel(string prefix, string text)
{
EditorGUILayout.BeginHorizontal();
EditorGUILayout.PrefixLabel(prefix);
EditorGUILayout.LabelField(text);
EditorGUILayout.EndHorizontal();
}
protected virtual void DisplayAppSettings()
{
this.connection.ShowSettings = EditorGUILayout.Foldout(this.connection.ShowSettings, new GUIContent("App Settings", "Settings to be used by this voice connection"));
if (this.connection.ShowSettings)
{
EditorGUI.indentLevel++;
EditorGUILayout.BeginHorizontal();
SerializedProperty sP = this.settingsSp.FindPropertyRelative("AppIdVoice");
EditorGUILayout.PropertyField(sP);
string appId = sP.stringValue;
string url = "https://dashboard.photonengine.com/en-US/PublicCloud";
if (!string.IsNullOrEmpty(appId))
{
url = string.Concat("https://dashboard.photonengine.com/en-US/App/Manage/", appId);
}
if (GUILayout.Button("Dashboard", EditorStyles.miniButton, GUILayout.Width(70)))
{
Application.OpenURL(url);
}
EditorGUILayout.EndHorizontal();
EditorGUILayout.PropertyField(this.settingsSp.FindPropertyRelative("AppVersion"));
EditorGUILayout.PropertyField(this.settingsSp.FindPropertyRelative("UseNameServer"), new GUIContent("Use Name Server", "Photon Cloud requires this checked.\nUncheck for Photon Server SDK (OnPremises)."));
EditorGUILayout.PropertyField(this.settingsSp.FindPropertyRelative("FixedRegion"), new GUIContent("Fixed Region", "Photon Cloud setting, needs a Name Server.\nDefine one region to always connect to.\nLeave empty to use the best region from a server-side region list."));
EditorGUILayout.PropertyField(this.settingsSp.FindPropertyRelative("Server"), new GUIContent("Server", "Typically empty for Photon Cloud.\nFor Photon Server, enter your host name or IP. Also uncheck \"Use Name Server\" for older Photon Server versions."));
EditorGUILayout.PropertyField(this.settingsSp.FindPropertyRelative("Port"), new GUIContent("Port", "Use 0 for Photon Cloud.\nOnPremise uses 5055 for UDP and 4530 for TCP."));
EditorGUILayout.PropertyField(this.settingsSp.FindPropertyRelative("ProxyServer"), new GUIContent("Proxy Server", "HTTP Proxy Server for WebSocket connection. See LoadBalancingClient.ProxyServerAddress for options."));
EditorGUILayout.PropertyField(this.settingsSp.FindPropertyRelative("Protocol"), new GUIContent("Protocol", "Use UDP where possible.\nWSS works on WebGL and Xbox exports.\nDefine WEBSOCKET for use on other platforms."));
EditorGUILayout.PropertyField(this.settingsSp.FindPropertyRelative("EnableProtocolFallback"), new GUIContent("Protocol Fallback", "Automatically try another network protocol, if initial connect fails.\nWill use default Name Server ports."));
EditorGUILayout.PropertyField(this.settingsSp.FindPropertyRelative("EnableLobbyStatistics"), new GUIContent("Lobby Statistics", "When using multiple room lists (lobbies), the server can send info about their usage."));
EditorGUILayout.PropertyField(this.settingsSp.FindPropertyRelative("NetworkLogging"), new GUIContent("Network Logging", "Log level for the Photon libraries."));
EditorGUI.indentLevel--;
#region Best Region Box
GUIStyle verticalBoxStyle = new GUIStyle("HelpBox") { padding = new RectOffset(6, 6, 6, 6) };
EditorGUILayout.BeginVertical(verticalBoxStyle);
string prefLabel;
const string notAvailableLabel = "n/a";
string bestRegionSummaryInPrefs = this.connection.BestRegionSummaryInPreferences;
if (!string.IsNullOrEmpty(bestRegionSummaryInPrefs))
{
string[] regionsPrefsList = bestRegionSummaryInPrefs.Split(';');
if (regionsPrefsList.Length < 2 || string.IsNullOrEmpty(regionsPrefsList[0]) || string.IsNullOrEmpty(regionsPrefsList[1]))
{
prefLabel = notAvailableLabel;
}
else
{
prefLabel = string.Format("'{0}' ping:{1}ms ", regionsPrefsList[0], regionsPrefsList[1]);
}
}
else
{
prefLabel = notAvailableLabel;
}
EditorGUILayout.LabelField(new GUIContent(string.Concat("Best Region Preference: ", prefLabel), "Best region is used if Fixed Region is empty."));
EditorGUILayout.BeginHorizontal();
Rect resetRect = EditorGUILayout.GetControlRect(GUILayout.MinWidth(64));
Rect editRect = EditorGUILayout.GetControlRect(GUILayout.MinWidth(64));
if (GUI.Button(resetRect, "Reset", EditorStyles.miniButton))
{
this.connection.BestRegionSummaryInPreferences = null;
}
if (!string.IsNullOrEmpty(appId) && GUI.Button(editRect, "Edit Regions WhiteList", EditorStyles.miniButton))
{
url = string.Concat("https://dashboard.photonengine.com/en-US/App/RegionsWhitelistEdit/", appId);
Application.OpenURL(url);
}
EditorGUILayout.EndHorizontal();
EditorGUILayout.EndVertical();
#endregion Best Region Box
}
}
protected virtual void ShowHeader()
{
this.ShowAssetVersionsFoldout();
}
protected virtual void ShowAssetVersions()
{
EditorGUILayout.LabelField(string.Format("Photon Voice: {0}", this.GetVersionString(this.photonVoiceVersion)));
EditorGUILayout.LabelField(string.Format("Photon Voice API: {0}", this.GetVersionString(this.photonVoiceApiVersion)));
EditorGUILayout.LabelField(string.Format("Photon Realtime and Unity Library: {0}", this.GetVersionString(this.photonLibraryVersion)));
}
private void ShowAssetVersionsFoldout()
{
EditorGUI.indentLevel++;
this.versionFoldout = EditorGUILayout.Foldout(this.versionFoldout, "Asset Version Info");
if (this.versionFoldout)
{
EditorGUI.indentLevel++;
EditorGUILayout.BeginVertical();
this.ShowAssetVersions();
EditorGUILayout.EndVertical();
EditorGUI.indentLevel--;
}
EditorGUI.indentLevel--;
}
protected string GetVersionString(string versionString)
{
return string.IsNullOrEmpty(versionString) ? notAvailable : versionString;
}
private string FormatSize(float bytes, string u = "B", string ti = "/s")
{
const long kb = 1024;
const long mb = kb * 1024;
const long gb = mb * 1024;
const long tb = gb * 1024;
long m = 1;
if (bytes >= tb)
{
m = tb;
u = "TB";
}
else if (bytes >= gb)
{
m = gb;
u = "GB";
}
else if (bytes >= mb)
{
m = mb;
u = "MB";
}
else if (bytes >= kb)
{
m = kb;
u = "KB";
}
return string.Format("{0:0.0}{1}{2}", bytes/m, u, ti);
}
}
}

View File

@@ -0,0 +1,13 @@
fileFormatVersion: 2
guid: 9496980b6639f4cb68abaed8fdda658e
timeCreated: 1537180258
licenseType: Store
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,192 @@
#if UNITY_STANDALONE_WIN || UNITY_STANDALONE_OSX || UNITY_IOS || UNITY_ANDROID || UNITY_WSA
#define WEBRTC_AUDIO_DSP_SUPPORTED_PLATFORMS
#endif
#if UNITY_EDITOR_WIN || UNITY_EDITOR_OSX
#define WEBRTC_AUDIO_DSP_SUPPORTED_EDITOR
#endif
using UnityEngine;
namespace Photon.Voice.Unity.Editor
{
using UnityEditor;
using Unity;
[CustomEditor(typeof(WebRtcAudioDsp))]
public class WebRtcAudioDspEditor : Editor
{
private WebRtcAudioDsp processor;
private Recorder recorder;
private SerializedProperty aecSp;
private SerializedProperty aecHighPassSp;
private SerializedProperty agcSp;
private SerializedProperty agcCompressionGainSp;
private SerializedProperty vadSp;
private SerializedProperty highPassSp;
private SerializedProperty bypassSp;
private SerializedProperty noiseSuppressionSp;
private SerializedProperty reverseStreamDelayMsSp;
private void OnEnable()
{
this.processor = this.target as WebRtcAudioDsp;
this.recorder = this.processor.GetComponent<Recorder>();
this.aecSp = this.serializedObject.FindProperty("aec");
this.aecHighPassSp = this.serializedObject.FindProperty("aecHighPass");
this.agcSp = this.serializedObject.FindProperty("agc");
this.agcCompressionGainSp = this.serializedObject.FindProperty("agcCompressionGain");
this.vadSp = this.serializedObject.FindProperty("vad");
this.highPassSp = this.serializedObject.FindProperty("highPass");
this.bypassSp = this.serializedObject.FindProperty("bypass");
this.noiseSuppressionSp = this.serializedObject.FindProperty("noiseSuppression");
this.reverseStreamDelayMsSp = this.serializedObject.FindProperty("reverseStreamDelayMs");
}
public override void OnInspectorGUI()
{
this.serializedObject.UpdateIfRequiredOrScript();
if (!PhotonVoiceEditorUtils.IsPrefab(this.processor.gameObject))
{
#if WEBRTC_AUDIO_DSP_SUPPORTED_PLATFORMS
#elif WEBRTC_AUDIO_DSP_SUPPORTED_EDITOR
string message = string.Format("WebRtcAudioDsp is not supported on this target platform {0}. The component will be disabled in build.", EditorUserBuildSettings.activeBuildTarget);
EditorGUILayout.HelpBox(message, MessageType.Warning);
#else
string message = string.Format("WebRtcAudioDsp is not supported on this target platform {0}. This component is disabled.", EditorUserBuildSettings.activeBuildTarget);
EditorGUILayout.HelpBox(message, MessageType.Warning);
#endif
}
if (!this.processor.isActiveAndEnabled && this.processor.AecOnlyWhenEnabled && this.aecSp.boolValue)
{
EditorGUILayout.HelpBox("WebRtcAudioDsp is not enabled, AEC will not be used.", MessageType.Warning);
}
if (this.recorder != null && this.recorder.SourceType != Recorder.InputSourceType.Microphone)
{
EditorGUILayout.HelpBox("WebRtcAudioDsp is better suited to be used with Microphone as Recorder Input Source Type.", MessageType.Warning);
}
VoiceLogger.ExposeLogLevel(this.serializedObject, this.processor);
bool bypassed;
EditorGUI.BeginChangeCheck();
bool isInSceneInPlayMode = PhotonVoiceEditorUtils.IsInTheSceneInPlayMode(this.processor.gameObject);
if (isInSceneInPlayMode)
{
this.processor.Bypass = EditorGUILayout.Toggle(new GUIContent("Bypass", "Bypass WebRTC Audio DSP"), this.processor.Bypass);
bypassed = this.processor.Bypass;
}
else
{
EditorGUILayout.PropertyField(this.bypassSp, new GUIContent("Bypass", "Bypass WebRTC Audio DSP"));
bypassed = this.bypassSp.boolValue;
}
#if UNITY_ANDROID
SerializedObject serializedObject = new SerializedObject(this.recorder);
SerializedProperty serializedProperty = serializedObject.FindProperty("nativeAndroidMicrophoneSettings");
#endif
if (!bypassed)
{
if (isInSceneInPlayMode)
{
this.processor.AEC = EditorGUILayout.Toggle(new GUIContent("AEC", "Acoustic Echo Cancellation"), this.processor.AEC);
if (this.processor.AEC)
{
if (this.recorder.SourceType == Recorder.InputSourceType.Microphone && this.recorder.MicrophoneType == Recorder.MicType.Photon)
{
#if UNITY_ANDROID
if (serializedProperty.FindPropertyRelative("AcousticEchoCancellation").boolValue)
{
EditorGUILayout.HelpBox("You have enabled AEC here and are using a Photon Mic as input on the Recorder, which might add its own echo cancellation. Please use only one AEC algorithm.", MessageType.Warning);
}
#else
EditorGUILayout.HelpBox("You have enabled AEC here and are using a Photon Mic as input on the Recorder, which might add its own echo cancellation. Please use only one AEC algorithm.", MessageType.Warning);
#endif
}
this.processor.ReverseStreamDelayMs = EditorGUILayout.IntField(new GUIContent("ReverseStreamDelayMs", "Reverse stream delay (hint for AEC) in Milliseconds"), this.processor.ReverseStreamDelayMs);
this.processor.AecHighPass = EditorGUILayout.Toggle(new GUIContent("AEC High Pass"), this.processor.AecHighPass);
}
this.processor.AGC = EditorGUILayout.Toggle(new GUIContent("AGC", "Automatic Gain Control"), this.processor.AGC);
if (this.processor.AGC)
{
#if UNITY_ANDROID
if (serializedProperty.FindPropertyRelative("AutomaticGainControl").boolValue)
{
EditorGUILayout.HelpBox("You have enabled AGC here and are using a AGC from native plugin (Photon microphone type). Please use only one AGC algorithm.", MessageType.Warning);
}
#endif
this.processor.AgcCompressionGain = EditorGUILayout.IntField(new GUIContent("AGC Compression Gain"), this.processor.AgcCompressionGain);
}
if (this.processor.VAD && this.recorder.VoiceDetection)
{
EditorGUILayout.HelpBox("You have enabled VAD here and in the associated Recorder. Please use only one Voice Detection algorithm.", MessageType.Warning);
}
this.processor.VAD = EditorGUILayout.Toggle(new GUIContent("VAD", "Voice Activity Detection"), this.processor.VAD);
this.processor.HighPass = EditorGUILayout.Toggle(new GUIContent("HighPass", "High Pass Filter"), this.processor.HighPass);
this.processor.NoiseSuppression = EditorGUILayout.Toggle(new GUIContent("NoiseSuppression", "Noise Suppression"), this.processor.NoiseSuppression);
if (this.processor.NoiseSuppression)
{
#if UNITY_ANDROID
if (serializedProperty.FindPropertyRelative("NoiseSuppression").boolValue)
{
EditorGUILayout.HelpBox("You have enabled NS here and are using a NS from native plugin (Photon microphone type). Please use only one NS algorithm.", MessageType.Warning);
}
#endif
}
}
else
{
EditorGUILayout.PropertyField(this.aecSp, new GUIContent("AEC", "Acoustic Echo Cancellation"));
if (this.aecSp.boolValue)
{
if (this.recorder.SourceType == Recorder.InputSourceType.Microphone && this.recorder.MicrophoneType == Recorder.MicType.Photon)
{
#if UNITY_ANDROID
if (serializedProperty.FindPropertyRelative("AcousticEchoCancellation").boolValue)
{
EditorGUILayout.HelpBox("You have enabled AEC here and are using AEC from native plugin (Photon microphone type). Please use only one AEC algorithm.", MessageType.Warning);
}
#else
EditorGUILayout.HelpBox("You have enabled AEC here and are using a Photon Mic as input on the Recorder, which might add its own echo cancellation. Please use only one AEC algorithm.", MessageType.Warning);
#endif
}
EditorGUILayout.PropertyField(this.reverseStreamDelayMsSp,
new GUIContent("ReverseStreamDelayMs", "Reverse stream delay (hint for AEC) in Milliseconds"));
EditorGUILayout.PropertyField(this.aecHighPassSp, new GUIContent("AEC High Pass"));
}
EditorGUILayout.PropertyField(this.agcSp, new GUIContent("AGC", "Automatic Gain Control"));
if (this.agcSp.boolValue)
{
#if UNITY_ANDROID
if (serializedProperty.FindPropertyRelative("AutomaticGainControl").boolValue)
{
EditorGUILayout.HelpBox("You have enabled AGC here and are using a AGC from native plugin (Photon microphone type). Please use only one AGC algorithm.", MessageType.Warning);
}
#endif
EditorGUILayout.PropertyField(this.agcCompressionGainSp, new GUIContent("AGC Compression Gain"));
}
if (this.vadSp.boolValue && this.recorder.VoiceDetection)
{
EditorGUILayout.HelpBox("You have enabled VAD here and in the associated Recorder. Please use only one Voice Detection algorithm.", MessageType.Warning);
}
EditorGUILayout.PropertyField(this.vadSp, new GUIContent("VAD", "Voice Activity Detection"));
EditorGUILayout.PropertyField(this.highPassSp, new GUIContent("HighPass", "High Pass Filter"));
EditorGUILayout.PropertyField(this.noiseSuppressionSp, new GUIContent("NoiseSuppression", "Noise Suppression"));
if (this.noiseSuppressionSp.boolValue)
{
#if UNITY_ANDROID
if (serializedProperty.FindPropertyRelative("NoiseSuppression").boolValue)
{
EditorGUILayout.HelpBox("You have enabled NS here and are using a NS from native plugin (Photon microphone type). Please use only one NS algorithm.", MessageType.Warning);
}
#endif
}
}
}
if (EditorGUI.EndChangeCheck())
{
this.serializedObject.ApplyModifiedProperties();
}
}
}
}

View File

@@ -0,0 +1,13 @@
fileFormatVersion: 2
guid: 82948294db79e4c318c9b8815781f0d3
timeCreated: 1538475591
licenseType: Store
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,15 @@
namespace Photon.Voice.Unity
{
using ExitGames.Client.Photon;
public interface ILoggable
{
DebugLevel LogLevel { get; set; }
VoiceLogger Logger { get; }
}
public interface ILoggableDependent : ILoggable
{
bool IgnoreGlobalLogLevel { get; set; }
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: ae6bdefe0d01f8741bb6558cd1fd9142
timeCreated: 1540904390
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: dd940d333aeeaa048842e87b9b259188, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,10 @@
namespace Photon.Voice.Unity
{
[System.Serializable]
public struct NativeAndroidMicrophoneSettings
{
public bool AcousticEchoCancellation;
public bool AutomaticGainControl;
public bool NoiseSuppression;
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: d5f2e4792d8cf0344bc658a7fe562eb4
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: dd940d333aeeaa048842e87b9b259188, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: d2da83cfaa45e4cafa6a70339d5e140d
folderAsset: yes
timeCreated: 1531141214
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: ecb985625034aef4681a3a80b40cc5ed
folderAsset: yes
timeCreated: 1537958667
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,14 @@
{
"name": "PhotonVoice.PUN.Editor",
"references": [
"PhotonVoice.PUN",
"PhotonVoice.Editor",
"PhotonVoice",
"PhotonRealtime",
"PhotonUnityNetworking"
],
"includePlatforms": [
"Editor"
],
"excludePlatforms": []
}

View File

@@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: bc45562f375e105438d27eded7d73e12
AssemblyDefinitionImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,73 @@
using System;
namespace Photon.Voice.PUN.Editor
{
using Unity.Editor;
using UnityEditor;
using UnityEngine;
using Pun;
[CustomEditor(typeof(PhotonVoiceNetwork))]
public class PhotonVoiceNetworkEditor : VoiceConnectionEditor
{
private SerializedProperty autoConnectAndJoinSp;
private SerializedProperty autoLeaveAndDisconnectSp;
private SerializedProperty usePunAppSettingsSp;
private SerializedProperty usePunAuthValuesSp;
private SerializedProperty workInOfflineModeSp;
protected override void OnEnable()
{
base.OnEnable();
this.autoConnectAndJoinSp = this.serializedObject.FindProperty("AutoConnectAndJoin");
this.autoLeaveAndDisconnectSp = this.serializedObject.FindProperty("AutoLeaveAndDisconnect");
this.usePunAppSettingsSp = this.serializedObject.FindProperty("usePunAppSettings");
this.usePunAuthValuesSp = this.serializedObject.FindProperty("usePunAuthValues");
this.workInOfflineModeSp = this.serializedObject.FindProperty("WorkInOfflineMode");
}
protected override void DisplayAppSettings()
{
EditorGUILayout.BeginHorizontal();
EditorGUILayout.PropertyField(this.usePunAppSettingsSp, new GUIContent("Use PUN's App Settings", "Use App Settings From PUN's PhotonServerSettings"));
if (GUILayout.Button("PhotonServerSettings", EditorStyles.miniButton, GUILayout.Width(120)))
{
Selection.objects = new Object[] { PhotonNetwork.PhotonServerSettings };
EditorGUIUtility.PingObject(PhotonNetwork.PhotonServerSettings);
}
EditorGUILayout.EndHorizontal();
if (!this.usePunAppSettingsSp.boolValue)
{
base.DisplayAppSettings();
}
EditorGUILayout.PropertyField(this.usePunAuthValuesSp, new GUIContent("Use PUN's Auth Values", "Use the same Authentication Values From PUN client"));
}
protected override void ShowHeader()
{
base.ShowHeader();
EditorGUI.BeginChangeCheck();
EditorGUILayout.PropertyField(this.autoConnectAndJoinSp, new GUIContent("Auto Connect And Join", "Auto connect voice client and join a voice room when PUN client is joined to a PUN room"));
EditorGUILayout.PropertyField(this.autoLeaveAndDisconnectSp, new GUIContent("Auto Leave And Disconnect", "Auto disconnect voice client when PUN client is not joined to a PUN room"));
EditorGUILayout.PropertyField(this.workInOfflineModeSp, new GUIContent("Work In Offline Mode", "Whether or not Photon Voice client should follow PUN client if the latter is in offline mode."));
if (EditorGUI.EndChangeCheck())
{
this.serializedObject.ApplyModifiedProperties();
}
}
protected override void ShowAssetVersions()
{
base.ShowAssetVersions();
string version = this.GetVersionString(this.punChangelogVersion).TrimStart('v');
if (!PhotonNetwork.PunVersion.Equals(version, StringComparison.OrdinalIgnoreCase))
{
EditorGUILayout.LabelField(string.Format("PUN2, Inside Voice: {0} != Imported Separately: {1}", version, PhotonNetwork.PunVersion));
}
else
{
EditorGUILayout.LabelField(string.Format("PUN2: {0}", version));
}
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: fa70a22b498b27f41a0bb50f9a287764
timeCreated: 1537357720
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,11 @@
{
"name": "PhotonVoice.PUN",
"references": [
"PhotonVoice",
"PhotonUnityNetworking",
"PhotonRealtime",
"PhotonVoice.API"
],
"includePlatforms": [],
"excludePlatforms": []
}

View File

@@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: d62e4537d6f8ca64c8d2affe565a5cd1
AssemblyDefinitionImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,598 @@
// ----------------------------------------------------------------------------
// <copyright file="PhotonVoiceNetwork.cs" company="Exit Games GmbH">
// Photon Voice - Copyright (C) 2018 Exit Games GmbH
// </copyright>
// <summary>
// This class can be used to automatically join/leave Voice rooms when
// Photon Unity Networking (PUN) joins or leaves its rooms. The Voice room
// will use the same name as PUN, but with a "_voice_" postfix.
// It also sets a custom PUN Speaker factory to find the Speaker
// component for a character's voice. For this to work, the voice's UserData
// must be set to the character's PhotonView ID.
// (see "PhotonVoiceView.cs")
// </summary>
// <author>developer@photonengine.com</author>
// ----------------------------------------------------------------------------
using UnityEngine;
using Photon.Pun;
using Photon.Realtime;
using Photon.Voice.Unity;
namespace Photon.Voice.PUN
{
/// <summary>
/// This class can be used to automatically sync client states between PUN and Voice.
/// It also sets a custom PUN Speaker factory to find the Speaker component for a character's voice.
/// For this to work attach a <see cref="PhotonVoiceView"/> next to the <see cref="PhotonView"/> of your player's prefab.
/// </summary>
[DisallowMultipleComponent]
[AddComponentMenu("Photon Voice/Photon Voice Network")]
[HelpURL("https://doc.photonengine.com/en-us/voice/v2/getting-started/voice-for-pun")]
public class PhotonVoiceNetwork : VoiceConnection
{
#region Public Fields
/// <summary> Suffix for voice room names appended to PUN room names. </summary>
public const string VoiceRoomNameSuffix = "_voice_";
/// <summary> Auto connect voice client and join a voice room when PUN client is joined to a PUN room </summary>
public bool AutoConnectAndJoin = true;
/// <summary> Auto disconnect voice client when PUN client is not joined to a PUN room </summary>
public bool AutoLeaveAndDisconnect = true;
/// <summary> Whether or not Photon Voice client should follow PUN client if the latter is in offline mode. </summary>
public bool WorkInOfflineMode = true;
#endregion
#region Private Fields
private EnterRoomParams voiceRoomParams = new EnterRoomParams
{
RoomOptions = new RoomOptions { IsVisible = false }
};
private bool clientCalledConnectAndJoin;
private bool clientCalledDisconnect;
private bool clientCalledConnectOnly;
private bool internalDisconnect;
private bool internalConnect;
private static object instanceLock = new object();
private static PhotonVoiceNetwork instance;
private static bool instantiated;
[SerializeField]
private bool usePunAppSettings = true;
[SerializeField]
private bool usePunAuthValues = true;
#endregion
#region Properties
/// <summary>
/// Singleton instance for PhotonVoiceNetwork
/// </summary>
public static PhotonVoiceNetwork Instance
{
get
{
lock (instanceLock)
{
if (AppQuits)
{
if (instance.Logger.IsWarningEnabled)
{
instance.Logger.LogWarning("PhotonVoiceNetwork Instance already destroyed on application quit. Won't create again - returning null.");
}
return null;
}
if (!instantiated)
{
PhotonVoiceNetwork[] objects = FindObjectsOfType<PhotonVoiceNetwork>();
if (objects == null || objects.Length < 1)
{
GameObject singleton = new GameObject();
singleton.name = "PhotonVoiceNetwork singleton";
instance = singleton.AddComponent<PhotonVoiceNetwork>();
if (instance.Logger.IsInfoEnabled)
{
instance.Logger.LogInfo("An instance of PhotonVoiceNetwork was automatically created in the scene.");
}
}
else if (objects.Length >= 1)
{
instance = objects[0];
if (objects.Length > 1)
{
if (instance.Logger.IsErrorEnabled)
{
instance.Logger.LogError("{0} PhotonVoiceNetwork instances found. Using first one only and destroying all the other extra instances.", objects.Length);
}
for (int i = 1; i < objects.Length; i++)
{
Destroy(objects[i]);
}
}
}
instantiated = true;
if (instance.Logger.IsDebugEnabled)
{
instance.Logger.LogDebug("PhotonVoiceNetwork singleton instance is now set.");
}
}
return instance;
}
}
set
{
lock (instanceLock)
{
if (value == null)
{
if (instantiated)
{
if (instance.Logger.IsErrorEnabled)
{
instance.Logger.LogError("Cannot set PhotonVoiceNetwork.Instance to null.");
}
}
else
{
Debug.LogError("Cannot set PhotonVoiceNetwork.Instance to null.");
}
return;
}
if (instantiated)
{
if (instance.GetInstanceID() != value.GetInstanceID())
{
if (instance.Logger.IsErrorEnabled)
{
instance.Logger.LogError("An instance of PhotonVoiceNetwork is already set. Destroying extra instance.");
}
Destroy(value);
}
return;
}
instance = value;
instantiated = true;
if (instance.Logger.IsDebugEnabled)
{
instance.Logger.LogDebug("PhotonVoiceNetwork singleton instance is now set.");
}
}
}
}
/// <summary>
/// Whether or not to use the same PhotonNetwork.AuthValues in PhotonVoiceNetwork.Instance.Client.AuthValues.
/// This means that the same UserID will be used in both clients.
/// If custom authentication is used and setup in PUN app, the same configuration should be done for the Voice app.
/// </summary>
public bool UsePunAuthValues
{
get
{
return this.usePunAuthValues;
}
set
{
this.usePunAuthValues = value;
}
}
#endregion
#region Public Methods
/// <summary>
/// Connect voice client to Photon servers and join a Voice room
/// </summary>
/// <returns>If true, connection command send from client</returns>
public bool ConnectAndJoinRoom()
{
if (!PhotonNetwork.InRoom)
{
if (this.Logger.IsErrorEnabled)
{
this.Logger.LogError("Cannot connect and join if PUN is not joined.");
}
return false;
}
if (this.Connect())
{
this.clientCalledConnectAndJoin = true;
this.clientCalledDisconnect = false;
return true;
}
if (this.Logger.IsErrorEnabled)
{
this.Logger.LogError("Connecting to server failed.");
}
return false;
}
/// <summary>
/// Disconnect voice client from all Photon servers
/// </summary>
public void Disconnect()
{
if (!this.Client.IsConnected)
{
if (this.Logger.IsErrorEnabled)
{
this.Logger.LogError("Cannot Disconnect if not connected.");
}
return;
}
this.clientCalledDisconnect = true;
this.clientCalledConnectAndJoin = false;
this.clientCalledConnectOnly = false;
this.Client.Disconnect();
}
#endregion
#region Private Methods
protected override void Awake()
{
Instance = this;
lock (instanceLock)
{
if (instantiated && instance.GetInstanceID() == this.GetInstanceID())
{
base.Awake();
}
}
}
private void OnEnable()
{
PhotonNetwork.NetworkingClient.StateChanged += this.OnPunStateChanged;
this.FollowPun(); // in case this is enabled or activated late
this.clientCalledConnectAndJoin = false;
this.clientCalledConnectOnly = false;
this.clientCalledDisconnect = false;
this.internalDisconnect = false;
}
protected override void OnDisable()
{
base.OnDisable();
PhotonNetwork.NetworkingClient.StateChanged -= this.OnPunStateChanged;
}
protected override void OnDestroy()
{
base.OnDestroy();
lock (instanceLock)
{
if (instantiated && instance.GetInstanceID() == this.GetInstanceID())
{
instantiated = false;
if (instance.Logger.IsDebugEnabled)
{
instance.Logger.LogDebug("PhotonVoiceNetwork singleton instance is being reset because destroyed.");
}
instance = null;
}
}
}
private void OnPunStateChanged(ClientState fromState, ClientState toState)
{
if (this.Logger.IsDebugEnabled)
{
this.Logger.LogDebug("OnPunStateChanged from {0} to {1}", fromState, toState);
}
this.FollowPun(toState);
}
protected override void OnVoiceStateChanged(ClientState fromState, ClientState toState)
{
base.OnVoiceStateChanged(fromState, toState);
if (toState == ClientState.Disconnected)
{
if (this.internalDisconnect)
{
this.internalDisconnect = false;
}
else if (!this.clientCalledDisconnect)
{
this.clientCalledDisconnect = this.Client.DisconnectedCause == DisconnectCause.DisconnectByClientLogic;
}
}
else if (toState == ClientState.ConnectedToMasterServer)
{
if (this.internalConnect)
{
this.internalConnect = false;
}
else if (!this.clientCalledConnectOnly && !this.clientCalledConnectAndJoin)
{
this.clientCalledConnectOnly = true;
this.clientCalledDisconnect = false;
}
}
this.FollowPun(toState);
}
private void FollowPun(ClientState toState)
{
switch (toState)
{
case ClientState.Joined:
case ClientState.Disconnected:
case ClientState.ConnectedToMasterServer:
this.FollowPun();
break;
}
}
protected override Speaker SimpleSpeakerFactory(int playerId, byte voiceId, object userData)
{
if (!(userData is int))
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("UserData ({0}) does not contain PhotonViewId. Remote voice {1}/{2} not linked. Do you have a Recorder not used with a PhotonVoiceView? is this expected?",
userData == null ? "null" : userData.ToString(), playerId, voiceId);
}
return null;
}
int photonViewId = (int)userData;
PhotonView photonView = PhotonView.Find(photonViewId);
if (photonView == null)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("No PhotonView with ID {0} found. Remote voice {1}/{2} not linked.", userData, playerId, voiceId);
}
return null;
}
PhotonVoiceView photonVoiceView = photonView.GetComponent<PhotonVoiceView>();
if (photonVoiceView == null)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("No PhotonVoiceView attached to the PhotonView with ID {0}. Remote voice {1}/{2} not linked.", userData, playerId, voiceId);
}
return null;
}
if (!photonVoiceView.IgnoreGlobalLogLevel)
{
photonVoiceView.LogLevel = this.LogLevel;
}
if (!photonVoiceView.IsSpeaker)
{
photonVoiceView.SetupSpeakerInUse();
}
return photonVoiceView.SpeakerInUse;
}
internal static string GetVoiceRoomName()
{
if (PhotonNetwork.InRoom)
{
return string.Format("{0}{1}", PhotonNetwork.CurrentRoom.Name, VoiceRoomNameSuffix);
}
return null;
}
private void ConnectOrJoin()
{
switch (this.ClientState)
{
case ClientState.PeerCreated:
case ClientState.Disconnected:
if (this.Logger.IsInfoEnabled)
{
this.Logger.LogInfo("PUN joined room, now connecting Voice client");
}
if (!this.Connect())
{
if (this.Logger.IsErrorEnabled)
{
this.Logger.LogError("Connecting to server failed.");
}
}
else
{
this.internalConnect = this.AutoConnectAndJoin && !this.clientCalledConnectOnly && !this.clientCalledConnectAndJoin;
}
break;
case ClientState.ConnectedToMasterServer:
if (this.Logger.IsInfoEnabled)
{
this.Logger.LogInfo("PUN joined room, now joining Voice room");
}
if (!this.JoinRoom(GetVoiceRoomName()))
{
if (this.Logger.IsErrorEnabled)
{
this.Logger.LogError("Joining a voice room failed.");
}
}
break;
default:
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("PUN joined room, Voice client is busy ({0}). Is this expected?", this.ClientState);
}
break;
}
}
private bool Connect()
{
AppSettings settings = null;
if (this.usePunAppSettings)
{
settings = new AppSettings();
settings = PhotonNetwork.PhotonServerSettings.AppSettings.CopyTo(settings); // creates an independent copy (cause we need to modify it slightly)
if (!string.IsNullOrEmpty(PhotonNetwork.CloudRegion))
{
settings.FixedRegion = PhotonNetwork.CloudRegion; // makes sure the voice connection follows into the same cloud region (as PUN uses now).
}
this.Client.SerializationProtocol = PhotonNetwork.NetworkingClient.SerializationProtocol;
}
// use the same user, authentication, auth-mode and encryption as PUN
if (this.UsePunAuthValues)
{
if (PhotonNetwork.AuthValues != null)
{
if (this.Client.AuthValues == null)
{
this.Client.AuthValues = new AuthenticationValues();
}
this.Client.AuthValues = PhotonNetwork.AuthValues.CopyTo(this.Client.AuthValues);
}
this.Client.AuthMode = PhotonNetwork.NetworkingClient.AuthMode;
this.Client.EncryptionMode = PhotonNetwork.NetworkingClient.EncryptionMode;
}
return this.ConnectUsingSettings(settings);
}
private bool JoinRoom(string voiceRoomName)
{
if (string.IsNullOrEmpty(voiceRoomName))
{
if (this.Logger.IsErrorEnabled)
{
this.Logger.LogError("Voice room name is null or empty.");
}
return false;
}
this.voiceRoomParams.RoomName = voiceRoomName;
return this.Client.OpJoinOrCreateRoom(this.voiceRoomParams);
}
// Follow PUN client state
// In case Voice client disconnects unexpectedly try to reconnect to the same room
// In case Voice client is connected to the wrong room switch to the correct one
private void FollowPun()
{
if (AppQuits)
{
return;
}
if (PhotonNetwork.OfflineMode && !this.WorkInOfflineMode)
{
return;
}
if (PhotonNetwork.NetworkClientState == this.ClientState)
{
if (PhotonNetwork.InRoom && this.AutoConnectAndJoin)
{
string expectedRoomName = GetVoiceRoomName();
string currentRoomName = this.Client.CurrentRoom.Name;
if (!currentRoomName.Equals(expectedRoomName))
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning(
"Voice room mismatch: Expected:\"{0}\" Current:\"{1}\", leaving the second to join the first.",
expectedRoomName, currentRoomName);
}
if (!this.Client.OpLeaveRoom(false))
{
if (this.Logger.IsErrorEnabled)
{
this.Logger.LogError("Leaving the current voice room failed.");
}
}
}
}
else if (this.ClientState == ClientState.ConnectedToMasterServer && this.AutoLeaveAndDisconnect && !this.clientCalledConnectAndJoin && !this.clientCalledConnectOnly)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Unexpected: PUN and Voice clients have the same client state: ConnectedToMasterServer, Disconnecting Voice client.");
}
this.internalDisconnect = true;
this.Client.Disconnect();
}
return;
}
if (PhotonNetwork.InRoom)
{
if (this.clientCalledConnectAndJoin || this.AutoConnectAndJoin && !this.clientCalledDisconnect)
{
this.ConnectOrJoin();
}
}
else if (this.Client.InRoom && this.AutoLeaveAndDisconnect && !this.clientCalledConnectAndJoin && !this.clientCalledConnectOnly)
{
if (this.Logger.IsInfoEnabled)
{
this.Logger.LogInfo("PUN left room, disconnecting Voice");
}
this.internalDisconnect = true;
this.Client.Disconnect();
}
}
internal void CheckLateLinking(Speaker speaker, int viewId)
{
if (!speaker || speaker == null)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Cannot check late linking for null Speaker");
}
return;
}
if (viewId <= 0)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Cannot check late linking for ViewID = {0} (<= 0)", viewId);
}
return;
}
if (!this.Client.InRoom)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Cannot check late linking while not joined to a voice room, client state: {0}", System.Enum.GetName(typeof(ClientState), this.ClientState));
}
return;
}
for (int i = 0; i < this.cachedRemoteVoices.Count; i++)
{
RemoteVoiceLink remoteVoice = this.cachedRemoteVoices[i];
if (remoteVoice.Info.UserData is int)
{
int photonViewId = (int)remoteVoice.Info.UserData;
if (viewId == photonViewId)
{
if (this.Logger.IsInfoEnabled)
{
this.Logger.LogInfo("Speaker 'late-linking' for the PhotonView with ID {0} with remote voice {1}/{2}.", viewId, remoteVoice.PlayerId, remoteVoice.VoiceId);
}
this.LinkSpeaker(speaker, remoteVoice);
break;
}
}
else if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("VoiceInfo.UserData should be int/ViewId, received: {0}, do you have a Recorder not used with a PhotonVoiceView? is this expected?",
remoteVoice.Info.UserData == null ? "null" : string.Format("{0} ({1})", remoteVoice.Info.UserData, remoteVoice.Info.UserData.GetType()));
if (remoteVoice.PlayerId == viewId / PhotonNetwork.MAX_VIEW_IDS)
{
this.Logger.LogWarning("Player with ActorNumber {0} has started recording (voice # {1}) too early without setting a ViewId maybe? (before PhotonVoiceView setup)", remoteVoice.PlayerId, remoteVoice.VoiceId);
}
}
}
}
#endregion
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 81d9debcbaf99472bb70ffc2f99cb23f
timeCreated: 1540904568
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: dd940d333aeeaa048842e87b9b259188, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,501 @@
// ----------------------------------------------------------------------------
// <copyright file="PhotonVoiceView.cs" company="Exit Games GmbH">
// Photon Voice - Copyright (C) 2018 Exit Games GmbH
// </copyright>
// <summary>
// Component that should be attached to a networked PUN prefab that has
// PhotonView. It will bind remote Recorder with local Speaker of the same
// networked prefab. This component makes automatic voice stream routing easy
// for players' characters/avatars.
// </summary>
// <author>developer@photonengine.com</author>
// ----------------------------------------------------------------------------
namespace Photon.Voice.PUN
{
using Pun;
using UnityEngine;
using Unity;
/// <summary>
/// Component that should be attached to a networked PUN prefab that has <see cref="PhotonView"/>.
/// It will bind remote <see cref="Recorder"/> with local <see cref="Speaker"/> of the same networked prefab.
/// This component makes automatic voice stream routing easy for players' characters/avatars.
/// </summary>
[AddComponentMenu("Photon Voice/Photon Voice View")]
[RequireComponent(typeof(PhotonView))]
[HelpURL("https://doc.photonengine.com/en-us/voice/v2/getting-started/voice-for-pun")]
public class PhotonVoiceView : VoiceComponent
{
#region Private Fields
private PhotonView photonView;
[SerializeField]
private Recorder recorderInUse;
[SerializeField]
private Speaker speakerInUse;
private bool onEnableCalledOnce;
#endregion
#region Public Fields
/// <summary> If true, a Recorder component will be added to the same GameObject if not found already. </summary>
public bool AutoCreateRecorderIfNotFound;
/// <summary> If true, PhotonVoiceNetwork.PrimaryRecorder will be used by this PhotonVoiceView </summary>
public bool UsePrimaryRecorder;
/// <summary> If true, a Speaker component will be setup to be used for the DebugEcho mode </summary>
public bool SetupDebugSpeaker;
#endregion
#region Properties
/// <summary> The Recorder component currently used by this PhotonVoiceView </summary>
public Recorder RecorderInUse
{
get
{
return this.recorderInUse;
}
set
{
if (value != this.recorderInUse)
{
this.recorderInUse = value;
this.IsRecorder = false;
}
if (this.RequiresRecorder)
{
this.SetupRecorderInUse();
}
else if (this.IsPhotonViewReady)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("No need to set Recorder as the PhotonView does not belong to local player");
}
}
}
}
/// <summary> The Speaker component currently used by this PhotonVoiceView </summary>
public Speaker SpeakerInUse
{
get
{
return this.speakerInUse;
}
set
{
if (this.speakerInUse != value)
{
this.speakerInUse = value;
this.IsSpeaker = false;
}
if (this.RequiresSpeaker)
{
this.SetupSpeakerInUse();
}
else if (this.IsPhotonViewReady)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Speaker not set because the PhotonView does not belong to a remote player or SetupDebugSpeaker is disabled");
}
}
}
}
/// <summary> If true, this PhotonVoiceView is setup and ready to be used </summary>
public bool IsSetup
{
get { return this.IsPhotonViewReady && (!this.RequiresRecorder || this.IsRecorder) && (!this.RequiresSpeaker || this.IsSpeaker); }
}
/// <summary> If true, this PhotonVoiceView has a Speaker setup for playback of received audio frames from remote audio source </summary>
public bool IsSpeaker { get; private set; }
/// <summary> If true, this PhotonVoiceView has a Speaker that is currently playing received audio frames from remote audio source </summary>
public bool IsSpeaking
{
get { return this.IsSpeaker && this.SpeakerInUse.IsPlaying; }
}
/// <summary> If true, this PhotonVoiceView has a Recorder setup for transmission of audio stream from local audio source </summary>
public bool IsRecorder { get; private set; }
/// <summary> If true, this PhotonVoiceView has a Recorder that is currently transmitting audio stream from local audio source </summary>
public bool IsRecording
{
get { return this.IsRecorder && this.RecorderInUse.IsCurrentlyTransmitting; }
}
/// <summary> If true, the SpeakerInUse is linked to the remote voice stream </summary>
public bool IsSpeakerLinked
{
get { return this.IsSpeaker && this.SpeakerInUse.IsLinked; }
}
/// <summary> If true, the PhotonView attached to the same GameObject has a valid ViewID > 0 </summary>
public bool IsPhotonViewReady
{
get { return this.photonView && this.photonView != null && this.photonView.ViewID > 0; }
}
internal bool RequiresSpeaker
{
get { return this.SetupDebugSpeaker || this.IsPhotonViewReady && !this.photonView.IsMine; }
}
internal bool RequiresRecorder
{
get { return this.IsPhotonViewReady && this.photonView.IsMine; }
}
#endregion
#region Private Methods
protected override void Awake()
{
base.Awake();
this.photonView = this.GetComponent<PhotonView>();
this.Init();
}
private void OnEnable()
{
if (this.onEnableCalledOnce)
{
this.Init();
}
else
{
this.onEnableCalledOnce = true;
}
}
private void Start()
{
this.Init();
}
private void CheckLateLinking()
{
if (PhotonVoiceNetwork.Instance.Client.InRoom)
{
if (this.IsSpeaker)
{
if (!this.IsSpeakerLinked)
{
PhotonVoiceNetwork.Instance.CheckLateLinking(this.SpeakerInUse, this.photonView.ViewID);
}
else if (this.Logger.IsDebugEnabled)
{
this.Logger.LogDebug("Speaker already linked");
}
}
else if (this.Logger.IsDebugEnabled)
{
this.Logger.LogDebug("PhotonVoiceView does not have a Speaker and may not need late linking check");
}
}
else if (this.Logger.IsDebugEnabled)
{
this.Logger.LogDebug("Voice client is still not in a room, skipping late linking check");
}
}
internal void Setup()
{
if (this.IsSetup)
{
if (this.Logger.IsDebugEnabled)
{
this.Logger.LogDebug("PhotonVoiceView already setup");
}
return;
}
this.SetupRecorderInUse();
this.SetupSpeakerInUse();
}
private bool SetupRecorder()
{
if (this.recorderInUse == null) // not manually assigned by user
{
if (this.UsePrimaryRecorder)
{
if (PhotonVoiceNetwork.Instance.PrimaryRecorder != null)
{
this.recorderInUse = PhotonVoiceNetwork.Instance.PrimaryRecorder;
return this.SetupRecorder(this.recorderInUse);
}
if (this.Logger.IsErrorEnabled)
{
this.Logger.LogError("PrimaryRecorder is not set.");
}
}
Recorder[] recorders = this.GetComponentsInChildren<Recorder>();
if (recorders.Length > 0)
{
this.recorderInUse = recorders[0];
if (recorders.Length > 1 && this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Multiple Recorder components found attached to the GameObject or its children.");
}
return this.SetupRecorder(this.recorderInUse);
}
if (!this.AutoCreateRecorderIfNotFound)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("No Recorder found to be setup.");
}
return false;
}
this.recorderInUse = this.gameObject.AddComponent<Recorder>();
}
return this.SetupRecorder(this.recorderInUse);
}
private bool SetupRecorder(Recorder recorder)
{
if (recorder == null)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Cannot setup a null Recorder.");
}
return false;
}
if (!this.IsPhotonViewReady)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Recorder setup cannot be done before assigning a valid ViewID to the PhotonView attached to the same GameObject as the PhotonVoiceView.");
}
return false;
}
recorder.UserData = this.photonView.ViewID;
if (!recorder.IsInitialized)
{
this.RecorderInUse.Init(PhotonVoiceNetwork.Instance);
}
if (recorder.RequiresRestart)
{
recorder.RestartRecording();
}
return recorder.IsInitialized && recorder.UserData is int && this.photonView.ViewID == (int) recorder.UserData;
}
private bool SetupSpeaker()
{
if (this.speakerInUse == null) // not manually assigned by user
{
Speaker[] speakers = this.GetComponentsInChildren<Speaker>(true);
if (speakers.Length > 0)
{
this.speakerInUse = speakers[0];
if (speakers.Length > 1 && this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Multiple Speaker components found attached to the GameObject or its children. Using the first one we found.");
}
}
if (this.speakerInUse == null)
{
if (!PhotonVoiceNetwork.Instance.AutoCreateSpeakerIfNotFound)
{
return false;
}
if (PhotonVoiceNetwork.Instance.SpeakerPrefab != null)
{
GameObject go = Instantiate(PhotonVoiceNetwork.Instance.SpeakerPrefab, this.transform, false);
speakers = go.GetComponentsInChildren<Speaker>(true);
if (speakers.Length > 0)
{
this.speakerInUse = speakers[0];
if (speakers.Length > 1 && this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Multiple Speaker components found attached to the GameObject (PhotonVoiceNetwork.SpeakerPrefab) or its children. Using the first one we found.");
}
}
if (this.speakerInUse == null)
{
if (this.Logger.IsErrorEnabled)
{
this.Logger.LogError("SpeakerPrefab does not have a component of type Speaker in its hierarchy.");
}
Destroy(go);
return false;
}
}
else
{
this.speakerInUse = this.gameObject.AddComponent<Speaker>();
}
}
}
return this.SetupSpeaker(this.speakerInUse);
}
private bool SetupSpeaker(Speaker speaker)
{
if (speaker == null)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Cannot setup a null Speaker");
}
return false;
}
AudioSource audioSource = speaker.GetComponent<AudioSource>();
if (audioSource == null)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Unexpected: no AudioSource found attached to the same GameObject as the Speaker component");
}
return false;
}
if (audioSource.mute)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("audioSource.mute is true, playback may not work properly");
}
}
if (audioSource.volume <= 0f)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("audioSource.volume is zero, playback may not work properly");
}
}
if (!audioSource.enabled)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("audioSource.enabled is false, playback may not work properly");
}
}
return true;
}
internal void SetupRecorderInUse()
{
if (this.IsRecorder)
{
if (this.Logger.IsInfoEnabled)
{
this.Logger.LogInfo("Recorder already setup");
}
return;
}
if (!this.RequiresRecorder)
{
if (this.IsPhotonViewReady)
{
if (this.Logger.IsInfoEnabled)
{
this.Logger.LogInfo("Recorder not needed");
}
}
return;
}
this.IsRecorder = this.SetupRecorder();
if (!this.IsRecorder)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Recorder not setup for PhotonVoiceView: playback may not work properly.");
}
}
else
{
if (!this.RecorderInUse.IsRecording && !this.RecorderInUse.AutoStart)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("PhotonVoiceView.RecorderInUse.AutoStart is false, don't forget to start recording manually using recorder.StartRecording() or recorder.IsRecording = true.");
}
}
if (!this.RecorderInUse.TransmitEnabled)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("PhotonVoiceView.RecorderInUse.TransmitEnabled is false, don't forget to set it to true to enable transmission.");
}
}
if (!this.RecorderInUse.isActiveAndEnabled && this.RecorderInUse.RecordOnlyWhenEnabled)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("PhotonVoiceView.RecorderInUse may not work properly as RecordOnlyWhenEnabled is set to true and recorder is disabled or attached to an inactive GameObject.");
}
}
}
}
internal void SetupSpeakerInUse()
{
if (this.IsSpeaker)
{
if (this.Logger.IsInfoEnabled)
{
this.Logger.LogInfo("Speaker already setup");
}
return;
}
if (!this.RequiresSpeaker)
{
if (this.IsPhotonViewReady)
{
if (this.Logger.IsInfoEnabled)
{
this.Logger.LogInfo("Speaker not needed");
}
}
return;
}
this.IsSpeaker = this.SetupSpeaker();
if (!this.IsSpeaker)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Speaker not setup for PhotonVoiceView: voice chat will not work.");
}
}
else
{
this.CheckLateLinking();
}
}
#endregion
#region Public Methods
/// <summary>
/// Initializes this PhotonVoiceView for Voice usage based on the PhotonView, Recorder and Speaker components.
/// </summary>
/// <remarks>
/// The initialization should happen automatically.
/// Call this method explicitly if this does not succeed.
/// The initialization is a two steps operation: step one is the setup of Recorder and Speaker to be used.
/// Step two is the late-linking -if needed- of the SpeakerInUse and corresponding remote voice info -if any- via ViewID.
/// </remarks>
public void Init()
{
if (this.IsPhotonViewReady)
{
this.Setup();
this.CheckLateLinking();
}
else if (this.Logger.IsDebugEnabled)
{
this.Logger.LogDebug("Tried to initialize PhotonVoiceView but PhotonView does not have a valid allocated ViewID yet.");
}
}
#endregion
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 126494ae2a10a34499272ed97a6c9f5d
timeCreated: 1540904580
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: dd940d333aeeaa048842e87b9b259188, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 22faa279621bc10479cc398c2447f370
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,242 @@
namespace Photon.Voice.PUN.UtilityScripts
{
using Pun;
using Unity;
using Realtime;
using UnityEngine;
using ExitGames.Client.Photon;
/// <summary> Utility script to be attached next to PhotonVoiceView & PhotonView on the player prefab to be network instantiated.
/// Call voiceDebugScript.CantHearYou() on the networked object of the remote (or local) player if you can't hear the corresponding player. </summary>
[RequireComponent(typeof(PhotonVoiceView))]
public class VoiceDebugScript : MonoBehaviourPun
{
private PhotonVoiceView photonVoiceView;
/// <summary> Make sure recorder.TransmitEnabled and recorder.IsRecording are true. </summary>
public bool ForceRecordingAndTransmission;
/// <summary> Audio file to be broadcast when TestUsingAudioClip is enabled. </summary>
public AudioClip TestAudioClip;
/// <summary> Broadcast Audio file to make sure transmission over network works if microphone (audio input device/hardware) is not reliable. Requires setting AudioClip in TestAudioClip. </summary>
public bool TestUsingAudioClip;
/// <summary> Disable recorder.VoiceDetection for easier testing. </summary>
public bool DisableVad;
/// <summary> Set main voice component's log level to ALL (max). </summary>
public bool IncreaseLogLevels;
/// <summary> Debug DebugEcho mode (Can't Hear My Self?!). </summary>
public bool LocalDebug;
private void Awake()
{
this.photonVoiceView = this.GetComponent<PhotonVoiceView>();
}
private void Update()
{
this.MaxLogs();
if (this.photonVoiceView.IsRecorder)
{
if (this.TestUsingAudioClip)
{
if (!this.TestAudioClip || this.TestAudioClip == null)
{
Debug.LogError("Set an AudioClip first");
}
else
{
this.photonVoiceView.RecorderInUse.SourceType = Recorder.InputSourceType.AudioClip;
this.photonVoiceView.RecorderInUse.AudioClip = this.TestAudioClip;
this.photonVoiceView.RecorderInUse.LoopAudioClip = true;
if (this.photonVoiceView.RecorderInUse.RequiresRestart)
{
this.photonVoiceView.RecorderInUse.RestartRecording();
}
else
{
this.photonVoiceView.RecorderInUse.StartRecording();
}
this.photonVoiceView.RecorderInUse.TransmitEnabled = true;
}
}
if (this.ForceRecordingAndTransmission)
{
this.photonVoiceView.RecorderInUse.IsRecording = true;
this.photonVoiceView.RecorderInUse.TransmitEnabled = true;
}
if (this.DisableVad)
{
this.photonVoiceView.RecorderInUse.VoiceDetection = false;
}
}
}
[ContextMenu("CantHearYou")]
public void CantHearYou()
{
if (!PhotonVoiceNetwork.Instance.Client.InRoom)
{
Debug.LogError("local voice client is not joined to a voice room");
}
else if (!this.photonVoiceView.IsPhotonViewReady)
{
Debug.LogError("PhotonView is not ready yet; maybe PUN client is not joined to a room yet or this PhotonView is not valid");
}
else if (!this.photonVoiceView.IsSpeaker)
{
if (this.photonView.IsMine && !this.photonVoiceView.SetupDebugSpeaker)
{
Debug.LogError("local object does not have SetupDebugSpeaker enabled");
if (this.LocalDebug)
{
Debug.Log("setup debug speaker not enabled, enabling it now (1)");
this.photonVoiceView.SetupDebugSpeaker = true;
this.photonVoiceView.Setup();
}
}
else
{
Debug.LogError("locally not speaker (yet?) (1)");
this.photonVoiceView.Setup();
}
}
else
{
if (!this.photonVoiceView.IsSpeakerLinked)
{
Debug.LogError("locally speaker not linked, trying late linking & asking anyway");
// late linking maybe
PhotonVoiceNetwork.Instance.CheckLateLinking(this.photonVoiceView.SpeakerInUse, this.photonView.ViewID);
}
this.photonView.RPC("CantHearYou", this.photonView.Owner, PhotonVoiceNetwork.Instance.Client.CurrentRoom.Name, PhotonVoiceNetwork.Instance.Client.LoadBalancingPeer.ServerIpAddress, PhotonVoiceNetwork.Instance.Client.AppVersion);
}
}
[PunRPC]
private void CantHearYou(string roomName, string serverIp, string appVersion, PhotonMessageInfo photonMessageInfo)
{
string why;
if (!PhotonVoiceNetwork.Instance.Client.InRoom)
{
why = "voice client not in a room";
}
else if (!PhotonVoiceNetwork.Instance.Client.CurrentRoom.Name.Equals(roomName))
{
why = string.Format("voice client is on another room {0} != {1}",
PhotonVoiceNetwork.Instance.Client.CurrentRoom.Name, roomName);
}
else if (!PhotonVoiceNetwork.Instance.Client.LoadBalancingPeer.ServerIpAddress.Equals(serverIp))
{
why = string.Format("voice client is on another server {0} != {1}, maybe different Photon Cloud regions",
PhotonVoiceNetwork.Instance.Client.LoadBalancingPeer.ServerIpAddress, serverIp);
}
else if (!PhotonVoiceNetwork.Instance.Client.AppVersion.Equals(appVersion))
{
why = string.Format("voice client uses different AppVersion {0} != {1}",
PhotonVoiceNetwork.Instance.Client.AppVersion, appVersion);
}
else if (!this.photonVoiceView.IsRecorder)
{
why = "recorder not setup (yet?)";
this.photonVoiceView.Setup();
}
else if (!this.photonVoiceView.RecorderInUse.IsRecording)
{
why = "recorder is not recording";
this.photonVoiceView.RecorderInUse.IsRecording = true;
}
else if (!this.photonVoiceView.RecorderInUse.TransmitEnabled)
{
why = "recorder is not transmitting";
this.photonVoiceView.RecorderInUse.TransmitEnabled = true;
}
else if (this.photonVoiceView.RecorderInUse.InterestGroup != 0)
{
why = "recorder.InterestGroup is not zero? is this on purpose? switching it back to zero";
this.photonVoiceView.RecorderInUse.InterestGroup = 0;
}
else if (!(this.photonVoiceView.RecorderInUse.UserData is int) || (int)this.photonVoiceView.RecorderInUse.UserData != this.photonView.ViewID)
{
why = string.Format("recorder.UserData ({0}) != photonView.ViewID ({1}), fixing it now", this.photonVoiceView.RecorderInUse.UserData, this.photonView.ViewID);
this.photonVoiceView.RecorderInUse.UserData = this.photonView.ViewID;
this.photonVoiceView.RecorderInUse.RestartRecording();
}
else if (this.photonVoiceView.RecorderInUse.VoiceDetection && this.DisableVad) // todo: check WebRtcAudioDsp.VAD
{
why = "recorder vad is enabled, disable it for testing";
this.photonVoiceView.RecorderInUse.VoiceDetection = false;
}
else if (this.photonView.OwnerActorNr == photonMessageInfo.Sender.ActorNumber)
{
if (this.LocalDebug)
{
if (this.photonVoiceView.IsSpeaker)
{
why = "no idea why!, should be working (1)";
this.photonVoiceView.RecorderInUse.RestartRecording(true);
}
else if (!this.photonVoiceView.SetupDebugSpeaker) // unreachable probably
{
why = "setup debug speaker not enabled, enabling it now (2)";
this.photonVoiceView.SetupDebugSpeaker = true;
this.photonVoiceView.Setup();
}
else if (!this.photonVoiceView.RecorderInUse.DebugEchoMode)
{
why = "recorder debug echo mode not enabled, enabling it now";
this.photonVoiceView.RecorderInUse.DebugEchoMode = true;
}
else
{
why = "locally not speaker (yet?) (2)";
this.photonVoiceView.Setup();
}
}
else
{
why = "local object, are you trying to hear yourself? (feedback DebugEcho), LocalDebug is disabled, enable it if you want to diagnose this";
}
}
else
{
why = "no idea why!, should be working (2)";
this.photonVoiceView.RecorderInUse.RestartRecording(true);
}
this.Reply(why, photonMessageInfo.Sender);
}
private void Reply(string why, Player player)
{
this.photonView.RPC("HeresWhy", player, why);
}
[PunRPC]
private void HeresWhy(string why, PhotonMessageInfo photonMessageInfo)
{
Debug.LogErrorFormat("Player {0} replied to my CantHearYou message with {1}", photonMessageInfo.Sender, why);
}
private void MaxLogs()
{
if (this.IncreaseLogLevels)
{
this.photonVoiceView.LogLevel = DebugLevel.ALL;
PhotonVoiceNetwork.Instance.LogLevel = DebugLevel.ALL;
PhotonVoiceNetwork.Instance.GlobalRecordersLogLevel = DebugLevel.ALL;
PhotonVoiceNetwork.Instance.GlobalSpeakersLogLevel = DebugLevel.ALL;
if (this.photonVoiceView.IsRecorder)
{
this.photonVoiceView.RecorderInUse.LogLevel = DebugLevel.ALL;
}
if (this.photonVoiceView.IsSpeaker)
{
this.photonVoiceView.SpeakerInUse.LogLevel = DebugLevel.ALL;
}
}
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 83be82b116045a747bf0141620a4a02f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,11 @@
{
"name": "PhotonVoice",
"references": [
"PhotonRealtime",
"PhotonVoice.API"
],
"optionalUnityReferences": [],
"includePlatforms": [],
"excludePlatforms": [],
"allowUnsafeCode": false
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: b59fae9c7ebc1574e8690f2570b29d6f
timeCreated: 1538045250
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,23 @@
namespace Photon.Voice.Unity
{
/// <summary> Playback delay configuration container. </summary>
[System.Serializable]
public struct PlaybackDelaySettings
{
public const int DEFAULT_LOW = 200;
public const int DEFAULT_HIGH = 400;
public const int DEFAULT_MAX = 1000;
/// <summary> ms: Audio player tries to keep the delay above this value. </summary>
public int MinDelaySoft;
/// <summary> ms: Audio player tries to keep the delay below this value. </summary>
public int MaxDelaySoft;
/// <summary> ms: Audio player guarantees that the delay never exceeds this value. </summary>
public int MaxDelayHard;
public override string ToString()
{
return string.Format("[low={0}ms,high={1}ms,max={2}ms]", this.MinDelaySoft, this.MaxDelaySoft, this.MaxDelayHard);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 1577aa029672fe448b6abdaec64e7d01
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: dd940d333aeeaa048842e87b9b259188, type: 3}
userData:
assetBundleName:
assetBundleVariant:

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 91175dcc15224463780e01a8a98b1b60
timeCreated: 1540904404
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: dd940d333aeeaa048842e87b9b259188, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,63 @@
namespace Photon.Voice.Unity
{
using Voice;
using System;
public class RemoteVoiceLink : IEquatable<RemoteVoiceLink>
{
public readonly VoiceInfo Info;
public readonly int PlayerId;
public readonly int VoiceId;
public readonly int ChannelId;
public event Action<FrameOut<float>> FloatFrameDecoded;
public event Action RemoteVoiceRemoved;
public RemoteVoiceLink(VoiceInfo info, int playerId, int voiceId, int channelId)
{
this.Info = info;
this.PlayerId = playerId;
this.VoiceId = voiceId;
this.ChannelId = channelId;
}
public void Init(ref RemoteVoiceOptions options)
{
options.SetOutput(this.OnDecodedFrameFloatAction);
options.OnRemoteVoiceRemoveAction = this.OnRemoteVoiceRemoveAction;
}
private void OnRemoteVoiceRemoveAction()
{
if (this.RemoteVoiceRemoved != null)
{
this.RemoteVoiceRemoved();
}
}
private void OnDecodedFrameFloatAction(FrameOut<float> floats)
{
if (this.FloatFrameDecoded != null)
{
this.FloatFrameDecoded(floats);
}
}
private string cached;
public override string ToString()
{
if (string.IsNullOrEmpty(this.cached))
{
this.cached = string.Format("[p#:{0},v#:{1},c#:{2},i:{{{3}}}]", this.PlayerId, this.VoiceId, this.ChannelId, this.Info);
}
return this.cached;
}
public bool Equals(RemoteVoiceLink other)
{
if (ReferenceEquals(null, other)) return false;
if (ReferenceEquals(this, other)) return true;
return this.PlayerId == other.PlayerId && this.VoiceId == other.VoiceId || this.Info.UserData == other.Info.UserData;
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: ce49702397a11ee4db63a71d56646ed4
timeCreated: 1544460899
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: dd940d333aeeaa048842e87b9b259188, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,599 @@
// ----------------------------------------------------------------------------
// <copyright file="Speaker.cs" company="Exit Games GmbH">
// Photon Voice for Unity - Copyright (C) 2018 Exit Games GmbH
// </copyright>
// <summary>
// Component representing remote audio stream in local scene.
// </summary>
// <author>developer@photonengine.com</author>
// ----------------------------------------------------------------------------
//#define USE_ONAUDIOFILTERREAD
using System;
using UnityEngine;
namespace Photon.Voice.Unity
{
/// <summary> Component representing remote audio stream in local scene. </summary>
[RequireComponent(typeof(AudioSource))]
[AddComponentMenu("Photon Voice/Speaker")]
[DisallowMultipleComponent]
public class Speaker : VoiceComponent
{
#region Private Fields
private IAudioOut<float> audioOutput;
private RemoteVoiceLink remoteVoiceLink;
[SerializeField]
private bool playbackOnlyWhenEnabled;
#if USE_ONAUDIOFILTERREAD
private AudioSyncBuffer<float> outBuffer;
private int outputSampleRate;
#endif
#pragma warning disable 414
[SerializeField]
[HideInInspector]
private int playDelayMs = 200;
#pragma warning restore 414
[SerializeField]
private PlaybackDelaySettings playbackDelaySettings = new PlaybackDelaySettings
{
MinDelaySoft = PlaybackDelaySettings.DEFAULT_LOW,
MaxDelaySoft = PlaybackDelaySettings.DEFAULT_HIGH,
MaxDelayHard = PlaybackDelaySettings.DEFAULT_MAX
};
private bool playbackExplicitlyStopped;
#endregion
#region Public Fields
///<summary>Remote audio stream playback delay to compensate packets latency variations. Try 100 - 200 if sound is choppy.</summary>
[Obsolete("Use SetPlaybackDelaySettings methods instead")]
public int PlayDelayMs
{
get
{
return this.playbackDelaySettings.MinDelaySoft;
}
set
{
if (value >= 0 && value < this.playbackDelaySettings.MaxDelaySoft)
{
this.playbackDelaySettings.MinDelaySoft = value;
}
}
}
#if UNITY_PS4 || UNITY_SHARLIN
/// <summary>Set the PlayStation User ID to determine on which users headphones to play audio.</summary>
/// <remarks>
/// Note: at the moment, only the first Speaker can successfully set the User ID.
/// Subsequently initialized Speakers will play their audio on the headphones that have been set with the first Speaker initialized.
public int PlayStationUserID = 0;
#endif
/// <summary>
/// A custom factory method to return <see cref="IAudioOut&lt;float&gt;"/> implementation used for the playback.
/// </summary>
public Func<IAudioOut<float>> CustomAudioOutFactory;
#endregion
#region Properties
/// <summary>Is the speaker playing right now.</summary>
public bool IsPlaying
{
get { return this.IsInitialized && this.audioOutput.IsPlaying; }
}
/// <summary>Smoothed difference between (jittering) stream and (clock-driven) audioOutput.</summary>
public int Lag
{
get { return this.IsPlaying ? this.audioOutput.Lag : -1; }
}
/// <summary>
/// Register a method to be called when remote voice removed.
/// </summary>
public Action<Speaker> OnRemoteVoiceRemoveAction { get; set; }
/// <summary>Per room, the connected users/players are represented with a Realtime.Player, also known as Actor.</summary>
/// <remarks>Photon Voice calls this Actor, to avoid a name-clash with the Player class in Voice.</remarks>
public Realtime.Player Actor { get; protected internal set; }
/// <summary>
/// Whether or not this Speaker has been linked to a remote voice stream.
/// </summary>
public bool IsLinked
{
get { return this.remoteVoiceLink != null; }
}
#if UNITY_EDITOR
/// <summary>
/// USE IN EDITOR ONLY
/// </summary>
public RemoteVoiceLink RemoteVoiceLink
{
get { return this.remoteVoiceLink; }
}
#else
internal RemoteVoiceLink RemoteVoiceLink
{
get { return this.remoteVoiceLink; }
}
#endif
/// <summary> If true, component will work only when enabled and active in hierarchy. </summary>
public bool PlaybackOnlyWhenEnabled
{
get { return this.playbackOnlyWhenEnabled; }
set
{
if (this.playbackOnlyWhenEnabled != value)
{
this.playbackOnlyWhenEnabled = value;
if (this.IsLinked)
{
if (this.playbackOnlyWhenEnabled)
{
if (this.isActiveAndEnabled != this.PlaybackStarted)
{
if (this.isActiveAndEnabled)
{
if (!this.playbackExplicitlyStopped)
{
this.StartPlaying();
}
}
else
{
this.StopPlaying();
}
}
}
else if (!this.PlaybackStarted && !this.playbackExplicitlyStopped)
{
this.StartPlaying();
}
}
}
}
}
/// <summary> Returns if the playback is on. </summary>
public bool PlaybackStarted { get; private set; }
/// <summary>Gets the value in ms above which the audio player tries to keep the delay.</summary>
public int PlaybackDelayMinSoft
{
get
{
return this.playbackDelaySettings.MinDelaySoft;
}
}
/// <summary>Gets the value in ms below which the audio player tries to keep the delay.</summary>
public int PlaybackDelayMaxSoft
{
get
{
return this.playbackDelaySettings.MaxDelaySoft;
}
}
/// <summary>Gets the value in ms that audio play delay will not exceed.</summary>
public int PlaybackDelayMaxHard
{
get
{
return this.playbackDelaySettings.MaxDelayHard;
}
}
internal bool IsInitialized
{
get { return this.audioOutput != null; }
}
#endregion
#region Private Methods
private void OnEnable()
{
if (this.IsLinked && !this.PlaybackStarted && !this.playbackExplicitlyStopped)
{
this.StartPlaying();
}
}
private void OnDisable()
{
if (this.PlaybackOnlyWhenEnabled && this.PlaybackStarted)
{
this.StopPlaying();
}
}
private void Initialize()
{
if (this.IsInitialized)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Already initialized.");
}
return;
}
if (this.Logger.IsDebugEnabled)
{
this.Logger.LogDebug("Initializing.");
}
Func<IAudioOut<float>> factory;
if (this.CustomAudioOutFactory != null)
{
factory = this.CustomAudioOutFactory;
}
else
{
factory = this.GetDefaultAudioOutFactory();
}
#if !UNITY_EDITOR && (UNITY_PS4 || UNITY_SHARLIN)
this.audioOutput = new Photon.Voice.PlayStation.PlayStationAudioOut(this.PlayStationUserID, factory);
#else
this.audioOutput = factory();
#endif
if (this.Logger.IsDebugEnabled)
{
this.Logger.LogDebug("Initialized.");
}
}
internal Func<IAudioOut<float>> GetDefaultAudioOutFactory()
{
#if USE_ONAUDIOFILTERREAD
this.outBuffer = new AudioSyncBuffer<float>(this.playbackDelaySettings.MinDelaySoft, this.Logger, string.Empty, this.Logger.IsDebugEnabled);
this.outputSampleRate = AudioSettings.outputSampleRate;
Func<IAudioOut<float>> factory = () => this.outBuffer;
#else
var pdc = new AudioOutDelayControl.PlayDelayConfig
{
Low = this.playbackDelaySettings.MinDelaySoft,
High = this.playbackDelaySettings.MaxDelaySoft,
Max = this.playbackDelaySettings.MaxDelayHard
};
Func<IAudioOut<float>> factory = () => new UnityAudioOut(this.GetComponent<AudioSource>(), pdc, this.Logger, string.Empty, this.Logger.IsDebugEnabled);
#endif
return factory;
}
internal bool OnRemoteVoiceInfo(RemoteVoiceLink stream)
{
if (stream == null)
{
if (this.Logger.IsErrorEnabled)
{
this.Logger.LogError("RemoteVoiceLink is null, cancelled linking");
}
return false;
}
if (!this.IsInitialized)
{
this.Initialize();
}
if (this.Logger.IsDebugEnabled)
{
this.Logger.LogDebug("OnRemoteVoiceInfo {0}", stream);
}
if (this.IsLinked)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Speaker already linked to {0}, cancelled linking to {1}", this.remoteVoiceLink, stream);
}
return false;
}
if (stream.Info.Channels <= 0) // early avoid possible crash due to ArgumentException in AudioClip.Create inside UnityAudioOut.Start
{
if (this.Logger.IsErrorEnabled)
{
this.Logger.LogError("Received voice info channels is not expected (<= 0), cancelled linking to {0}", stream);
}
return false;
}
this.remoteVoiceLink = stream;
this.remoteVoiceLink.RemoteVoiceRemoved += this.OnRemoteVoiceRemove;
if (this.IsInitialized)
{
if (!this.PlaybackOnlyWhenEnabled || this.isActiveAndEnabled)
{
return this.StartPlayback();
}
return true;
}
return false;
}
internal void OnRemoteVoiceRemove()
{
if (this.Logger.IsDebugEnabled)
{
this.Logger.LogDebug("OnRemoteVoiceRemove {0}", this.remoteVoiceLink);
}
this.StopPlaying();
if (this.OnRemoteVoiceRemoveAction != null) { this.OnRemoteVoiceRemoveAction(this); }
this.CleanUp();
}
internal void OnAudioFrame(FrameOut<float> frame)
{
this.audioOutput.Push(frame.Buf);
if (frame.EndOfStream)
{
this.audioOutput.Flush();
}
}
private bool StartPlaying()
{
if (!this.IsLinked)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Cannot start playback because speaker is not linked");
}
return false;
}
if (this.PlaybackStarted)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Playback is already started");
}
return false;
}
if (!this.IsInitialized)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Cannot start playback because not initialized yet");
}
return false;
}
if (!this.isActiveAndEnabled && this.PlaybackOnlyWhenEnabled)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Cannot start playback because PlaybackOnlyWhenEnabled is true and Speaker is not enabled or its GameObject is not active in the hierarchy.");
}
return false;
}
VoiceInfo voiceInfo = this.remoteVoiceLink.Info;
if (voiceInfo.Channels == 0)
{
if (this.Logger.IsErrorEnabled)
{
this.Logger.LogError("Cannot start playback because Channels == 0, stream {0}", this.remoteVoiceLink);
}
return false;
}
if (this.Logger.IsInfoEnabled)
{
this.Logger.LogInfo("Speaker about to start playback stream {0}, delay {1}", this.remoteVoiceLink, this.playbackDelaySettings);
}
this.audioOutput.Start(voiceInfo.SamplingRate, voiceInfo.Channels, voiceInfo.FrameDurationSamples);
this.remoteVoiceLink.FloatFrameDecoded += this.OnAudioFrame;
this.PlaybackStarted = true;
this.playbackExplicitlyStopped = false;
return true;
}
private void OnDestroy()
{
if (this.Logger.IsDebugEnabled)
{
this.Logger.LogDebug("OnDestroy");
}
this.StopPlaying(true);
this.CleanUp();
}
private bool StopPlaying(bool force = false)
{
if (this.Logger.IsDebugEnabled)
{
this.Logger.LogDebug("StopPlaying");
}
if (!force && !this.PlaybackStarted)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Cannot stop playback because it's not started");
}
return false;
}
if (this.IsLinked)
{
this.remoteVoiceLink.FloatFrameDecoded -= this.OnAudioFrame;
}
else if (!force && this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Speaker not linked while stopping playback");
}
if (this.IsInitialized)
{
this.audioOutput.Stop();
}
else if (!force && this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("audioOutput is null while stopping playback");
}
this.PlaybackStarted = false;
return true;
}
private void CleanUp()
{
if (this.Logger.IsDebugEnabled)
{
this.Logger.LogDebug("CleanUp");
}
if (this.remoteVoiceLink != null)
{
this.remoteVoiceLink.RemoteVoiceRemoved -= this.OnRemoteVoiceRemove;
this.remoteVoiceLink = null;
}
this.Actor = null;
}
#if USE_ONAUDIOFILTERREAD
private void OnAudioFilterRead(float[] data, int channels)
{
this.outBuffer.Read(data, channels, this.outputSampleRate);
}
#endif
#if UNITY_EDITOR
private void OnValidate()
{
if (this.playDelayMs > 0)
{
if (this.playbackDelaySettings.MinDelaySoft != this.playDelayMs)
{
this.playbackDelaySettings.MinDelaySoft = this.playDelayMs;
if (this.playbackDelaySettings.MaxDelaySoft <= this.playbackDelaySettings.MinDelaySoft)
{
this.playbackDelaySettings.MaxDelaySoft = 2 * this.playbackDelaySettings.MinDelaySoft;
if (this.playbackDelaySettings.MaxDelayHard < this.playbackDelaySettings.MaxDelaySoft)
{
this.playbackDelaySettings.MaxDelayHard = this.playbackDelaySettings.MaxDelaySoft + 1000;
}
}
}
this.playDelayMs = -1;
}
}
#endif
internal void Service()
{
if (this.PlaybackStarted)
{
this.audioOutput.Service();
}
}
#endregion
#region Public Methods
/// <summary>
/// Starts the audio playback of the linked incoming remote audio stream via AudioSource component.
/// </summary>
/// <returns>True if playback is successfully started.</returns>
public bool StartPlayback()
{
return this.StartPlaying();
}
/// <summary>
/// Stops the audio playback of the linked incoming remote audio stream via AudioSource component.
/// </summary>
/// <returns>True if playback is successfully stopped.</returns>
public bool StopPlayback()
{
if (this.playbackExplicitlyStopped)
{
if (this.Logger.IsWarningEnabled)
{
this.Logger.LogWarning("Cannot stop playback because it was already been explicitly stopped.");
}
return false;
}
this.playbackExplicitlyStopped = this.StopPlaying();
return this.playbackExplicitlyStopped;
}
/// <summary>
/// Restarts the audio playback of the linked incoming remote audio stream via AudioSource component.
/// </summary>
/// <param name="reinit">If true, player will be reinitialized.</param>
/// <returns>True if playback is successfully restarted.</returns>
public bool RestartPlayback(bool reinit = false)
{
if (!this.StopPlayback())
{
return false;
}
if (reinit)
{
this.audioOutput = null;
this.Initialize();
}
return this.StartPlayback();
}
/// <summary>
/// Sets the settings for the playback behaviour in case of delays.
/// </summary>
/// <param name="pdc">Playback delay configuration struct.</param>
/// <returns>If a change has been made.</returns>
public bool SetPlaybackDelaySettings(PlaybackDelaySettings pdc)
{
return this.SetPlaybackDelaySettings(pdc.MinDelaySoft, pdc.MaxDelaySoft, pdc.MaxDelayHard);
}
/// <summary>
/// Sets the settings for the playback behaviour in case of delays.
/// </summary>
/// <param name="low">In milliseconds, audio player tries to keep the playback delay above this value.</param>
/// <param name="high">In milliseconds, audio player tries to keep the playback below above this value.</param>
/// <param name="max">In milliseconds, audio player guarantees that the playback delay never exceeds this value.</param>
/// <returns>If a change has been made.</returns>
public bool SetPlaybackDelaySettings(int low, int high, int max)
{
if (low >= 0 && low < high)
{
if (this.playbackDelaySettings.MaxDelaySoft != high ||
this.playbackDelaySettings.MinDelaySoft != low ||
this.playbackDelaySettings.MaxDelayHard != max)
{
if (max < high)
{
max = high;
}
this.playbackDelaySettings.MaxDelaySoft = high;
this.playbackDelaySettings.MinDelaySoft = low;
this.playbackDelaySettings.MaxDelayHard = max;
if (this.IsPlaying)
{
this.RestartPlayback(true);
}
else if (this.IsInitialized)
{
this.audioOutput = null;
this.Initialize();
}
return true;
}
}
else if (this.Logger.IsErrorEnabled)
{
this.Logger.LogError("Wrong playback delay config values, make sure 0 <= Low < High, low={0}, high={1}, max={2}", low, high, max);
}
return false;
}
#endregion
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: dd7fea91fe63f4e1b884ef1e16a975c3
timeCreated: 1540904415
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: dd940d333aeeaa048842e87b9b259188, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 94fb63a21bf1c5343b47341817eee260
folderAsset: yes
timeCreated: 1531151987
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: c2a7832ead413484391836cfaa4f0a6b
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: e70c6b0e644598b458dbb42ace191228
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,546 @@
// ReSharper disable InconsistentNaming
namespace CSCore
{
//Copied from NAudio (including a few modifications)
/// <summary>
/// Defines all known encoding types. Primary used in the <see cref="WaveFormat" /> class. See
/// <see cref="WaveFormat.WaveFormatTag" />.
/// </summary>
public enum AudioEncoding : short
{
/// <summary>WAVE_FORMAT_UNKNOWN, Microsoft Corporation</summary>
Unknown = 0x0000,
/// <summary>WAVE_FORMAT_PCM Microsoft Corporation</summary>
Pcm = 0x0001,
/// <summary>WAVE_FORMAT_ADPCM Microsoft Corporation</summary>
Adpcm = 0x0002,
/// <summary>WAVE_FORMAT_IEEE_FLOAT Microsoft Corporation</summary>
IeeeFloat = 0x0003,
/// <summary>WAVE_FORMAT_VSELP Compaq Computer Corp.</summary>
Vselp = 0x0004,
/// <summary>WAVE_FORMAT_IBM_CVSD IBM Corporation</summary>
IbmCvsd = 0x0005,
/// <summary>WAVE_FORMAT_ALAW Microsoft Corporation</summary>
ALaw = 0x0006,
/// <summary>WAVE_FORMAT_MULAW Microsoft Corporation</summary>
MuLaw = 0x0007,
/// <summary>WAVE_FORMAT_DTS Microsoft Corporation</summary>
Dts = 0x0008,
/// <summary>WAVE_FORMAT_DRM Microsoft Corporation</summary>
Drm = 0x0009,
/// <summary>WAVE_FORMAT_WMAVOICE9 </summary>
WmaVoice9 = 0x000A,
/// <summary>WAVE_FORMAT_OKI_ADPCM OKI</summary>
OkiAdpcm = 0x0010,
/// <summary>WAVE_FORMAT_DVI_ADPCM Intel Corporation</summary>
DviAdpcm = 0x0011,
/// <summary>WAVE_FORMAT_IMA_ADPCM Intel Corporation</summary>
ImaAdpcm = DviAdpcm,
/// <summary>WAVE_FORMAT_MEDIASPACE_ADPCM Videologic</summary>
MediaspaceAdpcm = 0x0012,
/// <summary>WAVE_FORMAT_SIERRA_ADPCM Sierra Semiconductor Corp </summary>
SierraAdpcm = 0x0013,
/// <summary>WAVE_FORMAT_G723_ADPCM Antex Electronics Corporation </summary>
G723Adpcm = 0x0014,
/// <summary>WAVE_FORMAT_DIGISTD DSP Solutions, Inc.</summary>
DigiStd = 0x0015,
/// <summary>WAVE_FORMAT_DIGIFIX DSP Solutions, Inc.</summary>
DigiFix = 0x0016,
/// <summary>WAVE_FORMAT_DIALOGIC_OKI_ADPCM Dialogic Corporation</summary>
DialogicOkiAdpcm = 0x0017,
/// <summary>WAVE_FORMAT_MEDIAVISION_ADPCM Media Vision, Inc.</summary>
MediaVisionAdpcm = 0x0018,
/// <summary>WAVE_FORMAT_CU_CODEC Hewlett-Packard Company </summary>
CUCodec = 0x0019,
/// <summary>WAVE_FORMAT_YAMAHA_ADPCM Yamaha Corporation of America</summary>
YamahaAdpcm = 0x0020,
/// <summary>WAVE_FORMAT_SONARC Speech Compression</summary>
SonarC = 0x0021,
/// <summary>WAVE_FORMAT_DSPGROUP_TRUESPEECH DSP Group, Inc </summary>
DspGroupTrueSpeech = 0x0022,
/// <summary>WAVE_FORMAT_ECHOSC1 Echo Speech Corporation</summary>
EchoSpeechCorporation1 = 0x0023,
/// <summary>WAVE_FORMAT_AUDIOFILE_AF36, Virtual Music, Inc.</summary>
AudioFileAf36 = 0x0024,
/// <summary>WAVE_FORMAT_APTX Audio Processing Technology</summary>
Aptx = 0x0025,
/// <summary>WAVE_FORMAT_AUDIOFILE_AF10, Virtual Music, Inc.</summary>
AudioFileAf10 = 0x0026,
/// <summary>WAVE_FORMAT_PROSODY_1612, Aculab plc</summary>
Prosody1612 = 0x0027,
/// <summary>WAVE_FORMAT_LRC, Merging Technologies S.A. </summary>
Lrc = 0x0028,
/// <summary>WAVE_FORMAT_DOLBY_AC2, Dolby Laboratories</summary>
DolbyAc2 = 0x0030,
/// <summary>WAVE_FORMAT_GSM610, Microsoft Corporation</summary>
Gsm610 = 0x0031,
/// <summary>WAVE_FORMAT_MSNAUDIO, Microsoft Corporation</summary>
MsnAudio = 0x0032,
/// <summary>WAVE_FORMAT_ANTEX_ADPCME, Antex Electronics Corporation</summary>
AntexAdpcme = 0x0033,
/// <summary>WAVE_FORMAT_CONTROL_RES_VQLPC, Control Resources Limited </summary>
ControlResVqlpc = 0x0034,
/// <summary>WAVE_FORMAT_DIGIREAL, DSP Solutions, Inc. </summary>
DigiReal = 0x0035,
/// <summary>WAVE_FORMAT_DIGIADPCM, DSP Solutions, Inc.</summary>
DigiAdpcm = 0x0036,
/// <summary>WAVE_FORMAT_CONTROL_RES_CR10, Control Resources Limited</summary>
ControlResCr10 = 0x0037,
/// <summary>WAVE_FORMAT_NMS_VBXADPCM</summary>
WAVE_FORMAT_NMS_VBXADPCM = 0x0038, // Natural MicroSystems
/// <summary>WAVE_FORMAT_CS_IMAADPCM</summary>
WAVE_FORMAT_CS_IMAADPCM = 0x0039, // Crystal Semiconductor IMA ADPCM
/// <summary>WAVE_FORMAT_ECHOSC3</summary>
WAVE_FORMAT_ECHOSC3 = 0x003A, // Echo Speech Corporation
/// <summary>WAVE_FORMAT_ROCKWELL_ADPCM</summary>
WAVE_FORMAT_ROCKWELL_ADPCM = 0x003B, // Rockwell International
/// <summary>WAVE_FORMAT_ROCKWELL_DIGITALK</summary>
WAVE_FORMAT_ROCKWELL_DIGITALK = 0x003C, // Rockwell International
/// <summary>WAVE_FORMAT_XEBEC</summary>
WAVE_FORMAT_XEBEC = 0x003D, // Xebec Multimedia Solutions Limited
/// <summary>WAVE_FORMAT_G721_ADPCM</summary>
WAVE_FORMAT_G721_ADPCM = 0x0040, // Antex Electronics Corporation
/// <summary>WAVE_FORMAT_G728_CELP</summary>
WAVE_FORMAT_G728_CELP = 0x0041, // Antex Electronics Corporation
/// <summary>WAVE_FORMAT_MSG723</summary>
WAVE_FORMAT_MSG723 = 0x0042, // Microsoft Corporation
/// <summary>WAVE_FORMAT_MPEG, Microsoft Corporation </summary>
Mpeg = 0x0050,
/// <summary>WAVE_FORMAT_RT24</summary>
WAVE_FORMAT_RT24 = 0x0052, // InSoft, Inc.
/// <summary>WAVE_FORMAT_PAC</summary>
WAVE_FORMAT_PAC = 0x0053, // InSoft, Inc.
/// <summary>WAVE_FORMAT_MPEGLAYER3, ISO/MPEG Layer3 Format Tag </summary>
MpegLayer3 = 0x0055,
/// <summary>WAVE_FORMAT_LUCENT_G723</summary>
WAVE_FORMAT_LUCENT_G723 = 0x0059, // Lucent Technologies
/// <summary>WAVE_FORMAT_CIRRUS</summary>
WAVE_FORMAT_CIRRUS = 0x0060, // Cirrus Logic
/// <summary>WAVE_FORMAT_ESPCM</summary>
WAVE_FORMAT_ESPCM = 0x0061, // ESS Technology
/// <summary>WAVE_FORMAT_VOXWARE</summary>
WAVE_FORMAT_VOXWARE = 0x0062, // Voxware Inc
/// <summary>WAVE_FORMAT_CANOPUS_ATRAC</summary>
WAVE_FORMAT_CANOPUS_ATRAC = 0x0063, // Canopus, co., Ltd.
/// <summary>WAVE_FORMAT_G726_ADPCM</summary>
WAVE_FORMAT_G726_ADPCM = 0x0064, // APICOM
/// <summary>WAVE_FORMAT_G722_ADPCM</summary>
WAVE_FORMAT_G722_ADPCM = 0x0065, // APICOM
/// <summary>WAVE_FORMAT_DSAT_DISPLAY</summary>
WAVE_FORMAT_DSAT_DISPLAY = 0x0067, // Microsoft Corporation
/// <summary>WAVE_FORMAT_VOXWARE_BYTE_ALIGNED</summary>
WAVE_FORMAT_VOXWARE_BYTE_ALIGNED = 0x0069, // Voxware Inc
/// <summary>WAVE_FORMAT_VOXWARE_AC8</summary>
WAVE_FORMAT_VOXWARE_AC8 = 0x0070, // Voxware Inc
/// <summary>WAVE_FORMAT_VOXWARE_AC10</summary>
WAVE_FORMAT_VOXWARE_AC10 = 0x0071, // Voxware Inc
/// <summary>WAVE_FORMAT_VOXWARE_AC16</summary>
WAVE_FORMAT_VOXWARE_AC16 = 0x0072, // Voxware Inc
/// <summary>WAVE_FORMAT_VOXWARE_AC20</summary>
WAVE_FORMAT_VOXWARE_AC20 = 0x0073, // Voxware Inc
/// <summary>WAVE_FORMAT_VOXWARE_RT24</summary>
WAVE_FORMAT_VOXWARE_RT24 = 0x0074, // Voxware Inc
/// <summary>WAVE_FORMAT_VOXWARE_RT29</summary>
WAVE_FORMAT_VOXWARE_RT29 = 0x0075, // Voxware Inc
/// <summary>WAVE_FORMAT_VOXWARE_RT29HW</summary>
WAVE_FORMAT_VOXWARE_RT29HW = 0x0076, // Voxware Inc
/// <summary>WAVE_FORMAT_VOXWARE_VR12</summary>
WAVE_FORMAT_VOXWARE_VR12 = 0x0077, // Voxware Inc
/// <summary>WAVE_FORMAT_VOXWARE_VR18</summary>
WAVE_FORMAT_VOXWARE_VR18 = 0x0078, // Voxware Inc
/// <summary>WAVE_FORMAT_VOXWARE_TQ40</summary>
WAVE_FORMAT_VOXWARE_TQ40 = 0x0079, // Voxware Inc
/// <summary>WAVE_FORMAT_SOFTSOUND</summary>
WAVE_FORMAT_SOFTSOUND = 0x0080, // Softsound, Ltd.
/// <summary>WAVE_FORMAT_VOXWARE_TQ60</summary>
WAVE_FORMAT_VOXWARE_TQ60 = 0x0081, // Voxware Inc
/// <summary>WAVE_FORMAT_MSRT24</summary>
WAVE_FORMAT_MSRT24 = 0x0082, // Microsoft Corporation
/// <summary>WAVE_FORMAT_G729A</summary>
WAVE_FORMAT_G729A = 0x0083, // AT&T Labs, Inc.
/// <summary>WAVE_FORMAT_MVI_MVI2</summary>
WAVE_FORMAT_MVI_MVI2 = 0x0084, // Motion Pixels
/// <summary>WAVE_FORMAT_DF_G726</summary>
WAVE_FORMAT_DF_G726 = 0x0085, // DataFusion Systems (Pty) (Ltd)
/// <summary>WAVE_FORMAT_DF_GSM610</summary>
WAVE_FORMAT_DF_GSM610 = 0x0086, // DataFusion Systems (Pty) (Ltd)
/// <summary>WAVE_FORMAT_ISIAUDIO</summary>
WAVE_FORMAT_ISIAUDIO = 0x0088, // Iterated Systems, Inc.
/// <summary>WAVE_FORMAT_ONLIVE</summary>
WAVE_FORMAT_ONLIVE = 0x0089, // OnLive! Technologies, Inc.
/// <summary>WAVE_FORMAT_SBC24</summary>
WAVE_FORMAT_SBC24 = 0x0091, // Siemens Business Communications Sys
/// <summary>WAVE_FORMAT_DOLBY_AC3_SPDIF</summary>
WAVE_FORMAT_DOLBY_AC3_SPDIF = 0x0092, // Sonic Foundry
/// <summary>WAVE_FORMAT_MEDIASONIC_G723</summary>
WAVE_FORMAT_MEDIASONIC_G723 = 0x0093, // MediaSonic
/// <summary>WAVE_FORMAT_PROSODY_8KBPS</summary>
WAVE_FORMAT_PROSODY_8KBPS = 0x0094, // Aculab plc
/// <summary>WAVE_FORMAT_ZYXEL_ADPCM</summary>
WAVE_FORMAT_ZYXEL_ADPCM = 0x0097, // ZyXEL Communications, Inc.
/// <summary>WAVE_FORMAT_PHILIPS_LPCBB</summary>
WAVE_FORMAT_PHILIPS_LPCBB = 0x0098, // Philips Speech Processing
/// <summary>WAVE_FORMAT_PACKED</summary>
WAVE_FORMAT_PACKED = 0x0099, // Studer Professional Audio AG
/// <summary>WAVE_FORMAT_MALDEN_PHONYTALK</summary>
WAVE_FORMAT_MALDEN_PHONYTALK = 0x00A0, // Malden Electronics Ltd.
/// <summary>WAVE_FORMAT_GSM</summary>
Gsm = 0x00A1,
/// <summary>WAVE_FORMAT_G729</summary>
G729 = 0x00A2,
/// <summary>WAVE_FORMAT_G723</summary>
G723 = 0x00A3,
/// <summary>WAVE_FORMAT_ACELP</summary>
Acelp = 0x00A4,
/// <summary>
/// WAVE_FORMAT_RAW_AAC1
/// </summary>
RawAac = 0x00FF,
/// <summary>WAVE_FORMAT_RHETOREX_ADPCM</summary>
WAVE_FORMAT_RHETOREX_ADPCM = 0x0100, // Rhetorex Inc.
/// <summary>WAVE_FORMAT_IRAT</summary>
WAVE_FORMAT_IRAT = 0x0101, // BeCubed Software Inc.
/// <summary>WAVE_FORMAT_VIVO_G723</summary>
WAVE_FORMAT_VIVO_G723 = 0x0111, // Vivo Software
/// <summary>WAVE_FORMAT_VIVO_SIREN</summary>
WAVE_FORMAT_VIVO_SIREN = 0x0112, // Vivo Software
/// <summary>WAVE_FORMAT_DIGITAL_G723</summary>
WAVE_FORMAT_DIGITAL_G723 = 0x0123, // Digital Equipment Corporation
/// <summary>WAVE_FORMAT_SANYO_LD_ADPCM</summary>
WAVE_FORMAT_SANYO_LD_ADPCM = 0x0125, // Sanyo Electric Co., Ltd.
/// <summary>WAVE_FORMAT_SIPROLAB_ACEPLNET</summary>
WAVE_FORMAT_SIPROLAB_ACEPLNET = 0x0130, // Sipro Lab Telecom Inc.
/// <summary>WAVE_FORMAT_SIPROLAB_ACELP4800</summary>
WAVE_FORMAT_SIPROLAB_ACELP4800 = 0x0131, // Sipro Lab Telecom Inc.
/// <summary>WAVE_FORMAT_SIPROLAB_ACELP8V3</summary>
WAVE_FORMAT_SIPROLAB_ACELP8V3 = 0x0132, // Sipro Lab Telecom Inc.
/// <summary>WAVE_FORMAT_SIPROLAB_G729</summary>
WAVE_FORMAT_SIPROLAB_G729 = 0x0133, // Sipro Lab Telecom Inc.
/// <summary>WAVE_FORMAT_SIPROLAB_G729A</summary>
WAVE_FORMAT_SIPROLAB_G729A = 0x0134, // Sipro Lab Telecom Inc.
/// <summary>WAVE_FORMAT_SIPROLAB_KELVIN</summary>
WAVE_FORMAT_SIPROLAB_KELVIN = 0x0135, // Sipro Lab Telecom Inc.
/// <summary>WAVE_FORMAT_G726ADPCM</summary>
WAVE_FORMAT_G726ADPCM = 0x0140, // Dictaphone Corporation
/// <summary>WAVE_FORMAT_QUALCOMM_PUREVOICE</summary>
WAVE_FORMAT_QUALCOMM_PUREVOICE = 0x0150, // Qualcomm, Inc.
/// <summary>WAVE_FORMAT_QUALCOMM_HALFRATE</summary>
WAVE_FORMAT_QUALCOMM_HALFRATE = 0x0151, // Qualcomm, Inc.
/// <summary>WAVE_FORMAT_TUBGSM</summary>
WAVE_FORMAT_TUBGSM = 0x0155, // Ring Zero Systems, Inc.
/// <summary>WAVE_FORMAT_MSAUDIO1</summary>
WAVE_FORMAT_MSAUDIO1 = 0x0160, // Microsoft Corporation
/// <summary>
/// Windows Media Audio, WAVE_FORMAT_WMAUDIO2, Microsoft Corporation
/// </summary>
WindowsMediaAudio = 0x0161,
/// <summary>
/// Windows Media Audio Professional WAVE_FORMAT_WMAUDIO3, Microsoft Corporation
/// </summary>
WindowsMediaAudioProfessional = 0x0162,
/// <summary>
/// Windows Media Audio Lossless, WAVE_FORMAT_WMAUDIO_LOSSLESS
/// </summary>
WindowsMediaAudioLosseless = 0x0163,
/// <summary>
/// Windows Media Audio Professional over SPDIF WAVE_FORMAT_WMASPDIF (0x0164)
/// </summary>
WindowsMediaAudioSpdif = 0x0164,
/// <summary>WAVE_FORMAT_UNISYS_NAP_ADPCM</summary>
WAVE_FORMAT_UNISYS_NAP_ADPCM = 0x0170, // Unisys Corp.
/// <summary>WAVE_FORMAT_UNISYS_NAP_ULAW</summary>
WAVE_FORMAT_UNISYS_NAP_ULAW = 0x0171, // Unisys Corp.
/// <summary>WAVE_FORMAT_UNISYS_NAP_ALAW</summary>
WAVE_FORMAT_UNISYS_NAP_ALAW = 0x0172, // Unisys Corp.
/// <summary>WAVE_FORMAT_UNISYS_NAP_16K</summary>
WAVE_FORMAT_UNISYS_NAP_16K = 0x0173, // Unisys Corp.
/// <summary>WAVE_FORMAT_CREATIVE_ADPCM</summary>
WAVE_FORMAT_CREATIVE_ADPCM = 0x0200, // Creative Labs, Inc
/// <summary>WAVE_FORMAT_CREATIVE_FASTSPEECH8</summary>
WAVE_FORMAT_CREATIVE_FASTSPEECH8 = 0x0202, // Creative Labs, Inc
/// <summary>WAVE_FORMAT_CREATIVE_FASTSPEECH10</summary>
WAVE_FORMAT_CREATIVE_FASTSPEECH10 = 0x0203, // Creative Labs, Inc
/// <summary>WAVE_FORMAT_UHER_ADPCM</summary>
WAVE_FORMAT_UHER_ADPCM = 0x0210, // UHER informatic GmbH
/// <summary>WAVE_FORMAT_QUARTERDECK</summary>
WAVE_FORMAT_QUARTERDECK = 0x0220, // Quarterdeck Corporation
/// <summary>WAVE_FORMAT_ILINK_VC</summary>
WAVE_FORMAT_ILINK_VC = 0x0230, // I-link Worldwide
/// <summary>WAVE_FORMAT_RAW_SPORT</summary>
WAVE_FORMAT_RAW_SPORT = 0x0240, // Aureal Semiconductor
/// <summary>WAVE_FORMAT_ESST_AC3</summary>
WAVE_FORMAT_ESST_AC3 = 0x0241, // ESS Technology, Inc.
/// <summary>WAVE_FORMAT_IPI_HSX</summary>
WAVE_FORMAT_IPI_HSX = 0x0250, // Interactive Products, Inc.
/// <summary>WAVE_FORMAT_IPI_RPELP</summary>
WAVE_FORMAT_IPI_RPELP = 0x0251, // Interactive Products, Inc.
/// <summary>WAVE_FORMAT_CS2</summary>
WAVE_FORMAT_CS2 = 0x0260, // Consistent Software
/// <summary>WAVE_FORMAT_SONY_SCX</summary>
WAVE_FORMAT_SONY_SCX = 0x0270, // Sony Corp.
/// <summary>WAVE_FORMAT_FM_TOWNS_SND</summary>
WAVE_FORMAT_FM_TOWNS_SND = 0x0300, // Fujitsu Corp.
/// <summary>WAVE_FORMAT_BTV_DIGITAL</summary>
WAVE_FORMAT_BTV_DIGITAL = 0x0400, // Brooktree Corporation
/// <summary>WAVE_FORMAT_QDESIGN_MUSIC</summary>
WAVE_FORMAT_QDESIGN_MUSIC = 0x0450, // QDesign Corporation
/// <summary>WAVE_FORMAT_VME_VMPCM</summary>
WAVE_FORMAT_VME_VMPCM = 0x0680, // AT&T Labs, Inc.
/// <summary>WAVE_FORMAT_TPC</summary>
WAVE_FORMAT_TPC = 0x0681, // AT&T Labs, Inc.
/// <summary>WAVE_FORMAT_OLIGSM</summary>
WAVE_FORMAT_OLIGSM = 0x1000, // Ing C. Olivetti & C., S.p.A.
/// <summary>WAVE_FORMAT_OLIADPCM</summary>
WAVE_FORMAT_OLIADPCM = 0x1001, // Ing C. Olivetti & C., S.p.A.
/// <summary>WAVE_FORMAT_OLICELP</summary>
WAVE_FORMAT_OLICELP = 0x1002, // Ing C. Olivetti & C., S.p.A.
/// <summary>WAVE_FORMAT_OLISBC</summary>
WAVE_FORMAT_OLISBC = 0x1003, // Ing C. Olivetti & C., S.p.A.
/// <summary>WAVE_FORMAT_OLIOPR</summary>
WAVE_FORMAT_OLIOPR = 0x1004, // Ing C. Olivetti & C., S.p.A.
/// <summary>WAVE_FORMAT_LH_CODEC</summary>
WAVE_FORMAT_LH_CODEC = 0x1100, // Lernout & Hauspie
/// <summary>WAVE_FORMAT_NORRIS</summary>
WAVE_FORMAT_NORRIS = 0x1400, // Norris Communications, Inc.
/// <summary>WAVE_FORMAT_SOUNDSPACE_MUSICOMPRESS</summary>
WAVE_FORMAT_SOUNDSPACE_MUSICOMPRESS = 0x1500, // AT&T Labs, Inc.
/// <summary>
/// Advanced Audio Coding (AAC) audio in Audio Data Transport Stream (ADTS) format.
/// The format block is a WAVEFORMATEX structure with wFormatTag equal to WAVE_FORMAT_MPEG_ADTS_AAC.
/// </summary>
/// <remarks>
/// The WAVEFORMATEX structure specifies the core AAC-LC sample rate and number of channels,
/// prior to applying spectral band replication (SBR) or parametric stereo (PS) tools, if present.
/// No additional data is required after the WAVEFORMATEX structure.
/// </remarks>
/// <see>http://msdn.microsoft.com/en-us/library/dd317599%28VS.85%29.aspx</see>
MPEG_ADTS_AAC = 0x1600,
/// <summary>MPEG_RAW_AAC</summary>
/// <remarks>Source wmCodec.h</remarks>
MPEG_RAW_AAC = 0x1601,
/// <summary>
/// MPEG-4 audio transport stream with a synchronization layer (LOAS) and a multiplex layer (LATM).
/// The format block is a WAVEFORMATEX structure with wFormatTag equal to WAVE_FORMAT_MPEG_LOAS.
/// See <see href="http://msdn.microsoft.com/en-us/library/dd317599%28VS.85%29.aspx"/>.
/// </summary>
/// <remarks>
/// The WAVEFORMATEX structure specifies the core AAC-LC sample rate and number of channels,
/// prior to applying spectral SBR or PS tools, if present.
/// No additional data is required after the WAVEFORMATEX structure.
/// </remarks>
MPEG_LOAS = 0x1602,
/// <summary>NOKIA_MPEG_ADTS_AAC</summary>
/// <remarks>Source wmCodec.h</remarks>
NOKIA_MPEG_ADTS_AAC = 0x1608,
/// <summary>NOKIA_MPEG_RAW_AAC</summary>
/// <remarks>Source wmCodec.h</remarks>
NOKIA_MPEG_RAW_AAC = 0x1609,
/// <summary>VODAFONE_MPEG_ADTS_AAC</summary>
/// <remarks>Source wmCodec.h</remarks>
VODAFONE_MPEG_ADTS_AAC = 0x160A,
/// <summary>VODAFONE_MPEG_RAW_AAC</summary>
/// <remarks>Source wmCodec.h</remarks>
VODAFONE_MPEG_RAW_AAC = 0x160B,
/// <summary>
/// High-Efficiency Advanced Audio Coding (HE-AAC) stream.
/// The format block is an HEAACWAVEFORMAT structure. See <see href="http://msdn.microsoft.com/en-us/library/dd317599%28VS.85%29.aspx"/>.
/// </summary>
MPEG_HEAAC = 0x1610,
/// <summary>WAVE_FORMAT_DVM</summary>
WAVE_FORMAT_DVM = 0x2000, // FAST Multimedia AG
// others - not from MS headers
/// <summary>WAVE_FORMAT_VORBIS1 "Og" Original stream compatible</summary>
Vorbis1 = 0x674f,
/// <summary>WAVE_FORMAT_VORBIS2 "Pg" Have independent header</summary>
Vorbis2 = 0x6750,
/// <summary>WAVE_FORMAT_VORBIS3 "Qg" Have no codebook header</summary>
Vorbis3 = 0x6751,
/// <summary>WAVE_FORMAT_VORBIS1P "og" Original stream compatible</summary>
Vorbis1P = 0x676f,
/// <summary>WAVE_FORMAT_VORBIS2P "pg" Have independent headere</summary>
Vorbis2P = 0x6770,
/// <summary>WAVE_FORMAT_VORBIS3P "qg" Have no codebook header</summary>
Vorbis3P = 0x6771,
/// <summary>
/// Raw AAC1
/// </summary>
WAVE_FORMAT_RAW_AAC1 = 0x00FF,
/// <summary>
/// Windows Media Audio Voice (WMA Voice)
/// </summary>
WAVE_FORMAT_WMAVOICE9 = 0x000A,
/// <summary>Extensible</summary>
Extensible = unchecked((short)0xFFFE), // Microsoft
/// <summary>WAVE_FORMAT_DEVELOPMENT</summary>
WAVE_FORMAT_DEVELOPMENT = unchecked((short)0xFFFF),
/// <summary>
/// FLAC
/// </summary>
WAVE_FORMAT_FLAC = unchecked((short)0xF1AC)
}
}
// ReSharper restore InconsistentNaming

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 14a588271ffedcb44968c878818b7871
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,49 @@
using System;
namespace CSCore
{
/// <summary>
/// Defines <see cref="AudioSubTypes"/> and provides methods to convert between <see cref="AudioEncoding"/>-values and <see cref="AudioSubTypes"/>-values.
/// </summary>
/// <remarks><see cref="AudioSubTypes"/> are used by the <see cref="WaveFormatExtensible"/>, the <see cref="MFMediaType"/> and the <see cref="MediaType"/> class.</remarks>
public static partial class AudioSubTypes
{
/// <summary>
/// Converts a <see cref="AudioSubTypes"/>-value to a <see cref="AudioEncoding"/>-value.
/// </summary>
/// <param name="audioSubType">The <see cref="AudioSubTypes"/>-value to convert to the equivalent <see cref="AudioEncoding"/>-value.</param>
/// <returns>The <see cref="AudioEncoding"/> which belongs to the specified <paramref name="audioSubType"/>.</returns>
public static AudioEncoding EncodingFromSubType(Guid audioSubType)
{
var bytes = audioSubType.ToByteArray();
int value = BitConverter.ToInt32(bytes, 0);
if (Enum.IsDefined(typeof(AudioEncoding), (short)value))
return (AudioEncoding)value;
throw new ArgumentException("Invalid audioSubType.", "audioSubType");
}
/// <summary>
/// Converts a <see cref="AudioEncoding"/> value to a <see cref="AudioSubTypes"/>-value.
/// </summary>
/// <param name="audioEncoding">The <see cref="AudioEncoding"/> to convert to the equivalent <see cref="AudioSubTypes"/>-value.</param>
/// <returns>The <see cref="AudioSubTypes"/>-value which belongs to the specified <paramref name="audioEncoding"/>.</returns>
public static Guid SubTypeFromEncoding(AudioEncoding audioEncoding)
{
if (Enum.IsDefined(typeof(AudioEncoding), (short)audioEncoding))
return new Guid((int)audioEncoding, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
throw new ArgumentException("Invalid encoding.", "audioEncoding");
}
/// <summary>
/// The Major Type for <c>Audio</c> media types.
/// </summary>
public static readonly Guid MediaTypeAudio = new Guid("73647561-0000-0010-8000-00AA00389B71");
///// <summary>
///// FLAC
///// </summary>
//public static readonly Guid WAVE_FORMAT_FLAC = new Guid("0000f1ac-0000-0010-8000-00aa00389b71");
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 03a5e54186895d548a426573dcd38b22
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,381 @@
using System;
namespace CSCore
{
public static partial class AudioSubTypes
{
// ReSharper disable InconsistentNaming
/// <summary>WAVE_FORMAT_UNKNOWN, Microsoft Corporation</summary>
public static readonly Guid Unknown = new Guid((short)AudioEncoding.Unknown & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_PCM Microsoft Corporation</summary>
public static readonly Guid Pcm = new Guid((short)AudioEncoding.Pcm & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_ADPCM Microsoft Corporation</summary>
public static readonly Guid Adpcm = new Guid((short)AudioEncoding.Adpcm & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_IEEE_FLOAT Microsoft Corporation</summary>
public static readonly Guid IeeeFloat = new Guid((short)AudioEncoding.IeeeFloat & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VSELP Compaq Computer Corp.</summary>
public static readonly Guid Vselp = new Guid((short)AudioEncoding.Vselp & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_IBM_CVSD IBM Corporation</summary>
public static readonly Guid IbmCvsd = new Guid((short)AudioEncoding.IbmCvsd & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_ALAW Microsoft Corporation</summary>
public static readonly Guid ALaw = new Guid((short)AudioEncoding.ALaw & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_MULAW Microsoft Corporation</summary>
public static readonly Guid MuLaw = new Guid((short)AudioEncoding.MuLaw & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DTS Microsoft Corporation</summary>
public static readonly Guid Dts = new Guid((short)AudioEncoding.Dts & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DRM Microsoft Corporation</summary>
public static readonly Guid Drm = new Guid((short)AudioEncoding.Drm & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_WMAVOICE9 </summary>
public static readonly Guid WmaVoice9 = new Guid((short)AudioEncoding.WmaVoice9 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_OKI_ADPCM OKI</summary>
public static readonly Guid OkiAdpcm = new Guid((short)AudioEncoding.OkiAdpcm & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DVI_ADPCM Intel Corporation</summary>
public static readonly Guid DviAdpcm = new Guid((short)AudioEncoding.DviAdpcm & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_IMA_ADPCM Intel Corporation</summary>
public static readonly Guid ImaAdpcm = new Guid((short)AudioEncoding.ImaAdpcm & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_MEDIASPACE_ADPCM Videologic</summary>
public static readonly Guid MediaspaceAdpcm = new Guid((short)AudioEncoding.MediaspaceAdpcm & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_SIERRA_ADPCM Sierra Semiconductor Corp </summary>
public static readonly Guid SierraAdpcm = new Guid((short)AudioEncoding.SierraAdpcm & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_G723_ADPCM Antex Electronics Corporation </summary>
public static readonly Guid G723Adpcm = new Guid((short)AudioEncoding.G723Adpcm & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DIGISTD DSP Solutions, Inc.</summary>
public static readonly Guid DigiStd = new Guid((short)AudioEncoding.DigiStd & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DIGIFIX DSP Solutions, Inc.</summary>
public static readonly Guid DigiFix = new Guid((short)AudioEncoding.DigiFix & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DIALOGIC_OKI_ADPCM Dialogic Corporation</summary>
public static readonly Guid DialogicOkiAdpcm = new Guid((short)AudioEncoding.DialogicOkiAdpcm & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_MEDIAVISION_ADPCM Media Vision, Inc.</summary>
public static readonly Guid MediaVisionAdpcm = new Guid((short)AudioEncoding.MediaVisionAdpcm & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_CU_CODEC Hewlett-Packard Company </summary>
public static readonly Guid CUCodec = new Guid((short)AudioEncoding.CUCodec & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_YAMAHA_ADPCM Yamaha Corporation of America</summary>
public static readonly Guid YamahaAdpcm = new Guid((short)AudioEncoding.YamahaAdpcm & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_SONARC Speech Compression</summary>
public static readonly Guid SonarC = new Guid((short)AudioEncoding.SonarC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DSPGROUP_TRUESPEECH DSP Group, Inc </summary>
public static readonly Guid DspGroupTrueSpeech = new Guid((short)AudioEncoding.DspGroupTrueSpeech & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_ECHOSC1 Echo Speech Corporation</summary>
public static readonly Guid EchoSpeechCorporation1 = new Guid((short)AudioEncoding.EchoSpeechCorporation1 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_AUDIOFILE_AF36, Virtual Music, Inc.</summary>
public static readonly Guid AudioFileAf36 = new Guid((short)AudioEncoding.AudioFileAf36 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_APTX Audio Processing Technology</summary>
public static readonly Guid Aptx = new Guid((short)AudioEncoding.Aptx & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_AUDIOFILE_AF10, Virtual Music, Inc.</summary>
public static readonly Guid AudioFileAf10 = new Guid((short)AudioEncoding.AudioFileAf10 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_PROSODY_1612, Aculab plc</summary>
public static readonly Guid Prosody1612 = new Guid((short)AudioEncoding.Prosody1612 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_LRC, Merging Technologies S.A. </summary>
public static readonly Guid Lrc = new Guid((short)AudioEncoding.Lrc & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DOLBY_AC2, Dolby Laboratories</summary>
public static readonly Guid DolbyAc2 = new Guid((short)AudioEncoding.DolbyAc2 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_GSM610, Microsoft Corporation</summary>
public static readonly Guid Gsm610 = new Guid((short)AudioEncoding.Gsm610 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_MSNAUDIO, Microsoft Corporation</summary>
public static readonly Guid MsnAudio = new Guid((short)AudioEncoding.MsnAudio & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_ANTEX_ADPCME, Antex Electronics Corporation</summary>
public static readonly Guid AntexAdpcme = new Guid((short)AudioEncoding.AntexAdpcme & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_CONTROL_RES_VQLPC, Control Resources Limited </summary>
public static readonly Guid ControlResVqlpc = new Guid((short)AudioEncoding.ControlResVqlpc & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DIGIREAL, DSP Solutions, Inc. </summary>
public static readonly Guid DigiReal = new Guid((short)AudioEncoding.DigiReal & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DIGIADPCM, DSP Solutions, Inc.</summary>
public static readonly Guid DigiAdpcm = new Guid((short)AudioEncoding.DigiAdpcm & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_CONTROL_RES_CR10, Control Resources Limited</summary>
public static readonly Guid ControlResCr10 = new Guid((short)AudioEncoding.ControlResCr10 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_NMS_VBXADPCM</summary>
public static readonly Guid WAVE_FORMAT_NMS_VBXADPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_NMS_VBXADPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_CS_IMAADPCM</summary>
public static readonly Guid WAVE_FORMAT_CS_IMAADPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_CS_IMAADPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_ECHOSC3</summary>
public static readonly Guid WAVE_FORMAT_ECHOSC3 = new Guid((short)AudioEncoding.WAVE_FORMAT_ECHOSC3 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_ROCKWELL_ADPCM</summary>
public static readonly Guid WAVE_FORMAT_ROCKWELL_ADPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_ROCKWELL_ADPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_ROCKWELL_DIGITALK</summary>
public static readonly Guid WAVE_FORMAT_ROCKWELL_DIGITALK = new Guid((short)AudioEncoding.WAVE_FORMAT_ROCKWELL_DIGITALK & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_XEBEC</summary>
public static readonly Guid WAVE_FORMAT_XEBEC = new Guid((short)AudioEncoding.WAVE_FORMAT_XEBEC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_G721_ADPCM</summary>
public static readonly Guid WAVE_FORMAT_G721_ADPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_G721_ADPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_G728_CELP</summary>
public static readonly Guid WAVE_FORMAT_G728_CELP = new Guid((short)AudioEncoding.WAVE_FORMAT_G728_CELP & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_MSG723</summary>
public static readonly Guid WAVE_FORMAT_MSG723 = new Guid((short)AudioEncoding.WAVE_FORMAT_MSG723 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_MPEG, Microsoft Corporation </summary>
public static readonly Guid Mpeg = new Guid((short)AudioEncoding.Mpeg & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_RT24</summary>
public static readonly Guid WAVE_FORMAT_RT24 = new Guid((short)AudioEncoding.WAVE_FORMAT_RT24 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_PAC</summary>
public static readonly Guid WAVE_FORMAT_PAC = new Guid((short)AudioEncoding.WAVE_FORMAT_PAC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_MPEGLAYER3, ISO/MPEG Layer3 Format Tag </summary>
public static readonly Guid MpegLayer3 = new Guid((short)AudioEncoding.MpegLayer3 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_LUCENT_G723</summary>
public static readonly Guid WAVE_FORMAT_LUCENT_G723 = new Guid((short)AudioEncoding.WAVE_FORMAT_LUCENT_G723 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_CIRRUS</summary>
public static readonly Guid WAVE_FORMAT_CIRRUS = new Guid((short)AudioEncoding.WAVE_FORMAT_CIRRUS & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_ESPCM</summary>
public static readonly Guid WAVE_FORMAT_ESPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_ESPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VOXWARE</summary>
public static readonly Guid WAVE_FORMAT_VOXWARE = new Guid((short)AudioEncoding.WAVE_FORMAT_VOXWARE & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_CANOPUS_ATRAC</summary>
public static readonly Guid WAVE_FORMAT_CANOPUS_ATRAC = new Guid((short)AudioEncoding.WAVE_FORMAT_CANOPUS_ATRAC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_G726_ADPCM</summary>
public static readonly Guid WAVE_FORMAT_G726_ADPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_G726_ADPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_G722_ADPCM</summary>
public static readonly Guid WAVE_FORMAT_G722_ADPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_G722_ADPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DSAT_DISPLAY</summary>
public static readonly Guid WAVE_FORMAT_DSAT_DISPLAY = new Guid((short)AudioEncoding.WAVE_FORMAT_DSAT_DISPLAY & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VOXWARE_BYTE_ALIGNED</summary>
public static readonly Guid WAVE_FORMAT_VOXWARE_BYTE_ALIGNED = new Guid((short)AudioEncoding.WAVE_FORMAT_VOXWARE_BYTE_ALIGNED & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VOXWARE_AC8</summary>
public static readonly Guid WAVE_FORMAT_VOXWARE_AC8 = new Guid((short)AudioEncoding.WAVE_FORMAT_VOXWARE_AC8 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VOXWARE_AC10</summary>
public static readonly Guid WAVE_FORMAT_VOXWARE_AC10 = new Guid((short)AudioEncoding.WAVE_FORMAT_VOXWARE_AC10 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VOXWARE_AC16</summary>
public static readonly Guid WAVE_FORMAT_VOXWARE_AC16 = new Guid((short)AudioEncoding.WAVE_FORMAT_VOXWARE_AC16 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VOXWARE_AC20</summary>
public static readonly Guid WAVE_FORMAT_VOXWARE_AC20 = new Guid((short)AudioEncoding.WAVE_FORMAT_VOXWARE_AC20 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VOXWARE_RT24</summary>
public static readonly Guid WAVE_FORMAT_VOXWARE_RT24 = new Guid((short)AudioEncoding.WAVE_FORMAT_VOXWARE_RT24 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VOXWARE_RT29</summary>
public static readonly Guid WAVE_FORMAT_VOXWARE_RT29 = new Guid((short)AudioEncoding.WAVE_FORMAT_VOXWARE_RT29 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VOXWARE_RT29HW</summary>
public static readonly Guid WAVE_FORMAT_VOXWARE_RT29HW = new Guid((short)AudioEncoding.WAVE_FORMAT_VOXWARE_RT29HW & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VOXWARE_VR12</summary>
public static readonly Guid WAVE_FORMAT_VOXWARE_VR12 = new Guid((short)AudioEncoding.WAVE_FORMAT_VOXWARE_VR12 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VOXWARE_VR18</summary>
public static readonly Guid WAVE_FORMAT_VOXWARE_VR18 = new Guid((short)AudioEncoding.WAVE_FORMAT_VOXWARE_VR18 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VOXWARE_TQ40</summary>
public static readonly Guid WAVE_FORMAT_VOXWARE_TQ40 = new Guid((short)AudioEncoding.WAVE_FORMAT_VOXWARE_TQ40 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_SOFTSOUND</summary>
public static readonly Guid WAVE_FORMAT_SOFTSOUND = new Guid((short)AudioEncoding.WAVE_FORMAT_SOFTSOUND & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VOXWARE_TQ60</summary>
public static readonly Guid WAVE_FORMAT_VOXWARE_TQ60 = new Guid((short)AudioEncoding.WAVE_FORMAT_VOXWARE_TQ60 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_MSRT24</summary>
public static readonly Guid WAVE_FORMAT_MSRT24 = new Guid((short)AudioEncoding.WAVE_FORMAT_MSRT24 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_G729A</summary>
public static readonly Guid WAVE_FORMAT_G729A = new Guid((short)AudioEncoding.WAVE_FORMAT_G729A & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_MVI_MVI2</summary>
public static readonly Guid WAVE_FORMAT_MVI_MVI2 = new Guid((short)AudioEncoding.WAVE_FORMAT_MVI_MVI2 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DF_G726</summary>
public static readonly Guid WAVE_FORMAT_DF_G726 = new Guid((short)AudioEncoding.WAVE_FORMAT_DF_G726 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DF_GSM610</summary>
public static readonly Guid WAVE_FORMAT_DF_GSM610 = new Guid((short)AudioEncoding.WAVE_FORMAT_DF_GSM610 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_ISIAUDIO</summary>
public static readonly Guid WAVE_FORMAT_ISIAUDIO = new Guid((short)AudioEncoding.WAVE_FORMAT_ISIAUDIO & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_ONLIVE</summary>
public static readonly Guid WAVE_FORMAT_ONLIVE = new Guid((short)AudioEncoding.WAVE_FORMAT_ONLIVE & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_SBC24</summary>
public static readonly Guid WAVE_FORMAT_SBC24 = new Guid((short)AudioEncoding.WAVE_FORMAT_SBC24 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DOLBY_AC3_SPDIF</summary>
public static readonly Guid WAVE_FORMAT_DOLBY_AC3_SPDIF = new Guid((short)AudioEncoding.WAVE_FORMAT_DOLBY_AC3_SPDIF & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_MEDIASONIC_G723</summary>
public static readonly Guid WAVE_FORMAT_MEDIASONIC_G723 = new Guid((short)AudioEncoding.WAVE_FORMAT_MEDIASONIC_G723 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_PROSODY_8KBPS</summary>
public static readonly Guid WAVE_FORMAT_PROSODY_8KBPS = new Guid((short)AudioEncoding.WAVE_FORMAT_PROSODY_8KBPS & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_ZYXEL_ADPCM</summary>
public static readonly Guid WAVE_FORMAT_ZYXEL_ADPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_ZYXEL_ADPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_PHILIPS_LPCBB</summary>
public static readonly Guid WAVE_FORMAT_PHILIPS_LPCBB = new Guid((short)AudioEncoding.WAVE_FORMAT_PHILIPS_LPCBB & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_PACKED</summary>
public static readonly Guid WAVE_FORMAT_PACKED = new Guid((short)AudioEncoding.WAVE_FORMAT_PACKED & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_MALDEN_PHONYTALK</summary>
public static readonly Guid WAVE_FORMAT_MALDEN_PHONYTALK = new Guid((short)AudioEncoding.WAVE_FORMAT_MALDEN_PHONYTALK & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_GSM</summary>
public static readonly Guid Gsm = new Guid((short)AudioEncoding.Gsm & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_G729</summary>
public static readonly Guid G729 = new Guid((short)AudioEncoding.G729 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_G723</summary>
public static readonly Guid G723 = new Guid((short)AudioEncoding.G723 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_ACELP</summary>
public static readonly Guid Acelp = new Guid((short)AudioEncoding.Acelp & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>
/// WAVE_FORMAT_RAW_AAC1
/// </summary>
public static readonly Guid RawAac = new Guid((short)AudioEncoding.RawAac & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_RHETOREX_ADPCM</summary>
public static readonly Guid WAVE_FORMAT_RHETOREX_ADPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_RHETOREX_ADPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_IRAT</summary>
public static readonly Guid WAVE_FORMAT_IRAT = new Guid((short)AudioEncoding.WAVE_FORMAT_IRAT & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VIVO_G723</summary>
public static readonly Guid WAVE_FORMAT_VIVO_G723 = new Guid((short)AudioEncoding.WAVE_FORMAT_VIVO_G723 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VIVO_SIREN</summary>
public static readonly Guid WAVE_FORMAT_VIVO_SIREN = new Guid((short)AudioEncoding.WAVE_FORMAT_VIVO_SIREN & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DIGITAL_G723</summary>
public static readonly Guid WAVE_FORMAT_DIGITAL_G723 = new Guid((short)AudioEncoding.WAVE_FORMAT_DIGITAL_G723 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_SANYO_LD_ADPCM</summary>
public static readonly Guid WAVE_FORMAT_SANYO_LD_ADPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_SANYO_LD_ADPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_SIPROLAB_ACEPLNET</summary>
public static readonly Guid WAVE_FORMAT_SIPROLAB_ACEPLNET = new Guid((short)AudioEncoding.WAVE_FORMAT_SIPROLAB_ACEPLNET & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_SIPROLAB_ACELP4800</summary>
public static readonly Guid WAVE_FORMAT_SIPROLAB_ACELP4800 = new Guid((short)AudioEncoding.WAVE_FORMAT_SIPROLAB_ACELP4800 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_SIPROLAB_ACELP8V3</summary>
public static readonly Guid WAVE_FORMAT_SIPROLAB_ACELP8V3 = new Guid((short)AudioEncoding.WAVE_FORMAT_SIPROLAB_ACELP8V3 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_SIPROLAB_G729</summary>
public static readonly Guid WAVE_FORMAT_SIPROLAB_G729 = new Guid((short)AudioEncoding.WAVE_FORMAT_SIPROLAB_G729 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_SIPROLAB_G729A</summary>
public static readonly Guid WAVE_FORMAT_SIPROLAB_G729A = new Guid((short)AudioEncoding.WAVE_FORMAT_SIPROLAB_G729A & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_SIPROLAB_KELVIN</summary>
public static readonly Guid WAVE_FORMAT_SIPROLAB_KELVIN = new Guid((short)AudioEncoding.WAVE_FORMAT_SIPROLAB_KELVIN & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_G726ADPCM</summary>
public static readonly Guid WAVE_FORMAT_G726ADPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_G726ADPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_QUALCOMM_PUREVOICE</summary>
public static readonly Guid WAVE_FORMAT_QUALCOMM_PUREVOICE = new Guid((short)AudioEncoding.WAVE_FORMAT_QUALCOMM_PUREVOICE & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_QUALCOMM_HALFRATE</summary>
public static readonly Guid WAVE_FORMAT_QUALCOMM_HALFRATE = new Guid((short)AudioEncoding.WAVE_FORMAT_QUALCOMM_HALFRATE & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_TUBGSM</summary>
public static readonly Guid WAVE_FORMAT_TUBGSM = new Guid((short)AudioEncoding.WAVE_FORMAT_TUBGSM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_MSAUDIO1</summary>
public static readonly Guid WAVE_FORMAT_MSAUDIO1 = new Guid((short)AudioEncoding.WAVE_FORMAT_MSAUDIO1 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>
/// Windows Media Audio, WAVE_FORMAT_WMAUDIO2, Microsoft Corporation
/// </summary>
public static readonly Guid WindowsMediaAudio = new Guid((short)AudioEncoding.WindowsMediaAudio & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>
/// Windows Media Audio Professional WAVE_FORMAT_WMAUDIO3, Microsoft Corporation
/// </summary>
public static readonly Guid WindowsMediaAudioProfessional = new Guid((short)AudioEncoding.WindowsMediaAudioProfessional & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>
/// Windows Media Audio Lossless, WAVE_FORMAT_WMAUDIO_LOSSLESS
/// </summary>
public static readonly Guid WindowsMediaAudioLosseless = new Guid((short)AudioEncoding.WindowsMediaAudioLosseless & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>
/// Windows Media Audio Professional over SPDIF WAVE_FORMAT_WMASPDIF (0x0164)
/// </summary>
public static readonly Guid WindowsMediaAudioSpdif = new Guid((short)AudioEncoding.WindowsMediaAudioSpdif & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_UNISYS_NAP_ADPCM</summary>
public static readonly Guid WAVE_FORMAT_UNISYS_NAP_ADPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_UNISYS_NAP_ADPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_UNISYS_NAP_ULAW</summary>
public static readonly Guid WAVE_FORMAT_UNISYS_NAP_ULAW = new Guid((short)AudioEncoding.WAVE_FORMAT_UNISYS_NAP_ULAW & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_UNISYS_NAP_ALAW</summary>
public static readonly Guid WAVE_FORMAT_UNISYS_NAP_ALAW = new Guid((short)AudioEncoding.WAVE_FORMAT_UNISYS_NAP_ALAW & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_UNISYS_NAP_16K</summary>
public static readonly Guid WAVE_FORMAT_UNISYS_NAP_16K = new Guid((short)AudioEncoding.WAVE_FORMAT_UNISYS_NAP_16K & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_CREATIVE_ADPCM</summary>
public static readonly Guid WAVE_FORMAT_CREATIVE_ADPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_CREATIVE_ADPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_CREATIVE_FASTSPEECH8</summary>
public static readonly Guid WAVE_FORMAT_CREATIVE_FASTSPEECH8 = new Guid((short)AudioEncoding.WAVE_FORMAT_CREATIVE_FASTSPEECH8 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_CREATIVE_FASTSPEECH10</summary>
public static readonly Guid WAVE_FORMAT_CREATIVE_FASTSPEECH10 = new Guid((short)AudioEncoding.WAVE_FORMAT_CREATIVE_FASTSPEECH10 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_UHER_ADPCM</summary>
public static readonly Guid WAVE_FORMAT_UHER_ADPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_UHER_ADPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_QUARTERDECK</summary>
public static readonly Guid WAVE_FORMAT_QUARTERDECK = new Guid((short)AudioEncoding.WAVE_FORMAT_QUARTERDECK & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_ILINK_VC</summary>
public static readonly Guid WAVE_FORMAT_ILINK_VC = new Guid((short)AudioEncoding.WAVE_FORMAT_ILINK_VC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_RAW_SPORT</summary>
public static readonly Guid WAVE_FORMAT_RAW_SPORT = new Guid((short)AudioEncoding.WAVE_FORMAT_RAW_SPORT & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_ESST_AC3</summary>
public static readonly Guid WAVE_FORMAT_ESST_AC3 = new Guid((short)AudioEncoding.WAVE_FORMAT_ESST_AC3 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_IPI_HSX</summary>
public static readonly Guid WAVE_FORMAT_IPI_HSX = new Guid((short)AudioEncoding.WAVE_FORMAT_IPI_HSX & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_IPI_RPELP</summary>
public static readonly Guid WAVE_FORMAT_IPI_RPELP = new Guid((short)AudioEncoding.WAVE_FORMAT_IPI_RPELP & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_CS2</summary>
public static readonly Guid WAVE_FORMAT_CS2 = new Guid((short)AudioEncoding.WAVE_FORMAT_CS2 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_SONY_SCX</summary>
public static readonly Guid WAVE_FORMAT_SONY_SCX = new Guid((short)AudioEncoding.WAVE_FORMAT_SONY_SCX & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_FM_TOWNS_SND</summary>
public static readonly Guid WAVE_FORMAT_FM_TOWNS_SND = new Guid((short)AudioEncoding.WAVE_FORMAT_FM_TOWNS_SND & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_BTV_DIGITAL</summary>
public static readonly Guid WAVE_FORMAT_BTV_DIGITAL = new Guid((short)AudioEncoding.WAVE_FORMAT_BTV_DIGITAL & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_QDESIGN_MUSIC</summary>
public static readonly Guid WAVE_FORMAT_QDESIGN_MUSIC = new Guid((short)AudioEncoding.WAVE_FORMAT_QDESIGN_MUSIC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VME_VMPCM</summary>
public static readonly Guid WAVE_FORMAT_VME_VMPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_VME_VMPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_TPC</summary>
public static readonly Guid WAVE_FORMAT_TPC = new Guid((short)AudioEncoding.WAVE_FORMAT_TPC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_OLIGSM</summary>
public static readonly Guid WAVE_FORMAT_OLIGSM = new Guid((short)AudioEncoding.WAVE_FORMAT_OLIGSM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_OLIADPCM</summary>
public static readonly Guid WAVE_FORMAT_OLIADPCM = new Guid((short)AudioEncoding.WAVE_FORMAT_OLIADPCM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_OLICELP</summary>
public static readonly Guid WAVE_FORMAT_OLICELP = new Guid((short)AudioEncoding.WAVE_FORMAT_OLICELP & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_OLISBC</summary>
public static readonly Guid WAVE_FORMAT_OLISBC = new Guid((short)AudioEncoding.WAVE_FORMAT_OLISBC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_OLIOPR</summary>
public static readonly Guid WAVE_FORMAT_OLIOPR = new Guid((short)AudioEncoding.WAVE_FORMAT_OLIOPR & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_LH_CODEC</summary>
public static readonly Guid WAVE_FORMAT_LH_CODEC = new Guid((short)AudioEncoding.WAVE_FORMAT_LH_CODEC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_NORRIS</summary>
public static readonly Guid WAVE_FORMAT_NORRIS = new Guid((short)AudioEncoding.WAVE_FORMAT_NORRIS & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_SOUNDSPACE_MUSICOMPRESS</summary>
public static readonly Guid WAVE_FORMAT_SOUNDSPACE_MUSICOMPRESS = new Guid((short)AudioEncoding.WAVE_FORMAT_SOUNDSPACE_MUSICOMPRESS & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>
/// Advanced Audio Coding (AAC) audio in Audio Data Transport Stream (ADTS) format.
/// The format block is a WAVEFORMATEX structure with wFormatTag equal to WAVE_FORMAT_MPEG_ADTS_AAC.
/// </summary>
/// <remarks>
/// The WAVEFORMATEX structure specifies the core AAC-LC sample rate and number of channels,
/// prior to applying spectral band replication (SBR) or parametric stereo (PS) tools, if present.
/// No additional data is required after the WAVEFORMATEX structure.
/// </remarks>
/// <see>http://msdn.microsoft.com/en-us/library/dd317599%28VS.85%29.aspx</see>
public static readonly Guid MPEG_ADTS_AAC = new Guid((short)AudioEncoding.MPEG_ADTS_AAC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>MPEG_RAW_AAC</summary>
/// <remarks>Source wmCodec.h</remarks>
public static readonly Guid MPEG_RAW_AAC = new Guid((short)AudioEncoding.MPEG_RAW_AAC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>
/// MPEG-4 audio transport stream with a synchronization layer (LOAS) and a multiplex layer (LATM).
/// The format block is a WAVEFORMATEX structure with wFormatTag equal to WAVE_FORMAT_MPEG_LOAS.
/// See <see href="http://msdn.microsoft.com/en-us/library/dd317599%28VS.85%29.aspx"/>.
/// </summary>
/// <remarks>
/// The WAVEFORMATEX structure specifies the core AAC-LC sample rate and number of channels,
/// prior to applying spectral SBR or PS tools, if present.
/// No additional data is required after the WAVEFORMATEX structure.
/// </remarks>
public static readonly Guid MPEG_LOAS = new Guid((short)AudioEncoding.MPEG_LOAS & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>NOKIA_MPEG_ADTS_AAC</summary>
/// <remarks>Source wmCodec.h</remarks>
public static readonly Guid NOKIA_MPEG_ADTS_AAC = new Guid((short)AudioEncoding.NOKIA_MPEG_ADTS_AAC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>NOKIA_MPEG_RAW_AAC</summary>
/// <remarks>Source wmCodec.h</remarks>
public static readonly Guid NOKIA_MPEG_RAW_AAC = new Guid((short)AudioEncoding.NOKIA_MPEG_RAW_AAC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>VODAFONE_MPEG_ADTS_AAC</summary>
/// <remarks>Source wmCodec.h</remarks>
public static readonly Guid VODAFONE_MPEG_ADTS_AAC = new Guid((short)AudioEncoding.VODAFONE_MPEG_ADTS_AAC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>VODAFONE_MPEG_RAW_AAC</summary>
/// <remarks>Source wmCodec.h</remarks>
public static readonly Guid VODAFONE_MPEG_RAW_AAC = new Guid((short)AudioEncoding.VODAFONE_MPEG_RAW_AAC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>
/// High-Efficiency Advanced Audio Coding (HE-AAC) stream.
/// The format block is an HEAACWAVEFORMAT structure. See <see href="http://msdn.microsoft.com/en-us/library/dd317599%28VS.85%29.aspx"/>.
/// </summary>
public static readonly Guid MPEG_HEAAC = new Guid((short)AudioEncoding.MPEG_HEAAC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DVM</summary>
public static readonly Guid WAVE_FORMAT_DVM = new Guid((short)AudioEncoding.WAVE_FORMAT_DVM & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
// others - not from MS headers
/// <summary>WAVE_FORMAT_VORBIS1 "Og" Original stream compatible</summary>
public static readonly Guid Vorbis1 = new Guid((short)AudioEncoding.Vorbis1 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VORBIS2 "Pg" Have independent header</summary>
public static readonly Guid Vorbis2 = new Guid((short)AudioEncoding.Vorbis2 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VORBIS3 "Qg" Have no codebook header</summary>
public static readonly Guid Vorbis3 = new Guid((short)AudioEncoding.Vorbis3 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VORBIS1P "og" Original stream compatible</summary>
public static readonly Guid Vorbis1P = new Guid((short)AudioEncoding.Vorbis1P & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VORBIS2P "pg" Have independent headere</summary>
public static readonly Guid Vorbis2P = new Guid((short)AudioEncoding.Vorbis2P & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_VORBIS3P "qg" Have no codebook header</summary>
public static readonly Guid Vorbis3P = new Guid((short)AudioEncoding.Vorbis3P & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>
/// Raw AAC1
/// </summary>
public static readonly Guid WAVE_FORMAT_RAW_AAC1 = new Guid((short)AudioEncoding.WAVE_FORMAT_RAW_AAC1 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>
/// Windows Media Audio Voice (WMA Voice)
/// </summary>
public static readonly Guid WAVE_FORMAT_WMAVOICE9 = new Guid((short)AudioEncoding.WAVE_FORMAT_WMAVOICE9 & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>Extensible</summary>
public static readonly Guid Extensible = new Guid((short)AudioEncoding.Extensible & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>WAVE_FORMAT_DEVELOPMENT</summary>
public static readonly Guid WAVE_FORMAT_DEVELOPMENT = new Guid((short)AudioEncoding.WAVE_FORMAT_DEVELOPMENT & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
/// <summary>
/// FLAC
/// </summary>
public static readonly Guid WAVE_FORMAT_FLAC = new Guid((short)AudioEncoding.WAVE_FORMAT_FLAC & 0x0000FFFF, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
// ReSharper restore InconsistentNaming
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: d6a03d1238626a94cbad8f83782f22eb
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,102 @@
using System;
namespace CSCore
{
/// <summary>
/// Channelmask used by <see cref="WaveFormatExtensible" />. For more information see
/// http://msdn.microsoft.com/en-us/library/windows/desktop/dd757714(v=vs.85).aspx
/// </summary>
[Flags]
public enum ChannelMask
{
/// <summary>
/// Front left speaker.
/// </summary>
SpeakerFrontLeft = 0x1,
/// <summary>
/// Front right speaker.
/// </summary>
SpeakerFrontRight = 0x2,
/// <summary>
/// Front center speaker.
/// </summary>
SpeakerFrontCenter = 0x4,
/// <summary>
/// Low frequency speaker.
/// </summary>
SpeakerLowFrequency = 0x8,
/// <summary>
/// Back left speaker.
/// </summary>
SpeakerBackLeft = 0x10,
/// <summary>
/// Back right speaker.
/// </summary>
SpeakerBackRight = 0x20,
/// <summary>
/// Front left of center speaker.
/// </summary>
SpeakerFrontLeftOfCenter = 0x40,
/// <summary>
/// Front right of center speaker.
/// </summary>
SpeakerFrontRightOfCenter = 0x80,
/// <summary>
/// Back center speaker.
/// </summary>
SpeakerBackCenter = 0x100,
/// <summary>
/// Side left speaker.
/// </summary>
SpeakerSideLeft = 0x200,
/// <summary>
/// Side right speaker.
/// </summary>
SpeakerSideRight = 0x400,
/// <summary>
/// Top center speaker.
/// </summary>
SpeakerTopCenter = 0x800,
/// <summary>
/// Top front left speaker.
/// </summary>
SpeakerTopFrontLeft = 0x1000,
/// <summary>
/// Top front center speaker.
/// </summary>
SpeakerTopFrontCenter = 0x2000,
/// <summary>
/// Top front right speaker.
/// </summary>
SpeakerTopFrontRight = 0x4000,
/// <summary>
/// Top back left speaker.
/// </summary>
SpeakerTopBackLeft = 0x8000,
/// <summary>
/// Top back center speaker.
/// </summary>
SpeakerTopBackCenter = 0x10000,
/// <summary>
/// Top back right speaker.
/// </summary>
SpeakerTopBackRight = 0x20000
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4903a9a437074914ebd63a1298f684e7
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,30 @@
using System;
namespace CSCore
{
/// <summary>
/// Provides a few basic extensions.
/// </summary>
public static class Extensions
{
// ReSharper disable once InconsistentNaming
internal static bool IsPCM(this WaveFormat waveFormat)
{
if (waveFormat == null)
throw new ArgumentNullException("waveFormat");
if (waveFormat is WaveFormatExtensible)
return ((WaveFormatExtensible) waveFormat).SubFormat == AudioSubTypes.Pcm;
return waveFormat.WaveFormatTag == AudioEncoding.Pcm;
}
internal static bool IsIeeeFloat(this WaveFormat waveFormat)
{
if (waveFormat == null)
throw new ArgumentNullException("waveFormat");
if (waveFormat is WaveFormatExtensible)
return ((WaveFormatExtensible) waveFormat).SubFormat == AudioSubTypes.IeeeFloat;
return waveFormat.WaveFormatTag == AudioEncoding.IeeeFloat;
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 3cd34287d9d0abb43816d791896fe275
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,32 @@
using System;
namespace CSCore
{
/// <summary>
/// Defines the base for all audio streams.
/// </summary>
public interface IAudioSource : IDisposable
{
/// <summary>
/// Gets a value indicating whether the <see cref="IAudioSource" /> supports seeking.
/// </summary>
bool CanSeek { get; }
/// <summary>
/// Gets the <see cref="WaveFormat" /> of the waveform-audio data.
/// </summary>
WaveFormat WaveFormat { get; }
/// <summary>
/// Gets or sets the current position. The unit of this property depends on the implementation of this interface. Some
/// implementations may not support this property.
/// </summary>
long Position { get; set; }
/// <summary>
/// Gets the length of the waveform-audio data. The unit of this property depends on the implementation of this
/// interface. Some implementations may not support this property.
/// </summary>
long Length { get; }
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: b50967e9192a7cf4284b0ffad20718f2
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,27 @@
namespace CSCore
{
/// <summary>
/// Defines a generic base for all readable audio streams.
/// </summary>
/// <typeparam name="T">The type of the provided audio data.</typeparam>
public interface IReadableAudioSource<in T> : IAudioSource
{
/// <summary>
/// Reads a sequence of elements from the <see cref="IReadableAudioSource{T}" /> and advances the position within the
/// stream by the
/// number of elements read.
/// </summary>
/// <param name="buffer">
/// An array of elements. When this method returns, the <paramref name="buffer" /> contains the specified
/// array of elements with the values between <paramref name="offset" /> and (<paramref name="offset" /> +
/// <paramref name="count" /> - 1) replaced by the elements read from the current source.
/// </param>
/// <param name="offset">
/// The zero-based offset in the <paramref name="buffer" /> at which to begin storing the data
/// read from the current stream.
/// </param>
/// <param name="count">The maximum number of elements to read from the current source.</param>
/// <returns>The total number of elements read into the buffer.</returns>
int Read(T[] buffer, int offset, int count);
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 783f914c3776f4041b78c8639efe832f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,31 @@
namespace CSCore
{
/// <summary>
/// Defines the base for all audio streams which provide raw byte data.
/// </summary>
/// <remarks>
/// Compared to the <see cref="ISampleSource" />, the <see cref="IWaveSource" /> provides raw bytes instead of samples.
/// That means that the <see cref="IAudioSource.Position" /> and the <see cref="IAudioSource.Position" /> properties are
/// expressed in bytes.
/// Also the <see cref="IReadableAudioSource{T}.Read" /> method provides samples instead of raw bytes.
/// </remarks>
public interface IWaveSource : IReadableAudioSource<byte>
{
/*/// <summary>
/// Reads a sequence of bytes from the <see cref="IWaveSource" /> and advances the position within the stream by the
/// number of bytes read.
/// </summary>
/// <param name="buffer">
/// An array of bytes. When this method returns, the <paramref name="buffer" /> contains the specified
/// byte array with the values between <paramref name="offset" /> and (<paramref name="offset" /> +
/// <paramref name="count" /> - 1) replaced by the bytes read from the current source.
/// </param>
/// <param name="offset">
/// The zero-based byte offset in the <paramref name="buffer" /> at which to begin storing the data
/// read from the current stream.
/// </param>
/// <param name="count">The maximum number of bytes to read from the current source.</param>
/// <returns>The total number of bytes read into the buffer.</returns>
int Read(byte[] buffer, int offset, int count);*/
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 4bcb558a4ff125b4e8078864d6b39531
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,16 @@
namespace CSCore
{
/// <summary>
/// Provides the <see cref="Write" /> method.
/// </summary>
public interface IWriteable
{
/// <summary>
/// Used to write down raw byte data.
/// </summary>
/// <param name="buffer">Byte array which contains the data to write down.</param>
/// <param name="offset">Zero-based offset in the <paramref name="buffer" />.</param>
/// <param name="count">Number of bytes to write.</param>
void Write(byte[] buffer, int offset, int count);
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 49b581aba0acb904897782786821c602
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,274 @@
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Text;
namespace CSCore
{
// ReSharper disable ConvertToAutoProperty
/// <summary>
/// Defines the format of waveform-audio data.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 2)]
public class WaveFormat : ICloneable, IEquatable<WaveFormat>
{
private AudioEncoding _encoding;
private short _channels;
private int _sampleRate;
private int _bytesPerSecond;
private short _blockAlign;
private short _bitsPerSample;
private short _extraSize;
/// <summary>
/// Gets the number of channels in the waveform-audio data. Mono data uses one channel and stereo data uses two
/// channels.
/// </summary>
public virtual int Channels
{
get { return _channels; }
protected internal set
{
_channels = (short) value;
UpdateProperties();
}
}
/// <summary>
/// Gets the sample rate, in samples per second (hertz).
/// </summary>
public virtual int SampleRate
{
get { return _sampleRate; }
protected internal set
{
_sampleRate = value;
UpdateProperties();
}
}
/// <summary>
/// Gets the required average data transfer rate, in bytes per second. For example, 16-bit stereo at 44.1 kHz has an
/// average data rate of 176,400 bytes per second (2 channels — 2 bytes per sample per channel — 44,100 samples per
/// second).
/// </summary>
public virtual int BytesPerSecond
{
get { return _bytesPerSecond; }
protected internal set { _bytesPerSecond = value; }
}
/// <summary>
/// Gets the block alignment, in bytes. The block alignment is the minimum atomic unit of data. For PCM data, the block
/// alignment is the number of bytes used by a single sample, including data for both channels if the data is stereo.
/// For example, the block alignment for 16-bit stereo PCM is 4 bytes (2 channels x 2 bytes per sample).
/// </summary>
public virtual int BlockAlign
{
get { return _blockAlign; }
protected internal set { _blockAlign = (short) value; }
}
/// <summary>
/// Gets the number of bits, used to store one sample.
/// </summary>
public virtual int BitsPerSample
{
get { return _bitsPerSample; }
protected internal set
{
_bitsPerSample = (short) value;
UpdateProperties();
}
}
/// <summary>
/// Gets the size (in bytes) of extra information. This value is mainly used for marshalling.
/// </summary>
public virtual int ExtraSize
{
get { return _extraSize; }
protected internal set { _extraSize = (short) value; }
}
/// <summary>
/// Gets the number of bytes, used to store one sample.
/// </summary>
public virtual int BytesPerSample
{
get { return BitsPerSample / 8; }
}
/// <summary>
/// Gets the number of bytes, used to store one block. This value equals <see cref="BytesPerSample" /> multiplied with
/// <see cref="Channels" />.
/// </summary>
public virtual int BytesPerBlock
{
get { return BytesPerSample * Channels; }
}
/// <summary>
/// Gets the waveform-audio format type.
/// </summary>
public virtual AudioEncoding WaveFormatTag
{
get { return _encoding; }
protected internal set { _encoding = value; }
}
/// <summary>
/// Initializes a new instance of the <see cref="WaveFormat" /> class with a sample rate of 44100 Hz, bits per sample
/// of 16 bit, 2 channels and PCM as the format type.
/// </summary>
public WaveFormat()
: this(44100, 16, 2)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="WaveFormat" /> class with PCM as the format type.
/// </summary>
/// <param name="sampleRate">Samples per second.</param>
/// <param name="bits">Number of bits, used to store one sample.</param>
/// <param name="channels">Number of channels in the waveform-audio data.</param>
public WaveFormat(int sampleRate, int bits, int channels)
: this(sampleRate, bits, channels, AudioEncoding.Pcm)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="WaveFormat" /> class.
/// </summary>
/// <param name="sampleRate">Samples per second.</param>
/// <param name="bits">Number of bits, used to store one sample.</param>
/// <param name="channels">Number of channels in the waveform-audio data.</param>
/// <param name="encoding">Format type or encoding of the wave format.</param>
public WaveFormat(int sampleRate, int bits, int channels, AudioEncoding encoding)
: this(sampleRate, bits, channels, encoding, 0)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="WaveFormat" /> class.
/// </summary>
/// <param name="sampleRate">Samples per second.</param>
/// <param name="bits">Number of bits, used to store one sample.</param>
/// <param name="channels">Number of channels in the waveform-audio data.</param>
/// <param name="encoding">Format type or encoding of the wave format.</param>
/// <param name="extraSize">Size (in bytes) of extra information. This value is mainly used for marshalling.</param>
public WaveFormat(int sampleRate, int bits, int channels, AudioEncoding encoding, int extraSize)
{
if (sampleRate < 1)
throw new ArgumentOutOfRangeException("sampleRate");
if (bits < 0)
throw new ArgumentOutOfRangeException("bits");
if (channels < 1)
throw new ArgumentOutOfRangeException("channels", "Number of channels has to be bigger than 0.");
_sampleRate = sampleRate;
_bitsPerSample = (short) bits;
_channels = (short) channels;
_encoding = encoding;
_extraSize = (short) extraSize;
// ReSharper disable once DoNotCallOverridableMethodsInConstructor
UpdateProperties();
}
/// <summary>
/// Converts a duration in milliseconds to a duration in bytes.
/// </summary>
/// <param name="milliseconds">Duration in millisecond to convert to a duration in bytes.</param>
/// <returns>Duration in bytes.</returns>
public long MillisecondsToBytes(double milliseconds)
{
var result = (long) ((BytesPerSecond / 1000.0) * milliseconds);
result -= result % BlockAlign;
return result;
}
/// <summary>
/// Converts a duration in bytes to a duration in milliseconds.
/// </summary>
/// <param name="bytes">Duration in bytes to convert to a duration in milliseconds.</param>
/// <returns>Duration in milliseconds.</returns>
public double BytesToMilliseconds(long bytes)
{
bytes -= bytes % BlockAlign;
var result = ((bytes / (double) BytesPerSecond) * 1000.0);
return result;
}
/// <summary>
/// Indicates whether the current object is equal to another object of the same type.
/// </summary>
/// <param name="other">The <see cref="WaveFormat"/> to compare with this <see cref="WaveFormat"/>.</param>
/// <returns>true if the current object is equal to the other parameter; otherwise, false.</returns>
public virtual bool Equals(WaveFormat other)
{
return Channels == other.Channels &&
SampleRate == other.SampleRate &&
BytesPerSecond == other.BytesPerSecond &&
BlockAlign == other.BlockAlign &&
BitsPerSample == other.BitsPerSample &&
ExtraSize == other.ExtraSize &&
WaveFormatTag == other.WaveFormatTag;
}
/// <summary>
/// Returns a string which describes the <see cref="WaveFormat" />.
/// </summary>
/// <returns>A string which describes the <see cref="WaveFormat" />.</returns>
public override string ToString()
{
return GetInformation().ToString();
}
/// <summary>
/// Creates a new <see cref="WaveFormat" /> object that is a copy of the current instance.
/// </summary>
/// <returns>A copy of the current instance.</returns>
public virtual object Clone()
{
return MemberwiseClone(); //since there are value types MemberWiseClone is enough.
}
internal virtual void SetWaveFormatTagInternal(AudioEncoding waveFormatTag)
{
WaveFormatTag = waveFormatTag;
}
internal virtual void SetBitsPerSampleAndFormatProperties(int bitsPerSample)
{
BitsPerSample = bitsPerSample;
UpdateProperties();
}
/// <summary>
/// Updates the <see cref="BlockAlign"/>- and the <see cref="BytesPerSecond"/>-property.
/// </summary>
internal protected virtual void UpdateProperties()
{
BlockAlign = (BitsPerSample / 8) * Channels;
BytesPerSecond = BlockAlign * SampleRate;
}
[DebuggerStepThrough]
private StringBuilder GetInformation()
{
var builder = new StringBuilder();
builder.Append("ChannelsAvailable: " + Channels);
builder.Append("|SampleRate: " + SampleRate);
builder.Append("|Bps: " + BytesPerSecond);
builder.Append("|BlockAlign: " + BlockAlign);
builder.Append("|BitsPerSample: " + BitsPerSample);
builder.Append("|Encoding: " + _encoding);
return builder;
}
}
// ReSharper restore ConvertToAutoProperty
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 5d3c9bf63affc0f4fb0921bad33ffb91
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,203 @@
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Text;
namespace CSCore
{
//http://msdn.microsoft.com/en-us/library/windows/hardware/ff536383(v=vs.85).aspx
//http://msdn.microsoft.com/en-us/library/windows/hardware/gg463006.aspx
/// <summary>
/// Defines the format of waveform-audio data for formats having more than two channels or higher sample resolutions
/// than allowed by <see cref="WaveFormat" />.
/// Can be used to define any format that can be defined by <see cref="WaveFormat" />.
/// For more information see <see href="http://msdn.microsoft.com/en-us/library/windows/hardware/gg463006.aspx" /> and
/// <see href="http://msdn.microsoft.com/en-us/library/windows/hardware/ff536383(v=vs.85).aspx" />.
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 2, CharSet = CharSet.Ansi)]
public class WaveFormatExtensible : WaveFormat
{
internal const int WaveFormatExtensibleExtraSize = 22; //2(WORD) + 4(DWORD) + 16(GUID)
private short _samplesUnion;
private ChannelMask _channelMask;
private Guid _subFormat;
/// <summary>
/// Returns the SubType-Guid of a <paramref name="waveFormat" />. If the specified <paramref name="waveFormat" /> does
/// not contain a SubType-Guid, the <see cref="WaveFormat.WaveFormatTag" /> gets converted to the equal SubType-Guid
/// using the <see cref="AudioSubTypes.SubTypeFromEncoding" /> method.
/// </summary>
/// <param name="waveFormat"><see cref="WaveFormat" /> which gets used to determine the SubType-Guid.</param>
/// <returns>SubType-Guid of the specified <paramref name="waveFormat" />.</returns>
public static Guid SubTypeFromWaveFormat(WaveFormat waveFormat)
{
if (waveFormat == null)
throw new ArgumentNullException("waveFormat");
if (waveFormat is WaveFormatExtensible)
return ((WaveFormatExtensible) waveFormat).SubFormat;
return AudioSubTypes.SubTypeFromEncoding(waveFormat.WaveFormatTag);
}
/// <summary>
/// Gets the number of bits of precision in the signal.
/// Usually equal to <see cref="WaveFormat.BitsPerSample" />. However, <see cref="WaveFormat.BitsPerSample" /> is the
/// container size and must be a multiple of 8, whereas <see cref="ValidBitsPerSample" /> can be any value not
/// exceeding the container size. For example, if the format uses 20-bit samples,
/// <see cref="WaveFormat.BitsPerSample" /> must be at least 24, but <see cref="ValidBitsPerSample" /> is 20.
/// </summary>
public int ValidBitsPerSample
{
get { return _samplesUnion; }
internal protected set { _samplesUnion = (short) value; }
}
/// <summary>
/// Gets the number of samples contained in one compressed block of audio data. This value is used in buffer
/// estimation. This value is used with compressed formats that have a fixed number of samples within each block. This
/// value can be set to 0 if a variable number of samples is contained in each block of compressed audio data. In this
/// case, buffer estimation and position information needs to be obtained in other ways.
/// </summary>
public int SamplesPerBlock
{
get { return _samplesUnion; }
internal protected set { _samplesUnion = (short) value; }
}
/// <summary>
/// Gets a bitmask specifying the assignment of channels in the stream to speaker positions.
/// </summary>
public ChannelMask ChannelMask
{
get { return _channelMask; }
internal protected set { _channelMask = value; }
}
/// <summary>
/// Subformat of the data, such as <see cref="AudioSubTypes.Pcm" />. The subformat information is similar to
/// that provided by the tag in the <see cref="WaveFormat" /> class's <see cref="WaveFormat.WaveFormatTag" /> member.
/// </summary>
public Guid SubFormat
{
get { return _subFormat; }
internal protected set { _subFormat = value; }
}
internal WaveFormatExtensible()
{
}
/// <summary>
/// Initializes a new instance of the <see cref="WaveFormatExtensible" /> class.
/// </summary>
/// <param name="sampleRate">
/// Samplerate of the waveform-audio. This value will get applied to the
/// <see cref="WaveFormat.SampleRate" /> property.
/// </param>
/// <param name="bits">
/// Bits per sample of the waveform-audio. This value will get applied to the
/// <see cref="WaveFormat.BitsPerSample" /> property and the <see cref="ValidBitsPerSample" /> property.
/// </param>
/// <param name="channels">
/// Number of channels of the waveform-audio. This value will get applied to the
/// <see cref="WaveFormat.Channels" /> property.
/// </param>
/// <param name="subFormat">Subformat of the data. This value will get applied to the <see cref="SubFormat" /> property.</param>
public WaveFormatExtensible(int sampleRate, int bits, int channels, Guid subFormat)
: base(sampleRate, bits, channels, AudioEncoding.Extensible, WaveFormatExtensibleExtraSize)
{
_samplesUnion = (short) bits;
_subFormat = SubTypeFromWaveFormat(this);
int cm = 0;
for (int i = 0; i < channels; i++)
{
cm |= (1 << i);
}
_channelMask = (ChannelMask) cm;
_subFormat = subFormat;
}
/// <summary>
/// Initializes a new instance of the <see cref="WaveFormatExtensible" /> class.
/// </summary>
/// <param name="sampleRate">
/// Samplerate of the waveform-audio. This value will get applied to the
/// <see cref="WaveFormat.SampleRate" /> property.
/// </param>
/// <param name="bits">
/// Bits per sample of the waveform-audio. This value will get applied to the
/// <see cref="WaveFormat.BitsPerSample" /> property and the <see cref="ValidBitsPerSample" /> property.
/// </param>
/// <param name="channels">
/// Number of channels of the waveform-audio. This value will get applied to the
/// <see cref="WaveFormat.Channels" /> property.
/// </param>
/// <param name="subFormat">Subformat of the data. This value will get applied to the <see cref="SubFormat" /> property.</param>
/// <param name="channelMask">
/// Bitmask specifying the assignment of channels in the stream to speaker positions. Thie value
/// will get applied to the <see cref="ChannelMask" /> property.
/// </param>
public WaveFormatExtensible(int sampleRate, int bits, int channels, Guid subFormat, ChannelMask channelMask)
: this(sampleRate, bits, channels, subFormat)
{
Array totalChannelMaskValues = Enum.GetValues(typeof (ChannelMask));
int valuesSet = 0;
for (int i = 0; i < totalChannelMaskValues.Length; i++)
{
if ((channelMask & (ChannelMask) totalChannelMaskValues.GetValue(i)) ==
(ChannelMask) totalChannelMaskValues.GetValue(i))
valuesSet++;
}
if (channels != valuesSet)
throw new ArgumentException("Channels has to equal the set flags in the channelmask.");
_channelMask = channelMask;
}
/// <summary>
/// Converts the <see cref="WaveFormatExtensible" /> instance to a raw <see cref="WaveFormat" /> instance by converting
/// the <see cref="SubFormat" /> to the equal <see cref="WaveFormat.WaveFormatTag" />.
/// </summary>
/// <returns>A simple <see cref="WaveFormat"/> instance.</returns>
public WaveFormat ToWaveFormat()
{
return new WaveFormat(SampleRate, BitsPerSample, Channels, AudioSubTypes.EncodingFromSubType(SubFormat));
}
/// <summary>
/// Creates a new <see cref="WaveFormat" /> object that is a copy of the current instance.
/// </summary>
/// <returns>
/// A copy of the current instance.
/// </returns>
public override object Clone()
{
//var waveFormat = new WaveFormatExtensible(SampleRate, BitsPerSample, Channels, SubFormat, ChannelMask);
//waveFormat._samplesUnion = _samplesUnion;
//return waveFormat;
return MemberwiseClone();
}
internal override void SetWaveFormatTagInternal(AudioEncoding waveFormatTag)
{
SubFormat = AudioSubTypes.SubTypeFromEncoding(waveFormatTag);
}
/// <summary>
/// Returns a string which describes the <see cref="WaveFormatExtensible" />.
/// </summary>
/// <returns>A string which describes the <see cref="WaveFormatExtensible" />.</returns>
[DebuggerStepThrough]
public override string ToString()
{
var stringBuilder = new StringBuilder(base.ToString());
stringBuilder.Append("|SubFormat: " + SubFormat);
stringBuilder.Append("|ChannelMask: " + ChannelMask);
return stringBuilder.ToString();
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: bd1311d5300bf934aa43ba92b819427b
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,362 @@
using System;
using System.Diagnostics;
using System.IO;
using System.Text;
namespace CSCore.Codecs.WAV
{
/// <summary>
/// Encoder for wave files.
/// </summary>
public class WaveWriter : IDisposable, IWriteable
{
private readonly WaveFormat _waveFormat;
private readonly long _waveStartPosition;
private int _dataLength;
private bool _isDisposed;
private Stream _stream;
private BinaryWriter _writer;
private bool _isDisposing;
private readonly bool _closeStream;
/// <summary>
/// Signals if the object has already been disposed
/// </summary>
public bool IsDisposed
{
get
{
return _isDisposed;
}
}
/// <summary>
/// Signals if the object is in a disposing state
/// </summary>
public bool IsDisposing
{
get
{
return _isDisposing;
}
}
/// <summary>
/// Initializes a new instance of the <see cref="WaveWriter" /> class.
/// </summary>
/// <param name="fileName">Filename of the destination file. This filename should typically end with the .wav extension.</param>
/// <param name="waveFormat">
/// Format of the waveform-audio data. Note that the <see cref="WaveWriter" /> won't convert any
/// data.
/// </param>
public WaveWriter(string fileName, WaveFormat waveFormat)
: this(File.OpenWrite(fileName), waveFormat)
{
_closeStream = true;
}
/// <summary>
/// Initializes a new instance of the <see cref="WaveWriter" /> class.
/// </summary>
/// <param name="stream">Destination stream which should be used to store the</param>
/// <param name="waveFormat">
/// Format of the waveform-audio data. Note that the <see cref="WaveWriter" /> won't convert any
/// data.
/// </param>
public WaveWriter(Stream stream, WaveFormat waveFormat)
{
if (stream == null)
throw new ArgumentNullException("stream");
if (!stream.CanWrite)
throw new ArgumentException("Stream not writeable.", "stream");
if (!stream.CanSeek)
throw new ArgumentException("Stream not seekable.", "stream");
_isDisposing = false;
_isDisposed = false;
_stream = stream;
_waveStartPosition = stream.Position;
_writer = new BinaryWriter(stream);
for (int i = 0; i < 44; i++)
{
_writer.Write((byte) 0);
}
_waveFormat = waveFormat;
WriteHeader();
_closeStream = false;
}
/// <summary>
/// Disposes the <see cref="WaveWriter" /> and writes down the wave header.
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Writes down all audio data of the <see cref="IWaveSource" /> to a file.
/// </summary>
/// <param name="filename">The filename.</param>
/// <param name="source">The source to write down to the file.</param>
/// <param name="deleteFileIfAlreadyExists">if set to <c>true</c> the file will be overritten if it already exists.</param>
/// <param name="maxlength">The maximum number of bytes to write. Use -1 to write an infinte number of bytes.</param>
/// <remarks>
/// This method is obsolete. Use the <see cref="Extensions.WriteToWaveStream" /> extension instead.
/// </remarks>
[Obsolete("Use the Extensions.WriteToWaveStream extension instead.")]
public static void WriteToFile(string filename, IWaveSource source, bool deleteFileIfAlreadyExists,
int maxlength = -1)
{
if (deleteFileIfAlreadyExists && File.Exists(filename))
File.Delete(filename);
int r = 0;
var buffer = new byte[source.WaveFormat.BytesPerSecond];
using (var w = new WaveWriter(filename, source.WaveFormat))
{
int read;
while ((read = source.Read(buffer, 0, buffer.Length)) > 0)
{
w.Write(buffer, 0, read);
r += read;
if (maxlength != -1 && r > maxlength)
break;
}
}
}
/// <summary>
/// Encodes a single sample.
/// </summary>
/// <param name="sample">The sample to encode.</param>
public void WriteSample(float sample)
{
CheckObjectDisposed();
if (sample < -1 || sample > 1)
sample = Math.Max(-1, Math.Min(1, sample));
if (_waveFormat.IsPCM())
{
switch (_waveFormat.BitsPerSample)
{
case 8:
Write((byte) (byte.MaxValue * sample));
break;
case 16:
Write((short) (short.MaxValue * sample));
break;
case 24:
byte[] buffer = BitConverter.GetBytes((int)(0x7fffff * sample));
Write(new[] {buffer[0], buffer[1], buffer[2]}, 0, 3);
break;
case 32:
Write((int) (int.MaxValue * sample));
break;
default:
throw new InvalidOperationException("Invalid Waveformat",
new InvalidOperationException("Invalid BitsPerSample while using PCM encoding."));
}
}
else if (_waveFormat.IsIeeeFloat())
Write(sample);
else if (_waveFormat.WaveFormatTag == AudioEncoding.Extensible && _waveFormat.BitsPerSample == 32)
Write(UInt16.MaxValue * (int)sample);
else
{
throw new InvalidOperationException(
"Invalid Waveformat: Waveformat has to be PCM[8, 16, 24, 32] or IeeeFloat[32]");
}
}
/// <summary>
/// Encodes multiple samples.
/// </summary>
/// <param name="samples">Float array which contains the samples to encode.</param>
/// <param name="offset">Zero-based offset in the <paramref name="samples" /> array.</param>
/// <param name="count">Number of samples to encode.</param>
public void WriteSamples(float[] samples, int offset, int count)
{
CheckObjectDisposed();
for (int i = offset; i < offset + count; i++)
{
WriteSample(samples[i]);
}
}
/// <summary>
/// Encodes raw data in the form of a byte array.
/// </summary>
/// <param name="buffer">Byte array which contains the data to encode.</param>
/// <param name="offset">Zero-based offset in the <paramref name="buffer" />.</param>
/// <param name="count">Number of bytes to encode.</param>
public void Write(byte[] buffer, int offset, int count)
{
CheckObjectDisposed();
_stream.Write(buffer, offset, count);
_dataLength += count;
}
/// <summary>
/// Writes down a single byte.
/// </summary>
/// <param name="value">Byte to write down.</param>
public void Write(byte value)
{
CheckObjectDisposed();
_writer.Write(value);
_dataLength++;
}
/// <summary>
/// Writes down a single 16 bit integer value.
/// </summary>
/// <param name="value">Value to write down.</param>
public void Write(short value)
{
CheckObjectDisposed();
_writer.Write(value);
_dataLength += 2;
}
/// <summary>
/// Writes down a single 32 bit integer value.
/// </summary>
/// <param name="value">Value to write down.</param>
public void Write(int value)
{
CheckObjectDisposed();
_writer.Write(value);
_dataLength += 4;
}
/// <summary>
/// Writes down a single 32 bit float value.
/// </summary>
/// <param name="value">Value to write down.</param>
public void Write(float value)
{
CheckObjectDisposed();
_writer.Write(value);
_dataLength += 4;
}
private void WriteHeader()
{
_writer.Flush();
long currentPosition = _stream.Position;
_stream.Position = _waveStartPosition;
WriteRiffHeader();
WriteFmtChunk();
WriteDataChunk();
_writer.Flush();
_stream.Position = currentPosition;
}
private void WriteRiffHeader()
{
_writer.Write(Encoding.UTF8.GetBytes("RIFF"));
_writer.Write((int) (_stream.Length - 8));
_writer.Write(Encoding.UTF8.GetBytes("WAVE"));
}
private void WriteFmtChunk()
{
AudioEncoding tag = _waveFormat.WaveFormatTag;
if (tag == AudioEncoding.Extensible && _waveFormat is WaveFormatExtensible)
tag = AudioSubTypes.EncodingFromSubType((_waveFormat as WaveFormatExtensible).SubFormat);
_writer.Write(Encoding.UTF8.GetBytes("fmt "));
_writer.Write((int)16);
_writer.Write((short) tag);
_writer.Write((short)_waveFormat.Channels);
_writer.Write((int)_waveFormat.SampleRate);
_writer.Write((int)_waveFormat.BytesPerSecond);
_writer.Write((short) _waveFormat.BlockAlign);
_writer.Write((short)_waveFormat.BitsPerSample);
}
private void WriteDataChunk()
{
_writer.Write(Encoding.UTF8.GetBytes("data"));
_writer.Write(_dataLength);
}
private void CheckObjectDisposed()
{
if (_isDisposed)
throw new ObjectDisposedException("WaveWriter");
}
/// <summary>
/// Disposes the <see cref="WaveWriter" /> and writes down the wave header.
/// </summary>
/// <param name="disposing">
/// True to release both managed and unmanaged resources; false to release only unmanaged
/// resources.
/// </param>
protected virtual void Dispose(bool disposing)
{
if (_isDisposed) return;
if (!disposing) return;
try
{
_isDisposing = true;
WriteHeader();
}
catch (Exception ex)
{
Debug.WriteLine("WaveWriter::Dispose: " + ex);
}
finally
{
if (_closeStream)
{
if (_writer != null)
{
_writer.Close();
_writer = null;
}
if (_stream != null)
{
_stream.Dispose();
_stream = null;
}
}
_isDisposing = false;
}
_isDisposed = true;
}
/// <summary>
/// Destructor of the <see cref="WaveWriter" /> which calls the <see cref="Dispose(bool)" /> method.
/// </summary>
~WaveWriter()
{
Dispose(false);
}
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 8f3be84641ff7b543b200b146b3e8d37
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,36 @@
## NOTE: ##
This license does not apply to the [CSCore.Ffmpeg](https://github.com/filoe/cscore/tree/master/CSCore.Ffmpeg) project!
## Microsoft Public License (Ms-PL) ##
Microsoft Public License (Ms-PL)
This license governs use of the accompanying software. If you use the software, you accept this license. If you do not accept the license, do not use the software.
#### 1. Definitions ####
The terms "reproduce," "reproduction," "derivative works," and "distribution" have the same meaning here as under U.S. copyright law.
A "contribution" is the original software, or any additions or changes to the software.
A "contributor" is any person that distributes its contribution under this license.
"Licensed patents" are a contributor's patent claims that read directly on its contribution.
#### 2. Grant of Rights ####
(A) Copyright Grant- Subject to the terms of this license, including the license conditions and limitations in section 3, each contributor grants you a non-exclusive, worldwide, royalty-free copyright license to reproduce its contribution, prepare derivative works of its contribution, and distribute its contribution or any derivative works that you create.
(B) Patent Grant- Subject to the terms of this license, including the license conditions and limitations in section 3, each contributor grants you a non-exclusive, worldwide, royalty-free license under its licensed patents to make, have made, use, sell, offer for sale, import, and/or otherwise dispose of its contribution in the software or derivative works of the contribution in the software.
#### 3. Conditions and Limitations ####
(A) No Trademark License- This license does not grant you rights to use any contributors' name, logo, or trademarks.
(B) If you bring a patent claim against any contributor over patents that you claim are infringed by the software, your patent license from such contributor to the software ends automatically.
(C) If you distribute any portion of the software, you must retain all copyright, patent, trademark, and attribution notices that are present in the software.
(D) If you distribute any portion of the software in source code form, you may do so only under this license by including a complete copy of this license with your distribution. If you distribute any portion of the software in compiled or object code form, you may only do so under a license that complies with this license.
(E) The software is licensed "as-is." You bear the risk of using it. The contributors give no express warranties, guarantees or conditions. You may have additional consumer rights under your local laws which this license cannot change. To the extent permitted under your local laws, the contributors exclude the implied warranties of merchantability, fitness for a particular purpose and non-infringement.

View File

@@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: 31539401d6075c44daf952351a39dafd
TextScriptImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,163 @@
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="ConnectAndJoin.cs" company="Exit Games GmbH">
// Part of: Photon Voice Utilities for Unity - Copyright (C) 2018 Exit Games GmbH
// </copyright>
// <summary>
// Simple component to call voiceConnection.ConnectUsingSettings() and get into a Voice room easily.
// </summary>
// <remarks>
// Requires a VoiceConnection component attached to the same GameObject.
// </remarks>
// <author>developer@exitgames.com</author>
// --------------------------------------------------------------------------------------------------------------------
using Photon.Realtime;
using UnityEngine;
using System.Collections.Generic;
namespace Photon.Voice.Unity.UtilityScripts
{
[RequireComponent(typeof(VoiceConnection))]
public class ConnectAndJoin : MonoBehaviour, IConnectionCallbacks, IMatchmakingCallbacks
{
private VoiceConnection voiceConnection;
public bool RandomRoom = true;
[SerializeField]
private bool autoConnect = true;
[SerializeField]
private bool autoTransmit = true;
[SerializeField]
private bool publishUserId = false;
public string RoomName;
private readonly EnterRoomParams enterRoomParams = new EnterRoomParams
{
RoomOptions = new RoomOptions()
};
public bool IsConnected { get { return this.voiceConnection.Client.IsConnected; } }
private void Awake()
{
this.voiceConnection = this.GetComponent<VoiceConnection>();
}
private void OnEnable()
{
this.voiceConnection.Client.AddCallbackTarget(this);
if (this.autoConnect)
{
this.ConnectNow();
}
}
private void OnDisable()
{
this.voiceConnection.Client.RemoveCallbackTarget(this);
}
public void ConnectNow()
{
this.voiceConnection.ConnectUsingSettings();
}
#region MatchmakingCallbacks
public void OnCreatedRoom()
{
}
public void OnCreateRoomFailed(short returnCode, string message)
{
Debug.LogErrorFormat("OnCreateRoomFailed errorCode={0} errorMessage={1}", returnCode, message);
}
public void OnFriendListUpdate(List<FriendInfo> friendList)
{
}
public void OnJoinedRoom()
{
if (this.voiceConnection.PrimaryRecorder == null)
{
this.voiceConnection.PrimaryRecorder = this.gameObject.AddComponent<Recorder>();
}
if (this.autoTransmit)
{
this.voiceConnection.PrimaryRecorder.TransmitEnabled = this.autoTransmit;
}
}
public void OnJoinRandomFailed(short returnCode, string message)
{
Debug.LogErrorFormat("OnJoinRandomFailed errorCode={0} errorMessage={1}", returnCode, message);
}
public void OnJoinRoomFailed(short returnCode, string message)
{
Debug.LogErrorFormat("OnJoinRoomFailed roomName={0} errorCode={1} errorMessage={2}", this.RoomName, returnCode, message);
}
public void OnLeftRoom()
{
}
#endregion
#region ConnectionCallbacks
public void OnConnected()
{
}
public void OnConnectedToMaster()
{
this.enterRoomParams.RoomOptions.PublishUserId = this.publishUserId;
if (this.RandomRoom)
{
this.enterRoomParams.RoomName = null;
this.voiceConnection.Client.OpJoinRandomOrCreateRoom(new OpJoinRandomRoomParams(), this.enterRoomParams);
}
else
{
this.enterRoomParams.RoomName = this.RoomName;
this.voiceConnection.Client.OpJoinOrCreateRoom(this.enterRoomParams);
}
}
public void OnDisconnected(DisconnectCause cause)
{
if (cause == DisconnectCause.None || cause == DisconnectCause.DisconnectByClientLogic)
{
return;
}
Debug.LogErrorFormat("OnDisconnected cause={0}", cause);
}
public void OnRegionListReceived(RegionHandler regionHandler)
{
}
public void OnCustomAuthenticationResponse(Dictionary<string, object> data)
{
}
public void OnCustomAuthenticationFailed(string debugMessage)
{
}
#endregion
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 65b99afb3758e9e4689c28a18224eb8f
timeCreated: 1540904523
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: dd940d333aeeaa048842e87b9b259188, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 5e696093a38c17043b5fffafad200f41
folderAsset: yes
timeCreated: 1537958675
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,50 @@
namespace Photon.Voice.Unity.UtilityScripts.Editor
{
using UnityEditor;
using UnityEngine;
[CustomEditor(typeof(ConnectAndJoin))]
public class ConnectAndJoinEditor : Editor
{
private ConnectAndJoin connectAndJoin;
private SerializedProperty randomRoomSp;
private SerializedProperty roomNameSp;
private SerializedProperty autoConnectSp;
private SerializedProperty autoTransmitSp;
private SerializedProperty publishUserIdSp;
private void OnEnable()
{
this.connectAndJoin = this.target as ConnectAndJoin;
this.randomRoomSp = this.serializedObject.FindProperty("RandomRoom");
this.roomNameSp = this.serializedObject.FindProperty("RoomName");
this.autoConnectSp = this.serializedObject.FindProperty("autoConnect");
this.autoTransmitSp = this.serializedObject.FindProperty("autoTransmit");
this.publishUserIdSp = this.serializedObject.FindProperty("publishUserId");
}
public override void OnInspectorGUI()
{
EditorGUI.BeginChangeCheck();
EditorGUILayout.PropertyField(this.autoConnectSp);
EditorGUILayout.PropertyField(this.autoTransmitSp);
EditorGUILayout.PropertyField(this.randomRoomSp);
EditorGUILayout.PropertyField(this.publishUserIdSp);
if (!this.randomRoomSp.boolValue)
{
EditorGUILayout.PropertyField(this.roomNameSp);
}
if (Application.isPlaying && !this.connectAndJoin.IsConnected)
{
if (GUILayout.Button("Connect"))
{
this.connectAndJoin.ConnectNow();
}
}
if (EditorGUI.EndChangeCheck())
{
this.serializedObject.ApplyModifiedProperties();
}
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 814145524f648a04fa42d1ac5cbfb885
timeCreated: 1537888035
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,13 @@
{
"name": "PhotonVoice.Utilities.Editor",
"references": [
"PhotonRealtime",
"PhotonVoice"
],
"optionalUnityReferences": [],
"includePlatforms": [
"Editor"
],
"excludePlatforms": [],
"allowUnsafeCode": false
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: a4110ddd96589404bb35c918cc2fe96b
timeCreated: 1538045250
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 63d608879f3e21645b6d9e0cc8a1e6ed
folderAsset: yes
timeCreated: 1561049831
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,105 @@
using UnityEngine;
namespace Photon.Voice.Unity.UtilityScripts
{
[RequireComponent(typeof(Recorder))]
public class MicAmplifier : VoiceComponent
{
[SerializeField]
private float boostValue;
[SerializeField]
private float amplificationFactor = 1f;
public float AmplificationFactor
{
get { return this.amplificationFactor; }
set
{
if (this.amplificationFactor.Equals(value))
{
return;
}
this.amplificationFactor = value;
if (this.floatProcessor != null)
{
this.floatProcessor.AmplificationFactor = this.amplificationFactor;
}
if (this.shortProcessor != null)
{
this.shortProcessor.AmplificationFactor = (short)this.amplificationFactor;
}
}
}
public float BoostValue
{
get { return this.boostValue; }
set
{
if (this.boostValue.Equals(value))
{
return;
}
this.boostValue = value;
if (this.floatProcessor != null)
{
this.floatProcessor.BoostValue = this.boostValue;
}
if (this.shortProcessor != null)
{
this.shortProcessor.BoostValue = (short)this.boostValue;
}
}
}
private MicAmplifierFloat floatProcessor;
private MicAmplifierShort shortProcessor;
private void OnEnable()
{
if (this.floatProcessor != null)
{
this.floatProcessor.Disabled = false;
}
if (this.shortProcessor != null)
{
this.shortProcessor.Disabled = false;
}
}
private void OnDisable()
{
if (this.floatProcessor != null)
{
this.floatProcessor.Disabled = true;
}
if (this.shortProcessor != null)
{
this.shortProcessor.Disabled = true;
}
}
// Message sent by Recorder
private void PhotonVoiceCreated(PhotonVoiceCreatedParams p)
{
if (p.Voice is LocalVoiceAudioFloat)
{
LocalVoiceAudioFloat v = p.Voice as LocalVoiceAudioFloat;
this.floatProcessor = new MicAmplifierFloat(this.AmplificationFactor, this.BoostValue);
v.AddPostProcessor(this.floatProcessor);
}
else if (p.Voice is LocalVoiceAudioShort)
{
LocalVoiceAudioShort v = p.Voice as LocalVoiceAudioShort;
this.shortProcessor = new MicAmplifierShort((short)this.AmplificationFactor, (short)this.BoostValue);
//this.shortProcessor = new SimpleAmplifierShortProcessor((short)(this.AmplificationFactor* short.MaxValue), (short)(this.boostValue * short.MaxValue));
v.AddPostProcessor(this.shortProcessor);
}
else if (this.Logger.IsErrorEnabled)
{
this.Logger.LogError("LocalVoice object has unexpected value/type: {0}", p.Voice == null ? "null" : p.Voice.GetType().ToString());
}
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: a3986deb98168394684c41606a9495af
timeCreated: 1561048437
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: dd940d333aeeaa048842e87b9b259188, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,33 @@
#if UNITY_EDITOR
using UnityEngine;
using UnityEditor;
namespace Photon.Voice.Unity.UtilityScripts
{
[CustomEditor(typeof(MicAmplifier))]
public class MicAmplifierEditor : Editor
{
private MicAmplifier simpleAmplifier;
private void OnEnable()
{
this.simpleAmplifier = this.target as MicAmplifier;
}
public override void OnInspectorGUI()
{
EditorGUI.BeginChangeCheck();
this.simpleAmplifier.AmplificationFactor = EditorGUILayout.FloatField(
new GUIContent("Amplification Factor", "Amplification Factor (Multiplication)"),
this.simpleAmplifier.AmplificationFactor);
this.simpleAmplifier.BoostValue = EditorGUILayout.FloatField(
new GUIContent("Boost Value", "Boost Value (Addition)"),
this.simpleAmplifier.BoostValue);
if (EditorGUI.EndChangeCheck())
{
this.serializedObject.ApplyModifiedProperties();
}
}
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 2ca8fce417a134e4eb5444c7a9d071bb
timeCreated: 1561049980
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,48 @@
namespace Photon.Voice.Unity.UtilityScripts
{
public class MicAmplifierFloat : IProcessor<float>
{
public float AmplificationFactor { get; set; }
public float BoostValue { get; set; }
public float MaxBefore { get; private set; }
public float MaxAfter { get; private set; }
//public float MinBefore { get; private set; }
//public float MinAfter { get; private set; }
//public float AvgBefore { get; private set; }
//public float AvgAfter { get; private set; }
public bool Disabled { get; set; }
public MicAmplifierFloat(float amplificationFactor, float boostValue)
{
this.AmplificationFactor = amplificationFactor;
this.BoostValue = boostValue;
}
public float[] Process(float[] buf)
{
if (this.Disabled)
{
return buf;
}
for (int i = 0; i < buf.Length; i++)
{
float before = buf[i];
buf[i] *= this.AmplificationFactor;
buf[i] += this.BoostValue;
if (this.MaxBefore < before)
{
this.MaxBefore = before;
this.MaxAfter = buf[i];
}
}
return buf;
}
public void Dispose()
{
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 54f7c01cbf9e64e42bde37b5a0bee411
timeCreated: 1561049960
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,49 @@
namespace Photon.Voice.Unity.UtilityScripts
{
public class MicAmplifierShort : IProcessor<short>
{
public short AmplificationFactor { get; set; }
public short BoostValue { get; set; }
public short MaxBefore { get; private set; }
public short MaxAfter { get; private set; }
//public short MinBefore { get; private set; }
//public short MinAfter { get; private set; }
//public short AvgBefore { get; private set; }
//public short AvgAfter { get; private set; }
public bool Disabled { get; set; }
public MicAmplifierShort(short amplificationFactor, short boostValue)
{
this.AmplificationFactor = amplificationFactor;
this.BoostValue = boostValue;
}
public short[] Process(short[] buf)
{
if (this.Disabled)
{
return buf;
}
for (int i = 0; i < buf.Length; i++)
{
short before = buf[i];
buf[i] *= this.AmplificationFactor;
buf[i] += this.BoostValue;
if (this.MaxBefore < before)
{
this.MaxBefore = before;
this.MaxAfter = buf[i];
}
}
return buf;
}
public void Dispose()
{
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: fa8d3b254d2d2e6488f58f4d71ee2363
timeCreated: 1561049971
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,148 @@
#if UNITY_ANDROID && !UNITY_EDITOR
using UnityEngine.Android;
#endif
#if UNITY_IOS && !UNITY_EDITOR
using System.Collections;
using UnityEngine.iOS;
#endif
using System;
using UnityEngine;
namespace Photon.Voice.Unity.UtilityScripts
{
/// <summary>
/// Helper to request Microphone permission on Android or iOS.
/// </summary>
[RequireComponent(typeof(Recorder))]
public class MicrophonePermission : VoiceComponent
{
private Recorder recorder;
#if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
private bool isRequesting;
#endif
private bool hasPermission;
public static event Action<bool> MicrophonePermissionCallback;
[SerializeField]
private bool autoStart = true;
public bool HasPermission
{
get
{
return this.hasPermission;
}
private set
{
this.Logger.LogInfo("Microphone Permission Granted: {0}", value);
MicrophonePermissionCallback?.Invoke(value);
if (this.hasPermission != value)
{
this.hasPermission = value;
if (this.hasPermission)
{
this.recorder.AutoStart = this.autoStart;
}
}
}
}
protected override void Awake()
{
base.Awake();
this.recorder = this.GetComponent<Recorder>();
this.recorder.AutoStart = false;
this.InitVoice();
}
#if UNITY_IOS && !UNITY_EDITOR
IEnumerator PermissionCheck()
{
this.isRequesting = true;
this.Logger.LogInfo("iOS Microphone Permission Request");
yield return Application.RequestUserAuthorization(UserAuthorization.Microphone);
this.isRequesting = false;
if (Application.HasUserAuthorization(UserAuthorization.Microphone))
{
this.HasPermission = true;
}
else
{
this.HasPermission = false;
}
}
#endif
public void InitVoice()
{
#if UNITY_ANDROID && !UNITY_EDITOR
if (Permission.HasUserAuthorizedPermission(Permission.Microphone))
{
this.HasPermission = true;
}
else
{
this.Logger.LogInfo("Android Microphone Permission Request");
#if UNITY_2020_2_OR_NEWER
var callbacks = new PermissionCallbacks();
callbacks.PermissionDenied += PermissionCallbacks_PermissionDenied;
callbacks.PermissionGranted += PermissionCallbacks_PermissionGranted;
callbacks.PermissionDeniedAndDontAskAgain += PermissionCallbacks_PermissionDeniedAndDontAskAgain;
Permission.RequestUserPermission(Permission.Microphone, callbacks);
#else
Permission.RequestUserPermission(Permission.Microphone);
#endif
this.isRequesting = true;
}
#elif UNITY_IOS && !UNITY_EDITOR
this.StartCoroutine(this.PermissionCheck());
#else
this.HasPermission = true;
#endif
}
#if UNITY_ANDROID && !UNITY_EDITOR
#if UNITY_2020_2_OR_NEWER
internal void PermissionCallbacks_PermissionDeniedAndDontAskAgain(string permissionName)
{
this.isRequesting = false;
this.HasPermission = false;
this.Logger.LogInfo($"{permissionName} PermissionDeniedAndDontAskAgain");
}
internal void PermissionCallbacks_PermissionGranted(string permissionName)
{
this.isRequesting = false;
this.HasPermission = true;
this.Logger.LogInfo($"{permissionName} PermissionGranted");
}
internal void PermissionCallbacks_PermissionDenied(string permissionName)
{
this.isRequesting = false;
this.HasPermission = false;
this.Logger.LogInfo($"{permissionName} PermissionDenied");
}
#else
private void OnApplicationFocus(bool focus)
{
if (focus && this.isRequesting)
{
if (Permission.HasUserAuthorizedPermission(Permission.Microphone))
{
this.HasPermission = true;
}
else
{
this.HasPermission = false;
}
this.isRequesting = false;
}
}
#endif
#endif
}
}

View File

@@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 98fbb947c07407d4db937da17af98cbf
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,130 @@
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="PhotonVoiceLagSimulationGui.cs" company="Exit Games GmbH">
// Part of: Photon Voice Utilities for Unity - Copyright (C) 2018 Exit Games GmbH
// </copyright>
// <summary>
// This MonoBehaviour is a basic GUI for the Photon Voice client's network-simulation feature.
// It can modify lag (fixed delay), jitter (random lag), packet loss and audio frames percentage loss.
// </summary>
// <author>developer@exitgames.com</author>
// --------------------------------------------------------------------------------------------------------------------
using UnityEngine;
using ExitGames.Client.Photon;
namespace Photon.Voice.Unity.UtilityScripts
{
[RequireComponent(typeof(VoiceConnection))]
public class PhotonVoiceLagSimulationGui : MonoBehaviour
{
private VoiceConnection voiceConnection;
/// <summary>Positioning rect for window.</summary>
private Rect windowRect = new Rect(0, 100, 200, 100);
/// <summary>Unity GUI Window ID (must be unique or will cause issues).</summary>
private int windowId = 201;
/// <summary>Shows or hides GUI (does not affect settings).</summary>
private bool visible = true;
/// <summary>The peer currently in use (to set the network simulation).</summary>
private PhotonPeer peer;
private float debugLostPercent;
public void OnEnable()
{
VoiceConnection[] voiceConnections = this.GetComponents<VoiceConnection>();
if (voiceConnections == null || voiceConnections.Length == 0)
{
Debug.LogError("No VoiceConnection component found, PhotonVoiceStatsGui disabled", this);
this.enabled = false;
return;
}
if (voiceConnections.Length > 1)
{
Debug.LogWarningFormat(this, "Multiple VoiceConnection components found, using first occurrence attached to GameObject {0}", voiceConnections[0].name);
}
this.voiceConnection = voiceConnections[0];
this.peer = this.voiceConnection.Client.LoadBalancingPeer;
this.debugLostPercent = this.voiceConnection.VoiceClient.DebugLostPercent;
}
private void OnGUI()
{
if (!this.visible)
{
return;
}
if (this.peer == null)
{
this.windowRect = GUILayout.Window(this.windowId, this.windowRect, this.NetSimHasNoPeerWindow,
"Voice Network Simulation");
}
else
{
this.windowRect = GUILayout.Window(this.windowId, this.windowRect, this.NetSimWindow, "Voice Network Simulation");
}
}
private void NetSimHasNoPeerWindow(int windowId)
{
GUILayout.Label("No voice peer to communicate with. ");
}
private void NetSimWindow(int windowId)
{
GUILayout.Label(string.Format("Rtt:{0,4} +/-{1,3}", this.peer.RoundTripTime,
this.peer.RoundTripTimeVariance));
bool simEnabled = this.peer.IsSimulationEnabled;
bool newSimEnabled = GUILayout.Toggle(simEnabled, "Simulate");
if (newSimEnabled != simEnabled)
{
this.peer.IsSimulationEnabled = newSimEnabled;
}
float inOutLag = this.peer.NetworkSimulationSettings.IncomingLag;
GUILayout.Label(string.Format("Lag {0}", inOutLag));
inOutLag = GUILayout.HorizontalSlider(inOutLag, 0, 500);
this.peer.NetworkSimulationSettings.IncomingLag = (int)inOutLag;
this.peer.NetworkSimulationSettings.OutgoingLag = (int)inOutLag;
float inOutJitter = this.peer.NetworkSimulationSettings.IncomingJitter;
GUILayout.Label(string.Format("Jit {0}", inOutJitter));
inOutJitter = GUILayout.HorizontalSlider(inOutJitter, 0, 100);
this.peer.NetworkSimulationSettings.IncomingJitter = (int)inOutJitter;
this.peer.NetworkSimulationSettings.OutgoingJitter = (int)inOutJitter;
float loss = this.peer.NetworkSimulationSettings.IncomingLossPercentage;
GUILayout.Label(string.Format("Loss {0}", loss));
loss = GUILayout.HorizontalSlider(loss, 0, 10);
this.peer.NetworkSimulationSettings.IncomingLossPercentage = (int)loss;
this.peer.NetworkSimulationSettings.OutgoingLossPercentage = (int)loss;
GUILayout.Label(string.Format("Lost Audio Frames {0}%", (int)this.debugLostPercent));
this.debugLostPercent = GUILayout.HorizontalSlider(this.debugLostPercent, 0, 100);
if (newSimEnabled)
{
this.voiceConnection.VoiceClient.DebugLostPercent = (int)this.debugLostPercent;
}
else
{
this.voiceConnection.VoiceClient.DebugLostPercent = 0;
}
// if anything was clicked, the height of this window is likely changed. reduce it to be layouted again next frame
if (GUI.changed)
{
this.windowRect.height = 100;
}
GUI.DragWindow();
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 28c5a759338295941af2080959c92312
timeCreated: 1539349756
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: dd940d333aeeaa048842e87b9b259188, type: 3}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,206 @@
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="PhotonVoiceStatsGui.cs" company="Exit Games GmbH">
// Part of: Photon Voice Utilities for Unity - Copyright (C) 2018 Exit Games GmbH
// </copyright>
// <summary>
// This MonoBehaviour is a basic GUI for the Photon Voice client's network statistics.
// </summary>
// <author>developer@exitgames.com</author>
// --------------------------------------------------------------------------------------------------------------------
using ExitGames.Client.Photon;
using UnityEngine;
namespace Photon.Voice.Unity.UtilityScripts
{
/// <summary>
/// Basic GUI to show traffic and health statistics of the connection to Photon,
/// toggled by shift+tab.
/// </summary>
/// <remarks>
/// The shown health values can help identify problems with connection losses or performance.
/// Example:
/// If the time delta between two consecutive SendOutgoingCommands calls is a second or more,
/// chances rise for a disconnect being caused by this (because acknowledgments to the server
/// need to be sent in due time).
/// </remarks>
public class PhotonVoiceStatsGui : MonoBehaviour
{
/// <summary>Shows or hides GUI (does not affect if stats are collected).</summary>
private bool statsWindowOn = true;
/// <summary>Option to turn collecting stats on or off (used in Update()).</summary>
private bool statsOn;
/// <summary>Shows additional "health" values of connection.</summary>
private bool healthStatsVisible;
/// <summary>Shows additional "lower level" traffic stats.</summary>
private bool trafficStatsOn;
/// <summary>Show buttons to control stats and reset them.</summary>
private bool buttonsOn;
private bool voiceStatsOn = true;
/// <summary>Positioning rect for window.</summary>
private Rect statsRect = new Rect(0, 100, 300, 50);
/// <summary>Unity GUI Window ID (must be unique or will cause issues).</summary>
private int windowId = 200;
/// <summary>The peer currently in use (to set the network simulation).</summary>
private PhotonPeer peer;
private VoiceConnection voiceConnection;
private VoiceClient voiceClient;
private void OnEnable()
{
VoiceConnection[] voiceConnections = this.GetComponents<VoiceConnection>();
if (voiceConnections == null || voiceConnections.Length == 0)
{
Debug.LogError("No VoiceConnection component found, PhotonVoiceStatsGui disabled", this);
this.enabled = false;
return;
}
if (voiceConnections.Length > 1)
{
Debug.LogWarningFormat(this, "Multiple VoiceConnection components found, using first occurrence attached to GameObject {0}", voiceConnections[0].name);
}
this.voiceConnection = voiceConnections[0];
this.voiceClient = this.voiceConnection.VoiceClient;
this.peer = this.voiceConnection.Client.LoadBalancingPeer;
if (this.statsRect.x <= 0)
{
this.statsRect.x = Screen.width - this.statsRect.width;
}
}
/// <summary>Checks for shift+tab input combination (to toggle statsOn).</summary>
private void Update()
{
if (Input.GetKeyDown(KeyCode.Tab) && Input.GetKey(KeyCode.LeftShift))
{
this.statsWindowOn = !this.statsWindowOn;
this.statsOn = true; // enable stats when showing the window
}
}
private void OnGUI()
{
if (this.peer.TrafficStatsEnabled != this.statsOn)
{
this.peer.TrafficStatsEnabled = this.statsOn;
}
if (!this.statsWindowOn)
{
return;
}
this.statsRect = GUILayout.Window(this.windowId, this.statsRect, this.TrafficStatsWindow, "Voice Client Messages (shift+tab)");
}
private void TrafficStatsWindow(int windowId)
{
bool statsToLog = false;
TrafficStatsGameLevel gls = this.peer.TrafficStatsGameLevel;
long elapsedMs = this.peer.TrafficStatsElapsedMs / 1000;
if (elapsedMs == 0)
{
elapsedMs = 1;
}
GUILayout.BeginHorizontal();
this.buttonsOn = GUILayout.Toggle(this.buttonsOn, "buttons");
this.healthStatsVisible = GUILayout.Toggle(this.healthStatsVisible, "health");
this.trafficStatsOn = GUILayout.Toggle(this.trafficStatsOn, "traffic");
this.voiceStatsOn = GUILayout.Toggle(this.voiceStatsOn, "voice stats");
GUILayout.EndHorizontal();
string total = string.Format("Out {0,4} | In {1,4} | Sum {2,4}", gls.TotalOutgoingMessageCount, gls.TotalIncomingMessageCount, gls.TotalMessageCount);
string elapsedTime = string.Format("{0}sec average:", elapsedMs);
string average = string.Format("Out {0,4} | In {1,4} | Sum {2,4}", gls.TotalOutgoingMessageCount / elapsedMs, gls.TotalIncomingMessageCount / elapsedMs, gls.TotalMessageCount / elapsedMs);
GUILayout.Label(total);
GUILayout.Label(elapsedTime);
GUILayout.Label(average);
if (this.buttonsOn)
{
GUILayout.BeginHorizontal();
this.statsOn = GUILayout.Toggle(this.statsOn, "stats on");
if (GUILayout.Button("Reset"))
{
this.peer.TrafficStatsReset();
this.peer.TrafficStatsEnabled = true;
}
statsToLog = GUILayout.Button("To Log");
GUILayout.EndHorizontal();
}
string trafficStatsIn = string.Empty;
string trafficStatsOut = string.Empty;
if (this.trafficStatsOn)
{
GUILayout.Box("Voice Client Traffic Stats");
trafficStatsIn = string.Concat("Incoming: \n", this.peer.TrafficStatsIncoming);
trafficStatsOut = string.Concat("Outgoing: \n", this.peer.TrafficStatsOutgoing);
GUILayout.Label(trafficStatsIn);
GUILayout.Label(trafficStatsOut);
}
string healthStats = string.Empty;
if (this.healthStatsVisible)
{
GUILayout.Box("Voice Client Health Stats");
healthStats = string.Format(
"ping: {6}|{9}[+/-{7}|{10}]ms resent:{8} \n\nmax ms between\nsend: {0,4} \ndispatch: {1,4} \n\nlongest dispatch for: \nev({3}):{2,3}ms \nop({5}):{4,3}ms",
gls.LongestDeltaBetweenSending,
gls.LongestDeltaBetweenDispatching,
gls.LongestEventCallback,
gls.LongestEventCallbackCode,
gls.LongestOpResponseCallback,
gls.LongestOpResponseCallbackOpCode,
this.peer.RoundTripTime,
this.peer.RoundTripTimeVariance,
this.peer.ResentReliableCommands,
this.voiceClient.RoundTripTime,
this.voiceClient.RoundTripTimeVariance);
GUILayout.Label(healthStats);
}
string voiceStats = string.Empty;
if (this.voiceStatsOn)
{
GUILayout.Box("Voice Frames Stats");
voiceStats = string.Format("received: {0}, {1:F2}/s \n\nlost: {2}, {3:F2}/s ({4:F2}%) \n\nsent: {5} ({6} bytes)",
this.voiceClient.FramesReceived,
this.voiceConnection.FramesReceivedPerSecond,
this.voiceClient.FramesLost,
this.voiceConnection.FramesLostPerSecond,
this.voiceConnection.FramesLostPercent,
this.voiceClient.FramesSent,
this.voiceClient.FramesSentBytes);
GUILayout.Label(voiceStats);
}
if (statsToLog)
{
string complete = string.Format("{0}\n{1}\n{2}\n{3}\n{4}\n{5}", total, elapsedTime, average, trafficStatsIn, trafficStatsOut, healthStats);
Debug.Log(complete);
}
// if anything was clicked, the height of this window is likely changed. reduce it to be layouted again next frame
if (GUI.changed)
{
this.statsRect.height = 100;
}
GUI.DragWindow();
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 0d7b4b6b64f1b474d802f50cb9c720a2
timeCreated: 1549624795
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {fileID: 2800000, guid: dd940d333aeeaa048842e87b9b259188, type: 3}
userData:
assetBundleName:
assetBundleVariant:

Some files were not shown because too many files have changed in this diff Show More