Decompiled source of AudioBridge v2.0.1

plugins/AudioBridge/AudioBridge.dll

Decompiled 2 months ago
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.IO.MemoryMappedFiles;
using System.Linq;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Versioning;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using BepInEx;
using BepInEx.Configuration;
using BepInEx.NET.Common;
using BepInExResoniteShim;
using CSCore;
using CSCore.CoreAudioAPI;
using CSCore.SoundOut;
using CSCore.Win32;
using Elements.Core;
using FrooxEngine;
using HarmonyLib;

[assembly: CompilationRelaxations(8)]
[assembly: RuntimeCompatibility(WrapNonExceptionThrows = true)]
[assembly: Debuggable(/*Could not decode attribute arguments.*/)]
[assembly: TargetFramework(".NETCoreApp,Version=v9.0", FrameworkDisplayName = ".NET 9.0")]
[assembly: AssemblyCompany("Knackrack615")]
[assembly: AssemblyConfiguration("Debug")]
[assembly: AssemblyFileVersion("2.0.0.0")]
[assembly: AssemblyInformationalVersion("2.0.0")]
[assembly: AssemblyProduct("AudioBridge")]
[assembly: AssemblyTitle("AudioBridge")]
[assembly: AssemblyMetadata("RepositoryUrl", "https://github.com/knackrack615/AudioBridge")]
[assembly: AssemblyVersion("2.0.0.0")]
[module: RefSafetyRules(11)]
namespace AudioBridge;

public enum MuteTarget
{
	None,
	Host,
	Renderer
}
[ResonitePlugin("knackrack615.audiobridge", "AudioBridge", "2.0.0", "Knackrack615", "https://github.com/knackrack615/AudioBridge")]
[BepInDependency(/*Could not decode attribute arguments.*/)]
public class AudioBridge : BasePlugin
{
	[Serializable]
	[CompilerGenerated]
	private sealed class <>c
	{
		private sealed class <<OnEnabledChanged>b__9_0>d : IAsyncStateMachine
		{
			public int <>1__state;

			public AsyncTaskMethodBuilder <>t__builder;

			public <>c <>4__this;

			private TaskAwaiter <>u__1;

			private void MoveNext()
			{
				//IL_004d: Unknown result type (might be due to invalid IL or missing references)
				//IL_0052: Unknown result type (might be due to invalid IL or missing references)
				//IL_0059: Unknown result type (might be due to invalid IL or missing references)
				//IL_0016: Unknown result type (might be due to invalid IL or missing references)
				//IL_001b: Unknown result type (might be due to invalid IL or missing references)
				//IL_002f: Unknown result type (might be due to invalid IL or missing references)
				//IL_0030: Unknown result type (might be due to invalid IL or missing references)
				int num = <>1__state;
				try
				{
					TaskAwaiter awaiter;
					if (num != 0)
					{
						awaiter = global::System.Threading.Tasks.Task.Delay(100).GetAwaiter();
						if (!((TaskAwaiter)(ref awaiter)).IsCompleted)
						{
							num = (<>1__state = 0);
							<>u__1 = awaiter;
							<<OnEnabledChanged>b__9_0>d <<OnEnabledChanged>b__9_0>d = this;
							((AsyncTaskMethodBuilder)(ref <>t__builder)).AwaitUnsafeOnCompleted<TaskAwaiter, <<OnEnabledChanged>b__9_0>d>(ref awaiter, ref <<OnEnabledChanged>b__9_0>d);
							return;
						}
					}
					else
					{
						awaiter = <>u__1;
						<>u__1 = default(TaskAwaiter);
						num = (<>1__state = -1);
					}
					((TaskAwaiter)(ref awaiter)).GetResult();
					if (ShadowBus.EnsureInit(writer: true))
					{
						UniLog.Log("[AudioBridge] Audio sharing enabled successfully", false);
						ShadowBus.ResetBufferIndices();
						ShadowBus.PublishEnabled(enabled: true);
						ShadowWriterPatch.ResetState();
						if (_currentMuteTarget == MuteTarget.Host)
						{
							global::System.Threading.Tasks.Task.Run((Func<global::System.Threading.Tasks.Task>)([AsyncStateMachine(typeof(<<OnEnabledChanged>b__9_2>d))] [DebuggerStepThrough] () =>
							{
								//IL_0007: Unknown result type (might be due to invalid IL or missing references)
								//IL_000c: Unknown result type (might be due to invalid IL or missing references)
								<<OnEnabledChanged>b__9_2>d <<OnEnabledChanged>b__9_2>d = new <<OnEnabledChanged>b__9_2>d
								{
									<>t__builder = AsyncTaskMethodBuilder.Create(),
									<>4__this = <>9,
									<>1__state = -1
								};
								((AsyncTaskMethodBuilder)(ref <<OnEnabledChanged>b__9_2>d.<>t__builder)).Start<<<OnEnabledChanged>b__9_2>d>(ref <<OnEnabledChanged>b__9_2>d);
								return ((AsyncTaskMethodBuilder)(ref <<OnEnabledChanged>b__9_2>d.<>t__builder)).Task;
							}));
						}
					}
					else
					{
						UniLog.Error("[AudioBridge] Failed to enable audio sharing", true);
					}
				}
				catch (global::System.Exception exception)
				{
					<>1__state = -2;
					((AsyncTaskMethodBuilder)(ref <>t__builder)).SetException(exception);
					return;
				}
				<>1__state = -2;
				((AsyncTaskMethodBuilder)(ref <>t__builder)).SetResult();
			}

			[DebuggerHidden]
			private void SetStateMachine(IAsyncStateMachine stateMachine)
			{
			}
		}

		private sealed class <<OnEnabledChanged>b__9_1>d : IAsyncStateMachine
		{
			public int <>1__state;

			public AsyncTaskMethodBuilder <>t__builder;

			public <>c <>4__this;

			private TaskAwaiter <>u__1;

			private void MoveNext()
			{
				//IL_004d: Unknown result type (might be due to invalid IL or missing references)
				//IL_0052: Unknown result type (might be due to invalid IL or missing references)
				//IL_0059: Unknown result type (might be due to invalid IL or missing references)
				//IL_0019: Unknown result type (might be due to invalid IL or missing references)
				//IL_001e: Unknown result type (might be due to invalid IL or missing references)
				//IL_0032: Unknown result type (might be due to invalid IL or missing references)
				//IL_0033: Unknown result type (might be due to invalid IL or missing references)
				int num = <>1__state;
				try
				{
					TaskAwaiter awaiter;
					if (num != 0)
					{
						awaiter = global::System.Threading.Tasks.Task.Delay(500).GetAwaiter();
						if (!((TaskAwaiter)(ref awaiter)).IsCompleted)
						{
							num = (<>1__state = 0);
							<>u__1 = awaiter;
							<<OnEnabledChanged>b__9_1>d <<OnEnabledChanged>b__9_1>d = this;
							((AsyncTaskMethodBuilder)(ref <>t__builder)).AwaitUnsafeOnCompleted<TaskAwaiter, <<OnEnabledChanged>b__9_1>d>(ref awaiter, ref <<OnEnabledChanged>b__9_1>d);
							return;
						}
					}
					else
					{
						awaiter = <>u__1;
						<>u__1 = default(TaskAwaiter);
						num = (<>1__state = -1);
					}
					((TaskAwaiter)(ref awaiter)).GetResult();
					ShadowBus.Shutdown();
					ShadowWriterPatch.ResetState();
					UniLog.Log("[AudioBridge] Audio sharing disabled", false);
				}
				catch (global::System.Exception exception)
				{
					<>1__state = -2;
					((AsyncTaskMethodBuilder)(ref <>t__builder)).SetException(exception);
					return;
				}
				<>1__state = -2;
				((AsyncTaskMethodBuilder)(ref <>t__builder)).SetResult();
			}

			[DebuggerHidden]
			private void SetStateMachine(IAsyncStateMachine stateMachine)
			{
			}
		}

		private sealed class <<OnEnabledChanged>b__9_2>d : IAsyncStateMachine
		{
			public int <>1__state;

			public AsyncTaskMethodBuilder <>t__builder;

			public <>c <>4__this;

			private int <i>5__1;

			private TaskAwaiter <>u__1;

			private void MoveNext()
			{
				//IL_006a: Unknown result type (might be due to invalid IL or missing references)
				//IL_006f: Unknown result type (might be due to invalid IL or missing references)
				//IL_0076: Unknown result type (might be due to invalid IL or missing references)
				//IL_0036: Unknown result type (might be due to invalid IL or missing references)
				//IL_003b: Unknown result type (might be due to invalid IL or missing references)
				//IL_004f: Unknown result type (might be due to invalid IL or missing references)
				//IL_0050: Unknown result type (might be due to invalid IL or missing references)
				int num = <>1__state;
				try
				{
					if (num != 0)
					{
						<i>5__1 = 0;
						goto IL_00a0;
					}
					TaskAwaiter awaiter = <>u__1;
					<>u__1 = default(TaskAwaiter);
					num = (<>1__state = -1);
					goto IL_0085;
					IL_0085:
					((TaskAwaiter)(ref awaiter)).GetResult();
					<i>5__1++;
					goto IL_00a0;
					IL_00a0:
					if (<i>5__1 < 10 && !ShadowWriterPatch.TryApplyMuteConfiguration(shouldMute: true))
					{
						awaiter = global::System.Threading.Tasks.Task.Delay(500).GetAwaiter();
						if (!((TaskAwaiter)(ref awaiter)).IsCompleted)
						{
							num = (<>1__state = 0);
							<>u__1 = awaiter;
							<<OnEnabledChanged>b__9_2>d <<OnEnabledChanged>b__9_2>d = this;
							((AsyncTaskMethodBuilder)(ref <>t__builder)).AwaitUnsafeOnCompleted<TaskAwaiter, <<OnEnabledChanged>b__9_2>d>(ref awaiter, ref <<OnEnabledChanged>b__9_2>d);
							return;
						}
						goto IL_0085;
					}
				}
				catch (global::System.Exception exception)
				{
					<>1__state = -2;
					((AsyncTaskMethodBuilder)(ref <>t__builder)).SetException(exception);
					return;
				}
				<>1__state = -2;
				((AsyncTaskMethodBuilder)(ref <>t__builder)).SetResult();
			}

			[DebuggerHidden]
			private void SetStateMachine(IAsyncStateMachine stateMachine)
			{
			}
		}

		public static readonly <>c <>9 = new <>c();

		public static EventHandler <>9__6_2;

		public static Func<ParameterInfo, string> <>9__6_3;

		public static Func<ParameterInfo, string> <>9__6_4;

		public static Func<global::System.Threading.Tasks.Task?> <>9__9_2;

		public static Func<global::System.Threading.Tasks.Task?> <>9__9_0;

		public static Func<global::System.Threading.Tasks.Task?> <>9__9_1;

		internal void <Load>b__6_2(object? sender, EventArgs args)
		{
			_debugLogging = DEBUG_LOGGING.Value;
		}

		internal string <Load>b__6_3(ParameterInfo p)
		{
			return ((MemberInfo)p.ParameterType).Name + " " + p.Name;
		}

		internal string <Load>b__6_4(ParameterInfo p)
		{
			return ((MemberInfo)p.ParameterType).Name + " " + p.Name;
		}

		[AsyncStateMachine(typeof(<<OnEnabledChanged>b__9_0>d))]
		[DebuggerStepThrough]
		internal global::System.Threading.Tasks.Task? <OnEnabledChanged>b__9_0()
		{
			//IL_0007: Unknown result type (might be due to invalid IL or missing references)
			//IL_000c: Unknown result type (might be due to invalid IL or missing references)
			<<OnEnabledChanged>b__9_0>d <<OnEnabledChanged>b__9_0>d = new <<OnEnabledChanged>b__9_0>d
			{
				<>t__builder = AsyncTaskMethodBuilder.Create(),
				<>4__this = this,
				<>1__state = -1
			};
			((AsyncTaskMethodBuilder)(ref <<OnEnabledChanged>b__9_0>d.<>t__builder)).Start<<<OnEnabledChanged>b__9_0>d>(ref <<OnEnabledChanged>b__9_0>d);
			return ((AsyncTaskMethodBuilder)(ref <<OnEnabledChanged>b__9_0>d.<>t__builder)).Task;
		}

		[AsyncStateMachine(typeof(<<OnEnabledChanged>b__9_2>d))]
		[DebuggerStepThrough]
		internal global::System.Threading.Tasks.Task? <OnEnabledChanged>b__9_2()
		{
			//IL_0007: Unknown result type (might be due to invalid IL or missing references)
			//IL_000c: Unknown result type (might be due to invalid IL or missing references)
			<<OnEnabledChanged>b__9_2>d <<OnEnabledChanged>b__9_2>d = new <<OnEnabledChanged>b__9_2>d
			{
				<>t__builder = AsyncTaskMethodBuilder.Create(),
				<>4__this = this,
				<>1__state = -1
			};
			((AsyncTaskMethodBuilder)(ref <<OnEnabledChanged>b__9_2>d.<>t__builder)).Start<<<OnEnabledChanged>b__9_2>d>(ref <<OnEnabledChanged>b__9_2>d);
			return ((AsyncTaskMethodBuilder)(ref <<OnEnabledChanged>b__9_2>d.<>t__builder)).Task;
		}

		[AsyncStateMachine(typeof(<<OnEnabledChanged>b__9_1>d))]
		[DebuggerStepThrough]
		internal global::System.Threading.Tasks.Task? <OnEnabledChanged>b__9_1()
		{
			//IL_0007: Unknown result type (might be due to invalid IL or missing references)
			//IL_000c: Unknown result type (might be due to invalid IL or missing references)
			<<OnEnabledChanged>b__9_1>d <<OnEnabledChanged>b__9_1>d = new <<OnEnabledChanged>b__9_1>d
			{
				<>t__builder = AsyncTaskMethodBuilder.Create(),
				<>4__this = this,
				<>1__state = -1
			};
			((AsyncTaskMethodBuilder)(ref <<OnEnabledChanged>b__9_1>d.<>t__builder)).Start<<<OnEnabledChanged>b__9_1>d>(ref <<OnEnabledChanged>b__9_1>d);
			return ((AsyncTaskMethodBuilder)(ref <<OnEnabledChanged>b__9_1>d.<>t__builder)).Task;
		}
	}

	private static ConfigEntry<bool> ENABLED;

	private static ConfigEntry<MuteTarget> MUTE_TARGET;

	private static ConfigEntry<bool> DEBUG_LOGGING;

	private static MuteTarget _currentMuteTarget = MuteTarget.Host;

	private static bool _isEnabled = false;

	private static bool _debugLogging = false;

	public override void Load()
	{
		//IL_00a7: Unknown result type (might be due to invalid IL or missing references)
		//IL_00b1: Expected O, but got Unknown
		//IL_00be: Unknown result type (might be due to invalid IL or missing references)
		//IL_00c8: Expected O, but got Unknown
		//IL_00e2: Unknown result type (might be due to invalid IL or missing references)
		//IL_00e7: Unknown result type (might be due to invalid IL or missing references)
		//IL_00ed: Expected O, but got Unknown
		//IL_05ae: Unknown result type (might be due to invalid IL or missing references)
		//IL_05bb: Expected O, but got Unknown
		//IL_0371: Unknown result type (might be due to invalid IL or missing references)
		//IL_037e: Expected O, but got Unknown
		try
		{
			ENABLED = ((BasePlugin)this).Config.Bind<bool>("General", "Enabled", true, "Enable audio sharing to renderer process?");
			MUTE_TARGET = ((BasePlugin)this).Config.Bind<MuteTarget>("General", "MuteTarget", MuteTarget.Host, "Which process to mute (prevents double audio)?");
			DEBUG_LOGGING = ((BasePlugin)this).Config.Bind<bool>("General", "DebugLogging", false, "Enable debug/verbose logging?");
			UniLog.Log("[AudioBridge] Initializing audio sharing module", false);
			_currentMuteTarget = MUTE_TARGET.Value;
			_isEnabled = ENABLED.Value;
			_debugLogging = DEBUG_LOGGING.Value;
			ENABLED.SettingChanged += (EventHandler)([CompilerGenerated] (object? sender, EventArgs args) =>
			{
				OnEnabledChanged();
			});
			MUTE_TARGET.SettingChanged += (EventHandler)([CompilerGenerated] (object? sender, EventArgs args) =>
			{
				OnMuteTargetChanged();
			});
			ConfigEntry<bool> dEBUG_LOGGING = DEBUG_LOGGING;
			object obj = <>c.<>9__6_2;
			if (obj == null)
			{
				EventHandler val = delegate
				{
					_debugLogging = DEBUG_LOGGING.Value;
				};
				<>c.<>9__6_2 = val;
				obj = (object)val;
			}
			dEBUG_LOGGING.SettingChanged += (EventHandler)obj;
			UniLog.Log($"[AudioBridge] Mute target set to: {_currentMuteTarget}", false);
			UniLog.Log("[AudioBridge] Applying audio driver patches", false);
			Harmony harmonyInstance = ((BasePlugin)this).HarmonyInstance;
			try
			{
				global::System.Type typeFromHandle = typeof(CSCoreAudioOutputDriver);
				global::System.Type typeFromHandle2 = typeof(AudioOutputDriver);
				MethodInfo[] methods = typeFromHandle.GetMethods((BindingFlags)54);
				int num = 0;
				if (_debugLogging)
				{
					UniLog.Log($"[AudioBridge] Found {methods.Length} audio driver methods", false);
				}
				MethodInfo[] array = methods;
				foreach (MethodInfo val2 in array)
				{
					if (!((MemberInfo)val2).Name.Contains("Read") && !((MemberInfo)val2).Name.Contains("read"))
					{
						continue;
					}
					ParameterInfo[] parameters = ((MethodBase)val2).GetParameters();
					string text = string.Join(", ", Enumerable.Select<ParameterInfo, string>((global::System.Collections.Generic.IEnumerable<ParameterInfo>)parameters, (Func<ParameterInfo, string>)((ParameterInfo p) => ((MemberInfo)p.ParameterType).Name + " " + p.Name)));
					if (_debugLogging)
					{
						UniLog.Log($"[AudioBridge] Discovered audio method: {((MemberInfo)val2).Name}({text})", false);
					}
					if (!(((MemberInfo)val2).DeclaringType == typeFromHandle))
					{
						continue;
					}
					try
					{
						string text2 = null;
						if (((MemberInfo)val2).Name == "Read" && parameters.Length == 3)
						{
							if (parameters[0].ParameterType == typeof(float[]))
							{
								text2 = "Read_Float_Postfix";
							}
							else if (parameters[0].ParameterType == typeof(byte[]))
							{
								text2 = "Read_Byte_Postfix";
							}
						}
						else if (((MemberInfo)val2).Name == "ReadAuto")
						{
							text2 = "ReadAuto_Span_Postfix";
						}
						if (text2 == null)
						{
							continue;
						}
						MethodInfo method = typeof(ShadowWriterPatch).GetMethod(text2, (BindingFlags)40);
						if (method != (MethodInfo)null)
						{
							harmonyInstance.Patch((MethodBase)(object)val2, (HarmonyMethod)null, new HarmonyMethod(method), (HarmonyMethod)null, (HarmonyMethod)null, (HarmonyMethod)null);
							if (_debugLogging)
							{
								UniLog.Log("[AudioBridge] Successfully patched " + ((MemberInfo)val2).Name, false);
							}
							num++;
						}
						else if (_debugLogging)
						{
							UniLog.Log("[AudioBridge] Patch method " + text2 + " not found", false);
						}
					}
					catch (global::System.Exception ex)
					{
						if (_debugLogging)
						{
							UniLog.Log("[AudioBridge] Failed to patch " + ((MemberInfo)val2).Name + ": " + ex.Message, false);
						}
					}
				}
				MethodInfo[] methods2 = typeFromHandle2.GetMethods((BindingFlags)52);
				if (_debugLogging)
				{
					UniLog.Log($"[AudioBridge] Base driver has {methods2.Length} methods", false);
				}
				MethodInfo[] array2 = methods2;
				foreach (MethodInfo val3 in array2)
				{
					if (!((MemberInfo)val3).Name.Contains("Read") && !((MemberInfo)val3).Name.Contains("read") && !(((MemberInfo)val3).Name == "Start"))
					{
						continue;
					}
					ParameterInfo[] parameters2 = ((MethodBase)val3).GetParameters();
					string text3 = string.Join(", ", Enumerable.Select<ParameterInfo, string>((global::System.Collections.Generic.IEnumerable<ParameterInfo>)parameters2, (Func<ParameterInfo, string>)((ParameterInfo p) => ((MemberInfo)p.ParameterType).Name + " " + p.Name)));
					if (_debugLogging)
					{
						UniLog.Log($"[AudioBridge] Found base method: {((MemberInfo)val3).Name}({text3})", false);
					}
					if (((MemberInfo)val3).Name == "Start" && ((MemberInfo)val3).DeclaringType == typeFromHandle2)
					{
						try
						{
							MethodInfo method2 = typeof(ShadowWriterPatch).GetMethod("Start_Base_Postfix", (BindingFlags)40);
							harmonyInstance.Patch((MethodBase)(object)val3, (HarmonyMethod)null, new HarmonyMethod(method2), (HarmonyMethod)null, (HarmonyMethod)null, (HarmonyMethod)null);
							UniLog.Log("[AudioBridge] Patched base Start method", false);
						}
						catch (global::System.Exception ex2)
						{
							UniLog.Log("[AudioBridge] Failed to patch base Start: " + ex2.Message, false);
						}
					}
				}
				if (_debugLogging)
				{
					UniLog.Log($"[AudioBridge] Successfully patched {num} audio methods", false);
				}
			}
			catch (global::System.Exception ex3)
			{
				UniLog.Error("[AudioBridge] Patching failed: " + ex3.Message, true);
			}
			UniLog.Log("[AudioBridge] Audio driver patching completed", false);
			UniLog.Log($"[AudioBridge] Audio sharing enabled: {_isEnabled}", false);
			if (_isEnabled)
			{
				UniLog.Log("[AudioBridge] Initializing audio writer for host process", false);
				UniLog.Log("[AudioBridge] Initializing shared memory audio buffer", false);
				if (ShadowBus.EnsureInit(writer: true))
				{
					UniLog.Log("[AudioBridge] Shared memory audio buffer initialized", false);
					ShadowBus.ResetBufferIndices();
					ShadowBus.PublishEnabled(enabled: true);
					ShadowBus.PublishMuteTarget(_currentMuteTarget);
				}
				else
				{
					UniLog.Error("[AudioBridge] Failed to initialize shared memory buffer", true);
				}
			}
			else
			{
				UniLog.Log("[AudioBridge] Audio sharing is disabled", false);
			}
		}
		catch (global::System.Exception ex4)
		{
			UniLog.Error("[AudioBridge] Initialization failed: " + ex4.Message, true);
			UniLog.Error("[AudioBridge] Stack trace: " + ex4.StackTrace, true);
		}
	}

	internal static void Msg(string s)
	{
		UniLog.Log("[AudioBridge] " + s, false);
	}

	internal static void Err(string s)
	{
		UniLog.Error("[AudioBridge] " + s, false);
	}

	private void OnEnabledChanged()
	{
		bool isEnabled = _isEnabled;
		_isEnabled = ENABLED.Value;
		UniLog.Log($"[AudioBridge] Audio sharing enabled changed from {isEnabled} to {_isEnabled}", false);
		if (_isEnabled && !isEnabled)
		{
			UniLog.Log("[AudioBridge] Enabling audio sharing...", false);
			global::System.Threading.Tasks.Task.Run((Func<global::System.Threading.Tasks.Task>)([AsyncStateMachine(typeof(<>c.<<OnEnabledChanged>b__9_0>d))] [DebuggerStepThrough] () =>
			{
				//IL_0007: Unknown result type (might be due to invalid IL or missing references)
				//IL_000c: Unknown result type (might be due to invalid IL or missing references)
				<>c.<<OnEnabledChanged>b__9_0>d <<OnEnabledChanged>b__9_0>d = new <>c.<<OnEnabledChanged>b__9_0>d
				{
					<>t__builder = AsyncTaskMethodBuilder.Create(),
					<>4__this = <>c.<>9,
					<>1__state = -1
				};
				((AsyncTaskMethodBuilder)(ref <<OnEnabledChanged>b__9_0>d.<>t__builder)).Start<<>c.<<OnEnabledChanged>b__9_0>d>(ref <<OnEnabledChanged>b__9_0>d);
				return ((AsyncTaskMethodBuilder)(ref <<OnEnabledChanged>b__9_0>d.<>t__builder)).Task;
			}));
		}
		else if (!_isEnabled && isEnabled)
		{
			UniLog.Log("[AudioBridge] Disabling audio sharing...", false);
			if (_currentMuteTarget == MuteTarget.Host)
			{
				ShadowWriterPatch.ApplyMuteConfiguration(shouldMute: false);
			}
			ShadowBus.PublishEnabled(enabled: false);
			global::System.Threading.Tasks.Task.Run((Func<global::System.Threading.Tasks.Task>)([AsyncStateMachine(typeof(<>c.<<OnEnabledChanged>b__9_1>d))] [DebuggerStepThrough] () =>
			{
				//IL_0007: Unknown result type (might be due to invalid IL or missing references)
				//IL_000c: Unknown result type (might be due to invalid IL or missing references)
				<>c.<<OnEnabledChanged>b__9_1>d <<OnEnabledChanged>b__9_1>d = new <>c.<<OnEnabledChanged>b__9_1>d
				{
					<>t__builder = AsyncTaskMethodBuilder.Create(),
					<>4__this = <>c.<>9,
					<>1__state = -1
				};
				((AsyncTaskMethodBuilder)(ref <<OnEnabledChanged>b__9_1>d.<>t__builder)).Start<<>c.<<OnEnabledChanged>b__9_1>d>(ref <<OnEnabledChanged>b__9_1>d);
				return ((AsyncTaskMethodBuilder)(ref <<OnEnabledChanged>b__9_1>d.<>t__builder)).Task;
			}));
		}
	}

	private void OnMuteTargetChanged()
	{
		MuteTarget currentMuteTarget = _currentMuteTarget;
		_currentMuteTarget = MUTE_TARGET.Value;
		UniLog.Log($"[AudioBridge] Mute target changed from {currentMuteTarget} to {_currentMuteTarget}", false);
		if (_isEnabled)
		{
			ShadowBus.PublishMuteTarget(_currentMuteTarget);
			bool shouldMute = _currentMuteTarget == MuteTarget.Host;
			if (currentMuteTarget == MuteTarget.Host || _currentMuteTarget == MuteTarget.Host)
			{
				ShadowWriterPatch.ApplyMuteConfiguration(shouldMute);
			}
		}
	}

	internal static MuteTarget GetCurrentMuteTarget()
	{
		return _currentMuteTarget;
	}

	internal static bool IsEnabled()
	{
		return _isEnabled;
	}

	internal static bool IsDebugLogging()
	{
		return _debugLogging;
	}
}
internal static class ShadowBus
{
	private const string MMF_NAME = "AudioBridge_SharedMemory";

	private const string MUTEX_NAME = "AudioBridge_SharedMemory_Mutex";

	private const int HEADER_BYTES = 64;

	private const int RING_BYTES = 2097152;

	private const int MMF_BYTES = 2097216;

	private static MemoryMappedFile _mmf;

	private static MemoryMappedViewAccessor _view;

	private static Mutex _mtx;

	private static volatile bool _inited;

	public static bool EnsureInit(bool writer, int sampleRate = 48000, int channels = 2, string sessionId = null)
	{
		//IL_00a9: Unknown result type (might be due to invalid IL or missing references)
		//IL_00b3: Expected O, but got Unknown
		//IL_03b7: Unknown result type (might be due to invalid IL or missing references)
		if (_inited)
		{
			return true;
		}
		if (AudioBridge.IsDebugLogging())
		{
			UniLog.Log("[AudioBridge] Initializing shared memory as " + (writer ? "writer" : "reader"), false);
		}
		try
		{
			if (writer)
			{
				if (AudioBridge.IsDebugLogging())
				{
					UniLog.Log("[AudioBridge] Creating shared memory: AudioBridge_SharedMemory", false);
				}
				_mmf = MemoryMappedFile.CreateOrOpen("AudioBridge_SharedMemory", 2097216L, (MemoryMappedFileAccess)0);
				if (AudioBridge.IsDebugLogging())
				{
					UniLog.Log("[AudioBridge] Shared memory created", false);
				}
				if (AudioBridge.IsDebugLogging())
				{
					UniLog.Log("[AudioBridge] Creating synchronization mutex: AudioBridge_SharedMemory_Mutex", false);
				}
				_mtx = new Mutex(false, "AudioBridge_SharedMemory_Mutex");
				if (AudioBridge.IsDebugLogging())
				{
					UniLog.Log("[AudioBridge] Mutex created", false);
				}
				_view = _mmf.CreateViewAccessor(0L, 2097216L, (MemoryMappedFileAccess)0);
				if (AudioBridge.IsDebugLogging())
				{
					UniLog.Log("[AudioBridge] Memory accessor created", false);
				}
				((WaitHandle)_mtx).WaitOne();
				try
				{
					uint num = ((UnmanagedMemoryAccessor)_view).ReadUInt32(0L);
					uint num2 = ((UnmanagedMemoryAccessor)_view).ReadUInt32(4L);
					if (AudioBridge.IsDebugLogging())
					{
						UniLog.Log($"[AudioBridge] Buffer indices: write={num}, read={num2}", false);
					}
					if (num > 2097152 || num2 > 2097152)
					{
						UniLog.Log("[AudioBridge] Resetting buffer indices", false);
						((UnmanagedMemoryAccessor)_view).Write(0L, 0u);
						((UnmanagedMemoryAccessor)_view).Write(4L, 0u);
					}
					((UnmanagedMemoryAccessor)_view).Write(8L, sampleRate);
					((UnmanagedMemoryAccessor)_view).Write(12L, channels);
					((UnmanagedMemoryAccessor)_view).Write(16L, (int)AudioBridge.GetCurrentMuteTarget());
					((UnmanagedMemoryAccessor)_view).Write(20L, AudioBridge.IsEnabled() ? 1 : 0);
					if (!string.IsNullOrEmpty(sessionId))
					{
						WriteSessionId(sessionId);
					}
					if (AudioBridge.IsDebugLogging())
					{
						UniLog.Log($"[AudioBridge] Audio format: {sampleRate}Hz, {channels} channels, SessionID: {sessionId ?? "none"}", false);
					}
				}
				finally
				{
					_mtx.ReleaseMutex();
				}
			}
			else
			{
				UniLog.Log("[AudioBridge] Opening shared memory: AudioBridge_SharedMemory", false);
				bool flag = false;
				global::System.Exception ex = null;
				for (int i = 0; i < 10; i++)
				{
					try
					{
						_mmf = MemoryMappedFile.OpenExisting("AudioBridge_SharedMemory", (MemoryMappedFileRights)6);
						flag = true;
						UniLog.Log($"[AudioBridge] Shared memory opened on attempt {i + 1}", false);
					}
					catch (global::System.Exception ex2)
					{
						ex = ex2;
						if (i < 9)
						{
							if (AudioBridge.IsDebugLogging())
							{
								UniLog.Log($"[AudioBridge] Waiting for shared memory (attempt {i + 1}/10)", false);
							}
							Thread.Sleep(500);
						}
						continue;
					}
					break;
				}
				if (!flag)
				{
					UniLog.Error("[AudioBridge] Failed to open shared memory: " + ex?.Message, true);
					throw new InvalidOperationException("MMF AudioBridge_SharedMemory not found", ex);
				}
				UniLog.Log("[AudioBridge] Opening synchronization mutex", false);
				_mtx = Mutex.OpenExisting("AudioBridge_SharedMemory_Mutex");
				UniLog.Log("[AudioBridge] Mutex opened", false);
				_view = _mmf.CreateViewAccessor(0L, 2097216L, (MemoryMappedFileAccess)0);
				UniLog.Log("[AudioBridge] Memory accessor created", false);
			}
			_inited = true;
			UniLog.Log("[AudioBridge] Shared memory initialization complete", false);
			return true;
		}
		catch (global::System.Exception ex3)
		{
			UniLog.Error("[AudioBridge] Shared memory initialization failed: " + ex3.Message, true);
			UniLog.Error("[AudioBridge] Stack trace: " + ex3.StackTrace, true);
			try
			{
				MemoryMappedViewAccessor view = _view;
				if (view != null)
				{
					((UnmanagedMemoryAccessor)view).Dispose();
				}
			}
			catch
			{
			}
			try
			{
				Mutex mtx = _mtx;
				if (mtx != null)
				{
					((WaitHandle)mtx).Dispose();
				}
			}
			catch
			{
			}
			try
			{
				MemoryMappedFile mmf = _mmf;
				if (mmf != null)
				{
					mmf.Dispose();
				}
			}
			catch
			{
			}
			_view = null;
			_mtx = null;
			_mmf = null;
			return false;
		}
	}

	public static void WriteSessionId(string sessionId)
	{
		if (!_inited || string.IsNullOrEmpty(sessionId))
		{
			return;
		}
		((WaitHandle)_mtx).WaitOne();
		try
		{
			byte[] array = new byte[36];
			((UnmanagedMemoryAccessor)_view).WriteArray<byte>(24L, array, 0, 36);
			byte[] bytes = Encoding.ASCII.GetBytes(sessionId);
			int num = Math.Min(bytes.Length, 36);
			((UnmanagedMemoryAccessor)_view).WriteArray<byte>(24L, bytes, 0, num);
			if (AudioBridge.IsDebugLogging())
			{
				UniLog.Log("[AudioBridge] Written SessionID to shared memory: " + sessionId, false);
			}
		}
		finally
		{
			_mtx.ReleaseMutex();
		}
	}

	public static string ReadSessionId()
	{
		if (!_inited)
		{
			return null;
		}
		((WaitHandle)_mtx).WaitOne();
		try
		{
			byte[] array = new byte[36];
			((UnmanagedMemoryAccessor)_view).ReadArray<byte>(24L, array, 0, 36);
			int num = global::System.Array.IndexOf<byte>(array, (byte)0);
			if (num == -1)
			{
				num = 36;
			}
			if (num == 0)
			{
				return null;
			}
			return Encoding.ASCII.GetString(array, 0, num);
		}
		finally
		{
			_mtx.ReleaseMutex();
		}
	}

	public static void PublishFormat(int sampleRate, int channels, string sessionId = null)
	{
		if (!_inited)
		{
			return;
		}
		((WaitHandle)_mtx).WaitOne();
		try
		{
			if (AudioBridge.IsDebugLogging())
			{
				UniLog.Log($"[AudioBridge] Publishing audio format: {sampleRate}Hz, {channels}ch, SessionID: {sessionId ?? "none"}", false);
			}
			((UnmanagedMemoryAccessor)_view).Write(8L, sampleRate);
			((UnmanagedMemoryAccessor)_view).Write(12L, channels);
			((UnmanagedMemoryAccessor)_view).Write(16L, (int)AudioBridge.GetCurrentMuteTarget());
			if (!string.IsNullOrEmpty(sessionId))
			{
				WriteSessionId(sessionId);
			}
		}
		finally
		{
			_mtx.ReleaseMutex();
		}
	}

	public static void PublishMuteTarget(MuteTarget target)
	{
		if (!_inited)
		{
			return;
		}
		((WaitHandle)_mtx).WaitOne();
		try
		{
			((UnmanagedMemoryAccessor)_view).Write(16L, (int)target);
			if (AudioBridge.IsDebugLogging())
			{
				UniLog.Log($"[AudioBridge] Published mute target: {target}", false);
			}
		}
		finally
		{
			_mtx.ReleaseMutex();
		}
	}

	public static ValueTuple<int, int> ReadFormat()
	{
		//IL_0015: Unknown result type (might be due to invalid IL or missing references)
		//IL_001a: Unknown result type (might be due to invalid IL or missing references)
		//IL_007e: Unknown result type (might be due to invalid IL or missing references)
		//IL_0068: Unknown result type (might be due to invalid IL or missing references)
		//IL_006d: Unknown result type (might be due to invalid IL or missing references)
		//IL_005e: Unknown result type (might be due to invalid IL or missing references)
		//IL_0063: Unknown result type (might be due to invalid IL or missing references)
		if (!_inited)
		{
			return new ValueTuple<int, int>(48000, 2);
		}
		((WaitHandle)_mtx).WaitOne();
		try
		{
			int num = ((UnmanagedMemoryAccessor)_view).ReadInt32(8L);
			int num2 = ((UnmanagedMemoryAccessor)_view).ReadInt32(12L);
			if (num <= 0 || num2 <= 0)
			{
				return new ValueTuple<int, int>(48000, 2);
			}
			return new ValueTuple<int, int>(num, num2);
		}
		finally
		{
			_mtx.ReleaseMutex();
		}
	}

	public static MuteTarget ReadMuteTarget()
	{
		if (!_inited)
		{
			return MuteTarget.Renderer;
		}
		((WaitHandle)_mtx).WaitOne();
		try
		{
			int num = ((UnmanagedMemoryAccessor)_view).ReadInt32(16L);
			if (num < 0 || num > 2)
			{
				return MuteTarget.Renderer;
			}
			return (MuteTarget)num;
		}
		finally
		{
			_mtx.ReleaseMutex();
		}
	}

	public static void PublishEnabled(bool enabled)
	{
		if (!_inited)
		{
			return;
		}
		((WaitHandle)_mtx).WaitOne();
		try
		{
			((UnmanagedMemoryAccessor)_view).Write(20L, enabled ? 1 : 0);
			if (!enabled)
			{
				((UnmanagedMemoryAccessor)_view).Write(0L, 0u);
				((UnmanagedMemoryAccessor)_view).Write(4L, 0u);
				if (AudioBridge.IsDebugLogging())
				{
					UniLog.Log("[AudioBridge] Reset buffer indices on disable", false);
				}
			}
			if (AudioBridge.IsDebugLogging())
			{
				UniLog.Log($"[AudioBridge] Published enabled state: {enabled}", false);
			}
		}
		finally
		{
			_mtx.ReleaseMutex();
		}
	}

	public static bool ReadEnabled()
	{
		if (!_inited)
		{
			return false;
		}
		((WaitHandle)_mtx).WaitOne();
		try
		{
			return ((UnmanagedMemoryAccessor)_view).ReadInt32(20L) == 1;
		}
		finally
		{
			_mtx.ReleaseMutex();
		}
	}

	public static void ResetBufferIndices()
	{
		if (!_inited)
		{
			return;
		}
		((WaitHandle)_mtx).WaitOne();
		try
		{
			((UnmanagedMemoryAccessor)_view).Write(0L, 0u);
			((UnmanagedMemoryAccessor)_view).Write(4L, 0u);
			if (AudioBridge.IsDebugLogging())
			{
				UniLog.Log("[AudioBridge] Buffer indices reset to 0", false);
			}
		}
		finally
		{
			_mtx.ReleaseMutex();
		}
	}

	public static void Shutdown()
	{
		if (!_inited)
		{
			return;
		}
		UniLog.Log("[AudioBridge] Shutting down shared memory", false);
		_inited = false;
		try
		{
			MemoryMappedViewAccessor view = _view;
			if (view != null)
			{
				((UnmanagedMemoryAccessor)view).Dispose();
			}
		}
		catch
		{
		}
		try
		{
			Mutex mtx = _mtx;
			if (mtx != null)
			{
				((WaitHandle)mtx).Dispose();
			}
		}
		catch
		{
		}
		try
		{
			MemoryMappedFile mmf = _mmf;
			if (mmf != null)
			{
				mmf.Dispose();
			}
		}
		catch
		{
		}
		_view = null;
		_mtx = null;
		_mmf = null;
	}

	public static void WriteFloats(global::System.ReadOnlySpan<float> src)
	{
		if (!_inited || src.IsEmpty)
		{
			return;
		}
		global::System.ReadOnlySpan<byte> readOnlySpan = MemoryMarshal.AsBytes<float>(src);
		((WaitHandle)_mtx).WaitOne();
		try
		{
			uint num = ((UnmanagedMemoryAccessor)_view).ReadUInt32(0L);
			uint num2 = ((UnmanagedMemoryAccessor)_view).ReadUInt32(4L);
			int num3 = (int)((2097152 + num2 - num - 1) % 2097152);
			int num4 = Math.Min(num3, readOnlySpan.Length);
			if (num4 > 0)
			{
				int num5 = (int)(64 + num);
				int num6 = Math.Min(num4, (int)(2097152 - num));
				((UnmanagedMemoryAccessor)_view).WriteArray<byte>((long)num5, readOnlySpan.Slice(0, num6).ToArray(), 0, num6);
				if (num4 > num6)
				{
					MemoryMappedViewAccessor view = _view;
					long num7 = 64L;
					int num8 = num6;
					((UnmanagedMemoryAccessor)view).WriteArray<byte>(num7, readOnlySpan.Slice(num8, num4 - num8).ToArray(), 0, num4 - num6);
				}
				num = (uint)((num + num4) % 2097152);
				((UnmanagedMemoryAccessor)_view).Write(0L, num);
			}
		}
		finally
		{
			_mtx.ReleaseMutex();
		}
	}

	public static int ReadFloats(global::System.Span<float> dst)
	{
		if (!_inited || dst.IsEmpty)
		{
			return 0;
		}
		global::System.Span<byte> span = MemoryMarshal.AsBytes<float>(dst);
		int num = 0;
		((WaitHandle)_mtx).WaitOne();
		try
		{
			uint num2 = ((UnmanagedMemoryAccessor)_view).ReadUInt32(0L);
			uint num3 = ((UnmanagedMemoryAccessor)_view).ReadUInt32(4L);
			int num4 = (int)((2097152 + num2 - num3) % 2097152);
			if (num4 <= 0)
			{
				return 0;
			}
			int num5 = Math.Min(num4, span.Length);
			int num6 = (int)(64 + num3);
			int num7 = Math.Min(num5, (int)(2097152 - num3));
			byte[] array = new byte[num7];
			((UnmanagedMemoryAccessor)_view).ReadArray<byte>((long)num6, array, 0, num7);
			MemoryExtensions.CopyTo<byte>(array, span);
			if (num5 > num7)
			{
				int num8 = num5 - num7;
				byte[] array2 = new byte[num8];
				((UnmanagedMemoryAccessor)_view).ReadArray<byte>(64L, array2, 0, num8);
				int num9 = num7;
				MemoryExtensions.CopyTo<byte>(array2, span.Slice(num9, span.Length - num9));
			}
			num3 = (uint)((num3 + num5) % 2097152);
			((UnmanagedMemoryAccessor)_view).Write(4L, num3);
			num = num5;
		}
		finally
		{
			_mtx.ReleaseMutex();
		}
		return num / 4;
	}
}
internal static class ShadowWriterPatch
{
	private static readonly FieldRef<CSCoreAudioOutputDriver, WasapiOut> _fOut = AccessTools.FieldRefAccess<CSCoreAudioOutputDriver, WasapiOut>("_out");

	private static bool _busInitialized = false;

	private static WasapiOut _currentAudioOutput = null;

	private static bool _isMuted = false;

	private static string _capturedSessionId = null;

	private static bool _shouldMuteHostAudio = false;

	private static bool _loggedUnsupported = false;

	private static bool _loggedByte = false;

	private static bool _loggedAuto = false;

	private static bool _loggedFormat = false;

	private static int _writeCounter = 0;

	private static void Read_Float_Postfix(CSCoreAudioOutputDriver __instance, float[] buffer, int offset, int count, ref int __result)
	{
		try
		{
			if (!AudioBridge.IsEnabled())
			{
				return;
			}
			WasapiOut val = _fOut.Invoke(__instance);
			WaveFormat val2 = ((val != null) ? val.ActualOutputFormat : null);
			if (!_loggedFormat && val2 != null)
			{
				if (AudioBridge.IsDebugLogging())
				{
					UniLog.Log($"[AudioBridge] Audio output detected: {val2.SampleRate}Hz, {val2.Channels}ch, {val2.BitsPerSample}bit", false);
				}
				_loggedFormat = true;
			}
			if (val2 == null || val2.BitsPerSample != 32)
			{
				return;
			}
			int num = Math.Max(0, __result / 4);
			if (num == 0)
			{
				return;
			}
			if (!_busInitialized)
			{
				if (!ShadowBus.EnsureInit(writer: true, val2.SampleRate, val2.Channels, _capturedSessionId))
				{
					return;
				}
				ShadowBus.PublishFormat(val2.SampleRate, val2.Channels, _capturedSessionId);
				_busInitialized = true;
				UniLog.Log("[AudioBridge] Shared memory initialized for audio streaming", false);
			}
			ShadowBus.WriteFloats(global::System.Span<float>.op_Implicit(MemoryExtensions.AsSpan<float>(buffer, offset, num)));
			if (_shouldMuteHostAudio && AudioBridge.GetCurrentMuteTarget() == MuteTarget.Host)
			{
				global::System.Array.Clear((global::System.Array)buffer, offset, num);
			}
			_writeCounter++;
			if (_writeCounter % 1000 == 0 && AudioBridge.IsDebugLogging())
			{
				UniLog.Log($"[AudioBridge] Processed {_writeCounter} audio chunks", false);
			}
		}
		catch (global::System.Exception ex)
		{
			AudioBridge.Err("Audio float processing error: " + ex.Message);
		}
	}

	private static void Read_Byte_Postfix(CSCoreAudioOutputDriver __instance, byte[] buffer, int offset, int count, ref int __result)
	{
		try
		{
			if (!AudioBridge.IsEnabled() || __result <= 0)
			{
				return;
			}
			WasapiOut val = _fOut.Invoke(__instance);
			WaveFormat val2 = ((val != null) ? val.ActualOutputFormat : null);
			if (val2 == null)
			{
				return;
			}
			if (!_loggedByte)
			{
				if (AudioBridge.IsDebugLogging())
				{
					UniLog.Log($"[AudioBridge] Audio output detected (byte mode): {val2.SampleRate}Hz, {val2.Channels}ch, {val2.BitsPerSample}bit", false);
				}
				_loggedByte = true;
			}
			int num = __result;
			int sampleRate = val2.SampleRate;
			int channels = val2.Channels;
			int bitsPerSample = val2.BitsPerSample;
			if (!_busInitialized)
			{
				if (!ShadowBus.EnsureInit(writer: true, sampleRate, channels, _capturedSessionId))
				{
					return;
				}
				ShadowBus.PublishFormat(sampleRate, channels, _capturedSessionId);
				_busInitialized = true;
				UniLog.Log("[AudioBridge] Shared memory initialized for audio streaming", false);
			}
			switch (bitsPerSample)
			{
			case 32:
			{
				float[] array2 = new float[num / 4];
				Buffer.BlockCopy((global::System.Array)buffer, offset, (global::System.Array)array2, 0, num);
				ShadowBus.WriteFloats(global::System.Span<float>.op_Implicit(MemoryExtensions.AsSpan<float>(array2)));
				if (_shouldMuteHostAudio && AudioBridge.GetCurrentMuteTarget() == MuteTarget.Host)
				{
					global::System.Array.Clear((global::System.Array)buffer, offset, num);
				}
				_writeCounter++;
				if (_writeCounter % 1000 == 0 && AudioBridge.IsDebugLogging())
				{
					UniLog.Log($"[AudioBridge] Processed {_writeCounter} audio chunks (32-bit float)", false);
				}
				break;
			}
			case 16:
			{
				int num2 = num / 2;
				float[] array = new float[num2];
				for (int i = 0; i < num2; i++)
				{
					short num3 = BitConverter.ToInt16(buffer, offset + i * 2);
					array[i] = (float)num3 / 32768f;
				}
				ShadowBus.WriteFloats(global::System.Span<float>.op_Implicit(MemoryExtensions.AsSpan<float>(array)));
				if (_shouldMuteHostAudio && AudioBridge.GetCurrentMuteTarget() == MuteTarget.Host)
				{
					global::System.Array.Clear((global::System.Array)buffer, offset, num);
				}
				_writeCounter++;
				if (_writeCounter % 1000 == 0 && AudioBridge.IsDebugLogging())
				{
					UniLog.Log($"[AudioBridge] Processed {_writeCounter} audio chunks (16-bit PCM)", false);
				}
				break;
			}
			case 24:
			{
				int num4 = num / 3;
				float[] array3 = new float[num4];
				for (int j = 0; j < num4; j++)
				{
					int num5 = (buffer[offset + j * 3] << 8) | (buffer[offset + j * 3 + 1] << 16) | (buffer[offset + j * 3 + 2] << 24);
					num5 >>= 8;
					array3[j] = (float)num5 / 8388608f;
				}
				ShadowBus.WriteFloats(global::System.Span<float>.op_Implicit(MemoryExtensions.AsSpan<float>(array3)));
				if (_shouldMuteHostAudio && AudioBridge.GetCurrentMuteTarget() == MuteTarget.Host)
				{
					global::System.Array.Clear((global::System.Array)buffer, offset, num);
				}
				_writeCounter++;
				if (_writeCounter % 1000 == 0 && AudioBridge.IsDebugLogging())
				{
					UniLog.Log($"[AudioBridge] Processed {_writeCounter} audio chunks (24-bit PCM)", false);
				}
				break;
			}
			default:
				if (!_loggedUnsupported && AudioBridge.IsDebugLogging())
				{
					UniLog.Log($"[AudioBridge] Unsupported audio format: {bitsPerSample}-bit", false);
					_loggedUnsupported = true;
				}
				break;
			}
		}
		catch (global::System.Exception ex)
		{
			AudioBridge.Err("Audio byte processing error: " + ex.Message);
		}
	}

	private static void ReadAuto_Span_Postfix(CSCoreAudioOutputDriver __instance, ref int __result)
	{
		try
		{
			if (!_loggedAuto && AudioBridge.IsDebugLogging())
			{
				UniLog.Log("[AudioBridge] Auto-read method detected", false);
				_loggedAuto = true;
			}
		}
		catch (global::System.Exception ex)
		{
			AudioBridge.Err("Auto-read processing error: " + ex.Message);
		}
	}

	private static void Start_Base_Postfix(AudioOutputDriver __instance, string context)
	{
		try
		{
			if (AudioBridge.IsDebugLogging())
			{
				UniLog.Log("[AudioBridge] Audio driver started with context: " + context, false);
			}
			try
			{
				FieldInfo val = AccessTools.Field(typeof(AudioOutputDriver), "System") ?? AccessTools.Field(typeof(AudioOutputDriver), "system") ?? AccessTools.Field(typeof(AudioOutputDriver), "_system");
				if (val != (FieldInfo)null)
				{
					object value = val.GetValue((object)__instance);
					if (value != null)
					{
						FieldInfo val2 = AccessTools.Field(value.GetType(), "Engine") ?? AccessTools.Field(value.GetType(), "engine") ?? AccessTools.Field(value.GetType(), "_engine");
						if (val2 != (FieldInfo)null)
						{
							object value2 = val2.GetValue(value);
							if (value2 != null)
							{
								PropertyInfo val3 = AccessTools.Property(value2.GetType(), "UniqueSessionID");
								if (val3 != (PropertyInfo)null)
								{
									object value3 = val3.GetValue(value2);
									if (value3 != null)
									{
										_capturedSessionId = value3.ToString();
										if (AudioBridge.IsDebugLogging())
										{
											UniLog.Log("[AudioBridge] Captured Engine SessionID: " + _capturedSessionId, false);
										}
										if (_busInitialized && _capturedSessionId != null)
										{
											ShadowBus.WriteSessionId(_capturedSessionId);
										}
									}
								}
							}
						}
					}
				}
			}
			catch (global::System.Exception ex)
			{
				if (AudioBridge.IsDebugLogging())
				{
					UniLog.Log("[AudioBridge] Could not capture SessionID: " + ex.Message, false);
				}
			}
			CSCoreAudioOutputDriver val4 = (CSCoreAudioOutputDriver)(object)((__instance is CSCoreAudioOutputDriver) ? __instance : null);
			if (val4 == null)
			{
				return;
			}
			WasapiOut val5 = _fOut.Invoke(val4);
			if (val5 == null)
			{
				return;
			}
			WaveFormat actualOutputFormat = val5.ActualOutputFormat;
			if (actualOutputFormat != null && AudioBridge.IsDebugLogging())
			{
				UniLog.Log($"[AudioBridge] Audio device format: {actualOutputFormat.SampleRate}Hz, {actualOutputFormat.Channels}ch, {actualOutputFormat.BitsPerSample}bit", false);
			}
			MMDevice device = val5.Device;
			if (device != (MMDevice)null)
			{
				if (AudioBridge.IsDebugLogging())
				{
					UniLog.Log("[AudioBridge] Audio device: " + device.FriendlyName, false);
				}
				_currentAudioOutput = val5;
				if (AudioBridge.IsEnabled() && AudioBridge.GetCurrentMuteTarget() == MuteTarget.Host)
				{
					UniLog.Log("[AudioBridge] Applying Host mute configuration on audio start", false);
					ApplyMuteConfiguration(shouldMute: true);
				}
			}
		}
		catch (global::System.Exception ex2)
		{
			AudioBridge.Err("Audio start error: " + ex2.Message);
		}
	}

	internal static bool TryApplyMuteConfiguration(bool shouldMute)
	{
		if (_currentAudioOutput == null || _currentAudioOutput.Device == (MMDevice)null)
		{
			UniLog.Log("[AudioBridge] No audio device available to mute/unmute yet", false);
			return false;
		}
		ApplyMuteConfiguration(shouldMute);
		return true;
	}

	internal static void ApplyMuteConfiguration(bool shouldMute)
	{
		_shouldMuteHostAudio = shouldMute;
		UniLog.Log("[AudioBridge] Host audio buffer muting " + (shouldMute ? "enabled" : "disabled"), false);
		if (_currentAudioOutput == null || _currentAudioOutput.Device == (MMDevice)null)
		{
			UniLog.Log("[AudioBridge] No audio device available for session muting (buffer muting will still work)", false);
			return;
		}
		try
		{
			AudioSessionManager2 val = AudioSessionManager2.FromMMDevice(_currentAudioOutput.Device);
			try
			{
				AudioSessionEnumerator sessionEnumerator = val.GetSessionEnumerator();
				try
				{
					uint id = (uint)Process.GetCurrentProcess().Id;
					bool flag = false;
					global::System.Collections.Generic.IEnumerator<AudioSessionControl> enumerator = sessionEnumerator.GetEnumerator();
					try
					{
						while (((global::System.Collections.IEnumerator)enumerator).MoveNext())
						{
							AudioSessionControl current = enumerator.Current;
							AudioSessionControl2 val2 = ((ComObject)current).QueryInterface<AudioSessionControl2>();
							try
							{
								if (val2.ProcessID != id)
								{
									continue;
								}
								flag = true;
								SimpleAudioVolume val3 = ((ComObject)current).QueryInterface<SimpleAudioVolume>();
								try
								{
									val3.MasterVolume = (shouldMute ? 0f : 1f);
									_isMuted = shouldMute;
									if (AudioBridge.IsDebugLogging())
									{
										UniLog.Log($"[AudioBridge] Host audio session found for PID {id}, {(shouldMute ? "muted" : "unmuted")} at session level", false);
									}
									if (AudioBridge.IsDebugLogging())
									{
										try
										{
											string sessionIdentifier = val2.SessionIdentifier;
											string displayName = ((AudioSessionControl)val2).DisplayName;
											UniLog.Log("[AudioBridge] Session details - ID: " + sessionIdentifier + ", Name: " + displayName, false);
										}
										catch
										{
										}
									}
								}
								finally
								{
									((global::System.IDisposable)val3)?.Dispose();
								}
								break;
							}
							finally
							{
								((global::System.IDisposable)val2)?.Dispose();
							}
						}
					}
					finally
					{
						((global::System.IDisposable)enumerator)?.Dispose();
					}
					if (!flag && AudioBridge.IsDebugLogging())
					{
						UniLog.Log($"[AudioBridge] No audio session found for PID {id}, using buffer-level muting only", false);
					}
				}
				finally
				{
					((global::System.IDisposable)sessionEnumerator)?.Dispose();
				}
			}
			finally
			{
				((global::System.IDisposable)val)?.Dispose();
			}
		}
		catch (global::System.Exception ex)
		{
			AudioBridge.Err("Session muting failed (buffer muting still active): " + ex.Message);
		}
	}

	internal static void ResetState()
	{
		UniLog.Log("[AudioBridge] Resetting audio writer state", false);
		_busInitialized = false;
		_isMuted = false;
		_loggedFormat = false;
		_loggedByte = false;
		_loggedAuto = false;
		_writeCounter = 0;
	}
}
public static class PluginMetadata
{
	public const string GUID = "knackrack615.audiobridge";

	public const string NAME = "AudioBridge";

	public const string VERSION = "2.0.0";

	public const string AUTHORS = "Knackrack615";

	public const string REPOSITORY_URL = "https://github.com/knackrack615/AudioBridge";
}

Renderer/BepInEx/plugins/AudioBridgeRenderer/AudioBridgeRenderer.dll

Decompiled 2 months ago
using System;
using System.Diagnostics;
using System.IO.MemoryMappedFiles;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.Versioning;
using System.Text;
using System.Threading;
using BepInEx;
using CSCore;
using CSCore.CoreAudioAPI;
using CSCore.SoundOut;
using CSCore.Win32;
using UnityEngine;

[assembly: CompilationRelaxations(8)]
[assembly: RuntimeCompatibility(WrapNonExceptionThrows = true)]
[assembly: Debuggable(DebuggableAttribute.DebuggingModes.Default | DebuggableAttribute.DebuggingModes.DisableOptimizations | DebuggableAttribute.DebuggingModes.IgnoreSymbolStoreSequencePoints | DebuggableAttribute.DebuggingModes.EnableEditAndContinue)]
[assembly: TargetFramework(".NETFramework,Version=v4.7.2", FrameworkDisplayName = ".NET Framework 4.7.2")]
[assembly: AssemblyVersion("0.0.0.0")]
namespace AudioBridge.Renderer;

public enum MuteTarget
{
	None,
	Host,
	Renderer
}
[BepInPlugin("com.knackrack615.AudioBridgeRenderer", "AudioBridge Renderer", "1.0.0")]
public class AudioBridgeRendererPlugin : BaseUnityPlugin
{
	private ShadowAudioPlayer _audioPlayer;

	private bool _initialized = false;

	private void Awake()
	{
		((BaseUnityPlugin)this).Logger.LogInfo((object)"[AudioBridge.Renderer] Initializing audio renderer plugin");
		try
		{
			InitializeAudio();
		}
		catch (Exception arg)
		{
			((BaseUnityPlugin)this).Logger.LogError((object)$"Failed in Awake: {arg}");
		}
	}

	private void InitializeAudio()
	{
		if (_initialized)
		{
			return;
		}
		_initialized = true;
		((BaseUnityPlugin)this).Logger.LogInfo((object)"[AudioBridge.Renderer] Starting shared memory audio reader");
		try
		{
			_audioPlayer = new ShadowAudioPlayer();
			_audioPlayer.Start();
			((BaseUnityPlugin)this).Logger.LogInfo((object)"[AudioBridge.Renderer] Audio reader started successfully");
		}
		catch (Exception arg)
		{
			((BaseUnityPlugin)this).Logger.LogError((object)$"[AudioBridge.Renderer] Failed to start audio reader: {arg}");
		}
	}

	private void OnDestroy()
	{
		_audioPlayer?.Stop();
	}
}
public class ShadowAudioPlayer
{
	private WasapiOut _audioOut;

	private ShadowBusReader _busReader;

	private Thread _audioThread;

	private bool _running;

	public void Start()
	{
		_audioThread = new Thread(AudioThreadMain)
		{
			IsBackground = true
		};
		_audioThread.Start();
	}

	private void AudioThreadMain()
	{
		//IL_030d: Unknown result type (might be due to invalid IL or missing references)
		//IL_0312: Unknown result type (might be due to invalid IL or missing references)
		//IL_0314: Unknown result type (might be due to invalid IL or missing references)
		//IL_01c1: Unknown result type (might be due to invalid IL or missing references)
		//IL_01cb: Expected O, but got Unknown
		//IL_018c: Unknown result type (might be due to invalid IL or missing references)
		//IL_0196: Expected O, but got Unknown
		Debug.Log((object)"[AudioBridge.Renderer] Audio thread starting - will monitor for audio sharing");
		_running = true;
		bool flag = false;
		while (_running)
		{
			try
			{
				_busReader = new ShadowBusReader();
				bool flag2 = false;
				for (int i = 0; i < 10; i++)
				{
					if (flag2)
					{
						break;
					}
					if (_busReader.TryConnect())
					{
						flag2 = true;
						break;
					}
					Thread.Sleep(500);
				}
				if (!flag2)
				{
					Debug.Log((object)"[AudioBridge.Renderer] Shared memory not available, will retry...");
					_busReader.Dispose();
					_busReader = null;
					Thread.Sleep(2000);
					continue;
				}
				if (!_busReader.IsEnabled())
				{
					Debug.Log((object)"[AudioBridge.Renderer] Audio sharing is disabled, waiting for it to be enabled...");
					_busReader.Dispose();
					_busReader = null;
					Thread.Sleep(2000);
					continue;
				}
				Debug.Log((object)"[AudioBridge.Renderer] Connected and audio sharing is enabled!");
				_busReader.SyncWithWriter();
				var (num, num2) = _busReader.GetFormat();
				Debug.Log((object)$"[AudioBridge.Renderer] Audio format: {num}Hz, {num2} channels");
				string sessionId = _busReader.GetSessionId();
				Debug.Log((object)("[AudioBridge.Renderer] Using SessionID: " + (sessionId ?? "none")));
				ShadowAudioSource shadowAudioSource = new ShadowAudioSource(_busReader, num, num2);
				if (!string.IsNullOrEmpty(sessionId) && Guid.TryParse(sessionId, out var result))
				{
					_audioOut = new WasapiOut(false, (AudioClientShareMode)0, 20, result, false);
					Debug.Log((object)$"[AudioBridge.Renderer] Created WasapiOut with SessionID: {result} (crossProcess: false for independent muting)");
				}
				else
				{
					_audioOut = new WasapiOut(false, (AudioClientShareMode)0, 20, default(Guid), false);
					Debug.Log((object)"[AudioBridge.Renderer] Created WasapiOut without SessionID (legacy mode)");
				}
				_audioOut.Initialize(FluentExtensions.ToWaveSource((ISampleSource)(object)shadowAudioSource));
				_audioOut.Play();
				Debug.Log((object)"[AudioBridge.Renderer] Audio playback started");
				MuteTarget muteTarget = _busReader.GetMuteTarget();
				Debug.Log((object)$"[AudioBridge.Renderer] Mute target from host: {muteTarget}");
				if (muteTarget == MuteTarget.Renderer)
				{
					MuteCurrentProcessAudio(_audioOut.Device);
				}
				_running = true;
				MuteTarget muteTarget2 = muteTarget;
				while (_running)
				{
					Thread.Sleep(5000);
					if (!_busReader.IsEnabled())
					{
						Debug.Log((object)"[AudioBridge.Renderer] Audio sharing disabled by host, stopping playback");
						flag = true;
						break;
					}
					MuteTarget muteTarget3 = _busReader.GetMuteTarget();
					if (muteTarget3 != muteTarget2)
					{
						Debug.Log((object)$"[AudioBridge.Renderer] Mute target changed to: {muteTarget3}");
						if (muteTarget3 == MuteTarget.Renderer)
						{
							MuteCurrentProcessAudio(_audioOut.Device);
						}
						else if (muteTarget2 == MuteTarget.Renderer)
						{
							UnmuteCurrentProcessAudio(_audioOut.Device);
						}
						muteTarget2 = muteTarget3;
					}
					WasapiOut audioOut = _audioOut;
					PlaybackState val = (PlaybackState)((audioOut != null) ? ((int)audioOut.PlaybackState) : 0);
					if ((int)val != 0 || !_running)
					{
						continue;
					}
					Debug.LogWarning((object)"[AudioBridge.Renderer] Audio playback stopped, attempting restart");
					try
					{
						WasapiOut audioOut2 = _audioOut;
						if (audioOut2 != null)
						{
							audioOut2.Play();
						}
					}
					catch (Exception ex)
					{
						Debug.LogError((object)("[AudioBridge.Renderer] Failed to restart playback: " + ex.Message));
					}
				}
				goto IL_0412;
			}
			catch (Exception arg)
			{
				Debug.LogError((object)$"[AudioBridge.Renderer] Audio thread error: {arg}");
				goto IL_0412;
			}
			finally
			{
				if (_audioOut != null)
				{
					Debug.Log((object)"[AudioBridge.Renderer] Cleaning up audio output...");
					try
					{
						_audioOut.Stop();
					}
					catch
					{
					}
					try
					{
						_audioOut.Dispose();
					}
					catch
					{
					}
					_audioOut = null;
				}
				if (_busReader != null)
				{
					try
					{
						_busReader.Dispose();
					}
					catch
					{
					}
					_busReader = null;
				}
			}
			IL_0412:
			if (!_running)
			{
				Debug.Log((object)"[AudioBridge.Renderer] Audio thread exiting (Stop() was called)");
				break;
			}
			if (flag)
			{
				Debug.Log((object)"[AudioBridge.Renderer] Audio sharing was disabled, waiting for re-enable...");
				flag = false;
			}
			else
			{
				Debug.Log((object)"[AudioBridge.Renderer] Will retry connection in 2 seconds...");
			}
			Thread.Sleep(2000);
		}
	}

	public void Stop()
	{
		_running = false;
		WasapiOut audioOut = _audioOut;
		if (audioOut != null)
		{
			audioOut.Stop();
		}
		WasapiOut audioOut2 = _audioOut;
		if (audioOut2 != null)
		{
			audioOut2.Dispose();
		}
		_busReader?.Dispose();
		_audioThread?.Join(1000);
	}

	private void MuteCurrentProcessAudio(MMDevice device)
	{
		try
		{
			Debug.Log((object)"[AudioBridge.Renderer] Muting local audio playback (audio still available for recording)");
			AudioSessionManager2 val = AudioSessionManager2.FromMMDevice(device);
			try
			{
				AudioSessionEnumerator sessionEnumerator = val.GetSessionEnumerator();
				try
				{
					uint id = (uint)Process.GetCurrentProcess().Id;
					foreach (AudioSessionControl item in sessionEnumerator)
					{
						AudioSessionControl2 val2 = ((ComObject)item).QueryInterface<AudioSessionControl2>();
						try
						{
							if (val2.ProcessID == id)
							{
								SimpleAudioVolume val3 = ((ComObject)item).QueryInterface<SimpleAudioVolume>();
								try
								{
									val3.MasterVolume = 0f;
									Debug.Log((object)$"[AudioBridge.Renderer] Successfully muted audio session for process {id}");
									break;
								}
								finally
								{
									((IDisposable)val3)?.Dispose();
								}
							}
						}
						finally
						{
							((IDisposable)val2)?.Dispose();
						}
					}
				}
				finally
				{
					((IDisposable)sessionEnumerator)?.Dispose();
				}
			}
			finally
			{
				((IDisposable)val)?.Dispose();
			}
		}
		catch (Exception ex)
		{
			Debug.LogWarning((object)("[AudioBridge.Renderer] Failed to mute audio session: " + ex.Message));
			Debug.LogWarning((object)"[AudioBridge.Renderer] Audio will play normally (not muted)");
		}
	}

	private void UnmuteCurrentProcessAudio(MMDevice device)
	{
		try
		{
			Debug.Log((object)"[AudioBridge.Renderer] Unmuting local audio playback");
			AudioSessionManager2 val = AudioSessionManager2.FromMMDevice(device);
			try
			{
				AudioSessionEnumerator sessionEnumerator = val.GetSessionEnumerator();
				try
				{
					uint id = (uint)Process.GetCurrentProcess().Id;
					foreach (AudioSessionControl item in sessionEnumerator)
					{
						AudioSessionControl2 val2 = ((ComObject)item).QueryInterface<AudioSessionControl2>();
						try
						{
							if (val2.ProcessID == id)
							{
								SimpleAudioVolume val3 = ((ComObject)item).QueryInterface<SimpleAudioVolume>();
								try
								{
									val3.MasterVolume = 1f;
									Debug.Log((object)$"[AudioBridge.Renderer] Successfully unmuted audio session for process {id}");
									break;
								}
								finally
								{
									((IDisposable)val3)?.Dispose();
								}
							}
						}
						finally
						{
							((IDisposable)val2)?.Dispose();
						}
					}
				}
				finally
				{
					((IDisposable)sessionEnumerator)?.Dispose();
				}
			}
			finally
			{
				((IDisposable)val)?.Dispose();
			}
		}
		catch (Exception ex)
		{
			Debug.LogWarning((object)("[AudioBridge.Renderer] Failed to unmute audio session: " + ex.Message));
		}
	}
}
public class ShadowBusReader : IDisposable
{
	private const string MMF_NAME = "AudioBridge_SharedMemory";

	private const string MUTEX_NAME = "AudioBridge_SharedMemory_Mutex";

	private const int HEADER_BYTES = 64;

	private const int RING_BYTES = 2097152;

	private MemoryMappedFile _mmf;

	private MemoryMappedViewAccessor _view;

	private Mutex _mutex;

	private long _totalSamplesRead;

	public bool TryConnect()
	{
		try
		{
			_mmf = MemoryMappedFile.OpenExisting("AudioBridge_SharedMemory", MemoryMappedFileRights.ReadWrite);
			_mutex = Mutex.OpenExisting("AudioBridge_SharedMemory_Mutex");
			_view = _mmf.CreateViewAccessor(0L, 2097216L, MemoryMappedFileAccess.ReadWrite);
			return true;
		}
		catch
		{
			Dispose();
			return false;
		}
	}

	public (int sampleRate, int channels) GetFormat()
	{
		if (_mutex == null || _view == null)
		{
			return (48000, 2);
		}
		_mutex.WaitOne();
		try
		{
			int num = _view.ReadInt32(8L);
			int num2 = _view.ReadInt32(12L);
			return ((num > 0) ? num : 48000, (num2 > 0) ? num2 : 2);
		}
		finally
		{
			_mutex.ReleaseMutex();
		}
	}

	public string GetSessionId()
	{
		if (_mutex == null || _view == null)
		{
			return null;
		}
		_mutex.WaitOne();
		try
		{
			byte[] array = new byte[36];
			_view.ReadArray(24L, array, 0, 36);
			int num = Array.IndexOf(array, (byte)0);
			if (num == -1)
			{
				num = 36;
			}
			if (num == 0)
			{
				return null;
			}
			return Encoding.ASCII.GetString(array, 0, num);
		}
		finally
		{
			_mutex.ReleaseMutex();
		}
	}

	public MuteTarget GetMuteTarget()
	{
		if (_mutex == null || _view == null)
		{
			return MuteTarget.Renderer;
		}
		_mutex.WaitOne();
		try
		{
			int num = _view.ReadInt32(16L);
			if (num < 0 || num > 2)
			{
				return MuteTarget.Renderer;
			}
			return (MuteTarget)num;
		}
		finally
		{
			_mutex.ReleaseMutex();
		}
	}

	public bool IsEnabled()
	{
		if (_mutex == null || _view == null)
		{
			return false;
		}
		_mutex.WaitOne();
		try
		{
			return _view.ReadInt32(20L) == 1;
		}
		finally
		{
			_mutex.ReleaseMutex();
		}
	}

	public void SyncWithWriter()
	{
		if (_mutex == null || _view == null)
		{
			return;
		}
		_mutex.WaitOne();
		try
		{
			uint num = _view.ReadUInt32(0L);
			_view.Write(4L, num);
			Debug.Log((object)$"[AudioBridge.Renderer] Synced read index to write index: {num}");
		}
		finally
		{
			_mutex.ReleaseMutex();
		}
	}

	public int ReadFloats(float[] buffer, int offset, int count)
	{
		if (_view == null)
		{
			return 0;
		}
		int num = count * 4;
		byte[] array = new byte[num];
		int num2 = 0;
		_mutex.WaitOne();
		try
		{
			uint num3 = _view.ReadUInt32(0L);
			uint num4 = _view.ReadUInt32(4L);
			int num5 = (int)((2097152 + num3 - num4) % 2097152);
			if (num5 <= 0)
			{
				return 0;
			}
			int num6 = Math.Min(num5, num);
			int num7 = (int)(64 + num4);
			int num8 = Math.Min(num6, (int)(2097152 - num4));
			_view.ReadArray(num7, array, 0, num8);
			if (num6 > num8)
			{
				int count2 = num6 - num8;
				_view.ReadArray(64L, array, num8, count2);
			}
			num4 = (uint)((num4 + num6) % 2097152);
			_view.Write(4L, num4);
			num2 = num6;
		}
		finally
		{
			_mutex.ReleaseMutex();
		}
		int num9 = num2 / 4;
		Buffer.BlockCopy(array, 0, buffer, offset, num2);
		_totalSamplesRead += num9;
		return num9;
	}

	public void Dispose()
	{
		_view?.Dispose();
		_mutex?.Dispose();
		_mmf?.Dispose();
		_view = null;
		_mutex = null;
		_mmf = null;
	}
}
public class ShadowAudioSource : ISampleSource, IReadableAudioSource<float>, IAudioSource, IDisposable
{
	private readonly ShadowBusReader _reader;

	private readonly WaveFormat _format;

	public WaveFormat WaveFormat => _format;

	public bool CanSeek => false;

	public long Position
	{
		get
		{
			return 0L;
		}
		set
		{
		}
	}

	public long Length => 0L;

	public ShadowAudioSource(ShadowBusReader reader, int sampleRate, int channels)
	{
		//IL_0015: Unknown result type (might be due to invalid IL or missing references)
		//IL_001f: Expected O, but got Unknown
		_reader = reader;
		_format = new WaveFormat(sampleRate, 32, channels, (AudioEncoding)3);
	}

	public int Read(float[] buffer, int offset, int count)
	{
		try
		{
			int num = _reader.ReadFloats(buffer, offset, count);
			if (num < count)
			{
				for (int i = offset + num; i < offset + count; i++)
				{
					buffer[i] = 0f;
				}
			}
			return count;
		}
		catch
		{
			Array.Clear(buffer, offset, count);
			return count;
		}
	}

	public void Dispose()
	{
	}
}

Renderer/BepInEx/plugins/AudioBridgeRenderer/CSCore.dll

Decompiled 2 months ago
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.Drawing;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.InteropServices.ComTypes;
using System.Runtime.Serialization;
using System.Runtime.Versioning;
using System.Security;
using System.Security.Permissions;
using System.Text;
using System.Threading;
using CSCore.Codecs.AIFF;
using CSCore.Codecs.FLAC;
using CSCore.Codecs.FLAC.Metadata;
using CSCore.Codecs.OGG;
using CSCore.Codecs.WAV;
using CSCore.CoreAudioAPI;
using CSCore.DMO;
using CSCore.DMO.Effects;
using CSCore.DSP;
using CSCore.DirectSound;
using CSCore.SoundIn;
using CSCore.SoundOut;
using CSCore.SoundOut.MMInterop;
using CSCore.Streams;
using CSCore.Streams.SampleConverter;
using CSCore.Tags.ID3;
using CSCore.Tags.ID3.Frames;
using CSCore.Utils;
using CSCore.Utils.Buffer;
using CSCore.Win32;
using NVorbis;

[assembly: AssemblyDescription(".NET Sound Library")]
[assembly: CLSCompliant(true)]
[assembly: CompilationRelaxations(8)]
[assembly: RuntimeCompatibility(WrapNonExceptionThrows = true)]
[assembly: AssemblyTitle("CSCore")]
[assembly: InternalsVisibleTo("CSCore.Test, PublicKey=0024000004800000940000000602000000240000525341310004000001000100237314800493cdf9aabec35955e8928e3d5416ad1d223e8914e0e025ff9095b21bbb696235b9d3886b0edec26107ca0af49c3170fc08d117e8e9265ab371b157f2c2b27843d97c1d312850d10d1272c1d46d18f02ac56f46676cbe7946049b1b344db7154d35788fee27b3d581bd7d43e41813b10fd360a3fbfab9199d9e86a4")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("CSCore")]
[assembly: AssemblyCopyright("Florian R.")]
[assembly: AssemblyTrademark("")]
[assembly: ComVisible(false)]
[assembly: Guid("7939b0f4-fed9-4dcf-bb59-a17505864c55")]
[assembly: TargetFramework(".NETFramework,Version=v4.6.2", FrameworkDisplayName = ".NET Framework 4.6.2")]
[assembly: AssemblyFileVersion("1.2.1.2")]
[assembly: SecurityPermission(SecurityAction.RequestMinimum, SkipVerification = true)]
[assembly: AssemblyVersion("1.2.1.2")]
[module: UnverifiableCode]
namespace CSCore
{
	public static class AudioSubTypes
	{
		public static readonly Guid Unknown = new Guid(0, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Pcm = new Guid(1, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Adpcm = new Guid(2, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid IeeeFloat = new Guid(3, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Vselp = new Guid(4, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid IbmCvsd = new Guid(5, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid ALaw = new Guid(6, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid MuLaw = new Guid(7, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Dts = new Guid(8, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Drm = new Guid(9, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WmaVoice9 = new Guid(10, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid OkiAdpcm = new Guid(16, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid DviAdpcm = new Guid(17, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid ImaAdpcm = new Guid(17, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid MediaspaceAdpcm = new Guid(18, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid SierraAdpcm = new Guid(19, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid G723Adpcm = new Guid(20, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid DigiStd = new Guid(21, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid DigiFix = new Guid(22, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid DialogicOkiAdpcm = new Guid(23, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid MediaVisionAdpcm = new Guid(24, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid CUCodec = new Guid(25, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid YamahaAdpcm = new Guid(32, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid SonarC = new Guid(33, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid DspGroupTrueSpeech = new Guid(34, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid EchoSpeechCorporation1 = new Guid(35, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid AudioFileAf36 = new Guid(36, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Aptx = new Guid(37, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid AudioFileAf10 = new Guid(38, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Prosody1612 = new Guid(39, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Lrc = new Guid(40, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid DolbyAc2 = new Guid(48, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Gsm610 = new Guid(49, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid MsnAudio = new Guid(50, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid AntexAdpcme = new Guid(51, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid ControlResVqlpc = new Guid(52, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid DigiReal = new Guid(53, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid DigiAdpcm = new Guid(54, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid ControlResCr10 = new Guid(55, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_NMS_VBXADPCM = new Guid(56, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_CS_IMAADPCM = new Guid(57, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_ECHOSC3 = new Guid(58, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_ROCKWELL_ADPCM = new Guid(59, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_ROCKWELL_DIGITALK = new Guid(60, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_XEBEC = new Guid(61, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_G721_ADPCM = new Guid(64, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_G728_CELP = new Guid(65, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_MSG723 = new Guid(66, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Mpeg = new Guid(80, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_RT24 = new Guid(82, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_PAC = new Guid(83, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid MpegLayer3 = new Guid(85, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_LUCENT_G723 = new Guid(89, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_CIRRUS = new Guid(96, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_ESPCM = new Guid(97, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VOXWARE = new Guid(98, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_CANOPUS_ATRAC = new Guid(99, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_G726_ADPCM = new Guid(100, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_G722_ADPCM = new Guid(101, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_DSAT_DISPLAY = new Guid(103, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VOXWARE_BYTE_ALIGNED = new Guid(105, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VOXWARE_AC8 = new Guid(112, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VOXWARE_AC10 = new Guid(113, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VOXWARE_AC16 = new Guid(114, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VOXWARE_AC20 = new Guid(115, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VOXWARE_RT24 = new Guid(116, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VOXWARE_RT29 = new Guid(117, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VOXWARE_RT29HW = new Guid(118, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VOXWARE_VR12 = new Guid(119, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VOXWARE_VR18 = new Guid(120, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VOXWARE_TQ40 = new Guid(121, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_SOFTSOUND = new Guid(128, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VOXWARE_TQ60 = new Guid(129, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_MSRT24 = new Guid(130, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_G729A = new Guid(131, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_MVI_MVI2 = new Guid(132, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_DF_G726 = new Guid(133, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_DF_GSM610 = new Guid(134, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_ISIAUDIO = new Guid(136, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_ONLIVE = new Guid(137, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_SBC24 = new Guid(145, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_DOLBY_AC3_SPDIF = new Guid(146, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_MEDIASONIC_G723 = new Guid(147, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_PROSODY_8KBPS = new Guid(148, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_ZYXEL_ADPCM = new Guid(151, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_PHILIPS_LPCBB = new Guid(152, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_PACKED = new Guid(153, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_MALDEN_PHONYTALK = new Guid(160, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Gsm = new Guid(161, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid G729 = new Guid(162, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid G723 = new Guid(163, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Acelp = new Guid(164, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid RawAac = new Guid(255, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_RHETOREX_ADPCM = new Guid(256, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_IRAT = new Guid(257, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VIVO_G723 = new Guid(273, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VIVO_SIREN = new Guid(274, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_DIGITAL_G723 = new Guid(291, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_SANYO_LD_ADPCM = new Guid(293, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_SIPROLAB_ACEPLNET = new Guid(304, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_SIPROLAB_ACELP4800 = new Guid(305, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_SIPROLAB_ACELP8V3 = new Guid(306, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_SIPROLAB_G729 = new Guid(307, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_SIPROLAB_G729A = new Guid(308, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_SIPROLAB_KELVIN = new Guid(309, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_G726ADPCM = new Guid(320, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_QUALCOMM_PUREVOICE = new Guid(336, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_QUALCOMM_HALFRATE = new Guid(337, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_TUBGSM = new Guid(341, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_MSAUDIO1 = new Guid(352, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WindowsMediaAudio = new Guid(353, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WindowsMediaAudioProfessional = new Guid(354, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WindowsMediaAudioLosseless = new Guid(355, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WindowsMediaAudioSpdif = new Guid(356, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_UNISYS_NAP_ADPCM = new Guid(368, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_UNISYS_NAP_ULAW = new Guid(369, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_UNISYS_NAP_ALAW = new Guid(370, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_UNISYS_NAP_16K = new Guid(371, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_CREATIVE_ADPCM = new Guid(512, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_CREATIVE_FASTSPEECH8 = new Guid(514, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_CREATIVE_FASTSPEECH10 = new Guid(515, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_UHER_ADPCM = new Guid(528, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_QUARTERDECK = new Guid(544, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_ILINK_VC = new Guid(560, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_RAW_SPORT = new Guid(576, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_ESST_AC3 = new Guid(577, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_IPI_HSX = new Guid(592, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_IPI_RPELP = new Guid(593, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_CS2 = new Guid(608, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_SONY_SCX = new Guid(624, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_FM_TOWNS_SND = new Guid(768, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_BTV_DIGITAL = new Guid(1024, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_QDESIGN_MUSIC = new Guid(1104, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_VME_VMPCM = new Guid(1664, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_TPC = new Guid(1665, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_OLIGSM = new Guid(4096, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_OLIADPCM = new Guid(4097, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_OLICELP = new Guid(4098, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_OLISBC = new Guid(4099, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_OLIOPR = new Guid(4100, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_LH_CODEC = new Guid(4352, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_NORRIS = new Guid(5120, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_SOUNDSPACE_MUSICOMPRESS = new Guid(5376, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid MPEG_ADTS_AAC = new Guid(5632, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid MPEG_RAW_AAC = new Guid(5633, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid MPEG_LOAS = new Guid(5634, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid NOKIA_MPEG_ADTS_AAC = new Guid(5640, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid NOKIA_MPEG_RAW_AAC = new Guid(5641, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid VODAFONE_MPEG_ADTS_AAC = new Guid(5642, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid VODAFONE_MPEG_RAW_AAC = new Guid(5643, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid MPEG_HEAAC = new Guid(5648, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_DVM = new Guid(8192, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Vorbis1 = new Guid(26447, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Vorbis2 = new Guid(26448, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Vorbis3 = new Guid(26449, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Vorbis1P = new Guid(26479, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Vorbis2P = new Guid(26480, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Vorbis3P = new Guid(26481, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_RAW_AAC1 = new Guid(255, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_WMAVOICE9 = new Guid(10, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid Extensible = new Guid(65534, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_DEVELOPMENT = new Guid(65535, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid WAVE_FORMAT_FLAC = new Guid(61868, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);

		public static readonly Guid MediaTypeAudio = new Guid("73647561-0000-0010-8000-00AA00389B71");

		public static AudioEncoding EncodingFromSubType(Guid audioSubType)
		{
			byte[] value = audioSubType.ToByteArray();
			int num = BitConverter.ToInt32(value, 0);
			if (Enum.IsDefined(typeof(AudioEncoding), (short)num))
			{
				return (AudioEncoding)num;
			}
			throw new ArgumentException("Invalid audioSubType.", "audioSubType");
		}

		public static Guid SubTypeFromEncoding(AudioEncoding audioEncoding)
		{
			if (Enum.IsDefined(typeof(AudioEncoding), (short)audioEncoding))
			{
				return new Guid((int)audioEncoding, 0, 16, 128, 0, 0, 170, 0, 56, 155, 113);
			}
			throw new ArgumentException("Invalid encoding.", "audioEncoding");
		}
	}
	public enum AudioEncoding : short
	{
		Unknown = 0,
		Pcm = 1,
		Adpcm = 2,
		IeeeFloat = 3,
		Vselp = 4,
		IbmCvsd = 5,
		ALaw = 6,
		MuLaw = 7,
		Dts = 8,
		Drm = 9,
		WmaVoice9 = 10,
		OkiAdpcm = 16,
		DviAdpcm = 17,
		ImaAdpcm = 17,
		MediaspaceAdpcm = 18,
		SierraAdpcm = 19,
		G723Adpcm = 20,
		DigiStd = 21,
		DigiFix = 22,
		DialogicOkiAdpcm = 23,
		MediaVisionAdpcm = 24,
		CUCodec = 25,
		YamahaAdpcm = 32,
		SonarC = 33,
		DspGroupTrueSpeech = 34,
		EchoSpeechCorporation1 = 35,
		AudioFileAf36 = 36,
		Aptx = 37,
		AudioFileAf10 = 38,
		Prosody1612 = 39,
		Lrc = 40,
		DolbyAc2 = 48,
		Gsm610 = 49,
		MsnAudio = 50,
		AntexAdpcme = 51,
		ControlResVqlpc = 52,
		DigiReal = 53,
		DigiAdpcm = 54,
		ControlResCr10 = 55,
		WAVE_FORMAT_NMS_VBXADPCM = 56,
		WAVE_FORMAT_CS_IMAADPCM = 57,
		WAVE_FORMAT_ECHOSC3 = 58,
		WAVE_FORMAT_ROCKWELL_ADPCM = 59,
		WAVE_FORMAT_ROCKWELL_DIGITALK = 60,
		WAVE_FORMAT_XEBEC = 61,
		WAVE_FORMAT_G721_ADPCM = 64,
		WAVE_FORMAT_G728_CELP = 65,
		WAVE_FORMAT_MSG723 = 66,
		Mpeg = 80,
		WAVE_FORMAT_RT24 = 82,
		WAVE_FORMAT_PAC = 83,
		MpegLayer3 = 85,
		WAVE_FORMAT_LUCENT_G723 = 89,
		WAVE_FORMAT_CIRRUS = 96,
		WAVE_FORMAT_ESPCM = 97,
		WAVE_FORMAT_VOXWARE = 98,
		WAVE_FORMAT_CANOPUS_ATRAC = 99,
		WAVE_FORMAT_G726_ADPCM = 100,
		WAVE_FORMAT_G722_ADPCM = 101,
		WAVE_FORMAT_DSAT_DISPLAY = 103,
		WAVE_FORMAT_VOXWARE_BYTE_ALIGNED = 105,
		WAVE_FORMAT_VOXWARE_AC8 = 112,
		WAVE_FORMAT_VOXWARE_AC10 = 113,
		WAVE_FORMAT_VOXWARE_AC16 = 114,
		WAVE_FORMAT_VOXWARE_AC20 = 115,
		WAVE_FORMAT_VOXWARE_RT24 = 116,
		WAVE_FORMAT_VOXWARE_RT29 = 117,
		WAVE_FORMAT_VOXWARE_RT29HW = 118,
		WAVE_FORMAT_VOXWARE_VR12 = 119,
		WAVE_FORMAT_VOXWARE_VR18 = 120,
		WAVE_FORMAT_VOXWARE_TQ40 = 121,
		WAVE_FORMAT_SOFTSOUND = 128,
		WAVE_FORMAT_VOXWARE_TQ60 = 129,
		WAVE_FORMAT_MSRT24 = 130,
		WAVE_FORMAT_G729A = 131,
		WAVE_FORMAT_MVI_MVI2 = 132,
		WAVE_FORMAT_DF_G726 = 133,
		WAVE_FORMAT_DF_GSM610 = 134,
		WAVE_FORMAT_ISIAUDIO = 136,
		WAVE_FORMAT_ONLIVE = 137,
		WAVE_FORMAT_SBC24 = 145,
		WAVE_FORMAT_DOLBY_AC3_SPDIF = 146,
		WAVE_FORMAT_MEDIASONIC_G723 = 147,
		WAVE_FORMAT_PROSODY_8KBPS = 148,
		WAVE_FORMAT_ZYXEL_ADPCM = 151,
		WAVE_FORMAT_PHILIPS_LPCBB = 152,
		WAVE_FORMAT_PACKED = 153,
		WAVE_FORMAT_MALDEN_PHONYTALK = 160,
		Gsm = 161,
		G729 = 162,
		G723 = 163,
		Acelp = 164,
		RawAac = 255,
		WAVE_FORMAT_RHETOREX_ADPCM = 256,
		WAVE_FORMAT_IRAT = 257,
		WAVE_FORMAT_VIVO_G723 = 273,
		WAVE_FORMAT_VIVO_SIREN = 274,
		WAVE_FORMAT_DIGITAL_G723 = 291,
		WAVE_FORMAT_SANYO_LD_ADPCM = 293,
		WAVE_FORMAT_SIPROLAB_ACEPLNET = 304,
		WAVE_FORMAT_SIPROLAB_ACELP4800 = 305,
		WAVE_FORMAT_SIPROLAB_ACELP8V3 = 306,
		WAVE_FORMAT_SIPROLAB_G729 = 307,
		WAVE_FORMAT_SIPROLAB_G729A = 308,
		WAVE_FORMAT_SIPROLAB_KELVIN = 309,
		WAVE_FORMAT_G726ADPCM = 320,
		WAVE_FORMAT_QUALCOMM_PUREVOICE = 336,
		WAVE_FORMAT_QUALCOMM_HALFRATE = 337,
		WAVE_FORMAT_TUBGSM = 341,
		WAVE_FORMAT_MSAUDIO1 = 352,
		WindowsMediaAudio = 353,
		WindowsMediaAudioProfessional = 354,
		WindowsMediaAudioLosseless = 355,
		WindowsMediaAudioSpdif = 356,
		WAVE_FORMAT_UNISYS_NAP_ADPCM = 368,
		WAVE_FORMAT_UNISYS_NAP_ULAW = 369,
		WAVE_FORMAT_UNISYS_NAP_ALAW = 370,
		WAVE_FORMAT_UNISYS_NAP_16K = 371,
		WAVE_FORMAT_CREATIVE_ADPCM = 512,
		WAVE_FORMAT_CREATIVE_FASTSPEECH8 = 514,
		WAVE_FORMAT_CREATIVE_FASTSPEECH10 = 515,
		WAVE_FORMAT_UHER_ADPCM = 528,
		WAVE_FORMAT_QUARTERDECK = 544,
		WAVE_FORMAT_ILINK_VC = 560,
		WAVE_FORMAT_RAW_SPORT = 576,
		WAVE_FORMAT_ESST_AC3 = 577,
		WAVE_FORMAT_IPI_HSX = 592,
		WAVE_FORMAT_IPI_RPELP = 593,
		WAVE_FORMAT_CS2 = 608,
		WAVE_FORMAT_SONY_SCX = 624,
		WAVE_FORMAT_FM_TOWNS_SND = 768,
		WAVE_FORMAT_BTV_DIGITAL = 1024,
		WAVE_FORMAT_QDESIGN_MUSIC = 1104,
		WAVE_FORMAT_VME_VMPCM = 1664,
		WAVE_FORMAT_TPC = 1665,
		WAVE_FORMAT_OLIGSM = 4096,
		WAVE_FORMAT_OLIADPCM = 4097,
		WAVE_FORMAT_OLICELP = 4098,
		WAVE_FORMAT_OLISBC = 4099,
		WAVE_FORMAT_OLIOPR = 4100,
		WAVE_FORMAT_LH_CODEC = 4352,
		WAVE_FORMAT_NORRIS = 5120,
		WAVE_FORMAT_SOUNDSPACE_MUSICOMPRESS = 5376,
		MPEG_ADTS_AAC = 5632,
		MPEG_RAW_AAC = 5633,
		MPEG_LOAS = 5634,
		NOKIA_MPEG_ADTS_AAC = 5640,
		NOKIA_MPEG_RAW_AAC = 5641,
		VODAFONE_MPEG_ADTS_AAC = 5642,
		VODAFONE_MPEG_RAW_AAC = 5643,
		MPEG_HEAAC = 5648,
		WAVE_FORMAT_DVM = 8192,
		Vorbis1 = 26447,
		Vorbis2 = 26448,
		Vorbis3 = 26449,
		Vorbis1P = 26479,
		Vorbis2P = 26480,
		Vorbis3P = 26481,
		WAVE_FORMAT_RAW_AAC1 = 255,
		WAVE_FORMAT_WMAVOICE9 = 10,
		Extensible = -2,
		WAVE_FORMAT_DEVELOPMENT = -1,
		WAVE_FORMAT_FLAC = -3668
	}
	[Flags]
	public enum ChannelMask
	{
		SpeakerFrontLeft = 1,
		SpeakerFrontRight = 2,
		SpeakerFrontCenter = 4,
		SpeakerLowFrequency = 8,
		SpeakerBackLeft = 0x10,
		SpeakerBackRight = 0x20,
		SpeakerFrontLeftOfCenter = 0x40,
		SpeakerFrontRightOfCenter = 0x80,
		SpeakerBackCenter = 0x100,
		SpeakerSideLeft = 0x200,
		SpeakerSideRight = 0x400,
		SpeakerTopCenter = 0x800,
		SpeakerTopFrontLeft = 0x1000,
		SpeakerTopFrontCenter = 0x2000,
		SpeakerTopFrontRight = 0x4000,
		SpeakerTopBackLeft = 0x8000,
		SpeakerTopBackCenter = 0x10000,
		SpeakerTopBackRight = 0x20000
	}
	public static class ChannelMasks
	{
		public const ChannelMask MonoMask = ChannelMask.SpeakerFrontCenter;

		public const ChannelMask StereoMask = ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight;

		public const ChannelMask QuadraphonicMask = ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight;

		public const ChannelMask FiveDotOneWithRearMask = ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight;

		public const ChannelMask FiveDotOneWithSideMask = ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight;

		public const ChannelMask SevenDotOneMask = ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight;
	}
	public static class FluentExtensions
	{
		private class EofTrackingWaveSource : WaveAggregatorBase
		{
			private readonly Action<IWaveSource> _action;

			private bool _eofReached;

			public EofTrackingWaveSource(IWaveSource source, Action<IWaveSource> action)
				: base(source)
			{
				_action = action ?? throw new ArgumentNullException("action");
			}

			public override int Read(byte[] buffer, int offset, int count)
			{
				int num = base.Read(buffer, offset, count);
				if (num <= 0 && count - offset > WaveFormat.BlockAlign)
				{
					if (!_eofReached)
					{
						_eofReached = true;
						_action(BaseSource);
					}
				}
				else
				{
					_eofReached = false;
				}
				return num;
			}
		}

		private class EofTrackingSampleSource : SampleAggregatorBase
		{
			private readonly Action<ISampleSource> _action;

			private bool _eofReached;

			public EofTrackingSampleSource(ISampleSource source, Action<ISampleSource> action)
				: base(source)
			{
				_action = action ?? throw new ArgumentNullException("action");
			}

			public override int Read(float[] buffer, int offset, int count)
			{
				int num = base.Read(buffer, offset, count);
				if (num <= 0 && offset - count > WaveFormat.Channels)
				{
					if (!_eofReached)
					{
						_eofReached = true;
						_action(BaseSource);
					}
				}
				else
				{
					_eofReached = false;
				}
				return num;
			}
		}

		public static TResult AppendSource<TInput, TResult>(this TInput input, Func<TInput, TResult> func) where TInput : IAudioSource
		{
			return func(input);
		}

		public static TResult AppendSource<TInput, TResult>(this TInput input, Func<TInput, TResult> func, out TResult outputSource) where TInput : IAudioSource
		{
			outputSource = func(input);
			return outputSource;
		}

		public static IWaveSource ChangeSampleRate(this IWaveSource input, int destinationSampleRate)
		{
			if (input == null)
			{
				throw new ArgumentNullException("input");
			}
			if (destinationSampleRate <= 0)
			{
				throw new ArgumentOutOfRangeException("destinationSampleRate");
			}
			if (input.WaveFormat.SampleRate == destinationSampleRate)
			{
				return input;
			}
			return new DmoResampler(input, destinationSampleRate);
		}

		public static ISampleSource ChangeSampleRate(this ISampleSource input, int destinationSampleRate)
		{
			if (input == null)
			{
				throw new ArgumentNullException("input");
			}
			if (destinationSampleRate <= 0)
			{
				throw new ArgumentOutOfRangeException("destinationSampleRate");
			}
			if (input.WaveFormat.SampleRate == destinationSampleRate)
			{
				return input;
			}
			return new DmoResampler(input.ToWaveSource(), destinationSampleRate).ToSampleSource();
		}

		public static IWaveSource ToStereo(this IWaveSource input)
		{
			if (input == null)
			{
				throw new ArgumentNullException("input");
			}
			if (input.WaveFormat.Channels == 2)
			{
				return input;
			}
			if (input.WaveFormat.Channels == 1)
			{
				return new MonoToStereoSource(input.ToSampleSource()).ToWaveSource(input.WaveFormat.BitsPerSample);
			}
			if (input.WaveFormat is WaveFormatExtensible waveFormatExtensible)
			{
				ChannelMask channelMask = waveFormatExtensible.ChannelMask;
				ChannelMatrix matrix = ChannelMatrix.GetMatrix(channelMask, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight);
				return new DmoChannelResampler(input, matrix);
			}
			WaveFormat waveFormat = (WaveFormat)input.WaveFormat.Clone();
			waveFormat.Channels = 2;
			return new DmoResampler(input, waveFormat);
		}

		public static ISampleSource ToStereo(this ISampleSource input)
		{
			if (input == null)
			{
				throw new ArgumentNullException("input");
			}
			if (input.WaveFormat.Channels == 2)
			{
				return input;
			}
			if (input.WaveFormat.Channels == 1)
			{
				return new MonoToStereoSource(input);
			}
			return input.ToWaveSource().ToStereo().ToSampleSource();
		}

		public static IWaveSource ToMono(this IWaveSource input)
		{
			if (input == null)
			{
				throw new ArgumentNullException("input");
			}
			if (input.WaveFormat.Channels == 1)
			{
				return input;
			}
			if (input.WaveFormat.Channels == 2)
			{
				return new StereoToMonoSource(input.ToSampleSource()).ToWaveSource(input.WaveFormat.BitsPerSample);
			}
			if (input.WaveFormat is WaveFormatExtensible waveFormatExtensible)
			{
				ChannelMask channelMask = waveFormatExtensible.ChannelMask;
				ChannelMatrix matrix = ChannelMatrix.GetMatrix(channelMask, ChannelMask.SpeakerFrontCenter);
				return new DmoChannelResampler(input, matrix);
			}
			WaveFormat waveFormat = (WaveFormat)input.WaveFormat.Clone();
			waveFormat.Channels = 1;
			return new DmoResampler(input, waveFormat);
		}

		public static ISampleSource ToMono(this ISampleSource input)
		{
			if (input == null)
			{
				throw new ArgumentNullException("input");
			}
			if (input.WaveFormat.Channels == 1)
			{
				return input;
			}
			if (input.WaveFormat.Channels == 2)
			{
				return new StereoToMonoSource(input);
			}
			return input.ToWaveSource().ToMono().ToSampleSource();
		}

		public static IWaveSource Loop(this IWaveSource input)
		{
			return new LoopStream(input)
			{
				EnableLoop = true
			};
		}

		public static IWaveSource ToWaveSource(this ISampleSource sampleSource, int bits)
		{
			if (sampleSource == null)
			{
				throw new ArgumentNullException("sampleSource");
			}
			return bits switch
			{
				8 => new SampleToPcm8(sampleSource), 
				16 => new SampleToPcm16(sampleSource), 
				24 => new SampleToPcm24(sampleSource), 
				32 => new SampleToIeeeFloat32(sampleSource), 
				_ => throw new ArgumentOutOfRangeException("bits", "Must be 8, 16, 24 or 32 bits."), 
			};
		}

		public static IWaveSource ToWaveSource(this ISampleSource sampleSource)
		{
			if (sampleSource == null)
			{
				throw new ArgumentNullException("sampleSource");
			}
			return new SampleToIeeeFloat32(sampleSource);
		}

		public static ISampleSource ToSampleSource(this IWaveSource waveSource)
		{
			if (waveSource == null)
			{
				throw new ArgumentNullException("waveSource");
			}
			if (!(waveSource is SampleToIeeeFloat32 sampleToIeeeFloat))
			{
				if (!(waveSource is SampleToPcm8 sampleToPcm))
				{
					if (!(waveSource is SampleToPcm16 sampleToPcm2))
					{
						if (waveSource is SampleToPcm24 sampleToPcm3)
						{
							return sampleToPcm3.Source;
						}
						return WaveToSampleBase.CreateConverter(waveSource);
					}
					return sampleToPcm2.Source;
				}
				return sampleToPcm.Source;
			}
			return sampleToIeeeFloat.Source;
		}

		public static SynchronizedWaveSource<TAudioSource, T> Synchronized<TAudioSource, T>(this TAudioSource audioSource) where TAudioSource : class, IReadableAudioSource<T>
		{
			if (audioSource == null)
			{
				throw new ArgumentNullException("audioSource");
			}
			return new SynchronizedWaveSource<TAudioSource, T>(audioSource);
		}

		public static IWaveSource OnEndOfStream(this IWaveSource waveSource, Action<IWaveSource> action)
		{
			return new EofTrackingWaveSource(waveSource, action);
		}

		public static ISampleSource OnEndOfStream(this ISampleSource sampleSource, Action<ISampleSource> action)
		{
			return new EofTrackingSampleSource(sampleSource, action);
		}
	}
	public abstract class WaveAggregatorBase : IWaveAggregator, IWaveSource, IReadableAudioSource<byte>, IAudioSource, IDisposable, IAggregator<byte, IWaveSource>
	{
		private IWaveSource _baseSource;

		private bool _disposed;

		public bool DisposeBaseSource { get; set; }

		public virtual IWaveSource BaseSource
		{
			get
			{
				return _baseSource;
			}
			set
			{
				if (value == null)
				{
					throw new ArgumentNullException("value", "BaseSource must not be null.");
				}
				_baseSource = value;
			}
		}

		public virtual WaveFormat WaveFormat => BaseSource.WaveFormat;

		public virtual long Position
		{
			get
			{
				if (!CanSeek)
				{
					return 0L;
				}
				return BaseSource.Position;
			}
			set
			{
				if (CanSeek)
				{
					value -= value % WaveFormat.BlockAlign;
					BaseSource.Position = value;
					return;
				}
				throw new InvalidOperationException();
			}
		}

		public virtual long Length
		{
			get
			{
				if (!CanSeek)
				{
					return 0L;
				}
				return BaseSource.Length;
			}
		}

		public virtual bool CanSeek => BaseSource.CanSeek;

		protected WaveAggregatorBase()
		{
			DisposeBaseSource = true;
		}

		protected WaveAggregatorBase(IWaveSource baseSource)
			: this()
		{
			if (baseSource == null)
			{
				throw new ArgumentNullException("baseSource");
			}
			_baseSource = baseSource;
		}

		public virtual int Read(byte[] buffer, int offset, int count)
		{
			if (offset % WaveFormat.Channels != 0)
			{
				offset -= offset % WaveFormat.Channels;
			}
			if (count % WaveFormat.Channels != 0)
			{
				count -= count % WaveFormat.Channels;
			}
			return BaseSource.Read(buffer, offset, count);
		}

		public void Dispose()
		{
			if (!_disposed)
			{
				_disposed = true;
				Dispose(disposing: true);
				GC.SuppressFinalize(this);
			}
		}

		protected virtual void Dispose(bool disposing)
		{
			if (DisposeBaseSource)
			{
				if (BaseSource != null)
				{
					BaseSource.Dispose();
				}
				_baseSource = null;
			}
		}

		~WaveAggregatorBase()
		{
			Dispose(disposing: false);
		}
	}
	public interface IWaveSource : IReadableAudioSource<byte>, IAudioSource, IDisposable
	{
	}
	[StructLayout(LayoutKind.Sequential, Pack = 2)]
	public class WaveFormat : ICloneable, IEquatable<WaveFormat>
	{
		private AudioEncoding _encoding;

		private short _channels;

		private int _sampleRate;

		private int _bytesPerSecond;

		private short _blockAlign;

		private short _bitsPerSample;

		private short _extraSize;

		public virtual int Channels
		{
			get
			{
				return _channels;
			}
			protected internal set
			{
				_channels = (short)value;
				UpdateProperties();
			}
		}

		public virtual int SampleRate
		{
			get
			{
				return _sampleRate;
			}
			protected internal set
			{
				_sampleRate = value;
				UpdateProperties();
			}
		}

		public virtual int BytesPerSecond
		{
			get
			{
				return _bytesPerSecond;
			}
			protected internal set
			{
				_bytesPerSecond = value;
			}
		}

		public virtual int BlockAlign
		{
			get
			{
				return _blockAlign;
			}
			protected internal set
			{
				_blockAlign = (short)value;
			}
		}

		public virtual int BitsPerSample
		{
			get
			{
				return _bitsPerSample;
			}
			protected internal set
			{
				_bitsPerSample = (short)value;
				UpdateProperties();
			}
		}

		public virtual int ExtraSize
		{
			get
			{
				return _extraSize;
			}
			protected internal set
			{
				_extraSize = (short)value;
			}
		}

		public virtual int BytesPerSample => BitsPerSample / 8;

		public virtual int BytesPerBlock => BytesPerSample * Channels;

		public virtual AudioEncoding WaveFormatTag
		{
			get
			{
				return _encoding;
			}
			protected internal set
			{
				_encoding = value;
			}
		}

		public WaveFormat()
			: this(44100, 16, 2)
		{
		}

		public WaveFormat(int sampleRate, int bits, int channels)
			: this(sampleRate, bits, channels, AudioEncoding.Pcm)
		{
		}

		public WaveFormat(int sampleRate, int bits, int channels, AudioEncoding encoding)
			: this(sampleRate, bits, channels, encoding, 0)
		{
		}

		public WaveFormat(int sampleRate, int bits, int channels, AudioEncoding encoding, int extraSize)
		{
			if (sampleRate < 1)
			{
				throw new ArgumentOutOfRangeException("sampleRate");
			}
			if (bits < 0)
			{
				throw new ArgumentOutOfRangeException("bits");
			}
			if (channels < 1)
			{
				throw new ArgumentOutOfRangeException("channels", "Number of channels has to be bigger than 0.");
			}
			_sampleRate = sampleRate;
			_bitsPerSample = (short)bits;
			_channels = (short)channels;
			_encoding = encoding;
			_extraSize = (short)extraSize;
			UpdateProperties();
		}

		public long MillisecondsToBytes(double milliseconds)
		{
			long num = (long)((double)BytesPerSecond / 1000.0 * milliseconds);
			return num - num % BlockAlign;
		}

		public double BytesToMilliseconds(long bytes)
		{
			bytes -= bytes % BlockAlign;
			return (double)bytes / (double)BytesPerSecond * 1000.0;
		}

		public virtual bool Equals(WaveFormat other)
		{
			if (Channels == other.Channels && SampleRate == other.SampleRate && BytesPerSecond == other.BytesPerSecond && BlockAlign == other.BlockAlign && BitsPerSample == other.BitsPerSample && ExtraSize == other.ExtraSize)
			{
				return WaveFormatTag == other.WaveFormatTag;
			}
			return false;
		}

		public override string ToString()
		{
			return GetInformation().ToString();
		}

		public virtual object Clone()
		{
			return MemberwiseClone();
		}

		internal virtual void SetWaveFormatTagInternal(AudioEncoding waveFormatTag)
		{
			WaveFormatTag = waveFormatTag;
		}

		internal virtual void SetBitsPerSampleAndFormatProperties(int bitsPerSample)
		{
			BitsPerSample = bitsPerSample;
			UpdateProperties();
		}

		protected internal virtual void UpdateProperties()
		{
			BlockAlign = BitsPerSample / 8 * Channels;
			BytesPerSecond = BlockAlign * SampleRate;
		}

		[DebuggerStepThrough]
		private StringBuilder GetInformation()
		{
			StringBuilder stringBuilder = new StringBuilder();
			stringBuilder.Append("ChannelsAvailable: " + Channels);
			stringBuilder.Append("|SampleRate: " + SampleRate);
			stringBuilder.Append("|Bps: " + BytesPerSecond);
			stringBuilder.Append("|BlockAlign: " + BlockAlign);
			stringBuilder.Append("|BitsPerSample: " + BitsPerSample);
			stringBuilder.Append("|Encoding: " + _encoding);
			return stringBuilder;
		}
	}
	public class SampleAggregatorBase : ISampleAggregator, ISampleSource, IReadableAudioSource<float>, IAudioSource, IDisposable, IAggregator<float, ISampleSource>
	{
		private bool _disposed;

		private ISampleSource _baseSource;

		public virtual WaveFormat WaveFormat => BaseSource.WaveFormat;

		public virtual long Position
		{
			get
			{
				if (!CanSeek)
				{
					return 0L;
				}
				return BaseSource.Position;
			}
			set
			{
				if (CanSeek)
				{
					value -= value % WaveFormat.BlockAlign;
					BaseSource.Position = value;
					return;
				}
				throw new InvalidOperationException("Underlying BaseSource is not readable.");
			}
		}

		public virtual long Length
		{
			get
			{
				if (!CanSeek)
				{
					return 0L;
				}
				return BaseSource.Length;
			}
		}

		public bool CanSeek => BaseSource.CanSeek;

		public virtual ISampleSource BaseSource
		{
			get
			{
				return _baseSource;
			}
			set
			{
				if (value == null)
				{
					throw new ArgumentNullException("value");
				}
				_baseSource = value;
			}
		}

		public bool DisposeBaseSource { get; set; }

		public SampleAggregatorBase(ISampleSource source)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			_baseSource = source;
			DisposeBaseSource = true;
		}

		public virtual int Read(float[] buffer, int offset, int count)
		{
			if (offset % WaveFormat.Channels != 0)
			{
				offset -= offset % WaveFormat.Channels;
			}
			if (count % WaveFormat.Channels != 0)
			{
				count -= count % WaveFormat.Channels;
			}
			return BaseSource.Read(buffer, offset, count);
		}

		public void Dispose()
		{
			if (!_disposed)
			{
				_disposed = true;
				Dispose(disposing: true);
				GC.SuppressFinalize(this);
			}
		}

		protected virtual void Dispose(bool disposing)
		{
			if (DisposeBaseSource && BaseSource != null)
			{
				BaseSource.Dispose();
				_baseSource = null;
			}
		}

		~SampleAggregatorBase()
		{
			Dispose(disposing: false);
		}
	}
	public interface ISampleSource : IReadableAudioSource<float>, IAudioSource, IDisposable
	{
	}
	public interface IAudioSource : IDisposable
	{
		bool CanSeek { get; }

		WaveFormat WaveFormat { get; }

		long Position { get; set; }

		long Length { get; }
	}
}
namespace CSCore.DSP
{
	public class DmoResampler : WaveAggregatorBase
	{
		internal MediaBuffer InputBuffer;

		internal object LockObj = new object();

		internal DmoOutputDataBuffer OutputBuffer;

		internal WaveFormat Outputformat;

		private readonly bool _ignoreBaseStreamPosition;

		internal decimal Ratio;

		internal WMResampler Resampler;

		private bool _disposed;

		private int _quality = 30;

		private byte[] _readBuffer;

		private long _position;

		public override WaveFormat WaveFormat => Outputformat;

		public override long Position
		{
			get
			{
				if (_ignoreBaseStreamPosition)
				{
					return _position;
				}
				return InputToOutput(base.Position);
			}
			set
			{
				base.Position = OutputToInput(value);
				if (_ignoreBaseStreamPosition)
				{
					_position = InputToOutput(base.Position);
				}
			}
		}

		public override long Length => InputToOutput(base.Length);

		public int Quality
		{
			get
			{
				return _quality;
			}
			set
			{
				if (value < 1 || value > 60)
				{
					throw new ArgumentOutOfRangeException("value");
				}
				_quality = value;
				using (Resampler.MediaObject.Lock())
				{
					Resampler.ResamplerProps.SetHalfFilterLength(value);
				}
			}
		}

		private static WaveFormat GetWaveFormatWithChangedSampleRate(IWaveSource source, int destSampleRate)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			WaveFormat waveFormat = (WaveFormat)source.WaveFormat.Clone();
			waveFormat.SampleRate = destSampleRate;
			return waveFormat;
		}

		public DmoResampler(IWaveSource source, int destinationSampleRate)
			: this(source, GetWaveFormatWithChangedSampleRate(source, destinationSampleRate))
		{
		}

		public DmoResampler(IWaveSource source, WaveFormat outputFormat)
			: this(source, outputFormat, ignoreBaseStreamPosition: true)
		{
		}

		public DmoResampler(IWaveSource source, WaveFormat outputFormat, bool ignoreBaseStreamPosition)
			: base(source)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			if (outputFormat == null)
			{
				throw new ArgumentNullException("outputFormat");
			}
			Initialize(source.WaveFormat, outputFormat);
			Outputformat = outputFormat;
			_ignoreBaseStreamPosition = ignoreBaseStreamPosition;
		}

		internal void Initialize(WaveFormat inputformat, WaveFormat outputformat)
		{
			Ratio = (decimal)outputformat.BytesPerSecond / (decimal)inputformat.BytesPerSecond;
			lock (LockObj)
			{
				Resampler = new WMResampler();
				MediaObject mediaObject = Resampler.MediaObject;
				if (!mediaObject.SupportsInputFormat(0, inputformat))
				{
					throw new NotSupportedException("Inputformat not supported.");
				}
				mediaObject.SetInputType(0, inputformat);
				if (!mediaObject.SupportsOutputFormat(0, outputformat))
				{
					throw new NotSupportedException("Outputformat not supported.");
				}
				mediaObject.SetOutputType(0, outputformat);
				InputBuffer = new MediaBuffer(inputformat.BytesPerSecond / 2);
				OutputBuffer = new DmoOutputDataBuffer(outputformat.BytesPerSecond / 2);
			}
		}

		public override int Read(byte[] buffer, int offset, int count)
		{
			lock (LockObj)
			{
				int num = 0;
				while (num < count)
				{
					MediaObject mediaObject = Resampler.MediaObject;
					if (mediaObject.IsReadyForInput(0))
					{
						int val = (int)OutputToInput(count - num);
						val = Math.Min(WaveFormat.BytesPerSecond * 2, val);
						val -= val % BaseSource.WaveFormat.BlockAlign;
						_readBuffer = _readBuffer.CheckBuffer(val);
						int num2 = base.Read(_readBuffer, 0, val);
						if (num2 <= 0 || _disposed)
						{
							break;
						}
						if (InputBuffer.MaxLength < num2)
						{
							InputBuffer.Dispose();
							InputBuffer = new MediaBuffer(num2);
						}
						InputBuffer.Write(_readBuffer, 0, num2);
						mediaObject.ProcessInput(0, InputBuffer);
						OutputBuffer.Reset();
						MediaBuffer mediaBuffer = (MediaBuffer)OutputBuffer.Buffer;
						if (mediaBuffer.MaxLength < count)
						{
							mediaBuffer.Dispose();
							OutputBuffer.Buffer = new MediaBuffer(count);
						}
						OutputBuffer.Buffer.SetLength(0);
						mediaObject.ProcessOutput(ProcessOutputFlags.None, new DmoOutputDataBuffer[1] { OutputBuffer }, 1);
						if (OutputBuffer.Length > 0)
						{
							OutputBuffer.Read(buffer, offset + num);
							num += OutputBuffer.Length;
						}
					}
				}
				if (_ignoreBaseStreamPosition)
				{
					_position += num;
				}
				return num;
			}
		}

		internal long InputToOutput(long position)
		{
			long num = (long)((decimal)position * Ratio);
			return num - num % Outputformat.BlockAlign;
		}

		internal long OutputToInput(long position)
		{
			long num = (long)((decimal)position / Ratio);
			return num - num % BaseSource.WaveFormat.BlockAlign;
		}

		public void DisposeResamplerOnly()
		{
			bool disposeBaseSource = base.DisposeBaseSource;
			base.DisposeBaseSource = false;
			Dispose();
			base.DisposeBaseSource = disposeBaseSource;
		}

		protected override void Dispose(bool disposing)
		{
			if (!disposing)
			{
				base.DisposeBaseSource = false;
			}
			base.Dispose(disposing);
			DisposeAndReset(ref Resampler);
			OutputBuffer.Dispose();
			DisposeAndReset(ref InputBuffer);
			_readBuffer = null;
			_disposed = true;
		}

		private void DisposeAndReset<T>(ref T obj) where T : class, IDisposable
		{
			if (obj != null)
			{
				try
				{
					obj.Dispose();
				}
				catch (ObjectDisposedException)
				{
				}
				obj = null;
			}
		}
	}
}
namespace CSCore
{
	[StructLayout(LayoutKind.Sequential, Pack = 2)]
	public class WaveFormatExtensible : WaveFormat
	{
		internal const int WaveFormatExtensibleExtraSize = 22;

		private short _samplesUnion;

		private ChannelMask _channelMask;

		private Guid _subFormat;

		public int ValidBitsPerSample
		{
			get
			{
				return _samplesUnion;
			}
			protected internal set
			{
				_samplesUnion = (short)value;
			}
		}

		public int SamplesPerBlock
		{
			get
			{
				return _samplesUnion;
			}
			protected internal set
			{
				_samplesUnion = (short)value;
			}
		}

		public ChannelMask ChannelMask
		{
			get
			{
				return _channelMask;
			}
			protected internal set
			{
				_channelMask = value;
			}
		}

		public Guid SubFormat
		{
			get
			{
				return _subFormat;
			}
			protected internal set
			{
				_subFormat = value;
			}
		}

		public static Guid SubTypeFromWaveFormat(WaveFormat waveFormat)
		{
			if (waveFormat == null)
			{
				throw new ArgumentNullException("waveFormat");
			}
			if (waveFormat is WaveFormatExtensible)
			{
				return ((WaveFormatExtensible)waveFormat).SubFormat;
			}
			return AudioSubTypes.SubTypeFromEncoding(waveFormat.WaveFormatTag);
		}

		internal WaveFormatExtensible()
		{
		}

		public WaveFormatExtensible(int sampleRate, int bits, int channels, Guid subFormat)
			: base(sampleRate, bits, channels, AudioEncoding.Extensible, 22)
		{
			_samplesUnion = (short)bits;
			_subFormat = SubTypeFromWaveFormat(this);
			int num = 0;
			for (int i = 0; i < channels; i++)
			{
				num |= 1 << i;
			}
			_channelMask = (ChannelMask)num;
			_subFormat = subFormat;
		}

		public WaveFormatExtensible(int sampleRate, int bits, int channels, Guid subFormat, ChannelMask channelMask)
			: this(sampleRate, bits, channels, subFormat)
		{
			Array values = Enum.GetValues(typeof(ChannelMask));
			int num = 0;
			for (int i = 0; i < values.Length; i++)
			{
				if ((channelMask & (ChannelMask)values.GetValue(i)) == (ChannelMask)values.GetValue(i))
				{
					num++;
				}
			}
			if (channels != num)
			{
				throw new ArgumentException("Channels has to equal the set flags in the channelmask.");
			}
			_channelMask = channelMask;
		}

		public WaveFormat ToWaveFormat()
		{
			return new WaveFormat(SampleRate, BitsPerSample, Channels, AudioSubTypes.EncodingFromSubType(SubFormat));
		}

		public override object Clone()
		{
			return MemberwiseClone();
		}

		internal override void SetWaveFormatTagInternal(AudioEncoding waveFormatTag)
		{
			SubFormat = AudioSubTypes.SubTypeFromEncoding(waveFormatTag);
		}

		[DebuggerStepThrough]
		public override string ToString()
		{
			StringBuilder stringBuilder = new StringBuilder(base.ToString());
			stringBuilder.Append("|SubFormat: " + SubFormat);
			stringBuilder.Append("|ChannelMask: " + ChannelMask);
			return stringBuilder.ToString();
		}
	}
}
namespace CSCore.DSP
{
	public class ChannelMatrix
	{
		private static class Factory
		{
			private class FactoryEntry
			{
				public ChannelMask Input { get; set; }

				public ChannelMask Output { get; set; }

				public ChannelMatrix Matrix { get; set; }

				public FactoryEntry(ChannelMask input, ChannelMask output, ChannelMatrix matrix)
				{
					Input = input;
					Output = output;
					Matrix = matrix;
				}
			}

			private static readonly FactoryEntry[] FactoryEntries = new FactoryEntry[20]
			{
				new FactoryEntry(ChannelMask.SpeakerFrontCenter, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight, MonoToStereoMatrix),
				new FactoryEntry(ChannelMask.SpeakerFrontCenter, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight, MonoToFiveDotOneSurroundWithRear),
				new FactoryEntry(ChannelMask.SpeakerFrontCenter, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight, MonoToFiveDotOneSurroundWithSide),
				new FactoryEntry(ChannelMask.SpeakerFrontCenter, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight, MonoToSevenDotOneSurround),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight, ChannelMask.SpeakerFrontCenter, StereoToMonoMatrix),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight, StereoToFiveDotOneSurroundWithRear),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight, StereoToFiveDotOneSurroundWithSide),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight, StereoToSevenDotOneSurround),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight, ChannelMask.SpeakerFrontCenter, FiveDotOneSurroundWithRearToMono),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight, FiveDotOneSurroundWithRearToStereo),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight, FiveDotOneSurroundWithRearToSevenDotOne),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight, ChannelMask.SpeakerFrontCenter, FiveDotOneSurroundWithSideToMono),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight, FiveDotOneSurroundWithSideToStereo),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight, FiveDotOneSurroundWithSideToSevenDotOne),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight, ChannelMask.SpeakerFrontCenter, SevenDotOneSurroundToMono),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight, SevenDotOneSurroundToStereo),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight, SevenDotOneSurroundToFiveDotOneSurroundWithRear),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight, SevenDotOneSurroundToFiveDotOneSurroundWithSide),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight, QuadraphonicToStereo),
				new FactoryEntry(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight, StereoToQuadraphonic)
			};

			public static ChannelMatrix GetMatrix(ChannelMask from, ChannelMask to)
			{
				if (from == to)
				{
					throw new ArgumentException("from must not equal to.");
				}
				FactoryEntry factoryEntry = FactoryEntries.FirstOrDefault((FactoryEntry x) => x.Input == from && x.Output == to);
				if (factoryEntry == null)
				{
					throw new KeyNotFoundException("Could not find a channel matrix for specified channelmasks.");
				}
				return factoryEntry.Matrix;
			}
		}

		public static readonly ChannelMatrix StereoToFiveDotOneSurroundWithRear;

		public static readonly ChannelMatrix FiveDotOneSurroundWithRearToStereo;

		public static readonly ChannelMatrix StereoToFiveDotOneSurroundWithSide;

		public static readonly ChannelMatrix FiveDotOneSurroundWithSideToStereo;

		public static readonly ChannelMatrix StereoToSevenDotOneSurround;

		public static readonly ChannelMatrix SevenDotOneSurroundToStereo;

		public static readonly ChannelMatrix MonoToFiveDotOneSurroundWithRear;

		public static readonly ChannelMatrix FiveDotOneSurroundWithRearToMono;

		public static readonly ChannelMatrix MonoToFiveDotOneSurroundWithSide;

		public static readonly ChannelMatrix FiveDotOneSurroundWithSideToMono;

		public static readonly ChannelMatrix MonoToSevenDotOneSurround;

		public static readonly ChannelMatrix SevenDotOneSurroundToMono;

		public static readonly ChannelMatrix StereoToMonoMatrix;

		public static readonly ChannelMatrix MonoToStereoMatrix;

		public static readonly ChannelMatrix FiveDotOneSurroundWithRearToSevenDotOne;

		public static readonly ChannelMatrix SevenDotOneSurroundToFiveDotOneSurroundWithRear;

		public static readonly ChannelMatrix FiveDotOneSurroundWithSideToSevenDotOne;

		public static readonly ChannelMatrix SevenDotOneSurroundToFiveDotOneSurroundWithSide;

		public static readonly ChannelMatrix QuadraphonicToStereo;

		public static readonly ChannelMatrix StereoToQuadraphonic;

		private readonly ChannelMask _inputMask;

		private readonly ChannelMatrixElement[,] _matrix;

		private readonly ChannelMask _outputMask;

		public ChannelMask InputMask => _inputMask;

		public ChannelMask OutputMask => _outputMask;

		public int Height => _matrix.GetLength(0);

		public int Width => _matrix.GetLength(1);

		public int InputChannelCount => Height;

		public int OutputChannelCount => Width;

		public ChannelMatrixElement this[int input, int output]
		{
			get
			{
				return _matrix[input, output];
			}
			set
			{
				_matrix[input, output] = value;
			}
		}

		public static ChannelMatrix GetMatrix(ChannelMask from, ChannelMask to)
		{
			return Factory.GetMatrix(from, to);
		}

		public static ChannelMatrix GetMatrix(WaveFormat from, WaveFormat to)
		{
			if (from == null)
			{
				throw new ArgumentNullException("from");
			}
			if (to == null)
			{
				throw new ArgumentNullException("to");
			}
			if (TryExtractChannelMask(from, out var channelMask) && TryExtractChannelMask(to, out var channelMask2))
			{
				return GetMatrix(channelMask, channelMask2);
			}
			return null;
		}

		private static bool TryExtractChannelMask(WaveFormat waveFormat, out ChannelMask channelMask)
		{
			channelMask = (ChannelMask)0;
			if (waveFormat is WaveFormatExtensible waveFormatExtensible)
			{
				channelMask = waveFormatExtensible.ChannelMask;
			}
			else if (waveFormat.Channels == 1)
			{
				channelMask = ChannelMask.SpeakerFrontCenter;
			}
			else
			{
				if (waveFormat.Channels != 2)
				{
					return false;
				}
				channelMask = ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight;
			}
			return true;
		}

		internal WaveFormat BuildOutputWaveFormat(IAudioSource audioSource)
		{
			if (audioSource == null)
			{
				throw new ArgumentNullException("source");
			}
			return new WaveFormatExtensible(audioSource.WaveFormat.SampleRate, audioSource.WaveFormat.BitsPerSample, OutputChannelCount, WaveFormatExtensible.SubTypeFromWaveFormat(audioSource.WaveFormat), OutputMask);
		}

		static ChannelMatrix()
		{
			StereoToFiveDotOneSurroundWithRear = new ChannelMatrix(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight);
			StereoToFiveDotOneSurroundWithRear.SetMatrix(new float[2, 6]
			{
				{ 0.314f, 0f, 0.222f, 0.031f, 0.268f, 0.164f },
				{ 0f, 0.314f, 0.222f, 0.031f, 0.164f, 0.268f }
			});
			FiveDotOneSurroundWithRearToStereo = StereoToFiveDotOneSurroundWithRear.Flip();
			StereoToFiveDotOneSurroundWithSide = new ChannelMatrix(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight);
			StereoToFiveDotOneSurroundWithSide.SetMatrix(new float[2, 6]
			{
				{ 0.32f, 0f, 0.226f, 0.032f, 0.292f, 0.13f },
				{ 0f, 0.32f, 0.226f, 0.032f, 0.13f, 0.292f }
			});
			FiveDotOneSurroundWithSideToStereo = StereoToFiveDotOneSurroundWithSide.Flip();
			StereoToSevenDotOneSurround = new ChannelMatrix(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight);
			StereoToSevenDotOneSurround.SetMatrix(new float[2, 8]
			{
				{ 0.222f, 0f, 0.157f, 0.022f, 0.189f, 0.116f, 0.203f, 0.09f },
				{ 0f, 0.222f, 0.157f, 0.022f, 0.116f, 0.189f, 0.09f, 0.203f }
			});
			SevenDotOneSurroundToStereo = StereoToSevenDotOneSurround.Flip();
			MonoToFiveDotOneSurroundWithRear = new ChannelMatrix(ChannelMask.SpeakerFrontCenter, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight);
			MonoToFiveDotOneSurroundWithRear.SetMatrix(new float[1, 6] { { 0.192f, 0.192f, 0.192f, 0.038f, 0.192f, 0.192f } });
			FiveDotOneSurroundWithRearToMono = MonoToFiveDotOneSurroundWithRear.Flip();
			MonoToFiveDotOneSurroundWithSide = new ChannelMatrix(ChannelMask.SpeakerFrontCenter, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight);
			MonoToFiveDotOneSurroundWithSide.SetMatrix(new float[1, 6] { { 0.192f, 0.192f, 0.192f, 0.038f, 0.192f, 0.192f } });
			FiveDotOneSurroundWithSideToMono = MonoToFiveDotOneSurroundWithSide.Flip();
			MonoToSevenDotOneSurround = new ChannelMatrix(ChannelMask.SpeakerFrontCenter, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight);
			MonoToSevenDotOneSurround.SetMatrix(new float[1, 8] { { 0.139f, 0.139f, 0.139f, 0.028f, 0.139f, 0.139f, 0.139f, 0.139f } });
			SevenDotOneSurroundToMono = MonoToSevenDotOneSurround.Flip();
			FiveDotOneSurroundWithRearToSevenDotOne = new ChannelMatrix(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight);
			FiveDotOneSurroundWithRearToSevenDotOne.SetMatrix(new float[6, 8]
			{
				{ 0.518f, 0f, 0f, 0f, 0f, 0f, 0.189f, 0f },
				{ 0f, 0.518f, 0f, 0f, 0f, 0f, 0f, 0.189f },
				{ 0f, 0f, 0.518f, 0f, 0f, 0f, 0f, 0f },
				{ 0f, 0f, 0f, 0.518f, 0f, 0f, 0f, 0f },
				{ 0f, 0f, 0f, 0f, 0.518f, 0f, 0.482f, 0f },
				{ 0f, 0f, 0f, 0f, 0f, 0.518f, 0f, 0.482f }
			});
			SevenDotOneSurroundToFiveDotOneSurroundWithRear = FiveDotOneSurroundWithRearToSevenDotOne.Flip();
			FiveDotOneSurroundWithSideToSevenDotOne = new ChannelMatrix(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerFrontCenter | ChannelMask.SpeakerLowFrequency | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight | ChannelMask.SpeakerSideLeft | ChannelMask.SpeakerSideRight);
			FiveDotOneSurroundWithSideToSevenDotOne.SetMatrix(new float[6, 8]
			{
				{ 0.447f, 0f, 0f, 0f, 0f, 0f, 0f, 0f },
				{ 0f, 0.447f, 0f, 0f, 0f, 0f, 0f, 0f },
				{ 0f, 0f, 0.447f, 0f, 0f, 0f, 0f, 0f },
				{ 0f, 0f, 0f, 0.447f, 0f, 0f, 0f, 0f },
				{ 0f, 0f, 0f, 0f, 0.429f, 0.124f, 0.447f, 0f },
				{ 0f, 0f, 0f, 0f, 0.124f, 0.429f, 0f, 0.447f }
			});
			SevenDotOneSurroundToFiveDotOneSurroundWithSide = FiveDotOneSurroundWithSideToSevenDotOne.Flip();
			StereoToMonoMatrix = new ChannelMatrix(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight, ChannelMask.SpeakerFrontCenter);
			StereoToMonoMatrix.SetMatrix(new float[2, 1]
			{
				{ 0.5f },
				{ 0.5f }
			});
			MonoToStereoMatrix = new ChannelMatrix(ChannelMask.SpeakerFrontCenter, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight);
			MonoToStereoMatrix.SetMatrix(new float[1, 2] { { 1f, 1f } });
			StereoToQuadraphonic = new ChannelMatrix(ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight, ChannelMask.SpeakerFrontLeft | ChannelMask.SpeakerFrontRight | ChannelMask.SpeakerBackLeft | ChannelMask.SpeakerBackRight);
			StereoToQuadraphonic.SetMatrix(new float[2, 4]
			{
				{ 0.5f, 0f, 0.5f, 0f },
				{ 0f, 0.5f, 0f, 0.5f }
			});
			QuadraphonicToStereo = StereoToQuadraphonic.Flip();
		}

		public ChannelMatrix(ChannelMask inputMask, ChannelMask outputMask)
		{
			_inputMask = inputMask;
			_outputMask = outputMask;
			if (inputMask <= (ChannelMask)0)
			{
				throw new ArgumentException("Invalid inputMask");
			}
			if (outputMask <= (ChannelMask)0)
			{
				throw new ArgumentException("Invalid outputMask");
			}
			_matrix = new ChannelMatrixElement[GetValuesOfChannelMask(inputMask).Length, GetValuesOfChannelMask(outputMask).Length];
			for (int i = 0; i < Width; i++)
			{
				for (int j = 0; j < Height; j++)
				{
					_matrix[j, i] = new ChannelMatrixElement(GetValuesOfChannelMask(inputMask)[j], GetValuesOfChannelMask(outputMask)[i]);
				}
			}
		}

		public void SetMatrix(float[,] matrix)
		{
			if (matrix == null)
			{
				throw new ArgumentException("matrix");
			}
			if (matrix.GetLength(1) != Width)
			{
				throw new ArgumentException("Matrix has to have a width of " + Width);
			}
			if (matrix.GetLength(0) != Height)
			{
				throw new ArgumentException("Matrix has to have a height of " + Height);
			}
			for (int i = 0; i < Width; i++)
			{
				for (int j = 0; j < Height; j++)
				{
					this[j, i].Value = matrix[j, i];
				}
			}
		}

		public float[] GetOneDimensionalMatrix()
		{
			List<float> list = new List<float>();
			for (int i = 0; i < Width; i++)
			{
				for (int j = 0; j < Height; j++)
				{
					list.Add(this[j, i].Value);
				}
			}
			return list.ToArray();
		}

		public ChannelMatrix Flip()
		{
			ChannelMatrix channelMatrix = new ChannelMatrix(OutputMask, InputMask);
			for (int i = 0; i < OutputChannelCount; i++)
			{
				for (int j = 0; j < InputChannelCount; j++)
				{
					ChannelMatrixElement channelMatrixElement = this[j, i];
					channelMatrix[i, j] = new ChannelMatrixElement(channelMatrixElement.OutputChannel, channelMatrixElement.InputChannel)
					{
						Value = channelMatrixElement.Value
					};
				}
			}
			return channelMatrix;
		}

		private static ChannelMask[] GetValuesOfChannelMask(ChannelMask channelMask)
		{
			Array values = Enum.GetValues(typeof(ChannelMask));
			List<ChannelMask> list = new List<ChannelMask>();
			for (int i = 0; i < values.Length; i++)
			{
				if ((channelMask & (ChannelMask)values.GetValue(i)) == (ChannelMask)values.GetValue(i))
				{
					list.Add((ChannelMask)values.GetValue(i));
				}
			}
			return list.ToArray();
		}
	}
}
namespace CSCore.Streams
{
	public sealed class MonoToStereoSource : SampleAggregatorBase
	{
		private float[] _buffer;

		private readonly WaveFormat _waveFormat;

		public override long Position
		{
			get
			{
				return base.Position * 2;
			}
			set
			{
				value -= value % WaveFormat.BlockAlign;
				base.Position = value / 2;
			}
		}

		public override long Length => base.Length * 2;

		public override WaveFormat WaveFormat => _waveFormat;

		public MonoToStereoSource(ISampleSource source)
			: base(source)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			if (source.WaveFormat.Channels != 1)
			{
				throw new ArgumentException("The WaveFormat of the source has be a mono format (one channel).", "source");
			}
			_waveFormat = new WaveFormat(source.WaveFormat.SampleRate, 32, 2, AudioEncoding.IeeeFloat);
		}

		public override int Read(float[] buffer, int offset, int count)
		{
			int num = count / 2;
			_buffer = _buffer.CheckBuffer(num);
			int num2 = offset;
			int num3 = base.Read(_buffer, 0, num);
			for (int i = 0; i < num3; i++)
			{
				buffer[num2++] = _buffer[i];
				buffer[num2++] = _buffer[i];
			}
			return num3 * 2;
		}

		protected override void Dispose(bool disposing)
		{
			base.Dispose(disposing);
			_buffer = null;
		}
	}
}
namespace CSCore.DSP
{
	public class DmoChannelResampler : DmoResampler
	{
		private readonly ChannelMatrix _channelMatrix;

		public ChannelMatrix ChannelMatrix => _channelMatrix;

		public DmoChannelResampler(IWaveSource source, ChannelMatrix channelMatrix)
			: this(source, channelMatrix, source.WaveFormat.SampleRate)
		{
		}

		public DmoChannelResampler(IWaveSource source, ChannelMatrix channelMatrix, WaveFormat outputFormat)
			: base(source, outputFormat)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			if (channelMatrix == null)
			{
				throw new ArgumentNullException("channelMatrix");
			}
			if (outputFormat == null)
			{
				throw new ArgumentNullException("outputFormat");
			}
			if (source.WaveFormat.Channels != channelMatrix.InputChannelCount)
			{
				throw new ArgumentException("The number of channels of the source has to be equal to the number of input channels specified by the channelMatrix.");
			}
			WaveFormatExtensible inputformat = new WaveFormatExtensible(source.WaveFormat.SampleRate, source.WaveFormat.BitsPerSample, source.WaveFormat.Channels, WaveFormatExtensible.SubTypeFromWaveFormat(source.WaveFormat), channelMatrix.InputMask);
			Outputformat = new WaveFormatExtensible(outputFormat.SampleRate, outputFormat.BitsPerSample, outputFormat.Channels, WaveFormatExtensible.SubTypeFromWaveFormat(outputFormat), channelMatrix.OutputMask);
			Initialize(inputformat, Outputformat);
			_channelMatrix = channelMatrix;
			CommitChannelMatrixChanges();
		}

		public DmoChannelResampler(IWaveSource source, ChannelMatrix channelMatrix, int destinationSampleRate)
			: this(source, channelMatrix, GetOutputWaveFormat(source, destinationSampleRate, channelMatrix))
		{
		}

		private static WaveFormat GetOutputWaveFormat(IWaveSource source, int sampleRate, ChannelMatrix channelMatrix)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			if (channelMatrix == null)
			{
				throw new ArgumentNullException("channelMatrix");
			}
			WaveFormat waveFormat = channelMatrix.BuildOutputWaveFormat(source);
			waveFormat.SampleRate = sampleRate;
			return waveFormat;
		}

		public void CommitChannelMatrixChanges()
		{
			using (Resampler.MediaObject.Lock())
			{
				Resampler.MediaObject.SetOutputType(0, Resampler.MediaObject.GetOutputCurrentType(0), SetTypeFlags.None);
				Resampler.ResamplerProps.SetUserChannelMtx(_channelMatrix.GetOneDimensionalMatrix());
			}
		}
	}
}
namespace CSCore.Streams
{
	public class StereoToMonoSource : SampleAggregatorBase
	{
		private readonly WaveFormat _waveFormat;

		private float[] _buffer;

		public override long Position
		{
			get
			{
				return BaseSource.Position / 2;
			}
			set
			{
				value -= value % WaveFormat.BlockAlign;
				BaseSource.Position = value * 2;
			}
		}

		public override long Length => BaseSource.Length / 2;

		public override WaveFormat WaveFormat => _waveFormat;

		public StereoToMonoSource(ISampleSource source)
			: base(source)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			if (source.WaveFormat.Channels != 2)
			{
				throw new ArgumentException("The WaveFormat of the source has be a stereo format (two channels).", "source");
			}
			_waveFormat = new WaveFormat(source.WaveFormat.SampleRate, 32, 1, AudioEncoding.IeeeFloat);
		}

		public unsafe override int Read(float[] buffer, int offset, int count)
		{
			_buffer = _buffer.CheckBuffer(count * 2);
			int num = BaseSource.Read(_buffer, 0, count * 2);
			fixed (float* ptr = buffer)
			{
				float* ptr2 = ptr + offset;
				for (int i = 0; i < num - 1; i += 2)
				{
					*(ptr2++) = (_buffer[i] + _buffer[i + 1]) / 2f;
				}
			}
			return num / 2;
		}

		protected override void Dispose(bool disposing)
		{
			base.Dispose(disposing);
			_buffer = null;
		}
	}
	public class LoopStream : WaveAggregatorBase
	{
		private bool _enableLoop = true;

		private bool _raisedStreamFinishedEvent;

		public bool EnableLoop
		{
			get
			{
				return _enableLoop;
			}
			set
			{
				_enableLoop = value;
			}
		}

		public event EventHandler StreamFinished;

		public LoopStream(IWaveSource source)
			: base(source)
		{
		}

		public override int Read(byte[] buffer, int offset, int count)
		{
			int i;
			int num;
			for (i = base.Read(buffer, offset, count); i < count; i += num)
			{
				num = base.Read(buffer, offset + i, count - i);
				if (num == 0)
				{
					EventHandler streamFinished = this.StreamFinished;
					if (streamFinished != null && !_raisedStreamFinishedEvent)
					{
						streamFinished(this, EventArgs.Empty);
						_raisedStreamFinishedEvent = true;
					}
					if (!EnableLoop)
					{
						break;
					}
					Position = 0L;
				}
				else
				{
					_raisedStreamFinishedEvent = false;
				}
			}
			return i;
		}
	}
}
namespace CSCore.Streams.SampleConverter
{
	public class SampleToPcm8 : SampleToWaveBase
	{
		public SampleToPcm8(ISampleSource source)
			: base(source, 8, AudioEncoding.Pcm)
		{
		}

		public override int Read(byte[] buffer, int offset, int count)
		{
			Buffer = Buffer.CheckBuffer(count);
			int num = Source.Read(Buffer, 0, count);
			for (int i = offset; i < num; i++)
			{
				byte b = (byte)((Buffer[i] + 1f) * 128f);
				buffer[i] = b;
			}
			return num;
		}
	}
	public class SampleToPcm16 : SampleToWaveBase
	{
		public SampleToPcm16(ISampleSource source)
			: base(source, 16, AudioEncoding.Pcm)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
		}

		public override int Read(byte[] buffer, int offset, int count)
		{
			Buffer = Buffer.CheckBuffer(count / 2);
			int num = Source.Read(Buffer, 0, count / 2);
			int num2 = offset;
			for (int i = 0; i < num; i++)
			{
				short value = (short)(Buffer[i] * 32767f);
				byte[] bytes = BitConverter.GetBytes(value);
				buffer[num2++] = bytes[0];
				buffer[num2++] = bytes[1];
			}
			return num * 2;
		}
	}
	public class SampleToPcm24 : SampleToWaveBase
	{
		public SampleToPcm24(ISampleSource source)
			: base(source, 24, AudioEncoding.Pcm)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
		}

		public unsafe override int Read(byte[] buffer, int offset, int count)
		{
			int num = count / 3;
			Buffer = Buffer.CheckBuffer(num);
			int num2 = Source.Read(Buffer, 0, num);
			int num3 = offset;
			for (int i = 0; i < num2; i++)
			{
				uint num4 = (uint)(Buffer[i] * 8388608f);
				byte* ptr = (byte*)(&num4);
				buffer[num3++] = *ptr;
				buffer[num3++] = ptr[1];
				buffer[num3++] = ptr[2];
			}
			return num2 * 3;
		}
	}
	public class SampleToIeeeFloat32 : SampleToWaveBase
	{
		public SampleToIeeeFloat32(ISampleSource source)
			: base(source, 32, AudioEncoding.IeeeFloat)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
		}

		public override int Read(byte[] buffer, int offset, int count)
		{
			Buffer = Buffer.CheckBuffer(count / 4);
			int num = Source.Read(Buffer, offset / 4, count / 4);
			System.Buffer.BlockCopy(Buffer, 0, buffer, offset, num * 4);
			return num * 4;
		}
	}
	public abstract class SampleToWaveBase : IWaveSource, IReadableAudioSource<byte>, IAudioSource, IDisposable
	{
		private readonly WaveFormat _waveFormat;

		protected internal ISampleSource Source;

		protected internal float[] Buffer;

		private readonly double _ratio;

		private bool _disposed;

		public WaveFormat WaveFormat => _waveFormat;

		public long Position
		{
			get
			{
				if (!CanSeek)
				{
					return 0L;
				}
				return Source.Position * WaveFormat.BytesPerSample;
			}
			set
			{
				if (CanSeek)
				{
					value -= value % WaveFormat.BlockAlign;
					Source.Position = value / WaveFormat.BytesPerSample;
					return;
				}
				throw new InvalidOperationException();
			}
		}

		public long Length
		{
			get
			{
				if (!CanSeek)
				{
					return 0L;
				}
				return Source.Length * WaveFormat.BytesPerSample;
			}
		}

		public bool CanSeek => Source.CanSeek;

		protected SampleToWaveBase(ISampleSource source, int bits, AudioEncoding encoding)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			if (bits < 1)
			{
				throw new ArgumentOutOfRangeException("bits");
			}
			_waveFormat = (WaveFormat)source.WaveFormat.Clone();
			_waveFormat.BitsPerSample = bits;
			_waveFormat.SetWaveFormatTagInternal(encoding);
			Source = source;
			_ratio = 32.0 / (double)bits;
		}

		public abstract int Read(byte[] buffer, int offset, int count);

		internal long InputToOutput(long position)
		{
			long num = (long)((double)position * _ratio);
			return num - num % _waveFormat.BlockAlign;
		}

		internal long OutputToInput(long position)
		{
			long num = (long)((double)position / _ratio);
			return num - num % Source.WaveFormat.BlockAlign;
		}

		public void Dispose()
		{
			if (!_disposed)
			{
				_disposed = true;
				Dispose(disposing: true);
				GC.SuppressFinalize(this);
			}
		}

		protected virtual void Dispose(bool disposing)
		{
			Source.Dispose();
			Buffer = null;
		}

		~SampleToWaveBase()
		{
			Dispose(disposing: false);
		}
	}
	public abstract class WaveToSampleBase : ISampleSource, IReadableAudioSource<float>, IAudioSource, IDisposable
	{
		private readonly WaveFormat _waveFormat;

		protected internal IWaveSource Source;

		protected internal byte[] Buffer;

		public WaveFormat WaveFormat => _waveFormat;

		public long Position
		{
			get
			{
				if (!CanSeek)
				{
					return 0L;
				}
				return Source.Position / Source.WaveFormat.BytesPerSample;
			}
			set
			{
				if (CanSeek)
				{
					value -= value % WaveFormat.BlockAlign;
					Source.Position = value * Source.WaveFormat.BytesPerSample;
					return;
				}
				throw new InvalidOperationException();
			}
		}

		public long Length
		{
			get
			{
				if (!CanSeek || Source.Length == 0L)
				{
					return 0L;
				}
				return Source.Length / Source.WaveFormat.BytesPerSample;
			}
		}

		public bool CanSeek => Source.CanSeek;

		protected WaveToSampleBase(IWaveSource source)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			Source = source;
			_waveFormat = (WaveFormat)source.WaveFormat.Clone();
			_waveFormat.BitsPerSample = 32;
			_waveFormat.SetWaveFormatTagInternal(AudioEncoding.IeeeFloat);
		}

		public abstract int Read(float[] buffer, int offset, int count);

		public void Dispose()
		{
			Dispose(disposing: true);
		}

		protected virtual void Dispose(bool disposing)
		{
			if (Source != null)
			{
				Source.Dispose();
				Source = null;
			}
		}

		~WaveToSampleBase()
		{
			Dispose(disposing: false);
		}

		public static ISampleSource CreateConverter(IWaveSource source)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			int bitsPerSample = source.WaveFormat.BitsPerSample;
			if (source.WaveFormat.IsPCM())
			{
				return bitsPerSample switch
				{
					8 => new Pcm8BitToSample(source), 
					16 => new Pcm16BitToSample(source), 
					24 => new Pcm24BitToSample(source), 
					32 => new Pcm32BitToSample(source), 
					_ => throw new NotSupportedException("Waveformat is not supported. Invalid BitsPerSample value."), 
				};
			}
			if (source.WaveFormat.IsIeeeFloat() && bitsPerSample == 32)
			{
				return new IeeeFloatToSample(source);
			}
			throw new NotSupportedException("Waveformat is not supported. Invalid WaveformatTag.");
		}
	}
}
namespace CSCore.Streams
{
	public class SynchronizedWaveSource<TBaseSource, T> : IAggregator<T, TBaseSource>, IReadableAudioSource<T>, IAudioSource, IDisposable where TBaseSource : class, IReadableAudioSource<T>
	{
		private readonly object _lockObj = new object();

		private TBaseSource _baseSource;

		private bool _disposed;

		public WaveFormat WaveFormat
		{
			get
			{
				lock (_lockObj)
				{
					return BaseSource.WaveFormat;
				}
			}
		}

		public long Position
		{
			get
			{
				lock (_lockObj)
				{
					return BaseSource.Position;
				}
			}
			set
			{
				lock (_lockObj)
				{
					value -= value % WaveFormat.BlockAlign;
					BaseSource.Position = value;
				}
			}
		}

		public long Length
		{
			get
			{
				lock (_lockObj)
				{
					return BaseSource.Length;
				}
			}
		}

		public bool CanSeek
		{
			get
			{
				lock (_lockObj)
				{
					return BaseSource.CanSeek;
				}
			}
		}

		public TBaseSource BaseSource
		{
			get
			{
				lock (_lockObj)
				{
					return _baseSource;
				}
			}
			set
			{
				lock (_lockObj)
				{
					if (value == null)
					{
						throw new ArgumentNullException("value");
					}
					_baseSource = value;
				}
			}
		}

		public SynchronizedWaveSource(TBaseSource baseWaveSource)
		{
			BaseSource = baseWaveSource;
		}

		public int Read(T[] buffer, int offset, int count)
		{
			lock (_lockObj)
			{
				return BaseSource.Read(buffer, offset, count);
			}
		}

		public static explicit operator TBaseSource(SynchronizedWaveSource<TBaseSource, T> synchronizedWaveSource)
		{
			if (synchronizedWaveSource == null)
			{
				throw new ArgumentNullException("synchronizedWaveSource");
			}
			return synchronizedWaveSource.BaseSource;
		}

		protected void Dispose(bool disposing)
		{
			lock (_lockObj)
			{
				if (BaseSource != null)
				{
					BaseSource.Dispose();
				}
				_baseSource = null;
			}
		}

		public void Dispose()
		{
			lock (_lockObj)
			{
				if (!_disposed)
				{
					_disposed = true;
					Dispose(disposing: true);
					GC.SuppressFinalize(this);
				}
			}
		}

		~SynchronizedWaveSource()
		{
			lock (_lockObj)
			{
				Dispose(disposing: false);
			}
		}
	}
}
namespace CSCore
{
	public interface IReadableAudioSource<in T> : IAudioSource, IDisposable
	{
		int Read(T[] buffer, int offset, int count);
	}
	public interface IWriteable
	{
		void Write(byte[] buffer, int offset, int count);
	}
	public enum MmResult
	{
		NoError = 0,
		Error = 1,
		BadDevice = 2,
		NotEnabled = 3,
		Allocated = 4,
		InvalidHandle = 5,
		NoDriver = 6,
		NoMemory = 7,
		NotSupported = 8,
		BadErrorNumber = 9,
		InvalidFlag = 10,
		InvalidParameter = 11,
		HandleBusy = 12,
		InvalidAlias = 13,
		BadDatabase = 14,
		KeyNotFound = 15,
		ReadError = 16,
		WriteError = 17,
		DeleteError = 18,
		ValueNotFound = 19,
		NoDriverCallback = 20,
		MoreData = 21,
		BadFormat = 32,
		StillPlaying = 33,
		Unprepared = 34,
		Synchronous = 35
	}
	public class StoppedEventArgs : EventArgs
	{
		private readonly Exception _exception;

		public virtual bool HasError => _exception != null;

		public virtual Exception Exception => _exception;

		public StoppedEventArgs()
			: this(null)
		{
		}

		public StoppedEventArgs(Exception exception)
		{
			_exception = exception;
		}
	}
	public abstract class TimeConverter
	{
		internal class _WaveSourceTimeConverter : TimeConverter
		{
			public override long ToRawElements(WaveFormat waveFormat, TimeSpan timeSpan)
			{
				return waveFormat.MillisecondsToBytes(timeSpan.TotalMilliseconds);
			}

			public override TimeSpan ToTimeSpan(WaveFormat waveFormat, long rawElements)
			{
				return TimeSpan.FromMilliseconds(waveFormat.BytesToMilliseconds(rawElements));
			}
		}

		internal class _SampleSourceTimeConverter : TimeConverter
		{
			public override long ToRawElements(WaveFormat waveFormat, TimeSpan timeSpan)
			{
				return waveFormat.MillisecondsToBytes(timeSpan.TotalMilliseconds) / waveFormat.BytesPerSample;
			}

			public override TimeSpan ToTimeSpan(WaveFormat waveFormat, long rawElements)
			{
				return TimeSpan.FromMilliseconds(waveFormat.BytesToMilliseconds(rawElements * waveFormat.BytesPerSample));
			}
		}

		public static readonly TimeConverter SampleSourceTimeConverter = new _SampleSourceTimeConverter();

		public static readonly TimeConverter WaveSourceTimeConverter = new _WaveSourceTimeConverter();

		public abstract long ToRawElements(WaveFormat waveFormat, TimeSpan timeSpan);

		public abstract TimeSpan ToTimeSpan(WaveFormat waveFormat, long rawElements);
	}
	[AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct | AttributeTargets.Interface, AllowMultiple = false)]
	public sealed class TimeConverterAttribute : Attribute
	{
		public Type TimeConverterType { get; private set; }

		public object[] Args { get; set; }

		public bool ForceNewInstance { get; set; }

		public TimeConverterAttribute(Type timeConverterType)
		{
			if (timeConverterType == null)
			{
				throw new ArgumentNullException("timeConverterType");
			}
			if (!typeof(TimeConverter).IsAssignableFrom(timeConverterType))
			{
				throw new ArgumentException("Specified type is no time converter.", "timeConverterType");
			}
			TimeConverterType = timeConverterType;
		}
	}
	public sealed class TimeConverterFactory
	{
		private class CacheItem
		{
			public TimeConverter TimeConverter { get; set; }

			public TimeConverterAttribute TimeConverterAttribute { get; set; }

			public bool CreateNewInstance { get; set; }

			public TimeConverter GetTimeConverter()
			{
				if (CreateNewInstance)
				{
					return (TimeConverter)Activator.CreateInstance(TimeConverterAttribute.TimeConverterType, TimeConverterAttribute.Args);
				}
				return TimeConverter;
			}
		}

		private static readonly TimeConverterFactory _instance = new TimeConverterFactory();

		private readonly Dictionary<Type, TimeConverter> _timeConverters;

		private readonly Dictionary<Type, CacheItem> _cache;

		public static TimeConverterFactory Instance => _instance;

		private TimeConverterFactory()
		{
			_timeConverters = new Dictionary<Type, TimeConverter>();
			_cache = new Dictionary<Type, CacheItem>();
			RegisterTimeConverterForSourceType<IWaveSource>(TimeConverter.WaveSourceTimeConverter);
			RegisterTimeConverterForSourceType<ISampleSource>(TimeConverter.SampleSourceTimeConverter);
		}

		public void RegisterTimeConverterForSourceType<TSource>(TimeConverter timeConverter) where TSource : IAudioSource
		{
			if (timeConverter == null)
			{
				throw new ArgumentNullException("timeConverter");
			}
			Type typeFromHandle = typeof(TSource);
			if (_timeConverters.ContainsKey(typeFromHandle))
			{
				throw new ArgumentException("A timeconverter for the same source type got already registered.");
			}
			_timeConverters.Add(typeFromHandle, timeConverter);
		}

		public void UnregisterTimeConverter<TSource>() where TSource : IAudioSource
		{
			Type typeFromHandle = typeof(TSource);
			if (!_timeConverters.ContainsKey(typeFromHandle))
			{
				throw new ArgumentException("There is no timeconverter registered for the specified source type.");
			}
			_timeConverters.Remove(typeFromHandle);
		}

		public TimeConverter GetTimeConverterForSource<TSource>(TSource source) where TSource : class, IAudioSource
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			return GetTimeConverterForSourceType(source.GetType());
		}

		public TimeConverter GetTimeConverterForSource<TSource>() where TSource : IAudioSource
		{
			return GetTimeConverterForSourceType(typeof(TSource));
		}

		public TimeConverter GetTimeConverterForSourceType(Type sourceType)
		{
			if (sourceType == null)
			{
				throw new ArgumentNullException("sourceType");
			}
			if (!typeof(IAudioSource).IsAssignableFrom(sourceType))
			{
				throw new ArgumentException("Specified type is no AudioSource.", "sourceType");
			}
			if (_cache.ContainsKey(sourceType))
			{
				return _cache[sourceType].GetTimeConverter();
			}
			TimeConverterAttribute timeConverterAttribute = sourceType.GetCustomAttributes(typeof(TimeConverterAttribute), inherit: false).FirstOrDefault() as TimeConverterAttribute;
			TimeConverter timeConverter = null;
			try
			{
				if (timeConverterAttribute == null)
				{
					Type[] array = (from x in GetTypes(sourceType)
						where _timeConverters.ContainsKey(x)
						select x).ToArray();
					if (array.Length == 1)
					{
						timeConverter = _timeConverters[array.First()];
						return timeConverter;
					}
					if (array.Length == 0)
					{
						throw new ArgumentException("No registered time converter for the specified source type was found.");
					}
					throw new ArgumentException("Multiple possible time converters, for the specified source type, were found. Specify which time converter to use, through the TimeConverterAttribute.");
				}
				Type timeConverterType = timeConverterAttribute.TimeConverterType;
				timeConverter = (TimeConverter)Activator.CreateInstance(timeConverterType, timeConverterAttribute.Args);
				return timeConverter;
			}
			finally
			{
				if (timeConverter != null)
				{
					CacheItem value = ((timeConverterAttribute != null) ? new CacheItem
					{
						CreateNewInstance = timeConverterAttribute.ForceNewInstance,
						TimeConverterAttribute = timeConverterAttribute,
						TimeConverter = (timeConverterAttribute.ForceNewInstance ? null : timeConverter)
					} : new CacheItem
					{
						CreateNewInstance = false,
						TimeConverter = timeConverter
					});
					_cache[sourceType] = value;
				}
			}
		}

		public void ClearCache()
		{
			_cache.Clear();
		}

		private IEnumerable<Type> GetTypes(Type type)
		{
			if (!(type.BaseType == typeof(object)))
			{
				return Enumerable.Repeat(type.BaseType, 1).Concat<Type>(type.GetInterfaces()).Concat(GetTypes(type.BaseType))
					.Distinct();
			}
			return type.GetInterfaces();
		}
	}
	public interface ISampleAggregator : ISampleSource, IReadableAudioSource<float>, IAudioSource, IDisposable, IAggregator<float, ISampleSource>
	{
	}
	public interface IAggregator<in T, out TAggregator> : IReadableAudioSource<T>, IAudioSource, IDisposable where TAggregator : IReadableAudioSource<T>
	{
		TAggregator BaseSource { get; }
	}
	public interface IWaveAggregator : IWaveSource, IReadableAudioSource<byte>, IAudioSource, IDisposable, IAggregator<byte, IWaveSource>
	{
	}
	[Serializable]
	public class MmException : Exception
	{
		public MmResult Result { get; private set; }

		[Obsolete("Use the Function property instead.")]
		public string Target { get; private set; }

		public string Function => Target;

		public static void Try(MmResult result, string function)
		{
			if (result != 0)
			{
				throw new MmException(result, function);
			}
		}

		public MmException(MmResult result, string function)
		{
			Result = result;
			Target = function;
		}

		public MmException(SerializationInfo info, StreamingContext context)
			: base(info, context)
		{
			if (info == null)
			{
				throw new ArgumentNullException("info");
			}
			Target = info.GetString("Target");
			Result = (MmResult)info.GetInt32("Result");
		}

		public override void GetObjectData(SerializationInfo info, StreamingContext context)
		{
			base.GetObjectData(info, context);
			info.AddValue("Target", Target);
			info.AddValue("Result", (int)Result);
		}
	}
	public static class Extensions
	{
		public static TimeSpan GetLength(this IAudioSource source)
		{
			return source.GetTime(source.Length);
		}

		public static TimeSpan GetPosition(this IAudioSource source)
		{
			return source.GetTime(source.Position);
		}

		public static void SetPosition(this IAudioSource source, TimeSpan position)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			if (position.TotalMilliseconds < 0.0)
			{
				throw new ArgumentOutOfRangeException("position");
			}
			long rawElements = source.GetRawElements(position);
			source.Position = rawElements;
		}

		public static TimeSpan GetTime(this IAudioSource source, long elementCount)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			if (elementCount < 0)
			{
				throw new ArgumentNullException("elementCount");
			}
			return TimeConverterFactory.Instance.GetTimeConverterForSource(source).ToTimeSpan(source.WaveFormat, elementCount);
		}

		public static long GetMilliseconds(this IAudioSource source, long elementCount)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			if (elementCount < 0)
			{
				throw new ArgumentOutOfRangeException("elementCount");
			}
			return (long)source.GetTime(elementCount).TotalMilliseconds;
		}

		public static long GetRawElements(this IAudioSource source, TimeSpan timespan)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			return TimeConverterFactory.Instance.GetTimeConverterForSource(source).ToRawElements(source.WaveFormat, timespan);
		}

		public static long GetRawElements(this IAudioSource source, long milliseconds)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			if (milliseconds < 0)
			{
				throw new ArgumentOutOfRangeException("milliseconds");
			}
			return source.GetRawElements(TimeSpan.FromMilliseconds(milliseconds));
		}

		public static void WriteToFile(this IWaveSource source, string filename)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			using FileStream stream = File.OpenWrite(filename);
			source.WriteToWaveStream(stream);
		}

		public static void WriteToWaveStream(this IWaveSource source, Stream stream)
		{
			if (source == null)
			{
				throw new ArgumentNullException("source");
			}
			if (stream == null)
			{
				throw new ArgumentNullException("stream");
			}
			if (!stream.CanWrite)
			{
				throw new ArgumentException("Stream is not writeable.", "stream");
			}
			using WaveWriter waveWriter = new WaveWriter(stream, source.WaveFormat);
			byte[] array = new byte[source.WaveFormat.BytesPerSecond];
			int count;
			while ((count = source.Read(array, 0, array.Length)) > 0)
			{
				waveWriter.Write(array, 0, count);
			}
		}

		public static void WriteToStream(this IWaveSource waveSource, Stream stream)
		{
			if (waveSource == null)
			{
				throw new ArgumentNullException("waveSource");
			}
			if (stream == null)
			{
				throw new ArgumentNullException("stream");
			}
			if (!stream.CanWrite)
			{
				throw new ArgumentException("Stream is not writeable.", "stream");
			}
			byte[] array = new byte[waveSource.WaveFormat.BytesPerSecond];
			int count;
			while ((count = waveSource.Read(array, 0, array.Length)) > 0)
			{
				stream.Write(array, 0, count);
			}
		}

		public static T[] CheckBuffer<T>(this T[] inst, long size, bool exactSize = false)
		{
			if (inst == null || (!exactSize && inst.Length < size) || (exactSize && inst.Length != size))
			{
				return new T[size];
			}
			return inst;
		}

		internal static byte[] ReadBytes(this IWaveSource waveSource, int count)
		{
			if (waveSource == null)
			{
				throw new ArgumentNullException("waveSource");
			}
			count -= count % waveSource.WaveFormat.BlockAlign;
			if (count <= 0)
			{
				throw new ArgumentOutOfRangeException("count");
			}
			byte[] array = new byte[count];
			int num = waveSource.Read(array, 0, array.Length);
			if (num < count)
			{
				Array.Resize(ref array, num);
			}
			return array;
		}

		internal static bool IsClosed(this Stream stream)
		{
			if (!stream.CanRead)
			{
				return !stream.CanWrite;
			}
			return false;
		}

		internal static bool IsEndOfStream(this Stream stream)
		{
			return stream.Position == stream.Length;
		}

		internal static int LowWord(this int number)
		{
			return number & 0xFFFF;
		}

		internal static int LowWord(this int number, int newValue)
		{
			return (int)((number & 0xFFFF0000u) + (newValue & 0xFFFF));
		}

		internal static int HighWord(this int number)
		{
			return (int)(number & 0xFFFF0000u);
		}

		internal static int HighWord(this int number, int newValue)
		{
			return (number & 0xFFFF) + (newValue << 16);
		}

		internal static uint LowWord(this uint number)
		{
			return number & 0xFFFFu;
		}

		internal static uint LowWord(this uint number, int newValue)
		{
			return (uint)((uint)((int)number & -65536) + (newValue & 0xFFFF));
		}

		internal static uint HighWord(this uint number)
		{
			return number & 0xFFFF0000u;
		}

		internal static uint HighWord(this uint number, int newValue)
		{
			return (uint)((number & 0xFFFF) + (newValue << 16));
		}

		internal static Guid GetGuid(this object obj)
		{
			return obj.GetType().GUID;
		}

		internal static void WaitForExit(this Thread thread)
		{
			if (thread != null)
			{
				if (thread == Thread.CurrentThread)
				{
					throw new InvalidOperationException("Deadlock detected.");
				}
				thread.Join();
			}
		}

		internal static bool WaitForExit(this Thread thread, int timeout)
		{
			if (thread == null)
			{
				return true;
			}
			if (thread == Thread.CurrentThread)
			{
				throw new InvalidOperationException("Deadlock detected.");
			}
			return thread.Join(timeout);
		}

		internal static bool IsPCM(this WaveFormat waveFormat)
		{
			if (waveFormat == null)
			{
				throw new ArgumentNullException("waveFormat");
			}
			if (waveFormat is WaveFormatExtensible)
			{
				return ((WaveFormatExtensible)waveFormat).SubFormat == AudioSubTypes.Pcm;
			}
			return waveFormat.WaveFormatTag == AudioEncoding.Pcm;
		}

		internal static bool IsIeeeFloat(this WaveFormat waveFormat)
		{
			if (waveFormat == null)
			{
				throw new ArgumentNullException("waveFormat");
			}
			if (waveFormat is WaveFormatExtensible)
			{
				return ((WaveFormatExtensible)waveFormat).SubFormat == AudioSubTypes.IeeeFloat;
			}
			return waveFormat.WaveFormatTag == AudioEncoding.IeeeFloat;
		}

		internal static AudioEncoding GetWaveFormatTag(this WaveFormat waveFormat)
		{
			if (waveFormat is WaveFormatExtensible)
			{
				return AudioSubTypes.EncodingFromSubType(((WaveFormatExtensible)waveFormat).SubFormat);
			}
			return waveFormat.WaveFormatTag;
		}

		public static bool WaitForStopped(this ISoundOut soundOut, int millisecondsTimeout)
		{
			if (soundOut == null)
			{
				throw new ArgumentNullException("soundOut");
			}
			if (millisecondsTimeout < -1)
			{
				throw new ArgumentOutOfRangeException("millisecondsTimeout");
			}
			if (soundOut.PlaybackState == PlaybackState.Stopped)
			{
				return true;
			}
			AutoResetEvent waitHandle = new AutoResetEvent(initialState: false);
			try
			{
				EventHandler<PlaybackStoppedEventArgs> value = delegate
				{
					waitHandle.Set();
				};
				soundOut.Stopped += value;
				bool result = waitHandle.WaitOne(millisecondsTimeout);
				soundOut.Stopped -= value;
				return result;
			}
			finally
			{
				if (waitHandle != null)
				{
					((IDisposable)waitHandle).Dispose();
				}
			}
		}

		public static void WaitForStopped(this ISoundOut soundOut)
		{
			soundOut.WaitForStopped(-1);
		}

		internal static void SetValueForValueType<T>(this FieldInfo field, ref T item, object value) where T : struct
		{
			field.SetValueDirect(__makeref(item), value);
		}
	}
}
namespace CSCore.SoundOut
{
	public class PlaybackStoppedEventArgs : StoppedEventArgs
	{
		public PlaybackStoppedEventArgs()
			: this(null)
		{
		}

		public PlaybackStoppedEventArgs(Exception exception)