using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Versioning;
using System.Security;
using System.Security.Permissions;
using System.Speech.Recognition;
using BepInEx;
using BepInEx.Logging;
using HarmonyLib;
using UnityEngine;
using VoiceRecognitionAPI.Patches;
[assembly: CompilationRelaxations(8)]
[assembly: RuntimeCompatibility(WrapNonExceptionThrows = true)]
[assembly: Debuggable(DebuggableAttribute.DebuggingModes.Default | DebuggableAttribute.DebuggingModes.DisableOptimizations | DebuggableAttribute.DebuggingModes.IgnoreSymbolStoreSequencePoints | DebuggableAttribute.DebuggingModes.EnableEditAndContinue)]
[assembly: TargetFramework(".NETStandard,Version=v2.1", FrameworkDisplayName = ".NET Standard 2.1")]
[assembly: IgnoresAccessChecksTo("")]
[assembly: AssemblyCompany("VoiceRecognitionAPI")]
[assembly: AssemblyConfiguration("Debug")]
[assembly: AssemblyFileVersion("1.0.0.0")]
[assembly: AssemblyInformationalVersion("1.0.0+200ea5b701b6b31c1d2c4ebe14e74e82882f2439")]
[assembly: AssemblyProduct("VoiceRecognitionAPI")]
[assembly: AssemblyTitle("VoiceRecognitionAPI")]
[assembly: SecurityPermission(SecurityAction.RequestMinimum, SkipVerification = true)]
[assembly: AssemblyVersion("1.0.0.0")]
[module: UnverifiableCode]
namespace VoiceRecognitionAPI
{
public class VoiceRecognitionEngineAlreadyStarted : Exception
{
public VoiceRecognitionEngineAlreadyStarted()
{
}
public VoiceRecognitionEngineAlreadyStarted(string message)
: base(message)
{
}
}
internal class SpeechHandler
{
private static object recognition;
public static SpeechHandler instance { get; private set; }
internal SpeechHandler()
{
//IL_0060: Unknown result type (might be due to invalid IL or missing references)
//IL_006a: Expected O, but got Unknown
//IL_006f: Unknown result type (might be due to invalid IL or missing references)
//IL_0075: Expected O, but got Unknown
//IL_0133: Unknown result type (might be due to invalid IL or missing references)
//IL_013d: Expected O, but got Unknown
//IL_0138: Unknown result type (might be due to invalid IL or missing references)
//IL_013d: Unknown result type (might be due to invalid IL or missing references)
//IL_0150: Expected O, but got Unknown
//IL_0162: Unknown result type (might be due to invalid IL or missing references)
//IL_016c: Expected O, but got Unknown
if (instance != null)
{
return;
}
instance = this;
if (Voice.phrases.Count == 0)
{
VoicePlugin.logger.LogWarning((object)"this is awkward, no mods registered any voice phrases. Cancelling creating the speech recognition engine!");
instance = null;
return;
}
VoicePlugin.logger.LogInfo((object)"Setting up the recognition engine.");
recognition = (object)new SpeechRecognitionEngine();
SpeechRecognitionEngine val = (SpeechRecognitionEngine)recognition;
try
{
val.SetInputToDefaultAudioDevice();
}
catch (Exception ex) when (ex is PlatformNotSupportedException || ex is COMException)
{
VoicePlugin.logger.LogError((object)("Failed create recognition engine. This is most likely due to your language not supporting Microsoft's speech recognition!\n" + ex));
instance = null;
return;
}
foreach (string phrase in Voice.phrases)
{
VoicePlugin.logger.LogDebug((object)("registering phrase: " + phrase));
}
GrammarBuilder val2 = new GrammarBuilder(new Choices(Voice.phrases.ToArray()))
{
Culture = val.RecognizerInfo.Culture
};
VoicePlugin.logger.LogInfo((object)"Almost done setting up..");
val.LoadGrammar(new Grammar(val2));
val.RecognizeCompleted += RecognizeCompletedHandler;
val.RecognizeAsync();
VoicePlugin.logger.LogInfo((object)"Speech Recognition Engine is Ready to Go!!");
}
private void RecognizeCompletedHandler(object sender, RecognizeCompletedEventArgs e)
{
//IL_0016: Unknown result type (might be due to invalid IL or missing references)
VoicePlugin.logger.LogDebug((object)"Speech Engine event fired.");
((SpeechRecognitionEngine)recognition).RecognizeAsync();
if (((AsyncCompletedEventArgs)(object)e).Error != null)
{
VoicePlugin.logger.LogError((object)("An error occured during recognition: " + ((AsyncCompletedEventArgs)(object)e).Error));
}
else if (e.InitialSilenceTimeout || e.BabbleTimeout)
{
VoicePlugin.logger.LogDebug((object)"babble timeout");
}
else if (e.Result != null)
{
Voice.VoiceRecognition(e);
}
else
{
VoicePlugin.logger.LogDebug((object)"No result.");
}
}
}
public static class Voice
{
public class VoiceRecognitionEventArgs : EventArgs
{
public string Message;
public float Confidence;
}
public const float DEFAULT_MIN_CONFIDENCE = 0.2f;
internal static List<string> phrases;
public static bool RECOGNITION_SETUP { get; internal set; }
internal static event EventHandler<VoiceRecognitionEventArgs> VoiceRecognitionFinishedEvent;
public static EventHandler<VoiceRecognitionEventArgs> CustomListenForPhrases(string[] phrases, EventHandler<VoiceRecognitionEventArgs> callback)
{
Voice.phrases.AddRange(phrases);
EventHandler<VoiceRecognitionEventArgs> eventHandler = delegate(object __, VoiceRecognitionEventArgs args)
{
if (phrases.Contains(args.Message))
{
callback(__, args);
}
};
VoiceRecognitionFinishedEvent += eventHandler;
return eventHandler;
}
public static EventHandler<VoiceRecognitionEventArgs> ListenForPhrase(string phrase, Action<string> callback)
{
return ListenForPhrase(phrase, 0.2f, callback);
}
public static EventHandler<VoiceRecognitionEventArgs> ListenForPhrase(string phrase, float minConfidence, Action<string> callback)
{
return ListenForPhrases(new string[1] { phrase }, minConfidence, callback);
}
public static EventHandler<VoiceRecognitionEventArgs> ListenForPhrases(string[] phrases, Action<string> callback)
{
return ListenForPhrases(phrases, 0.2f, callback);
}
public static EventHandler<VoiceRecognitionEventArgs> ListenForPhrases(string[] phrases, float minConfidence, Action<string> callback)
{
if (RECOGNITION_SETUP)
{
throw new VoiceRecognitionEngineAlreadyStarted("The voice recognition engine was already started. If you are a developer, Make sure to setup your voice recognition patterns in Awake().");
}
Voice.phrases.AddRange(phrases);
EventHandler<VoiceRecognitionEventArgs> eventHandler = delegate(object __, VoiceRecognitionEventArgs args)
{
if (phrases.Contains(args.Message) && args.Confidence >= minConfidence)
{
callback(args.Message);
}
};
VoiceRecognitionFinishedEvent += eventHandler;
return eventHandler;
}
public static void StopListeningForPhrase(EventHandler<VoiceRecognitionEventArgs> callback)
{
VoiceRecognitionFinishedEvent -= callback;
}
internal static void VoiceRecognition(RecognizeCompletedEventArgs e)
{
VoiceRecognitionEventArgs voiceRecognitionEventArgs = new VoiceRecognitionEventArgs();
voiceRecognitionEventArgs.Message = ((RecognizedPhrase)e.Result).Text;
voiceRecognitionEventArgs.Confidence = ((RecognizedPhrase)e.Result).Confidence;
try
{
Voice.VoiceRecognitionFinishedEvent(VoicePlugin.instance, voiceRecognitionEventArgs);
}
catch (Exception ex)
{
VoicePlugin.logger.LogError((object)("Something failed to do something " + ex.Message + "\n" + ex.StackTrace));
}
}
static Voice()
{
Voice.VoiceRecognitionFinishedEvent = delegate(object __, VoiceRecognitionEventArgs args)
{
VoicePlugin.logger.LogDebug((object)("Recognized: \"" + args.Message + "\" with a confidence of " + args.Confidence));
};
phrases = new List<string>();
}
}
[BepInPlugin("me.loaforc.voicerecognitionapi", "VoiceRecognitionAPI", "1.2.1")]
public class VoicePlugin : BaseUnityPlugin
{
public const string modGUID = "me.loaforc.voicerecognitionapi";
public const string modName = "VoiceRecognitionAPI";
public const string modVersion = "1.2.1";
private static readonly Harmony harmony = new Harmony("me.loaforc.voicerecognitionapi");
internal static VoicePlugin instance;
internal static ManualLogSource logger;
private void Awake()
{
if (!((Object)(object)instance == (Object)null))
{
return;
}
instance = this;
logger = Logger.CreateLogSource("me.loaforc.voicerecognitionapi");
AppDomain.CurrentDomain.AssemblyResolve += delegate(object sender, ResolveEventArgs args)
{
logger.LogDebug((object)("Importing " + args.Name));
string text = "VoiceRecognitionAPI.Resources." + new AssemblyName(args.Name).Name + ".dll";
logger.LogDebug((object)("Located at: " + text));
if (Assembly.GetExecutingAssembly().GetManifestResourceInfo(text) != null)
{
Stream manifestResourceStream = Assembly.GetExecutingAssembly().GetManifestResourceStream(text);
logger.LogDebug((object)("Found file! Length: " + manifestResourceStream.Length));
byte[] array = new byte[manifestResourceStream.Length];
manifestResourceStream.Read(array, 0, array.Length);
try
{
Assembly assembly = Assembly.Load(array);
logger.LogDebug((object)("Loaded " + assembly.FullName));
return assembly;
}
catch (Exception ex)
{
logger.LogError((object)("Failed to load assembly: \n" + ex));
return null;
}
}
return null;
};
logger.LogInfo((object)"Applying Patches");
harmony.PatchAll(typeof(NetworkVoiceHandlerPatch));
logger.LogInfo((object)"VoiceRecognitionAPI:1.2.1 has succesfully loaded!");
}
}
}
namespace VoiceRecognitionAPI.Patches
{
[HarmonyPatch(typeof(NetworkVoiceHandler))]
internal class NetworkVoiceHandlerPatch
{
[HarmonyPostfix]
[HarmonyPatch("Start")]
internal static void SetupRecognitionEngine()
{
new SpeechHandler();
}
}
}
namespace System.Runtime.CompilerServices
{
[AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)]
internal sealed class IgnoresAccessChecksToAttribute : Attribute
{
public IgnoresAccessChecksToAttribute(string assemblyName)
{
}
}
}