using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Versioning;
using System.Security;
using System.Security.Permissions;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using BepInEx;
using BepInEx.Configuration;
using BepInEx.Logging;
using ESpeakWrapper;
using HarmonyLib;
using Microsoft.CodeAnalysis;
using Photon.Pun;
using Unity.VisualScripting;
using UnityEngine;
[assembly: CompilationRelaxations(8)]
[assembly: RuntimeCompatibility(WrapNonExceptionThrows = true)]
[assembly: Debuggable(DebuggableAttribute.DebuggingModes.Default | DebuggableAttribute.DebuggingModes.DisableOptimizations | DebuggableAttribute.DebuggingModes.IgnoreSymbolStoreSequencePoints | DebuggableAttribute.DebuggingModes.EnableEditAndContinue)]
[assembly: TargetFramework(".NETStandard,Version=v2.1", FrameworkDisplayName = ".NET Standard 2.1")]
[assembly: IgnoresAccessChecksTo("Assembly-CSharp-firstpass")]
[assembly: IgnoresAccessChecksTo("Assembly-CSharp")]
[assembly: IgnoresAccessChecksTo("Autodesk.Fbx")]
[assembly: IgnoresAccessChecksTo("Facepunch.Steamworks.Win64")]
[assembly: IgnoresAccessChecksTo("FbxBuildTestAssets")]
[assembly: IgnoresAccessChecksTo("Klattersynth")]
[assembly: IgnoresAccessChecksTo("Photon3Unity3D")]
[assembly: IgnoresAccessChecksTo("PhotonChat")]
[assembly: IgnoresAccessChecksTo("PhotonRealtime")]
[assembly: IgnoresAccessChecksTo("PhotonUnityNetworking")]
[assembly: IgnoresAccessChecksTo("PhotonUnityNetworking.Utilities")]
[assembly: IgnoresAccessChecksTo("PhotonVoice.API")]
[assembly: IgnoresAccessChecksTo("PhotonVoice")]
[assembly: IgnoresAccessChecksTo("PhotonVoice.PUN")]
[assembly: IgnoresAccessChecksTo("SingularityGroup.HotReload.Runtime")]
[assembly: IgnoresAccessChecksTo("SingularityGroup.HotReload.Runtime.Public")]
[assembly: IgnoresAccessChecksTo("Sirenix.OdinInspector.Attributes")]
[assembly: IgnoresAccessChecksTo("Sirenix.Serialization.Config")]
[assembly: IgnoresAccessChecksTo("Sirenix.Serialization")]
[assembly: IgnoresAccessChecksTo("Sirenix.Utilities")]
[assembly: IgnoresAccessChecksTo("Unity.AI.Navigation")]
[assembly: IgnoresAccessChecksTo("Unity.Formats.Fbx.Runtime")]
[assembly: IgnoresAccessChecksTo("Unity.InputSystem")]
[assembly: IgnoresAccessChecksTo("Unity.InputSystem.ForUI")]
[assembly: IgnoresAccessChecksTo("Unity.Postprocessing.Runtime")]
[assembly: IgnoresAccessChecksTo("Unity.RenderPipelines.Core.Runtime")]
[assembly: IgnoresAccessChecksTo("Unity.RenderPipelines.Core.ShaderLibrary")]
[assembly: IgnoresAccessChecksTo("Unity.RenderPipelines.ShaderGraph.ShaderGraphLibrary")]
[assembly: IgnoresAccessChecksTo("Unity.TextMeshPro")]
[assembly: IgnoresAccessChecksTo("Unity.Timeline")]
[assembly: IgnoresAccessChecksTo("Unity.VisualScripting.Antlr3.Runtime")]
[assembly: IgnoresAccessChecksTo("Unity.VisualScripting.Core")]
[assembly: IgnoresAccessChecksTo("Unity.VisualScripting.Flow")]
[assembly: IgnoresAccessChecksTo("Unity.VisualScripting.State")]
[assembly: IgnoresAccessChecksTo("UnityEngine.ARModule")]
[assembly: IgnoresAccessChecksTo("UnityEngine.NVIDIAModule")]
[assembly: IgnoresAccessChecksTo("UnityEngine.UI")]
[assembly: IgnoresAccessChecksTo("websocket-sharp")]
[assembly: AssemblyCompany("Lavighju")]
[assembly: AssemblyConfiguration("Debug")]
[assembly: AssemblyFileVersion("1.0.0.0")]
[assembly: AssemblyInformationalVersion("1.0.0")]
[assembly: AssemblyProduct("espeakTTS")]
[assembly: AssemblyTitle("espeakTTS")]
[assembly: SecurityPermission(SecurityAction.RequestMinimum, SkipVerification = true)]
[assembly: AssemblyVersion("1.0.0.0")]
[module: UnverifiableCode]
[module: RefSafetyRules(11)]
namespace Microsoft.CodeAnalysis
{
[CompilerGenerated]
[Microsoft.CodeAnalysis.Embedded]
internal sealed class EmbeddedAttribute : Attribute
{
}
}
namespace System.Runtime.CompilerServices
{
[CompilerGenerated]
[Microsoft.CodeAnalysis.Embedded]
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Property | AttributeTargets.Field | AttributeTargets.Event | AttributeTargets.Parameter | AttributeTargets.ReturnValue | AttributeTargets.GenericParameter, AllowMultiple = false, Inherited = false)]
internal sealed class NullableAttribute : Attribute
{
public readonly byte[] NullableFlags;
public NullableAttribute(byte P_0)
{
NullableFlags = new byte[1] { P_0 };
}
public NullableAttribute(byte[] P_0)
{
NullableFlags = P_0;
}
}
[CompilerGenerated]
[Microsoft.CodeAnalysis.Embedded]
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct | AttributeTargets.Method | AttributeTargets.Interface | AttributeTargets.Delegate, AllowMultiple = false, Inherited = false)]
internal sealed class NullableContextAttribute : Attribute
{
public readonly byte Flag;
public NullableContextAttribute(byte P_0)
{
Flag = P_0;
}
}
[CompilerGenerated]
[Microsoft.CodeAnalysis.Embedded]
[AttributeUsage(AttributeTargets.Module, AllowMultiple = false, Inherited = false)]
internal sealed class RefSafetyRulesAttribute : Attribute
{
public readonly int Version;
public RefSafetyRulesAttribute(int P_0)
{
Version = P_0;
}
}
}
namespace ESpeakWrapper
{
public class Client
{
private enum AudioOutput
{
Playback,
Retrieval,
Synchronous,
SynchronousPlayback
}
private enum Error
{
EE_OK = 0,
EE_INTERNAL_ERROR = -1,
EE_BUFFER_FULL = 1,
EE_NOT_FOUND = 2
}
private enum PositionType
{
Character = 1,
Word,
Sentence
}
private enum Parameter
{
Rate = 1,
Volume = 2,
Pitch = 3,
Range = 4,
Punctuation = 5,
Capitals = 6,
WordGap = 7,
Intonation = 9
}
private enum ParameterType
{
Absolute,
Relative
}
[Flags]
private enum SpeechFlags
{
CharsUtf8 = 1,
SSML = 0x10
}
private static bool Initialized;
public static int sampleRate;
public static void Initialize(string path)
{
try
{
int num = espeak_Initialize(AudioOutput.Retrieval, 0, path, 0);
if (num == -1)
{
Debug.LogError((object)$"Could not initialize ESpeak. Maybe there is no espeak data at {path}?");
}
else
{
sampleRate = num;
}
}
catch (Exception ex)
{
Debug.LogError((object)ex);
}
espeak_SetSynthCallback(EventHandler.Handle);
Initialized = true;
}
public static bool SetRate(int rate)
{
if (rate < 80 && rate > 450)
{
Debug.LogError((object)"The rate must be between 80 and 450.");
}
Error result = espeak_SetParameter(Parameter.Rate, rate, ParameterType.Absolute);
return CheckResult(result);
}
public static bool SetWordgap(int wordgap)
{
if (wordgap < 0)
{
wordgap = 0;
}
Error result = espeak_SetParameter(Parameter.WordGap, wordgap, ParameterType.Absolute);
return CheckResult(result);
}
public static bool VoiceFinished()
{
bool result = false;
EventHandler.audio_files_mutex.WaitOne();
if (EventHandler.audio_files.Count > 0)
{
result = true;
}
EventHandler.audio_files_mutex.ReleaseMutex();
return result;
}
public static byte[] PopVoice()
{
byte[] array = null;
EventHandler.audio_files_mutex.WaitOne();
if (EventHandler.audio_files.Count > 0)
{
array = EventHandler.audio_files[0];
EventHandler.audio_files.RemoveAt(0);
int num = 0;
for (int i = 0; i < array.Length; i++)
{
num += array[i] & 0xFF;
}
}
EventHandler.audio_files_mutex.ReleaseMutex();
return array;
}
public static bool SetPitch(int pitch)
{
if (pitch < 0)
{
pitch = 0;
}
if (pitch > 100)
{
pitch = 100;
}
Error result = espeak_SetParameter(Parameter.Pitch, pitch, ParameterType.Absolute);
return CheckResult(result);
}
private static bool CheckResult(Error result)
{
switch (result)
{
case Error.EE_OK:
return true;
case Error.EE_BUFFER_FULL:
return false;
case Error.EE_INTERNAL_ERROR:
Debug.LogError((object)"Internal error in ESpeak.");
return false;
default:
return false;
}
}
public static bool Speak(string text)
{
Error result = espeak_Synth(text, text.Length * Marshal.SystemDefaultCharSize, 0u, PositionType.Character, 0u, SpeechFlags.CharsUtf8, (UIntPtr)0u, (IntPtr)0);
return CheckResult(result);
}
public static bool SpeakSSML(string text)
{
Error result = espeak_Synth(text, text.Length * Marshal.SystemDefaultCharSize, 0u, PositionType.Character, 0u, SpeechFlags.CharsUtf8 | SpeechFlags.SSML, (UIntPtr)0u, (IntPtr)0);
return CheckResult(result);
}
public static bool Stop()
{
Error result = espeak_Cancel();
return CheckResult(result);
}
public static bool SetVoiceByName(string name)
{
Error result = espeak_SetVoiceByName(name);
return CheckResult(result);
}
public static Voice GetCurrentVoice()
{
IntPtr ptr = espeak_GetCurrentVoice();
ESpeakVoice eSpeakVoice = (ESpeakVoice)Marshal.PtrToStructure(ptr, typeof(ESpeakVoice));
if (eSpeakVoice.Equals(default(ESpeakVoice)))
{
Debug.LogError((object)"eSpeak returned an empty voice object. Did you call one of the ESpeak.SetVoice*() functions?");
}
return new Voice
{
Name = eSpeakVoice.Name,
Languages = eSpeakVoice.Languages.Substring(1),
Priority = eSpeakVoice.Languages[0],
Identifier = eSpeakVoice.Identifier
};
}
[DllImport("libespeak-ng.dll", CharSet = CharSet.Auto)]
private static extern Error espeak_SetVoiceByName([MarshalAs(UnmanagedType.LPUTF8Str)] string name);
[DllImport("libespeak-ng.dll", CharSet = CharSet.Auto)]
private static extern Error espeak_SetParameter(Parameter parameter, int value, ParameterType type);
[DllImport("libespeak-ng.dll", CharSet = CharSet.Auto)]
private static extern IntPtr espeak_GetCurrentVoice();
[DllImport("libespeak-ng.dll", CharSet = CharSet.Auto)]
private static extern Error espeak_Synth([MarshalAs(UnmanagedType.LPUTF8Str)] string text, int size, uint startPosition = 0u, PositionType positionType = PositionType.Character, uint endPosition = 0u, SpeechFlags flags = SpeechFlags.CharsUtf8, UIntPtr uniqueIdentifier = default(UIntPtr), IntPtr userData = default(IntPtr));
[DllImport("libespeak-ng.dll", CharSet = CharSet.Auto)]
private static extern int espeak_Initialize(AudioOutput output, int bufferLength, string path, int options);
[DllImport("libespeak-ng.dll", CharSet = CharSet.Auto)]
private static extern Error espeak_Cancel();
[DllImport("libespeak-ng.dll", CharSet = CharSet.Auto)]
private static extern void espeak_SetSynthCallback(EventHandler.SynthCallback callback);
}
internal struct ESpeakVoice
{
[MarshalAs(UnmanagedType.LPStr)]
public string Name;
[MarshalAs(UnmanagedType.LPStr)]
public string Languages;
[MarshalAs(UnmanagedType.LPStr)]
public string Identifier;
public char Gender;
public char Age;
public char Variant;
}
internal struct Event
{
public enum EventType
{
ListTerminated,
Word,
Sentence,
Mark,
Play,
End,
MessageTerminated,
Phoneme,
SetSampleRate
}
public EventType Type;
public uint UniqueIdentifier;
public int TextPosition;
public int Length;
public int AudioPosition;
public int Sample;
public IntPtr UserData;
public int Id;
}
internal class EventHandler
{
public delegate int SynthCallback(IntPtr wavePtr, int bufferLength, IntPtr eventsPtr);
private static MemoryStream Stream;
public static Mutex audio_files_mutex = new Mutex();
public static List<byte[]> audio_files = new List<byte[]>();
public static int Handle(IntPtr wavePtr, int bufferLength, IntPtr eventsPtr)
{
if (bufferLength == 0)
{
if (Stream != null)
{
Stream.Flush();
audio_files_mutex.WaitOne();
audio_files.Add(Stream.ToArray());
audio_files_mutex.ReleaseMutex();
Stream.Dispose();
Stream = null;
}
return 0;
}
WriteAudioToStream(wavePtr, bufferLength);
List<Event> list = MarshalEvents(eventsPtr);
foreach (Event item in list)
{
}
return 0;
}
private static List<Event> MarshalEvents(IntPtr eventsPtr)
{
List<Event> list = new List<Event>();
int num = Marshal.SizeOf(typeof(Event));
int num2 = 0;
while (true)
{
IntPtr ptr = new IntPtr(eventsPtr.ToInt64() + num * num2);
Event item = (Event)Marshal.PtrToStructure(ptr, typeof(Event));
if (item.Type == Event.EventType.ListTerminated)
{
break;
}
list.Add(item);
num2++;
}
return list;
}
private static int WriteAudioToStream(IntPtr wavePtr, int bufferLength)
{
if (wavePtr == IntPtr.Zero)
{
return 0;
}
if (Stream == null)
{
Stream = new MemoryStream();
InitializeStream();
}
byte[] array = new byte[bufferLength * 2];
Marshal.Copy(wavePtr, array, 0, array.Length);
Stream.Write(array, 0, array.Length);
return 0;
}
private static void InitializeStream()
{
Encoding aSCII = Encoding.ASCII;
Stream.Write(aSCII.GetBytes("RIFF"), 0, 4);
Stream.Write(BitConverter.GetBytes(0), 0, 4);
Stream.Write(aSCII.GetBytes("WAVEfmt "), 0, 8);
Stream.Write(BitConverter.GetBytes(16), 0, 4);
Stream.Write(BitConverter.GetBytes((short)1), 0, 2);
Stream.Write(BitConverter.GetBytes((short)1), 0, 2);
Stream.Write(BitConverter.GetBytes(22050), 0, 4);
Stream.Write(BitConverter.GetBytes(44100), 0, 4);
Stream.Write(BitConverter.GetBytes((short)2), 0, 2);
Stream.Write(BitConverter.GetBytes((short)16), 0, 2);
Stream.Write(aSCII.GetBytes("DATA"), 0, 4);
Stream.Write(BitConverter.GetBytes(0), 0, 4);
}
private static string PrintBytes(byte[] byteArray)
{
StringBuilder stringBuilder = new StringBuilder("new byte[] { ");
for (int i = 0; i < byteArray.Length; i++)
{
byte value = byteArray[i];
stringBuilder.Append(value);
if (i < byteArray.Length - 1)
{
stringBuilder.Append(", ");
}
}
stringBuilder.Append(" }");
return stringBuilder.ToString();
}
private static string ConvertHeadersToString(byte[] buffer)
{
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.AppendFormat("The stream length is {0}.\n", Stream.Length);
stringBuilder.Append(Encoding.ASCII.GetChars(buffer, 0, 4));
stringBuilder.Append(BitConverter.ToInt32(buffer, 4));
stringBuilder.Append(Encoding.ASCII.GetChars(buffer, 8, 8));
stringBuilder.Append(BitConverter.ToInt32(buffer, 16));
stringBuilder.Append(BitConverter.ToInt16(buffer, 20));
stringBuilder.Append(BitConverter.ToInt16(buffer, 22));
stringBuilder.Append(BitConverter.ToInt32(buffer, 24));
stringBuilder.Append(BitConverter.ToInt32(buffer, 28));
stringBuilder.Append(BitConverter.ToInt16(buffer, 32));
stringBuilder.Append(BitConverter.ToInt16(buffer, 34));
stringBuilder.Append(Encoding.ASCII.GetChars(buffer, 36, 4));
stringBuilder.Append(BitConverter.ToInt32(buffer, 40));
return stringBuilder.ToString();
}
}
public class Voice
{
public string Name;
public string Languages;
public int Priority;
public string Identifier;
public override string ToString()
{
return $"Name: {Name}, Languages: {Languages}, Identifier: {Identifier}, Priority: {Priority}";
}
}
}
namespace repo_espeak
{
public class eSpeakPun : MonoBehaviour
{
public static eSpeakPun instance { get; private set; }
public PlayerAvatar playerAvatar { get; set; }
[PunRPC]
public void eSpeakChatMessageSendRPC(string _message, bool crouching, string lang, string variant, int speed, int pitch, bool natural, int gap)
{
//IL_0006: Unknown result type (might be due to invalid IL or missing references)
//IL_000c: Invalid comparison between Unknown and I4
if ((int)GameDirector.instance.currentState == 2)
{
espeakTTS.eSpeakTTSSpeakNow(_message, crouching, new eSpeakParameters(lang, variant, pitch, speed, gap), playerAvatar.voiceChat.ttsVoice, natural);
}
}
}
public struct eSpeakParameters
{
public string Language;
public string Variant;
public int Pitch;
public int Speed;
public int Gap;
public eSpeakParameters(string language, string variant, int pitch, int speed, int gap)
{
Language = language;
Variant = variant;
Pitch = pitch;
Speed = speed;
Gap = gap;
}
}
public class message
{
public string word;
public TTSVoice TTSInstance;
public eSpeakParameters parameters;
public float[] audio_buffer;
public bool whisper;
public message(string w, TTSVoice a, eSpeakParameters p, bool c = false, float[] ab = null)
{
word = w;
TTSInstance = a;
parameters = p;
audio_buffer = ab;
whisper = c;
}
}
public class PlayerTTS
{
public int player;
public long end_timestamp;
public PlayerTTS(int p, long e)
{
player = p;
end_timestamp = e;
}
}
public class espeakTTS
{
private static List<message> words = new List<message>();
private static List<PlayerTTS> player_tts_list = new List<PlayerTTS>();
private static Mutex words_mutex = new Mutex();
private static bool is_playing = false;
private static volatile bool is_tts_done = true;
public static Thread thread;
public static eSpeakParameters TTSParameters;
public static void init(string language, string variant, int pitch, int speed, int gap)
{
TTSParameters = new eSpeakParameters(language, variant, pitch, speed, gap);
try
{
Client.Initialize(Paths.PatcherPluginPath + "\\Lavighju-repo_espeak\\espeak-ng-data");
}
catch (Exception ex)
{
repo_espeak.Logger.LogError((object)ex);
}
}
private static void ThreadVoice()
{
int num = -1;
while (!is_tts_done)
{
words_mutex.WaitOne();
try
{
if (is_playing)
{
if (Client.VoiceFinished())
{
byte[] array = Client.PopVoice();
int num2 = 0;
for (int i = 0; i < array.Length; i++)
{
num2 += array[i] & 0xFF;
}
float[] array2 = new float[array.Length / 2];
for (int j = 0; j < array2.Length; j++)
{
if (j < 20 || j > array2.Length - 11)
{
array2[j] = 0f;
}
else
{
array2[j] = (float)BitConverter.ToInt16(array, j * 2) / 32767f;
}
}
if (words.Count > 0 && words.Count > num && words[num] != null)
{
words[num].audio_buffer = array2;
}
is_playing = false;
}
}
else if (words.Count > 0)
{
num = words.FindIndex((message w) => w != null && w.audio_buffer == null);
if (num < 0)
{
is_tts_done = true;
}
else
{
is_playing = true;
if (words[num].parameters.Variant.ToLower() == "whisper")
{
words[num].parameters.Variant = "";
}
if (words[num].parameters.Pitch < 0 || words[num].parameters.Pitch > 99)
{
words[num].parameters.Pitch = 25;
}
if (words[num].parameters.Speed < 80 || words[num].parameters.Speed > 500)
{
words[num].parameters.Speed = 100;
}
if (!repo_espeak.natural_voice.Value || words[num].parameters.Gap < 10 || words[num].parameters.Gap > 1000)
{
words[num].parameters.Gap = 10;
}
string text = (words[num].whisper ? "whisper" : words[num].parameters.Variant);
Client.SetVoiceByName(words[num].parameters.Language + "+" + text);
Client.SetPitch(words[num].parameters.Pitch);
Client.SetRate(words[num].parameters.Speed);
Client.SetWordgap(words[num].parameters.Gap / 10);
Client.Speak(words[num].word);
}
}
else
{
is_tts_done = true;
}
words_mutex.ReleaseMutex();
}
catch (Exception ex)
{
words_mutex.ReleaseMutex();
resetTTS();
repo_espeak.Logger.LogError((object)"An error occured in the TTS Thread:");
repo_espeak.Logger.LogError((object)ex);
thread.Abort();
}
Thread.Sleep(8);
}
}
public static void update()
{
try
{
words_mutex.WaitOne();
for (int i = 0; i < words.Count; i++)
{
if (words[i] != null)
{
bool flag = false;
long num = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds();
int player_id = ((Object)words[i].TTSInstance.playerAvatar).GetInstanceID();
int num2 = player_tts_list.FindIndex((PlayerTTS p) => p.player == player_id);
if (num2 < 0)
{
player_tts_list.Add(new PlayerTTS(player_id, 0L));
num2 = player_tts_list.Count - 1;
}
else if (num <= player_tts_list[num2].end_timestamp)
{
flag = true;
}
if (words[i].audio_buffer != null && !flag)
{
AudioClip val = AudioClip.Create("espeakTTS" + player_id, words[i].audio_buffer.Length, 1, Client.sampleRate, false);
val.SetData(words[i].audio_buffer, 0);
player_tts_list[num2].end_timestamp = num + (long)(val.length * 1000f);
words[i].TTSInstance.VoiceText(words[i].word, val.length);
words[i].TTSInstance.audioSource.PlayOneShot(val);
words[i] = null;
}
}
}
player_tts_list.RemoveAll((PlayerTTS p) => isPlayerEmpty(p.player));
if (player_tts_list.Count < 1 && words.Count > 0)
{
words.RemoveAll((message word) => word == null);
}
words_mutex.ReleaseMutex();
}
catch (Exception ex)
{
words_mutex.ReleaseMutex();
resetTTS();
repo_espeak.Logger.LogError((object)"An error occured during update.");
repo_espeak.Logger.LogError((object)ex);
}
}
private static bool isPlayerEmpty(int player_id)
{
for (int i = 0; i < words.Count; i++)
{
if (words[i] != null && ((Object)words[i].TTSInstance.playerAvatar).GetInstanceID() == player_id)
{
return false;
}
}
return true;
}
public static void stopSentence(PlayerAvatar adc)
{
PlayerAvatar adc2 = adc;
words_mutex.WaitOne();
words.RemoveAll((message word) => word != null && (Object)(object)word.TTSInstance.playerAvatar == (Object)(object)adc2);
PlayerTTS playerTTS = player_tts_list.Find((PlayerTTS p) => p.player == ((Object)adc2).GetInstanceID());
if (playerTTS != null)
{
playerTTS.end_timestamp = 0L;
}
words_mutex.ReleaseMutex();
}
private static void resetTTS()
{
words_mutex.WaitOne();
words = new List<message>();
player_tts_list = new List<PlayerTTS>();
is_tts_done = true;
words_mutex.ReleaseMutex();
}
public static void schedule_play(List<string> word_list, TTSVoice TTSInstance, bool whisper = false, eSpeakParameters? parameters = null)
{
if (!parameters.HasValue)
{
parameters = TTSParameters;
}
try
{
words_mutex.WaitOne();
foreach (string item in word_list)
{
words.Add(new message(item, TTSInstance, parameters.Value, whisper));
}
if (is_tts_done)
{
is_tts_done = false;
thread = new Thread(ThreadVoice);
thread.Start();
}
words_mutex.ReleaseMutex();
}
catch (Exception ex)
{
words_mutex.ReleaseMutex();
resetTTS();
repo_espeak.Logger.LogError((object)"An error occured while scheduling word.");
repo_espeak.Logger.LogError((object)ex);
}
}
public static void eSpeakTTSSpeakNow(string text, bool crouch, eSpeakParameters parameters, TTSVoice instance, bool natural)
{
instance.StopAndClearVoice();
stopSentence(instance.playerAvatar);
instance.setVoice(0);
if (!Object.op_Implicit((Object)(object)instance.activeVoice))
{
repo_espeak.Logger.LogError((object)"Active voice is not set.");
}
text = instance.TranslateSpecialLetters(text);
if (natural)
{
instance.words = new List<string> { text };
}
else
{
instance.words = new List<string>(text.Split(' '));
}
schedule_play(instance.words, instance, crouch, parameters);
}
}
[BepInPlugin("Lavighju.espeakTTS", "espeakTTS", "0.1.1")]
public class repo_espeak : BaseUnityPlugin
{
private class HarmonyPatches
{
[HarmonyPatch(typeof(PlayerAvatar), "ChatMessageSendRPC")]
[HarmonyPrefix]
private static bool ChatMessageSendRPCPrefix(ref string _message, bool crouching, PlayerAvatar __instance)
{
//IL_0006: Unknown result type (might be due to invalid IL or missing references)
//IL_000c: Invalid comparison between Unknown and I4
if ((int)GameDirector.instance.currentState == 2)
{
__instance.ChatMessageSpeak(_message, crouching);
}
return false;
}
[HarmonyPatch(typeof(PlayerAvatar), "ChatMessageSpeak")]
[HarmonyPrefix]
private static bool ChatMessageSpeakPrefix(ref string _message, bool crouching, PlayerAvatar __instance)
{
if (_message[0] == '\u200e')
{
return false;
}
if (Object.op_Implicit((Object)(object)__instance.voiceChat) && Object.op_Implicit((Object)(object)__instance.voiceChat.ttsVoice))
{
espeakTTS.eSpeakTTSSpeakNow(_message, crouching, new eSpeakParameters(default_language.Value, default_variant.Value, default_pitch.Value, default_speed.Value, natural_gap.Value), __instance.voiceChat.ttsVoice, natural_voice.Value);
}
return false;
}
[HarmonyPatch(typeof(PlayerAvatar), "ChatMessageSend")]
[HarmonyPrefix]
private static bool ChatMessageSendPrefix(ref string _message, bool _debugMessage, PlayerAvatar __instance)
{
string value = language.Value;
string pattern = "^\\[(.*?)\\]\\s*";
Match match = Regex.Match(_message, pattern);
if (match.Success)
{
value = match.Groups[1].Value;
_message = Regex.Replace(_message, pattern, "");
}
_message = "\u200e" + _message;
if (!_debugMessage)
{
foreach (PlayerVoiceChat voiceChat in RunManager.instance.voiceChats)
{
if (!voiceChat.recordingEnabled)
{
return false;
}
}
}
bool flag = __instance.isCrouching;
SemiFunc.Command(_message);
if (!SemiFunc.IsMultiplayer())
{
__instance.ChatMessageSpeak(_message, flag);
return false;
}
if (__instance.isDisabled)
{
flag = true;
}
__instance.photonView.RPC("ChatMessageSendRPC", (RpcTarget)0, new object[2] { _message, flag });
__instance.photonView.RPC("eSpeakChatMessageSendRPC", (RpcTarget)0, new object[8] { _message, flag, value, variant.Value, speed.Value, pitch.Value, natural_voice.Value, natural_gap.Value });
return false;
}
[HarmonyPatch(typeof(PlayerAvatar), "Awake")]
[HarmonyPostfix]
private static void AwakePost(PlayerAvatar __instance)
{
eSpeakPun eSpeakPun2 = ComponentHolderProtocol.AddComponent<eSpeakPun>((Object)(object)__instance);
eSpeakPun2.playerAvatar = __instance;
}
}
public static readonly string[] language_list = new string[128]
{
"af", "sq", "am", "ar", "an", "hy", "hyw", "as", "az", "ba",
"cu", "eu", "be", "bn", "bpy", "bs", "bg", "my", "ca", "chr",
"yue", "hak", "haw", "cmn", "hr", "cs", "da", "nl", "en-us", "en",
"en-029", "en-gb-x-gbclan", "en-gb-x-rp", "en-gb-scotland", "en-gb-x-gbcwmd", "eo", "et", "fa", "fa-latn", "fi",
"fr-be", "fr", "fr-ch", "ga", "gd", "ka", "de", "grc", "el", "kl",
"gn", "gu", "ht", "he", "hi", "hu", "is", "id", "ia", "io",
"it", "ja", "kn", "kok", "ko", "ku", "kk", "ky", "la", "lb",
"ltg", "lv", "lfn", "lt", "jbo", "mi", "mk", "ms", "ml", "mt",
"mr", "nci", "ne", "nb", "nog", "or", "om", "pap", "py", "pl",
"pt-br", "qdb", "qu", "quc", "qya", "pt", "pa", "piqd", "ro", "ru",
"ru-lv", "uk", "sjn", "sr", "tn", "sd", "shn", "si", "sk", "sl",
"smj", "es", "es-419", "sw", "sv", "ta", "th", "tk", "tt", "te",
"tr", "ug", "ur", "uz", "vi-vn-x-central", "vi", "vi-vn-x-south", "cy"
};
public static readonly string[] variant_list = new string[14]
{
"", "m1", "m2", "m3", "m4", "m5", "m6", "m7", "f1", "f2",
"f3", "f4", "f5", "croak"
};
public static ConfigEntry<string> language;
public static ConfigEntry<string> variant;
public static ConfigEntry<int> pitch;
public static ConfigEntry<int> speed;
public static ConfigEntry<string> default_language;
public static ConfigEntry<string> default_variant;
public static ConfigEntry<int> default_pitch;
public static ConfigEntry<int> default_speed;
public static ConfigEntry<bool> natural_voice;
public static ConfigEntry<int> natural_gap;
public static eSpeakPun espeakpun;
internal static repo_espeak Instance { get; private set; } = null;
public static ManualLogSource Logger => Instance._logger;
private ManualLogSource _logger => ((BaseUnityPlugin)this).Logger;
internal Harmony? Harmony { get; set; }
private void Awake()
{
//IL_002a: Unknown result type (might be due to invalid IL or missing references)
//IL_0034: Expected O, but got Unknown
//IL_0062: Unknown result type (might be due to invalid IL or missing references)
//IL_006c: Expected O, but got Unknown
//IL_0095: Unknown result type (might be due to invalid IL or missing references)
//IL_009f: Expected O, but got Unknown
//IL_00cc: Unknown result type (might be due to invalid IL or missing references)
//IL_00d6: Expected O, but got Unknown
//IL_0104: Unknown result type (might be due to invalid IL or missing references)
//IL_010e: Expected O, but got Unknown
//IL_013c: Unknown result type (might be due to invalid IL or missing references)
//IL_0146: Expected O, but got Unknown
//IL_016f: Unknown result type (might be due to invalid IL or missing references)
//IL_0179: Expected O, but got Unknown
//IL_01a6: Unknown result type (might be due to invalid IL or missing references)
//IL_01b0: Expected O, but got Unknown
//IL_01d1: Unknown result type (might be due to invalid IL or missing references)
//IL_01db: Expected O, but got Unknown
//IL_0208: Unknown result type (might be due to invalid IL or missing references)
//IL_0212: Expected O, but got Unknown
language = ((BaseUnityPlugin)this).Config.Bind<string>("General", "Language", "en", new ConfigDescription("Language set for the TTS. You can find the identifier for your language here: https://github.com/espeak-ng/espeak-ng/blob/59823f30e3edbd01e87002c04a49ebfd63edaaa7/docs/languages.md", (AcceptableValueBase)(object)new AcceptableValueList<string>(language_list), Array.Empty<object>()));
variant = ((BaseUnityPlugin)this).Config.Bind<string>("General", "Variant", "", new ConfigDescription("Variant of the voice. Working values are: m1, m2, m3, m4, m5, m6, m7, f1, f2, f3, f4, f5, croak. (whisper is used when crouching and disabled here)", (AcceptableValueBase)(object)new AcceptableValueList<string>(variant_list), Array.Empty<object>()));
pitch = ((BaseUnityPlugin)this).Config.Bind<int>("General", "Pitch", 25, new ConfigDescription("Pitch of the voice, 0-99", (AcceptableValueBase)(object)new AcceptableValueRange<int>(0, 99), Array.Empty<object>()));
speed = ((BaseUnityPlugin)this).Config.Bind<int>("General", "Speed", 100, new ConfigDescription("Speed of the voice, 80-500", (AcceptableValueBase)(object)new AcceptableValueRange<int>(80, 500), Array.Empty<object>()));
default_language = ((BaseUnityPlugin)this).Config.Bind<string>("TTS for players without the mod installed", "Language", "en", new ConfigDescription("Language set for the TTS for players without the mod installed", (AcceptableValueBase)(object)new AcceptableValueList<string>(language_list), Array.Empty<object>()));
default_variant = ((BaseUnityPlugin)this).Config.Bind<string>("TTS for players without the mod installed", "Variant", "", new ConfigDescription("Variant of the voice", (AcceptableValueBase)(object)new AcceptableValueList<string>(variant_list), Array.Empty<object>()));
default_pitch = ((BaseUnityPlugin)this).Config.Bind<int>("TTS for players without the mod installed", "Pitch", 25, new ConfigDescription("Pitch of the voice, 0-99", (AcceptableValueBase)(object)new AcceptableValueRange<int>(0, 99), Array.Empty<object>()));
default_speed = ((BaseUnityPlugin)this).Config.Bind<int>("TTS for players without the mod installed", "Speed", 100, new ConfigDescription("Speed of the voice, 80-500", (AcceptableValueBase)(object)new AcceptableValueRange<int>(80, 500), Array.Empty<object>()));
natural_voice = ((BaseUnityPlugin)this).Config.Bind<bool>("Full sentences settings", "Enable full sentences", false, new ConfigDescription("Enable or disable full sentences (whole sentences spoken instead of word by word)", (AcceptableValueBase)null, Array.Empty<object>()));
natural_gap = ((BaseUnityPlugin)this).Config.Bind<int>("Full sentences settings", "Gap", 10, new ConfigDescription("Word gap in sentences, in milliseconds", (AcceptableValueBase)(object)new AcceptableValueRange<int>(10, 1000), Array.Empty<object>()));
Instance = this;
((Component)this).gameObject.transform.parent = null;
((Object)((Component)this).gameObject).hideFlags = (HideFlags)61;
string text = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location);
if (Directory.Exists(text + "\\plugins\\espeak-ng-data"))
{
text += "\\plugins";
}
else if (!Directory.Exists(text + "\\espeak-ng-data"))
{
Logger.LogError((object)("Could not find the folder espeak-ng-data. Are you sure it is present in the plugin's folder " + text + " ?"));
return;
}
Environment.SetEnvironmentVariable("ESPEAK_DATA_PATH", text);
Patch();
Logger.LogInfo((object)("Trying to initialize the dictionnary from this location: " + text));
try
{
espeakTTS.init(language.Value, variant.Value, pitch.Value, speed.Value, natural_gap.Value);
}
catch (Exception ex)
{
Logger.LogError((object)"An error occured during initialization:");
Logger.LogError((object)ex);
}
Logger.LogInfo((object)$"{((BaseUnityPlugin)this).Info.Metadata.GUID} v{((BaseUnityPlugin)this).Info.Metadata.Version} has successfully loaded");
}
internal void Patch()
{
//IL_001a: Unknown result type (might be due to invalid IL or missing references)
//IL_001f: Unknown result type (might be due to invalid IL or missing references)
//IL_0021: Expected O, but got Unknown
//IL_0026: Expected O, but got Unknown
if (Harmony == null)
{
Harmony val = new Harmony(((BaseUnityPlugin)this).Info.Metadata.GUID);
Harmony val2 = val;
Harmony = val;
}
Harmony.PatchAll(typeof(HarmonyPatches));
}
internal void Unpatch()
{
Harmony? harmony = Harmony;
if (harmony != null)
{
harmony.UnpatchSelf();
}
if (espeakTTS.thread != null)
{
espeakTTS.thread.Abort();
}
}
internal void Update()
{
espeakTTS.update();
}
}
}