First commit with progress so far

This commit is contained in:
Tom
2023-12-30 09:27:31 +00:00
commit 72be20594d
575 changed files with 23496 additions and 0 deletions

View File

@ -0,0 +1,90 @@
using NAudio.Wave;
using NAudio.Extras;
using NAudio.Wave.SampleProviders;
public class AudioPlaybackEngine : IDisposable
{
public static readonly AudioPlaybackEngine Instance = new AudioPlaybackEngine(22050, 1);
private readonly IWavePlayer outputDevice;
private readonly MixingSampleProvider mixer;
private AudioPlaybackEngine(int sampleRate = 44100, int channelCount = 2)
{
outputDevice = new WaveOutEvent();
mixer = new MixingSampleProvider(WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channelCount));
mixer.ReadFully = true;
outputDevice.Init(mixer);
outputDevice.Play();
}
private ISampleProvider ConvertToRightChannelCount(ISampleProvider input)
{
if (input.WaveFormat.Channels == mixer.WaveFormat.Channels)
{
return input;
}
if (input.WaveFormat.Channels == 1 && mixer.WaveFormat.Channels == 2)
{
return new MonoToStereoSampleProvider(input);
}
throw new NotImplementedException("Not yet implemented this channel count conversion");
}
public void PlaySound(string fileName)
{
var input = new AudioFileReader(fileName);
AddMixerInput(new AutoDisposeFileReader(input));
}
public void PlaySound(NetworkWavSound sound)
{
AddMixerInput(new CachedWavProvider(sound));
}
public ISampleProvider ConvertSound(IWaveProvider provider) {
ISampleProvider converted = null;
if (provider.WaveFormat.Encoding == WaveFormatEncoding.Pcm) {
if (provider.WaveFormat.BitsPerSample == 8) {
converted = new Pcm8BitToSampleProvider(provider);
} else if (provider.WaveFormat.BitsPerSample == 16) {
converted = new Pcm16BitToSampleProvider(provider);
} else if (provider.WaveFormat.BitsPerSample == 24) {
converted = new Pcm24BitToSampleProvider(provider);
} else if (provider.WaveFormat.BitsPerSample == 32) {
converted = new Pcm32BitToSampleProvider(provider);
}
} else if (provider.WaveFormat.Encoding == WaveFormatEncoding.IeeeFloat) {
if (provider.WaveFormat.BitsPerSample == 64) {
converted = new WaveToSampleProvider64(provider);
} else {
converted = new WaveToSampleProvider(provider);
}
} else {
throw new ArgumentException("Unsupported source encoding while adding to mixer.");
}
return converted;
}
public void AddMixerInput(ISampleProvider input)
{
mixer.AddMixerInput(input);
}
public void AddMixerInput(IWaveProvider input)
{
mixer.AddMixerInput(input);
}
public void RemoveMixerInput(ISampleProvider sound) {
mixer.RemoveMixerInput(sound);
}
public void AddOnMixerInputEnded(EventHandler<SampleProviderEventArgs> e) {
mixer.MixerInputEnded += e;
}
public void Dispose() {
outputDevice.Dispose();
}
}

View File

@ -0,0 +1,45 @@
using NAudio.Wave;
using System;
public class NetworkWavSound
{
public byte[] AudioData { get; private set; }
public WaveFormat WaveFormat { get; private set; }
public NetworkWavSound(string uri)
{
using (var mfr = new MediaFoundationReader(uri)) {
WaveFormat = mfr.WaveFormat;
//Console.WriteLine("W: " + WaveFormat.SampleRate + " C: " + WaveFormat.Channels + " B: " + WaveFormat.BitsPerSample + " E: " + WaveFormat.Encoding);
byte[] buffer = new byte[4096];
int read = 0;
using (var ms = new MemoryStream()) {
while ((read = mfr.Read(buffer, 0, buffer.Length)) > 0)
ms.Write(buffer, 0, read);
AudioData = ms.ToArray();
}
}
}
}
public class CachedWavProvider : IWaveProvider
{
private readonly NetworkWavSound sound;
private long position;
private readonly RawSourceWaveStream stream;
public WaveFormat WaveFormat { get => sound.WaveFormat; }
public CachedWavProvider(NetworkWavSound cachedSound)
{
sound = cachedSound;
stream = new RawSourceWaveStream(new MemoryStream(sound.AudioData), sound.WaveFormat);
}
public int Read(byte[] buffer, int offset, int count)
{
return stream.Read(buffer, offset, count);
}
}

View File

@ -0,0 +1,76 @@
using NAudio.Wave;
public class TTSPlayer {
private PriorityQueue<TTSMessage, int> _messages; // ready to play
private PriorityQueue<TTSMessage, int> _buffer;
private Mutex _mutex;
private Mutex _mutex2;
public TTSPlayer() {
_messages = new PriorityQueue<TTSMessage, int>();
_buffer = new PriorityQueue<TTSMessage, int>();
_mutex = new Mutex();
_mutex2 = new Mutex();
}
public void Add(TTSMessage message) {
try {
_mutex2.WaitOne();
_buffer.Enqueue(message, message.Priority);
} finally {
_mutex2.ReleaseMutex();
}
}
public TTSMessage ReceiveReady() {
try {
_mutex.WaitOne();
if (_messages.TryDequeue(out TTSMessage message, out int _)) {
return message;
}
return null;
} finally {
_mutex.ReleaseMutex();
}
}
public TTSMessage ReceiveBuffer() {
try {
_mutex2.WaitOne();
if (_buffer.TryDequeue(out TTSMessage message, out int _)) {
return message;
}
return null;
} finally {
_mutex2.ReleaseMutex();
}
}
public void Ready(TTSMessage message) {
try {
_mutex.WaitOne();
_messages.Enqueue(message, message.Priority);
} finally {
_mutex.ReleaseMutex();
}
}
public bool IsEmpty() {
return _messages.Count == 0;
}
}
public class TTSMessage {
public string Voice { get; set; }
public string Channel { get; set; }
public string Username { get; set; }
public string Message { get; set; }
public string File { get; set; }
public DateTime Timestamp { get; set; }
public bool Moderator { get; set; }
public bool Bot { get; set; }
public IEnumerable<KeyValuePair<string, string>> Badges { get; set; }
public int Bits { get; set; }
public int Priority { get; set; }
public ISampleProvider Audio { get; set; }
}