Skip to content

Commit

Permalink
+Added MonoAudioSoundEffectInstance
Browse files Browse the repository at this point in the history
  • Loading branch information
MineCake147E committed Dec 8, 2019
1 parent b83be38 commit b72d79e
Show file tree
Hide file tree
Showing 15 changed files with 376 additions and 19 deletions.
2 changes: 1 addition & 1 deletion MonoAudio.Android/AudioTrackOutput.cs
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ private static Encoding ConvertEncoding(IWaveFormat format)
{
switch (format.Encoding)
{
case AudioEncoding.Pcm:
case AudioEncoding.LinearPcm:
switch (format.BitDepth)
{
case 8:
Expand Down
2 changes: 1 addition & 1 deletion MonoAudio.IO.CSCore/Devices/CSCoreMMDevice.cs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ public FormatSupportStatus CheckSupportStatus(IWaveFormat format, IOExclusivity
{
switch (format.Encoding)
{
case AudioEncoding.Pcm:
case AudioEncoding.LinearPcm:
case AudioEncoding.IeeeFloat:
break;
default:
Expand Down
25 changes: 25 additions & 0 deletions MonoAudio.IO.MonoGame/MonoAudio.IO.MonoGame.csproj
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<TargetFramework>netstandard2.0</TargetFramework>
</PropertyGroup>

<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
<DocumentationFile>MonoAudio.IO.MonoGame.xml</DocumentationFile>
<WarningsAsErrors>NU1605;CS1591</WarningsAsErrors>
</PropertyGroup>

<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<DocumentationFile>MonoAudio.IO.MonoGame.xml</DocumentationFile>
<WarningsAsErrors>NU1605;CS1591</WarningsAsErrors>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="MonoGame.Framework.Portable" Version="3.7.1.189" />
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\MonoAudio\MonoAudio.csproj" />
</ItemGroup>

</Project>
76 changes: 76 additions & 0 deletions MonoAudio.IO.MonoGame/MonoAudio.IO.MonoGame.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

233 changes: 233 additions & 0 deletions MonoAudio.IO.MonoGame/MonoAudioSoundEffectInstance.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,233 @@
using System;
using System.Collections.Generic;
using System.Text;
using Microsoft.Xna.Framework.Audio;
using MonoAudio.Conversion.Resampling.Sample;
using MonoAudio.Conversion.SampleToWaveConverters;
using MonoAudio.Conversion.WaveToSampleConverters;

namespace MonoAudio.IO.MonoGame
{
/// <summary>
///
/// </summary>
/// <seealso cref="ISoundOut" />
public sealed class MonoAudioSoundEffectInstance : ISoundOut
{
private bool disposedValue = false;

private DynamicSoundEffectInstance soundEffectInstance;

/// <summary>
/// Gets the latency of the playback.
/// </summary>
/// <value>
/// The latency.
/// </value>
public TimeSpan Latency { get; }

private const int TargetBufferCount = 4;
private int currentBufferIndex = 0;

private byte[][] buffers;

/// <summary>
/// Gets the state of the playback.
/// </summary>
/// <value>
/// The state of the playback.
/// </value>
public PlaybackState PlaybackState
{
get
{
switch (soundEffectInstance.State)
{
case SoundState.Playing:
return PlaybackState.Playing;
case SoundState.Paused:
return PlaybackState.Paused;
case SoundState.Stopped:
return PlaybackState.Stopped;
default:
throw new InvalidOperationException();
}
}
}

private IWaveSource waveSource;

/// <summary>
/// Initializes the <see cref="ISoundOut" /> for playing a <paramref name="source" />.
/// </summary>
/// <param name="source">The source to play.</param>
/// <exception cref="NotSupportedException">
/// The specified number of channels is not supported!
/// or
/// The format \"{actualSource.Format.BitDepth}bit Linear PCM\" is not supported!
/// or
/// The format \"{actualSource.Format.BitDepth}bit IEEE 754 Floating Point PCM\" is not supported!
/// or
/// The format \"{actualSource.Format.BitDepth}bit {actualSource.Format.Encoding.ToString()}\" is not supported!
/// </exception>
public void Initialize(IWaveSource source)
{
AudioChannels channels;
switch (source.Format.Channels)
{
case 1:
channels = AudioChannels.Mono;
break;
case 2:
channels = AudioChannels.Stereo;
break;
default:
throw new NotSupportedException($"The specified number of channels is not supported!");
}
IWaveSource actualSource = source;
ISampleSource sampleSource;
//Needed to be 16bit PCM : https://github.com/MonoGame/MonoGame/blob/develop/MonoGame.Framework/Audio/DynamicSoundEffectInstance.cs#L230
switch (actualSource.Format.Encoding)
{
case AudioEncoding.LinearPcm:
switch (actualSource.Format.BitDepth)
{
case 16:
sampleSource = new Pcm16ToSampleConverter(actualSource);
break;
case 8:
sampleSource = new Pcm8ToSampleConverter(actualSource);
break;
case 24:
sampleSource = new Pcm24ToSampleConverter(actualSource);
break;
case 32:
sampleSource = new Pcm32ToSampleConverter(actualSource);
break;
default:
throw new NotSupportedException($"The format \"{actualSource.Format.BitDepth}bit Linear PCM\" is not supported!");
}
break;
case AudioEncoding.IeeeFloat:
switch (actualSource.Format.BitDepth)
{
case 32:
sampleSource = new Float32ToSampleConverter(actualSource);
break;
default:
throw new NotSupportedException($"The format \"{actualSource.Format.BitDepth}bit IEEE 754 Floating Point PCM\" is not supported!");
}
break;
default:
throw new NotSupportedException($"The format \"{actualSource.Format.BitDepth}bit {actualSource.Format.Encoding.ToString()}\" is not supported!");
}
//Sample rate is capped between 8k~48k : https://github.com/MonoGame/MonoGame/blob/develop/MonoGame.Framework/Audio/DynamicSoundEffectInstance.cs#L83
ValidateSampleRate(source, ref actualSource, ref sampleSource);
soundEffectInstance = new DynamicSoundEffectInstance(actualSource.Format.SampleRate, channels);
soundEffectInstance.BufferNeeded += SoundEffectInstance_BufferNeeded;
var l = soundEffectInstance.GetSampleSizeInBytes(Latency);
buffers = new byte[TargetBufferCount * 3][];
for (int i = 0; i < buffers.Length; i++)
{
buffers[i] = new byte[l];
buffers[i].AsSpan().FastFill(0);
}
}

private volatile bool isProcessing = false;

/// <summary>
/// Initializes a new instance of the <see cref="MonoAudioSoundEffectInstance"/> class.
/// </summary>
/// <param name="latency">The latency.</param>
public MonoAudioSoundEffectInstance(TimeSpan latency)
{
if (latency.TotalMilliseconds < 4) latency = TimeSpan.FromMilliseconds(4);
Latency = latency;
}

private void SoundEffectInstance_BufferNeeded(object sender, EventArgs e)
{
if (isProcessing) return;
isProcessing = true;
while (soundEffectInstance.PendingBufferCount < TargetBufferCount)
{
var buf = buffers[currentBufferIndex];
currentBufferIndex = ++currentBufferIndex % buffers.Length;
var len = waveSource.Read(buf);
soundEffectInstance.SubmitBuffer(buf, 0, len);
}
isProcessing = false;
}

private void ValidateSampleRate(IWaveSource source, ref IWaveSource actualSource, ref ISampleSource sampleSource)
{
if (sampleSource.Format.SampleRate < 24000)
{
sampleSource = new SplineResampler(sampleSource, 24000);
}
else if (sampleSource.Format.SampleRate > 48000)
{
sampleSource = new SplineResampler(sampleSource, 48000);
}
else if (actualSource.Format.Encoding == AudioEncoding.LinearPcm && actualSource.Format.BitDepth == 16)
{
sampleSource?.Dispose();
sampleSource = null;
waveSource = source;
return;
}
actualSource = new SampleToPcm16Converter(sampleSource, true, Endianness.Little);
waveSource = actualSource;
}

/// <summary>
/// Pauses the audio playback.
/// </summary>
public void Pause() => soundEffectInstance?.Pause();

/// <summary>
/// Starts the audio playback.
/// </summary>
public void Play() => soundEffectInstance?.Play();

/// <summary>
/// Resumes the audio playback.
/// </summary>
public void Resume() => soundEffectInstance?.Resume();

/// <summary>
/// Stops the audio playback.
/// </summary>
public void Stop() => soundEffectInstance?.Stop();

#region IDisposable Support

private void Dispose(bool disposing)
{
if (!disposedValue)
{
if (disposing)
{
//
}
soundEffectInstance?.Dispose();
soundEffectInstance = null;
//

disposedValue = true;
}
}

/// <summary>
/// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}

#endregion IDisposable Support
}
}
2 changes: 1 addition & 1 deletion MonoAudio.IO.OpenTK/ALDevice.cs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ public FormatSupportStatus CheckSupportStatus(IWaveFormat format, IOExclusivity
if (format.Channels > 2) return FormatSupportStatus.NotSupported;
switch (format.Encoding)
{
case AudioEncoding.Pcm:
case AudioEncoding.LinearPcm:
break;
case AudioEncoding.IeeeFloat:
return FormatSupportStatus.SupportedBySoftware;
Expand Down
4 changes: 2 additions & 2 deletions MonoAudio.IO.OpenTK/ALOutput.cs
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ private static ALFormat GetALFormat(IWaveFormat wf)
{
if (wf.Channels == 1)
{
if (wf.Encoding == AudioEncoding.Pcm)
if (wf.Encoding == AudioEncoding.LinearPcm)
{
if (wf.BitDepth == 8) return ALFormat.Mono8;
else if (wf.BitDepth == 16) return ALFormat.Mono16;
Expand All @@ -143,7 +143,7 @@ private static ALFormat GetALFormat(IWaveFormat wf)
}
else if (wf.Channels == 2)
{
if (wf.Encoding == AudioEncoding.Pcm)
if (wf.Encoding == AudioEncoding.LinearPcm)
{
if (wf.BitDepth == 8) return ALFormat.Stereo8;
else if (wf.BitDepth == 16) return ALFormat.Stereo16;
Expand Down
Loading

0 comments on commit b72d79e

Please sign in to comment.