Clyde audio cleanup & fallback if audio device is not available (#2240)

This commit is contained in:
20kdc
2021-11-17 13:18:04 +00:00
committed by GitHub
parent 2ec715b70e
commit 28e6cdc92f
16 changed files with 892 additions and 469 deletions

View File

@@ -5,6 +5,7 @@ using Robust.Client.Debugging;
using Robust.Client.GameObjects;
using Robust.Client.GameStates;
using Robust.Client.Graphics;
using Robust.Client.Graphics.Audio;
using Robust.Client.Graphics.Clyde;
using Robust.Client.Input;
using Robust.Client.Map;
@@ -82,8 +83,9 @@ namespace Robust.Client
case GameController.DisplayMode.Headless:
IoCManager.Register<IClyde, ClydeHeadless>();
IoCManager.Register<IClipboardManager, ClydeHeadless>();
IoCManager.Register<IClydeAudio, ClydeHeadless>();
IoCManager.Register<IClydeInternal, ClydeHeadless>();
IoCManager.Register<IClydeAudio, ClydeAudioHeadless>();
IoCManager.Register<IClydeAudioInternal, ClydeAudioHeadless>();
IoCManager.Register<IInputManager, InputManager>();
IoCManager.Register<IFileDialogManager, DummyFileDialogManager>();
IoCManager.Register<IUriOpener, UriOpenerDummy>();
@@ -91,8 +93,9 @@ namespace Robust.Client
case GameController.DisplayMode.Clyde:
IoCManager.Register<IClyde, Clyde>();
IoCManager.Register<IClipboardManager, Clyde>();
IoCManager.Register<IClydeAudio, Clyde>();
IoCManager.Register<IClydeInternal, Clyde>();
IoCManager.Register<IClydeAudio, FallbackProxyClydeAudio>();
IoCManager.Register<IClydeAudioInternal, FallbackProxyClydeAudio>();
IoCManager.Register<IInputManager, ClydeInputManager>();
IoCManager.Register<IFileDialogManager, FileDialogManager>();
IoCManager.Register<IUriOpener, UriOpener>();

View File

@@ -59,6 +59,7 @@ namespace Robust.Client
[Dependency] private readonly IViewVariablesManagerInternal _viewVariablesManager = default!;
[Dependency] private readonly IDiscordRichPresence _discord = default!;
[Dependency] private readonly IClydeInternal _clyde = default!;
[Dependency] private readonly IClydeAudioInternal _clydeAudio = default!;
[Dependency] private readonly IFontManagerInternal _fontManager = default!;
[Dependency] private readonly IModLoaderInternal _modLoader = default!;
[Dependency] private readonly IScriptClient _scriptClient = default!;
@@ -86,6 +87,7 @@ namespace Robust.Client
internal bool StartupContinue(DisplayMode displayMode)
{
_clyde.InitializePostWindowing();
_clydeAudio.InitializePostWindowing();
_clyde.SetWindowTitle(Options.DefaultWindowTitle);
_taskManager.Initialize();
@@ -439,6 +441,7 @@ namespace Robust.Client
private void Update(FrameEventArgs frameEventArgs)
{
_webViewHook?.Update();
_clydeAudio.FrameProcess(frameEventArgs);
_clyde.FrameProcess(frameEventArgs);
_modLoader.BroadcastUpdate(ModUpdateLevel.FramePreEngine, frameEventArgs);
_stateManager.FrameUpdate(frameEventArgs);
@@ -537,6 +540,7 @@ namespace Robust.Client
IoCManager.Resolve<IEntityLookup>().Shutdown();
_entityManager.Shutdown();
_clyde.Shutdown();
_clydeAudio.Shutdown();
}
private sealed record ResourceManifestData(string[] Modules);

View File

@@ -0,0 +1,74 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using OpenToolkit.Audio.OpenAL;
using OpenToolkit.Audio.OpenAL.Extensions.Creative.EFX;
using OpenToolkit.Mathematics;
using Robust.Client.Audio;
using Robust.Shared;
using Robust.Shared.Configuration;
using Robust.Shared.IoC;
using Robust.Shared.Audio;
using Robust.Shared.Log;
using Vector2 = Robust.Shared.Maths.Vector2;
namespace Robust.Client.Graphics.Audio
{
internal partial class ClydeAudio
{
// Used to track audio sources that were disposed in the finalizer thread,
// so we need to properly send them off in the main thread.
private readonly ConcurrentQueue<(int sourceHandle, int filterHandle)> _sourceDisposeQueue = new();
private readonly ConcurrentQueue<(int sourceHandle, int filterHandle)> _bufferedSourceDisposeQueue = new();
private readonly ConcurrentQueue<int> _bufferDisposeQueue = new();
private void _flushALDisposeQueues()
{
// Clear out finalized audio sources.
while (_sourceDisposeQueue.TryDequeue(out var handles))
{
_openALSawmill.Debug("Cleaning out source {0} which finalized in another thread.", handles.sourceHandle);
if (IsEfxSupported) RemoveEfx(handles);
AL.DeleteSource(handles.sourceHandle);
_checkAlError();
_audioSources.Remove(handles.sourceHandle);
}
// Clear out finalized buffered audio sources.
while (_bufferedSourceDisposeQueue.TryDequeue(out var handles))
{
_openALSawmill.Debug("Cleaning out buffered source {0} which finalized in another thread.", handles.sourceHandle);
if (IsEfxSupported) RemoveEfx(handles);
AL.DeleteSource(handles.sourceHandle);
_checkAlError();
_bufferedAudioSources.Remove(handles.sourceHandle);
}
// Clear out finalized audio buffers.
while (_bufferDisposeQueue.TryDequeue(out var handle))
{
AL.DeleteBuffer((int) handle);
_checkAlError();
}
}
private void DeleteSourceOnMainThread(int sourceHandle, int filterHandle)
{
_sourceDisposeQueue.Enqueue((sourceHandle, filterHandle));
}
private void DeleteBufferedSourceOnMainThread(int bufferedSourceHandle, int filterHandle)
{
_bufferedSourceDisposeQueue.Enqueue((bufferedSourceHandle, filterHandle));
}
private void DeleteAudioBufferOnMainThread(int bufferHandle)
{
_bufferDisposeQueue.Enqueue(bufferHandle);
}
}
}

View File

@@ -10,436 +10,20 @@ using OpenToolkit.Audio.OpenAL.Extensions.Creative.EFX;
using OpenToolkit.Mathematics;
using Robust.Client.Audio;
using Robust.Shared;
using Robust.Shared.Configuration;
using Robust.Shared.IoC;
using Robust.Shared.Audio;
using Robust.Shared.Log;
using Vector2 = Robust.Shared.Maths.Vector2;
namespace Robust.Client.Graphics.Clyde
namespace Robust.Client.Graphics.Audio
{
internal partial class Clyde
internal partial class ClydeAudio
{
private ALDevice _openALDevice;
private ALContext _openALContext;
private readonly List<LoadedAudioSample> _audioSampleBuffers = new();
private readonly Dictionary<int, WeakReference<AudioSource>> _audioSources =
new();
private readonly Dictionary<int, WeakReference<BufferedAudioSource>> _bufferedAudioSources =
new();
private readonly HashSet<string> _alcDeviceExtensions = new();
private readonly HashSet<string> _alContextExtensions = new();
// Used to track audio sources that were disposed in the finalizer thread,
// so we need to properly send them off in the main thread.
private readonly ConcurrentQueue<(int sourceHandle, int filterHandle)> _sourceDisposeQueue = new();
private readonly ConcurrentQueue<(int sourceHandle, int filterHandle)> _bufferedSourceDisposeQueue = new();
private readonly ConcurrentQueue<int> _bufferDisposeQueue = new();
// The base gain value for a listener, used to boost the default volume.
private const float _baseGain = 2f;
public bool HasAlDeviceExtension(string extension) => _alcDeviceExtensions.Contains(extension);
public bool HasAlContextExtension(string extension) => _alContextExtensions.Contains(extension);
internal bool IsEfxSupported;
private ISawmill _openALSawmill = default!;
private void _initializeAudio()
{
_openALSawmill = Logger.GetSawmill("clyde.oal");
_audioOpenDevice();
// Create OpenAL context.
_audioCreateContext();
IsEfxSupported = HasAlDeviceExtension("ALC_EXT_EFX");
_cfg.OnValueChanged(CVars.AudioMasterVolume, SetMasterVolume, true);
_cfg.OnValueChanged(CVars.AudioAttenuation, SetAudioAttenuation, true);
}
private void _audioCreateContext()
{
unsafe
{
_openALContext = ALC.CreateContext(_openALDevice, (int*) 0);
}
ALC.MakeContextCurrent(_openALContext);
_checkAlcError(_openALDevice);
_checkAlError();
// Load up AL context extensions.
var s = ALC.GetString(ALDevice.Null, AlcGetString.Extensions) ?? "";
foreach (var extension in s.Split(' '))
{
_alContextExtensions.Add(extension);
}
_openALSawmill.Debug("OpenAL Vendor: {0}", AL.Get(ALGetString.Vendor));
_openALSawmill.Debug("OpenAL Renderer: {0}", AL.Get(ALGetString.Renderer));
_openALSawmill.Debug("OpenAL Version: {0}", AL.Get(ALGetString.Version));
}
private void _audioOpenDevice()
{
var preferredDevice = _cfg.GetCVar(CVars.AudioDevice);
// Open device.
if (!string.IsNullOrEmpty(preferredDevice))
{
_openALDevice = ALC.OpenDevice(preferredDevice);
if (_openALDevice == IntPtr.Zero)
{
_openALSawmill.Warning("Unable to open preferred audio device '{0}': {1}. Falling back default.",
preferredDevice, ALC.GetError(ALDevice.Null));
_openALDevice = ALC.OpenDevice(null);
}
}
else
{
_openALDevice = ALC.OpenDevice(null);
}
_checkAlcError(_openALDevice);
if (_openALDevice == IntPtr.Zero)
{
throw new InvalidOperationException($"Unable to open OpenAL device! {ALC.GetError(ALDevice.Null)}");
}
// Load up ALC extensions.
var s = ALC.GetString(_openALDevice, AlcGetString.Extensions) ?? "";
foreach (var extension in s.Split(' '))
{
_alcDeviceExtensions.Add(extension);
}
}
private void _shutdownAudio()
{
foreach (var source in _audioSources.Values.ToArray())
{
if (source.TryGetTarget(out var target))
{
target.Dispose();
}
}
foreach (var source in _bufferedAudioSources.Values.ToArray())
{
if (source.TryGetTarget(out var target))
{
target.Dispose();
}
}
if (_openALContext != ALContext.Null)
{
ALC.DestroyContext(_openALContext);
}
if (_openALDevice != IntPtr.Zero)
{
ALC.CloseDevice(_openALDevice);
}
}
private void _updateAudio()
{
var eye = _eyeManager.CurrentEye;
var (x, y) = eye.Position.Position;
AL.Listener(ALListener3f.Position, x, y, -5);
var rot2d = eye.Rotation.ToVec();
AL.Listener(ALListenerfv.Orientation, new []{0, 0, -1, rot2d.X, rot2d.Y, 0});
// Default orientation: at: (0, 0, -1) up: (0, 1, 0)
var (rotX, rotY) = eye.Rotation.ToVec();
var at = new Vector3(0f, 0f, -1f);
var up = new Vector3(rotY, rotX, 0f);
AL.Listener(ALListenerfv.Orientation, ref at, ref up);
// Clear out finalized audio sources.
while (_sourceDisposeQueue.TryDequeue(out var handles))
{
_openALSawmill.Debug("Cleaning out source {0} which finalized in another thread.", handles.sourceHandle);
if (IsEfxSupported) RemoveEfx(handles);
AL.DeleteSource(handles.sourceHandle);
_checkAlError();
_audioSources.Remove(handles.sourceHandle);
}
// Clear out finalized buffered audio sources.
while (_bufferedSourceDisposeQueue.TryDequeue(out var handles))
{
_openALSawmill.Debug("Cleaning out buffered source {0} which finalized in another thread.", handles.sourceHandle);
if (IsEfxSupported) RemoveEfx(handles);
AL.DeleteSource(handles.sourceHandle);
_checkAlError();
_bufferedAudioSources.Remove(handles.sourceHandle);
}
// Clear out finalized audio buffers.
while (_bufferDisposeQueue.TryDequeue(out var handle))
{
AL.DeleteBuffer((int) handle);
_checkAlError();
}
}
private static void RemoveEfx((int sourceHandle, int filterHandle) handles)
{
if (handles.filterHandle != 0) EFX.DeleteFilter(handles.filterHandle);
}
public void SetMasterVolume(float newVolume)
{
AL.Listener(ALListenerf.Gain, _baseGain * newVolume);
}
public void SetAudioAttenuation(int value)
{
var attenuation = (Attenuation) value;
switch (attenuation)
{
case Attenuation.NoAttenuation:
AL.DistanceModel(ALDistanceModel.None);
break;
case Attenuation.InverseDistance:
AL.DistanceModel(ALDistanceModel.InverseDistance);
break;
case Attenuation.Default:
case Attenuation.InverseDistanceClamped:
AL.DistanceModel(ALDistanceModel.InverseDistanceClamped);
break;
case Attenuation.LinearDistance:
AL.DistanceModel(ALDistanceModel.LinearDistance);
break;
case Attenuation.LinearDistanceClamped:
AL.DistanceModel(ALDistanceModel.LinearDistanceClamped);
break;
case Attenuation.ExponentDistance:
AL.DistanceModel(ALDistanceModel.ExponentDistance);
break;
case Attenuation.ExponentDistanceClamped:
AL.DistanceModel(ALDistanceModel.ExponentDistanceClamped);
break;
default:
throw new ArgumentOutOfRangeException($"No implementation to set {attenuation.ToString()} for DistanceModel!");
}
var attToString = attenuation == Attenuation.Default ? Attenuation.InverseDistanceClamped : attenuation;
_openALSawmill.Info($"Set audio attenuation to {attToString.ToString()}");
}
public IClydeAudioSource CreateAudioSource(AudioStream stream)
{
var source = AL.GenSource();
// ReSharper disable once PossibleInvalidOperationException
// TODO: This really shouldn't be indexing based on the ClydeHandle...
AL.Source(source, ALSourcei.Buffer, _audioSampleBuffers[(int) stream.ClydeHandle!.Value.Value].BufferHandle);
var audioSource = new AudioSource(this, source, stream);
_audioSources.Add(source, new WeakReference<AudioSource>(audioSource));
return audioSource;
}
public IClydeBufferedAudioSource CreateBufferedAudioSource(int buffers, bool floatAudio=false)
{
var source = AL.GenSource();
// ReSharper disable once PossibleInvalidOperationException
var audioSource = new BufferedAudioSource(this, source, AL.GenBuffers(buffers), floatAudio);
_bufferedAudioSources.Add(source, new WeakReference<BufferedAudioSource>(audioSource));
return audioSource;
}
private void _checkAlcError(ALDevice device,
[CallerMemberName] string callerMember = "",
[CallerLineNumber] int callerLineNumber = -1)
{
var error = ALC.GetError(device);
if (error != AlcError.NoError)
{
_openALSawmill.Error("[{0}:{1}] ALC error: {2}", callerMember, callerLineNumber, error);
}
}
private void _checkAlError([CallerMemberName] string callerMember = "",
[CallerLineNumber] int callerLineNumber = -1)
{
var error = AL.GetError();
if (error != ALError.NoError)
{
_openALSawmill.Error("[{0}:{1}] AL error: {2}", callerMember, callerLineNumber, error);
}
}
public AudioStream LoadAudioOggVorbis(Stream stream, string? name = null)
{
var vorbis = _readOggVorbis(stream);
var buffer = AL.GenBuffer();
ALFormat format;
// NVorbis only supports loading into floats.
// If this becomes a problem due to missing extension support (doubt it but ok),
// check the git history, I originally used libvorbisfile which worked and loaded 16 bit LPCM.
if (vorbis.Channels == 1)
{
format = ALFormat.MonoFloat32Ext;
}
else if (vorbis.Channels == 2)
{
format = ALFormat.StereoFloat32Ext;
}
else
{
throw new InvalidOperationException("Unable to load audio with more than 2 channels.");
}
unsafe
{
fixed (float* ptr = vorbis.Data.Span)
{
AL.BufferData(buffer, format, (IntPtr) ptr, vorbis.Data.Length * sizeof(float),
(int) vorbis.SampleRate);
}
}
_checkAlError();
var handle = new ClydeHandle(_audioSampleBuffers.Count);
_audioSampleBuffers.Add(new LoadedAudioSample(buffer));
var length = TimeSpan.FromSeconds(vorbis.TotalSamples / (double) vorbis.SampleRate);
return new AudioStream(handle, length, (int) vorbis.Channels, name);
}
public AudioStream LoadAudioWav(Stream stream, string? name = null)
{
var wav = _readWav(stream);
var buffer = AL.GenBuffer();
ALFormat format;
if (wav.BitsPerSample == 16)
{
if (wav.NumChannels == 1)
{
format = ALFormat.Mono16;
}
else if (wav.NumChannels == 2)
{
format = ALFormat.Stereo16;
}
else
{
throw new InvalidOperationException("Unable to load audio with more than 2 channels.");
}
}
else if (wav.BitsPerSample == 8)
{
if (wav.NumChannels == 1)
{
format = ALFormat.Mono8;
}
else if (wav.NumChannels == 2)
{
format = ALFormat.Stereo8;
}
else
{
throw new InvalidOperationException("Unable to load audio with more than 2 channels.");
}
}
else
{
throw new InvalidOperationException("Unable to load wav with bits per sample different from 8 or 16");
}
unsafe
{
fixed (byte* ptr = wav.Data.Span)
{
AL.BufferData(buffer, format, (IntPtr) ptr, wav.Data.Length, wav.SampleRate);
}
}
_checkAlError();
var handle = new ClydeHandle(_audioSampleBuffers.Count);
_audioSampleBuffers.Add(new LoadedAudioSample(buffer));
var length = TimeSpan.FromSeconds(wav.Data.Length / (double) wav.BlockAlign / wav.SampleRate);
return new AudioStream(handle, length, wav.NumChannels, name);
}
public AudioStream LoadAudioRaw(ReadOnlySpan<short> samples, int channels, int sampleRate, string? name = null)
{
var fmt = channels switch
{
1 => ALFormat.Mono16,
2 => ALFormat.Stereo16,
_ => throw new ArgumentOutOfRangeException(
nameof(channels), "Only stereo and mono is currently supported")
};
var buffer = AL.GenBuffer();
_checkAlError();
unsafe
{
fixed (short* ptr = samples)
{
AL.BufferData(buffer, fmt, (IntPtr) ptr, samples.Length * sizeof(short), sampleRate);
}
}
_checkAlError();
var handle = new ClydeHandle(_audioSampleBuffers.Count);
var length = TimeSpan.FromSeconds((double) samples.Length / channels / sampleRate);
_audioSampleBuffers.Add(new LoadedAudioSample(buffer));
return new AudioStream(handle, length, channels, name);
}
private sealed class LoadedAudioSample
{
public readonly int BufferHandle;
public LoadedAudioSample(int bufferHandle)
{
BufferHandle = bufferHandle;
}
}
private void DeleteSourceOnMainThread(int sourceHandle, int filterHandle)
{
_sourceDisposeQueue.Enqueue((sourceHandle, filterHandle));
}
private void DeleteBufferedSourceOnMainThread(int bufferedSourceHandle, int filterHandle)
{
_bufferedSourceDisposeQueue.Enqueue((bufferedSourceHandle, filterHandle));
}
private void DeleteAudioBufferOnMainThread(int bufferHandle)
{
_bufferDisposeQueue.Enqueue(bufferHandle);
}
private sealed class AudioSource : IClydeAudioSource
{
private int SourceHandle;
private readonly Clyde _master;
private readonly ClydeAudio _master;
private readonly AudioStream _sourceStream;
private int FilterHandle;
#if DEBUG
@@ -450,7 +34,7 @@ namespace Robust.Client.Graphics.Clyde
private bool IsEfxSupported => _master.IsEfxSupported;
public AudioSource(Clyde master, int sourceHandle, AudioStream sourceStream)
public AudioSource(ClydeAudio master, int sourceHandle, AudioStream sourceStream)
{
_master = master;
SourceHandle = sourceHandle;
@@ -700,7 +284,7 @@ namespace Robust.Client.Graphics.Clyde
private int? SourceHandle = null;
private int[] BufferHandles;
private Dictionary<int, int> BufferMap = new();
private readonly Clyde _master;
private readonly ClydeAudio _master;
private bool _mono = true;
private bool _float = false;
private int FilterHandle;
@@ -711,7 +295,7 @@ namespace Robust.Client.Graphics.Clyde
private bool IsEfxSupported => _master.IsEfxSupported;
public BufferedAudioSource(Clyde master, int sourceHandle, int[] bufferHandles, bool floatAudio = false)
public BufferedAudioSource(ClydeAudio master, int sourceHandle, int[] bufferHandles, bool floatAudio = false)
{
_master = master;
SourceHandle = sourceHandle;

View File

@@ -1,9 +1,9 @@
using System;
using System.IO;
namespace Robust.Client.Graphics.Clyde
namespace Robust.Client.Graphics.Audio
{
internal partial class Clyde
internal partial class ClydeAudio
{
private OggVorbisData _readOggVorbis(Stream stream)
{

View File

@@ -3,9 +3,9 @@ using System.IO;
using JetBrains.Annotations;
using Robust.Shared.Utility;
namespace Robust.Client.Graphics.Clyde
namespace Robust.Client.Graphics.Audio
{
internal partial class Clyde
internal partial class ClydeAudio
{
/// <summary>
/// Load up a WAVE file.

View File

@@ -0,0 +1,50 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using OpenToolkit.Audio.OpenAL;
using OpenToolkit.Audio.OpenAL.Extensions.Creative.EFX;
using OpenToolkit.Mathematics;
using Robust.Client.Audio;
using Robust.Shared;
using Robust.Shared.Configuration;
using Robust.Shared.IoC;
using Robust.Shared.Audio;
using Robust.Shared.Log;
using Robust.Shared.Timing;
using Vector2 = Robust.Shared.Maths.Vector2;
namespace Robust.Client.Graphics.Audio
{
internal partial class ClydeAudio
{
[Robust.Shared.IoC.Dependency] private readonly IConfigurationManager _cfg = default!;
[Robust.Shared.IoC.Dependency] private readonly IEyeManager _eyeManager = default!;
private Thread? _gameThread;
public bool InitializePostWindowing()
{
_gameThread = Thread.CurrentThread;
return _initializeAudio();
}
public void FrameProcess(FrameEventArgs eventArgs)
{
_updateAudio();
}
public void Shutdown()
{
_shutdownAudio();
}
private bool IsMainThread()
{
return Thread.CurrentThread == _gameThread;
}
}
}

View File

@@ -0,0 +1,399 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using OpenToolkit.Audio.OpenAL;
using OpenToolkit.Audio.OpenAL.Extensions.Creative.EFX;
using OpenToolkit.Mathematics;
using Robust.Client.Audio;
using Robust.Shared;
using Robust.Shared.Configuration;
using Robust.Shared.IoC;
using Robust.Shared.Audio;
using Robust.Shared.Log;
using Vector2 = Robust.Shared.Maths.Vector2;
namespace Robust.Client.Graphics.Audio
{
internal partial class ClydeAudio : IClydeAudio, IClydeAudioInternal
{
private ALDevice _openALDevice;
private ALContext _openALContext;
private readonly List<LoadedAudioSample> _audioSampleBuffers = new();
private readonly Dictionary<int, WeakReference<AudioSource>> _audioSources =
new();
private readonly Dictionary<int, WeakReference<BufferedAudioSource>> _bufferedAudioSources =
new();
private readonly HashSet<string> _alcDeviceExtensions = new();
private readonly HashSet<string> _alContextExtensions = new();
// The base gain value for a listener, used to boost the default volume.
private const float _baseGain = 2f;
public bool HasAlDeviceExtension(string extension) => _alcDeviceExtensions.Contains(extension);
public bool HasAlContextExtension(string extension) => _alContextExtensions.Contains(extension);
internal bool IsEfxSupported;
private ISawmill _openALSawmill = default!;
private bool _initializeAudio()
{
_openALSawmill = Logger.GetSawmill("clyde.oal");
if (!_audioOpenDevice())
return false;
// Create OpenAL context.
_audioCreateContext();
IsEfxSupported = HasAlDeviceExtension("ALC_EXT_EFX");
_cfg.OnValueChanged(CVars.AudioMasterVolume, SetMasterVolume, true);
_cfg.OnValueChanged(CVars.AudioAttenuation, SetAudioAttenuation, true);
return true;
}
private void _audioCreateContext()
{
unsafe
{
_openALContext = ALC.CreateContext(_openALDevice, (int*) 0);
}
ALC.MakeContextCurrent(_openALContext);
_checkAlcError(_openALDevice);
_checkAlError();
// Load up AL context extensions.
var s = ALC.GetString(ALDevice.Null, AlcGetString.Extensions) ?? "";
foreach (var extension in s.Split(' '))
{
_alContextExtensions.Add(extension);
}
_openALSawmill.Debug("OpenAL Vendor: {0}", AL.Get(ALGetString.Vendor));
_openALSawmill.Debug("OpenAL Renderer: {0}", AL.Get(ALGetString.Renderer));
_openALSawmill.Debug("OpenAL Version: {0}", AL.Get(ALGetString.Version));
}
private bool _audioOpenDevice()
{
var preferredDevice = _cfg.GetCVar(CVars.AudioDevice);
// Open device.
if (!string.IsNullOrEmpty(preferredDevice))
{
_openALDevice = ALC.OpenDevice(preferredDevice);
if (_openALDevice == IntPtr.Zero)
{
_openALSawmill.Warning("Unable to open preferred audio device '{0}': {1}. Falling back default.",
preferredDevice, ALC.GetError(ALDevice.Null));
_openALDevice = ALC.OpenDevice(null);
}
}
else
{
_openALDevice = ALC.OpenDevice(null);
}
_checkAlcError(_openALDevice);
if (_openALDevice == IntPtr.Zero)
{
_openALSawmill.Error("Unable to open OpenAL device! {1}", ALC.GetError(ALDevice.Null));
return false;
}
// Load up ALC extensions.
var s = ALC.GetString(_openALDevice, AlcGetString.Extensions) ?? "";
foreach (var extension in s.Split(' '))
{
_alcDeviceExtensions.Add(extension);
}
return true;
}
private void _shutdownAudio()
{
foreach (var source in _audioSources.Values.ToArray())
{
if (source.TryGetTarget(out var target))
{
target.Dispose();
}
}
foreach (var source in _bufferedAudioSources.Values.ToArray())
{
if (source.TryGetTarget(out var target))
{
target.Dispose();
}
}
if (_openALContext != ALContext.Null)
{
ALC.DestroyContext(_openALContext);
}
if (_openALDevice != IntPtr.Zero)
{
ALC.CloseDevice(_openALDevice);
}
}
private void _updateAudio()
{
var eye = _eyeManager.CurrentEye;
var (x, y) = eye.Position.Position;
AL.Listener(ALListener3f.Position, x, y, -5);
var rot2d = eye.Rotation.ToVec();
AL.Listener(ALListenerfv.Orientation, new []{0, 0, -1, rot2d.X, rot2d.Y, 0});
// Default orientation: at: (0, 0, -1) up: (0, 1, 0)
var (rotX, rotY) = eye.Rotation.ToVec();
var at = new Vector3(0f, 0f, -1f);
var up = new Vector3(rotY, rotX, 0f);
AL.Listener(ALListenerfv.Orientation, ref at, ref up);
_flushALDisposeQueues();
}
private static void RemoveEfx((int sourceHandle, int filterHandle) handles)
{
if (handles.filterHandle != 0) EFX.DeleteFilter(handles.filterHandle);
}
public void SetMasterVolume(float newVolume)
{
AL.Listener(ALListenerf.Gain, _baseGain * newVolume);
}
public void SetAudioAttenuation(int value)
{
var attenuation = (Attenuation) value;
switch (attenuation)
{
case Attenuation.NoAttenuation:
AL.DistanceModel(ALDistanceModel.None);
break;
case Attenuation.InverseDistance:
AL.DistanceModel(ALDistanceModel.InverseDistance);
break;
case Attenuation.Default:
case Attenuation.InverseDistanceClamped:
AL.DistanceModel(ALDistanceModel.InverseDistanceClamped);
break;
case Attenuation.LinearDistance:
AL.DistanceModel(ALDistanceModel.LinearDistance);
break;
case Attenuation.LinearDistanceClamped:
AL.DistanceModel(ALDistanceModel.LinearDistanceClamped);
break;
case Attenuation.ExponentDistance:
AL.DistanceModel(ALDistanceModel.ExponentDistance);
break;
case Attenuation.ExponentDistanceClamped:
AL.DistanceModel(ALDistanceModel.ExponentDistanceClamped);
break;
default:
throw new ArgumentOutOfRangeException($"No implementation to set {attenuation.ToString()} for DistanceModel!");
}
var attToString = attenuation == Attenuation.Default ? Attenuation.InverseDistanceClamped : attenuation;
_openALSawmill.Info($"Set audio attenuation to {attToString.ToString()}");
}
public IClydeAudioSource CreateAudioSource(AudioStream stream)
{
var source = AL.GenSource();
// ReSharper disable once PossibleInvalidOperationException
// TODO: This really shouldn't be indexing based on the ClydeHandle...
AL.Source(source, ALSourcei.Buffer, _audioSampleBuffers[(int) stream.ClydeHandle!.Value.Value].BufferHandle);
var audioSource = new AudioSource(this, source, stream);
_audioSources.Add(source, new WeakReference<AudioSource>(audioSource));
return audioSource;
}
public IClydeBufferedAudioSource CreateBufferedAudioSource(int buffers, bool floatAudio=false)
{
var source = AL.GenSource();
// ReSharper disable once PossibleInvalidOperationException
var audioSource = new BufferedAudioSource(this, source, AL.GenBuffers(buffers), floatAudio);
_bufferedAudioSources.Add(source, new WeakReference<BufferedAudioSource>(audioSource));
return audioSource;
}
private void _checkAlcError(ALDevice device,
[CallerMemberName] string callerMember = "",
[CallerLineNumber] int callerLineNumber = -1)
{
var error = ALC.GetError(device);
if (error != AlcError.NoError)
{
_openALSawmill.Error("[{0}:{1}] ALC error: {2}", callerMember, callerLineNumber, error);
}
}
private void _checkAlError([CallerMemberName] string callerMember = "",
[CallerLineNumber] int callerLineNumber = -1)
{
var error = AL.GetError();
if (error != ALError.NoError)
{
_openALSawmill.Error("[{0}:{1}] AL error: {2}", callerMember, callerLineNumber, error);
}
}
public AudioStream LoadAudioOggVorbis(Stream stream, string? name = null)
{
var vorbis = _readOggVorbis(stream);
var buffer = AL.GenBuffer();
ALFormat format;
// NVorbis only supports loading into floats.
// If this becomes a problem due to missing extension support (doubt it but ok),
// check the git history, I originally used libvorbisfile which worked and loaded 16 bit LPCM.
if (vorbis.Channels == 1)
{
format = ALFormat.MonoFloat32Ext;
}
else if (vorbis.Channels == 2)
{
format = ALFormat.StereoFloat32Ext;
}
else
{
throw new InvalidOperationException("Unable to load audio with more than 2 channels.");
}
unsafe
{
fixed (float* ptr = vorbis.Data.Span)
{
AL.BufferData(buffer, format, (IntPtr) ptr, vorbis.Data.Length * sizeof(float),
(int) vorbis.SampleRate);
}
}
_checkAlError();
var handle = new ClydeHandle(_audioSampleBuffers.Count);
_audioSampleBuffers.Add(new LoadedAudioSample(buffer));
var length = TimeSpan.FromSeconds(vorbis.TotalSamples / (double) vorbis.SampleRate);
return new AudioStream(handle, length, (int) vorbis.Channels, name);
}
public AudioStream LoadAudioWav(Stream stream, string? name = null)
{
var wav = _readWav(stream);
var buffer = AL.GenBuffer();
ALFormat format;
if (wav.BitsPerSample == 16)
{
if (wav.NumChannels == 1)
{
format = ALFormat.Mono16;
}
else if (wav.NumChannels == 2)
{
format = ALFormat.Stereo16;
}
else
{
throw new InvalidOperationException("Unable to load audio with more than 2 channels.");
}
}
else if (wav.BitsPerSample == 8)
{
if (wav.NumChannels == 1)
{
format = ALFormat.Mono8;
}
else if (wav.NumChannels == 2)
{
format = ALFormat.Stereo8;
}
else
{
throw new InvalidOperationException("Unable to load audio with more than 2 channels.");
}
}
else
{
throw new InvalidOperationException("Unable to load wav with bits per sample different from 8 or 16");
}
unsafe
{
fixed (byte* ptr = wav.Data.Span)
{
AL.BufferData(buffer, format, (IntPtr) ptr, wav.Data.Length, wav.SampleRate);
}
}
_checkAlError();
var handle = new ClydeHandle(_audioSampleBuffers.Count);
_audioSampleBuffers.Add(new LoadedAudioSample(buffer));
var length = TimeSpan.FromSeconds(wav.Data.Length / (double) wav.BlockAlign / wav.SampleRate);
return new AudioStream(handle, length, wav.NumChannels, name);
}
public AudioStream LoadAudioRaw(ReadOnlySpan<short> samples, int channels, int sampleRate, string? name = null)
{
var fmt = channels switch
{
1 => ALFormat.Mono16,
2 => ALFormat.Stereo16,
_ => throw new ArgumentOutOfRangeException(
nameof(channels), "Only stereo and mono is currently supported")
};
var buffer = AL.GenBuffer();
_checkAlError();
unsafe
{
fixed (short* ptr = samples)
{
AL.BufferData(buffer, fmt, (IntPtr) ptr, samples.Length * sizeof(short), sampleRate);
}
}
_checkAlError();
var handle = new ClydeHandle(_audioSampleBuffers.Count);
var length = TimeSpan.FromSeconds((double) samples.Length / channels / sampleRate);
_audioSampleBuffers.Add(new LoadedAudioSample(buffer));
return new AudioStream(handle, length, channels, name);
}
private sealed class LoadedAudioSample
{
public readonly int BufferHandle;
public LoadedAudioSample(int bufferHandle)
{
BufferHandle = bufferHandle;
}
}
}
}

View File

@@ -0,0 +1,70 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using JetBrains.Annotations;
using Robust.Client.Audio;
using Robust.Client.Input;
using Robust.Client.UserInterface.CustomControls;
using Robust.Shared.Map;
using Robust.Shared.Maths;
using Robust.Shared.Timing;
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.PixelFormats;
using Color = Robust.Shared.Maths.Color;
namespace Robust.Client.Graphics.Audio
{
/// <summary>
/// Hey look, it's ClydeAudio's evil twin brother!
/// </summary>
[UsedImplicitly]
internal sealed class ClydeAudioHeadless : IClydeAudio, IClydeAudioInternal
{
public bool InitializePostWindowing()
{
return true;
}
public void FrameProcess(FrameEventArgs eventArgs)
{
}
public void Shutdown()
{
}
public AudioStream LoadAudioOggVorbis(Stream stream, string? name = null)
{
// TODO: Might wanna actually load this so the length gets reported correctly.
return new(default, default, 1, name);
}
public AudioStream LoadAudioWav(Stream stream, string? name = null)
{
// TODO: Might wanna actually load this so the length gets reported correctly.
return new(default, default, 1, name);
}
public AudioStream LoadAudioRaw(ReadOnlySpan<short> samples, int channels, int sampleRate, string? name = null)
{
// TODO: Might wanna actually load this so the length gets reported correctly.
return new(default, default, channels, name);
}
public IClydeAudioSource CreateAudioSource(AudioStream stream)
{
return DummyAudioSource.Instance;
}
public IClydeBufferedAudioSource CreateBufferedAudioSource(int buffers, bool floatAudio = false)
{
return DummyBufferedAudioSource.Instance;
}
public void SetMasterVolume(float newVolume)
{
// Nada.
}
}
}

View File

@@ -0,0 +1,98 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using JetBrains.Annotations;
using Robust.Client.Audio;
using Robust.Client.Input;
using Robust.Client.UserInterface.CustomControls;
using Robust.Shared.Map;
using Robust.Shared.Maths;
using Robust.Shared.Timing;
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.PixelFormats;
using Color = Robust.Shared.Maths.Color;
namespace Robust.Client.Graphics.Audio
{
/// <summary>
/// Hey look, it's ClydeAudio.AudioSource's evil twin brother!
/// </summary>
internal class DummyAudioSource : IClydeAudioSource
{
public static DummyAudioSource Instance { get; } = new();
public bool IsPlaying => default;
public bool IsLooping { get; set; }
public void Dispose()
{
// Nada.
}
public void StartPlaying()
{
// Nada.
}
public void StopPlaying()
{
// Nada.
}
public bool SetPosition(Vector2 position)
{
return true;
}
public void SetPitch(float pitch)
{
// Nada.
}
public void SetGlobal()
{
// Nada.
}
public void SetVolume(float decibels)
{
// Nada.
}
public void SetVolumeDirect(float scale)
{
// Nada.
}
public void SetMaxDistance(float maxDistance)
{
// Nada.
}
public void SetRolloffFactor(float rolloffFactor)
{
// Nada.
}
public void SetReferenceDistance(float refDistance)
{
// Nada.
}
public void SetOcclusion(float blocks)
{
// Nada.
}
public void SetPlaybackPosition(float seconds)
{
// Nada.
}
public void SetVelocity(Vector2 velocity)
{
// Nada.
}
}
}

View File

@@ -0,0 +1,56 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using JetBrains.Annotations;
using Robust.Client.Audio;
using Robust.Client.Input;
using Robust.Client.UserInterface.CustomControls;
using Robust.Shared.Map;
using Robust.Shared.Maths;
using Robust.Shared.Timing;
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.PixelFormats;
using Color = Robust.Shared.Maths.Color;
namespace Robust.Client.Graphics.Audio
{
/// <summary>
/// Hey look, it's ClydeAudio.BufferedAudioSource's evil twin brother!
/// </summary>
internal sealed class DummyBufferedAudioSource : DummyAudioSource, IClydeBufferedAudioSource
{
public new static DummyBufferedAudioSource Instance { get; } = new();
public int SampleRate { get; set; } = 0;
public void WriteBuffer(int handle, ReadOnlySpan<ushort> data)
{
// Nada.
}
public void WriteBuffer(int handle, ReadOnlySpan<float> data)
{
// Nada.
}
public void QueueBuffers(ReadOnlySpan<int> handles)
{
// Nada.
}
public void EmptyBuffers()
{
// Nada.
}
public void GetBuffersProcessed(Span<int> handles)
{
// Nada.
}
public int GetNumberOfBuffersProcessed()
{
return 0;
}
}
}

View File

@@ -0,0 +1,34 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using JetBrains.Annotations;
using Robust.Client.Audio;
using Robust.Client.Input;
using Robust.Shared.Timing;
using Robust.Shared.IoC;
namespace Robust.Client.Graphics.Audio
{
/// <summary>
/// For "start ss14 with no audio devices" Smugleaf
/// </summary>
[UsedImplicitly]
internal sealed class FallbackProxyClydeAudio : ProxyClydeAudio
{
public override bool InitializePostWindowing()
{
// Deliberate lack of base call here (see base implementation for comments as to why there even is a base)
ActualImplementation = new ClydeAudio();
IoCManager.InjectDependencies(ActualImplementation);
if (ActualImplementation.InitializePostWindowing())
return true;
// If we get here, that failed, so use the fallback
ActualImplementation = new ClydeAudioHeadless();
IoCManager.InjectDependencies(ActualImplementation);
return ActualImplementation.InitializePostWindowing();
}
}
}

View File

@@ -0,0 +1,72 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using JetBrains.Annotations;
using Robust.Client.Audio;
using Robust.Client.Input;
using Robust.Client.UserInterface.CustomControls;
using Robust.Shared.Map;
using Robust.Shared.Maths;
using Robust.Shared.Timing;
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.PixelFormats;
using Color = Robust.Shared.Maths.Color;
namespace Robust.Client.Graphics.Audio
{
/// <summary>
/// For "start ss14 with no audio devices" Smugleaf
/// </summary>
[UsedImplicitly]
internal abstract class ProxyClydeAudio : IClydeAudio, IClydeAudioInternal
{
protected IClydeAudioInternal ActualImplementation = default!;
public virtual bool InitializePostWindowing()
{
// This particular implementation exists to be overridden because removing this method causes C# to complain
return ActualImplementation.InitializePostWindowing();
}
public void FrameProcess(FrameEventArgs eventArgs)
{
ActualImplementation.FrameProcess(eventArgs);
}
public void Shutdown()
{
ActualImplementation.Shutdown();
}
public AudioStream LoadAudioOggVorbis(Stream stream, string? name = null)
{
return ActualImplementation.LoadAudioOggVorbis(stream, name);
}
public AudioStream LoadAudioWav(Stream stream, string? name = null)
{
return ActualImplementation.LoadAudioWav(stream, name);
}
public AudioStream LoadAudioRaw(ReadOnlySpan<short> samples, int channels, int sampleRate, string? name = null)
{
return ActualImplementation.LoadAudioRaw(samples, channels, sampleRate, name);
}
public IClydeAudioSource CreateAudioSource(AudioStream stream)
{
return ActualImplementation.CreateAudioSource(stream);
}
public IClydeBufferedAudioSource CreateBufferedAudioSource(int buffers, bool floatAudio = false)
{
return ActualImplementation.CreateBufferedAudioSource(buffers, floatAudio);
}
public void SetMasterVolume(float newVolume)
{
ActualImplementation.SetMasterVolume(newVolume);
}
}
}

View File

@@ -22,7 +22,7 @@ namespace Robust.Client.Graphics.Clyde
/// <summary>
/// Responsible for most things rendering on OpenGL mode.
/// </summary>
internal sealed partial class Clyde : IClydeInternal, IClydeAudio, IPostInjectInit
internal sealed partial class Clyde : IClydeInternal, IPostInjectInit
{
[Dependency] private readonly IClydeTileDefinitionManager _tileDefinitionManager = default!;
[Dependency] private readonly IEyeManager _eyeManager = default!;
@@ -97,7 +97,6 @@ namespace Robust.Client.Graphics.Clyde
if (!InitMainWindowAndRenderer())
return false;
_initializeAudio();
return true;
}
@@ -115,8 +114,6 @@ namespace Robust.Client.Graphics.Clyde
public void FrameProcess(FrameEventArgs eventArgs)
{
_updateAudio();
_windowing?.FlushDispose();
FlushShaderInstanceDispose();
FlushRenderTargetDispose();
@@ -509,7 +506,6 @@ namespace Robust.Client.Graphics.Clyde
{
_glContext?.Shutdown();
ShutdownWindowing();
_shutdownAudio();
}
private bool IsMainThread()

View File

@@ -19,7 +19,7 @@ namespace Robust.Client.Graphics.Clyde
/// Hey look, it's Clyde's evil twin brother!
/// </summary>
[UsedImplicitly]
internal sealed class ClydeHeadless : IClydeInternal, IClydeAudio
internal sealed class ClydeHeadless : IClydeInternal
{
// Would it make sense to report a fake resolution like 720p here so code doesn't break? idk.
public IClydeWindow MainWindow { get; }
@@ -236,34 +236,6 @@ namespace Robust.Client.Graphics.Clyde
// Nada.
}
public AudioStream LoadAudioOggVorbis(Stream stream, string? name = null)
{
// TODO: Might wanna actually load this so the length gets reported correctly.
return new(default, default, 1, name);
}
public AudioStream LoadAudioWav(Stream stream, string? name = null)
{
// TODO: Might wanna actually load this so the length gets reported correctly.
return new(default, default, 1, name);
}
public AudioStream LoadAudioRaw(ReadOnlySpan<short> samples, int channels, int sampleRate, string? name = null)
{
// TODO: Might wanna actually load this so the length gets reported correctly.
return new(default, default, channels, name);
}
public IClydeAudioSource CreateAudioSource(AudioStream stream)
{
return DummyAudioSource.Instance;
}
public IClydeBufferedAudioSource CreateBufferedAudioSource(int buffers, bool floatAudio = false)
{
return DummyBufferedAudioSource.Instance;
}
public Task<string> GetText()
{
return Task.FromResult(string.Empty);
@@ -274,11 +246,6 @@ namespace Robust.Client.Graphics.Clyde
// Nada.
}
public void SetMasterVolume(float newVolume)
{
// Nada.
}
private class DummyCursor : ICursor
{
public void Dispose()

View File

@@ -0,0 +1,16 @@
using System;
using Robust.Client.Input;
using Robust.Client.UserInterface;
using Robust.Shared.Map;
using Robust.Shared.Maths;
using Robust.Shared.Timing;
namespace Robust.Client.Graphics
{
internal interface IClydeAudioInternal : IClydeAudio
{
bool InitializePostWindowing();
void FrameProcess(FrameEventArgs eventArgs);
void Shutdown();
}
}