diff --git a/src/Ryujinx.Audio.Backends.Apple/AppleAudioBuffer.cs b/src/Ryujinx.Audio.Backends.Apple/AppleAudioBuffer.cs new file mode 100644 index 000000000..995236889 --- /dev/null +++ b/src/Ryujinx.Audio.Backends.Apple/AppleAudioBuffer.cs @@ -0,0 +1,16 @@ +namespace Ryujinx.Audio.Backends.Apple +{ + class AppleAudioBuffer + { + public readonly ulong DriverIdentifier; + public readonly ulong SampleCount; + public ulong SamplePlayed; + + public AppleAudioBuffer(ulong driverIdentifier, ulong sampleCount) + { + DriverIdentifier = driverIdentifier; + SampleCount = sampleCount; + SamplePlayed = 0; + } + } +} diff --git a/src/Ryujinx.Audio.Backends.Apple/AppleHardwareDeviceDriver.cs b/src/Ryujinx.Audio.Backends.Apple/AppleHardwareDeviceDriver.cs new file mode 100644 index 000000000..62d81c6cc --- /dev/null +++ b/src/Ryujinx.Audio.Backends.Apple/AppleHardwareDeviceDriver.cs @@ -0,0 +1,241 @@ +using Ryujinx.Audio.Common; +using Ryujinx.Audio.Integration; +using Ryujinx.Common.Logging; +using Ryujinx.Memory; +using System; +using System.Collections.Concurrent; +using System.Runtime.InteropServices; +using System.Threading; +using System.Runtime.Versioning; +using Ryujinx.Audio.Backends.Apple.Native; +using static Ryujinx.Audio.Backends.Apple.Native.AudioToolbox; +using static Ryujinx.Audio.Integration.IHardwareDeviceDriver; + +namespace Ryujinx.Audio.Backends.Apple +{ + [SupportedOSPlatform("macos")] + [SupportedOSPlatform("ios")] + public class AppleHardwareDeviceDriver : IHardwareDeviceDriver + { + private readonly ManualResetEvent _updateRequiredEvent; + private readonly ManualResetEvent _pauseEvent; + private readonly ConcurrentDictionary _sessions; + private readonly bool _supportSurroundConfiguration; + + public float Volume { get; set; } + + public AppleHardwareDeviceDriver() + { + _updateRequiredEvent = new ManualResetEvent(false); + _pauseEvent = new ManualResetEvent(true); + _sessions = new ConcurrentDictionary(); + + _supportSurroundConfiguration = TestSurroundSupport(); + + Volume = 1f; + } + + private bool TestSurroundSupport() + { + try + { + var format = GetAudioFormat(SampleFormat.PcmFloat, Constants.TargetSampleRate, 6); + + int result = AudioQueueNewOutput( + ref format, + IntPtr.Zero, + IntPtr.Zero, + IntPtr.Zero, + IntPtr.Zero, + 0, + out IntPtr testQueue); + + if (result == 0) + { + AudioChannelLayout layout = new AudioChannelLayout + { + AudioChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_1_A, + AudioChannelBitmap = 0, + NumberChannelDescriptions = 0 + }; + + int layoutResult = AudioQueueSetProperty( + testQueue, + kAudioQueueProperty_ChannelLayout, + ref layout, + (uint)Marshal.SizeOf()); + + if (layoutResult == 0) + { + AudioQueueDispose(testQueue, true); + return true; + } + + AudioQueueDispose(testQueue, true); + } + + return false; + } + catch + { + return false; + } + } + + public static bool IsSupported => IsSupportedInternal(); + + private static bool IsSupportedInternal() + { + try + { + var format = GetAudioFormat(SampleFormat.PcmInt16, Constants.TargetSampleRate, 2); + int result = AudioQueueNewOutput( + ref format, + IntPtr.Zero, + IntPtr.Zero, + IntPtr.Zero, + IntPtr.Zero, + 0, + out IntPtr testQueue); + + if (result == 0) + { + AudioQueueDispose(testQueue, true); + return true; + } + + return false; + } + catch + { + return false; + } + } + + public ManualResetEvent GetUpdateRequiredEvent() + { + return _updateRequiredEvent; + } + + public ManualResetEvent GetPauseEvent() + { + return _pauseEvent; + } + + public IHardwareDeviceSession OpenDeviceSession(Direction direction, IVirtualMemoryManager memoryManager, SampleFormat sampleFormat, uint sampleRate, uint channelCount) + { + if (channelCount == 0) + { + channelCount = 2; + } + + if (sampleRate == 0) + { + sampleRate = Constants.TargetSampleRate; + } + + if (direction != Direction.Output) + { + throw new NotImplementedException("Input direction is currently not implemented on Apple backend!"); + } + + AppleHardwareDeviceSession session = new(this, memoryManager, sampleFormat, sampleRate, channelCount); + + _sessions.TryAdd(session, 0); + + return session; + } + + internal bool Unregister(AppleHardwareDeviceSession session) + { + return _sessions.TryRemove(session, out _); + } + + internal static AudioStreamBasicDescription GetAudioFormat(SampleFormat sampleFormat, uint sampleRate, uint channelCount) + { + uint formatFlags; + uint bitsPerChannel; + + switch (sampleFormat) + { + case SampleFormat.PcmInt8: + formatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; + bitsPerChannel = 8; + break; + case SampleFormat.PcmInt16: + formatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; + bitsPerChannel = 16; + break; + case SampleFormat.PcmInt32: + formatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; + bitsPerChannel = 32; + break; + case SampleFormat.PcmFloat: + formatFlags = kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked; + bitsPerChannel = 32; + break; + default: + throw new ArgumentException($"Unsupported sample format {sampleFormat}"); + } + + uint bytesPerFrame = (bitsPerChannel / 8) * channelCount; + + return new AudioStreamBasicDescription + { + SampleRate = sampleRate, + FormatID = kAudioFormatLinearPCM, + FormatFlags = formatFlags, + BytesPerPacket = bytesPerFrame, + FramesPerPacket = 1, + BytesPerFrame = bytesPerFrame, + ChannelsPerFrame = channelCount, + BitsPerChannel = bitsPerChannel, + Reserved = 0 + }; + } + + public void Dispose() + { + GC.SuppressFinalize(this); + Dispose(true); + } + + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + foreach (AppleHardwareDeviceSession session in _sessions.Keys) + { + session.Dispose(); + } + + _pauseEvent.Dispose(); + } + } + + public bool SupportsSampleRate(uint sampleRate) + { + return true; + } + + public bool SupportsSampleFormat(SampleFormat sampleFormat) + { + return sampleFormat != SampleFormat.PcmInt24; + } + + public bool SupportsChannelCount(uint channelCount) + { + if (channelCount == 6) + { + return _supportSurroundConfiguration; + } + + return true; + } + + public bool SupportsDirection(Direction direction) + { + return direction != Direction.Input; + } + } +} \ No newline at end of file diff --git a/src/Ryujinx.Audio.Backends.Apple/AppleHardwareDeviceSession.cs b/src/Ryujinx.Audio.Backends.Apple/AppleHardwareDeviceSession.cs new file mode 100644 index 000000000..95826b4f4 --- /dev/null +++ b/src/Ryujinx.Audio.Backends.Apple/AppleHardwareDeviceSession.cs @@ -0,0 +1,288 @@ +using Ryujinx.Audio.Backends.Common; +using Ryujinx.Audio.Common; +using Ryujinx.Common.Logging; +using Ryujinx.Memory; +using System; +using System.Collections.Concurrent; +using System.Runtime.InteropServices; +using System.Threading; +using System.Runtime.Versioning; +using Ryujinx.Audio.Backends.Apple.Native; +using static Ryujinx.Audio.Backends.Apple.Native.AudioToolbox; +using static Ryujinx.Audio.Backends.Apple.AppleHardwareDeviceDriver; + +namespace Ryujinx.Audio.Backends.Apple +{ + [SupportedOSPlatform("macos")] + [SupportedOSPlatform("ios")] + class AppleHardwareDeviceSession : HardwareDeviceSessionOutputBase + { + private const int NumBuffers = 3; + + private readonly AppleHardwareDeviceDriver _driver; + private readonly ConcurrentQueue _queuedBuffers = new(); + private readonly DynamicRingBuffer _ringBuffer = new(); + private readonly ManualResetEvent _updateRequiredEvent; + + private readonly AudioQueueOutputCallback _callbackDelegate; + private readonly GCHandle _gcHandle; + + private IntPtr _audioQueue; + private readonly IntPtr[] _audioQueueBuffers = new IntPtr[NumBuffers]; + private readonly int[] _bufferBytesFilled = new int[NumBuffers]; + + private readonly int _bytesPerFrame; + + private ulong _playedSampleCount; + private bool _started; + private float _volume = 1f; + + private readonly object _lock = new(); + + [UnmanagedFunctionPointer(CallingConvention.Cdecl)] + private delegate void AudioQueueOutputCallback( + IntPtr userData, + IntPtr audioQueue, + IntPtr buffer); + + public AppleHardwareDeviceSession( + AppleHardwareDeviceDriver driver, + IVirtualMemoryManager memoryManager, + SampleFormat requestedSampleFormat, + uint requestedSampleRate, + uint requestedChannelCount) + : base(memoryManager, requestedSampleFormat, requestedSampleRate, requestedChannelCount) + { + _driver = driver; + _updateRequiredEvent = driver.GetUpdateRequiredEvent(); + _callbackDelegate = OutputCallback; + _bytesPerFrame = BackendHelper.GetSampleSize(requestedSampleFormat) * (int)requestedChannelCount; + + _gcHandle = GCHandle.Alloc(this, GCHandleType.Normal); + + SetupAudioQueue(); + } + + private void SetupAudioQueue() + { + lock (_lock) + { + var format = AppleHardwareDeviceDriver.GetAudioFormat( + RequestedSampleFormat, + RequestedSampleRate, + RequestedChannelCount); + + IntPtr callbackPtr = Marshal.GetFunctionPointerForDelegate(_callbackDelegate); + IntPtr userData = GCHandle.ToIntPtr(_gcHandle); + + int result = AudioQueueNewOutput( + ref format, + callbackPtr, + userData, + IntPtr.Zero, + IntPtr.Zero, + 0, + out _audioQueue); + + if (result != 0) + { + throw new InvalidOperationException($"AudioQueueNewOutput failed: {result}"); + } + + uint framesPerBuffer = RequestedSampleRate / 100; + uint bufferSize = framesPerBuffer * (uint)_bytesPerFrame; + + for (int i = 0; i < NumBuffers; i++) + { + AudioQueueAllocateBuffer(_audioQueue, bufferSize, out _audioQueueBuffers[i]); + _bufferBytesFilled[i] = 0; + + PrimeBuffer(_audioQueueBuffers[i], i); + } + } + } + + private unsafe void PrimeBuffer(IntPtr bufferPtr, int bufferIndex) + { + AudioQueueBuffer* buffer = (AudioQueueBuffer*)bufferPtr; + + int capacityBytes = (int)buffer->AudioDataBytesCapacity; + int framesPerBuffer = capacityBytes / _bytesPerFrame; + + int availableFrames = _ringBuffer.Length / _bytesPerFrame; + int framesToRead = Math.Min(availableFrames, framesPerBuffer); + int bytesToRead = framesToRead * _bytesPerFrame; + + Span dst = new((void*)buffer->AudioData, capacityBytes); + dst.Clear(); + + if (bytesToRead > 0) + { + Span audio = dst.Slice(0, bytesToRead); + _ringBuffer.Read(audio, 0, bytesToRead); + ApplyVolume(buffer->AudioData, bytesToRead); + } + + buffer->AudioDataByteSize = (uint)capacityBytes; + _bufferBytesFilled[bufferIndex] = bytesToRead; + + AudioQueueEnqueueBuffer(_audioQueue, bufferPtr, 0, IntPtr.Zero); + } + + private void OutputCallback(IntPtr userData, IntPtr audioQueue, IntPtr bufferPtr) + { + if (!_started || bufferPtr == IntPtr.Zero) + return; + + int bufferIndex = Array.IndexOf(_audioQueueBuffers, bufferPtr); + if (bufferIndex < 0) + return; + + int bytesPlayed = _bufferBytesFilled[bufferIndex]; + if (bytesPlayed > 0) + { + ProcessPlayedSamples(bytesPlayed); + } + + PrimeBuffer(bufferPtr, bufferIndex); + } + + private void ProcessPlayedSamples(int bytesPlayed) + { + ulong samplesPlayed = GetSampleCount(bytesPlayed); + ulong remaining = samplesPlayed; + bool needUpdate = false; + + while (remaining > 0 && _queuedBuffers.TryPeek(out AppleAudioBuffer buffer)) + { + ulong needed = buffer.SampleCount - Interlocked.Read(ref buffer.SamplePlayed); + ulong take = Math.Min(needed, remaining); + + ulong played = Interlocked.Add(ref buffer.SamplePlayed, take); + remaining -= take; + + if (played == buffer.SampleCount) + { + _queuedBuffers.TryDequeue(out _); + needUpdate = true; + } + + Interlocked.Add(ref _playedSampleCount, take); + } + + if (needUpdate) + { + _updateRequiredEvent.Set(); + } + } + + private unsafe void ApplyVolume(IntPtr dataPtr, int byteSize) + { + float volume = Math.Clamp(_volume * _driver.Volume, 0f, 1f); + if (volume >= 0.999f) + return; + + int sampleCount = byteSize / BackendHelper.GetSampleSize(RequestedSampleFormat); + + switch (RequestedSampleFormat) + { + case SampleFormat.PcmInt16: + short* s16 = (short*)dataPtr; + for (int i = 0; i < sampleCount; i++) + s16[i] = (short)(s16[i] * volume); + break; + + case SampleFormat.PcmFloat: + float* f32 = (float*)dataPtr; + for (int i = 0; i < sampleCount; i++) + f32[i] *= volume; + break; + + case SampleFormat.PcmInt32: + int* s32 = (int*)dataPtr; + for (int i = 0; i < sampleCount; i++) + s32[i] = (int)(s32[i] * volume); + break; + + case SampleFormat.PcmInt8: + sbyte* s8 = (sbyte*)dataPtr; + for (int i = 0; i < sampleCount; i++) + s8[i] = (sbyte)(s8[i] * volume); + break; + } + } + + public override void QueueBuffer(AudioBuffer buffer) + { + _ringBuffer.Write(buffer.Data, 0, buffer.Data.Length); + _queuedBuffers.Enqueue(new AppleAudioBuffer(buffer.HostTag, GetSampleCount(buffer))); + } + + public override void Start() + { + lock (_lock) + { + if (_started) + return; + + _started = true; + AudioQueueStart(_audioQueue, IntPtr.Zero); + } + } + + public override void Stop() + { + lock (_lock) + { + if (!_started) + return; + + _started = false; + AudioQueuePause(_audioQueue); + } + } + + public override ulong GetPlayedSampleCount() + => Interlocked.Read(ref _playedSampleCount); + + public override float GetVolume() => _volume; + public override void SetVolume(float volume) => _volume = volume; + + public override bool WasBufferFullyConsumed(AudioBuffer buffer) + { + if (!_queuedBuffers.TryPeek(out AppleAudioBuffer driverBuffer)) + return true; + + return driverBuffer.DriverIdentifier != buffer.HostTag; + } + + public override void PrepareToClose() { } + public override void UnregisterBuffer(AudioBuffer buffer) { } + + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + Stop(); + + if (_audioQueue != IntPtr.Zero) + { + AudioQueueStop(_audioQueue, true); + AudioQueueDispose(_audioQueue, true); + _audioQueue = IntPtr.Zero; + } + + if (_gcHandle.IsAllocated) + { + _gcHandle.Free(); + } + } + } + + public override void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + } +} diff --git a/src/Ryujinx.Audio.Backends.Apple/Native/AudioToolbox.cs b/src/Ryujinx.Audio.Backends.Apple/Native/AudioToolbox.cs new file mode 100644 index 000000000..9a6e8e189 --- /dev/null +++ b/src/Ryujinx.Audio.Backends.Apple/Native/AudioToolbox.cs @@ -0,0 +1,103 @@ +using Ryujinx.Common.Memory; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +namespace Ryujinx.Audio.Backends.Apple.Native +{ + public static partial class AudioToolbox + { + [StructLayout(LayoutKind.Sequential)] + internal struct AudioStreamBasicDescription + { + public double SampleRate; + public uint FormatID; + public uint FormatFlags; + public uint BytesPerPacket; + public uint FramesPerPacket; + public uint BytesPerFrame; + public uint ChannelsPerFrame; + public uint BitsPerChannel; + public uint Reserved; + } + + [StructLayout(LayoutKind.Sequential)] + internal struct AudioChannelLayout + { + public uint AudioChannelLayoutTag; + public uint AudioChannelBitmap; + public uint NumberChannelDescriptions; + } + + internal const uint kAudioFormatLinearPCM = 0x6C70636D; + internal const uint kAudioQueueProperty_ChannelLayout = 0x6171636c; + internal const uint kAudioChannelLayoutTag_MPEG_5_1_A = 0x650006; + internal const uint kAudioFormatFlagIsFloat = (1 << 0); + internal const uint kAudioFormatFlagIsSignedInteger = (1 << 2); + internal const uint kAudioFormatFlagIsPacked = (1 << 3); + internal const uint kAudioFormatFlagIsBigEndian = (1 << 1); + internal const uint kAudioFormatFlagIsAlignedHigh = (1 << 4); + internal const uint kAudioFormatFlagIsNonInterleaved = (1 << 5); + + [LibraryImport("/System/Library/Frameworks/AudioToolbox.framework/AudioToolbox")] + internal static partial int AudioQueueNewOutput( + ref AudioStreamBasicDescription format, + nint callback, + nint userData, + nint callbackRunLoop, + nint callbackRunLoopMode, + uint flags, + out nint audioQueue); + + [LibraryImport("/System/Library/Frameworks/AudioToolbox.framework/AudioToolbox")] + internal static partial int AudioQueueSetProperty( + nint audioQueue, + uint propertyID, + ref AudioChannelLayout layout, + uint layoutSize); + + [LibraryImport("/System/Library/Frameworks/AudioToolbox.framework/AudioToolbox")] + internal static partial int AudioQueueDispose(nint audioQueue, [MarshalAs(UnmanagedType.I1)] bool immediate); + + [LibraryImport("/System/Library/Frameworks/AudioToolbox.framework/AudioToolbox")] + internal static partial int AudioQueueAllocateBuffer( + nint audioQueue, + uint bufferByteSize, + out nint buffer); + + [LibraryImport("/System/Library/Frameworks/AudioToolbox.framework/AudioToolbox")] + internal static partial int AudioQueueStart(nint audioQueue, nint startTime); + + [LibraryImport("/System/Library/Frameworks/AudioToolbox.framework/AudioToolbox")] + internal static partial int AudioQueuePause(nint audioQueue); + + [LibraryImport("/System/Library/Frameworks/AudioToolbox.framework/AudioToolbox")] + internal static partial int AudioQueueStop(nint audioQueue, [MarshalAs(UnmanagedType.I1)] bool immediate); + + [LibraryImport("/System/Library/Frameworks/AudioToolbox.framework/AudioToolbox")] + internal static partial int AudioQueueSetParameter( + nint audioQueue, + uint parameterID, + float value); + + [LibraryImport("/System/Library/Frameworks/AudioToolbox.framework/AudioToolbox")] + internal static partial int AudioQueueEnqueueBuffer( + nint audioQueue, + nint buffer, + uint numPacketDescs, + nint packetDescs); + + [StructLayout(LayoutKind.Sequential)] + internal struct AudioQueueBuffer + { + public uint AudioDataBytesCapacity; + public nint AudioData; + public uint AudioDataByteSize; + public nint UserData; + public uint PacketDescriptionCapacity; + public nint PacketDescriptions; + public uint PacketDescriptionCount; + } + + internal const uint kAudioQueueParam_Volume = 1; + } +} \ No newline at end of file diff --git a/src/Ryujinx.Audio.Backends.Apple/Ryujinx.Audio.Backends.Apple.csproj b/src/Ryujinx.Audio.Backends.Apple/Ryujinx.Audio.Backends.Apple.csproj new file mode 100644 index 000000000..b7e1b6d84 --- /dev/null +++ b/src/Ryujinx.Audio.Backends.Apple/Ryujinx.Audio.Backends.Apple.csproj @@ -0,0 +1,13 @@ + + + + net8.0 + true + + + + + + + +