Beispiel #1
0
        public unsafe AudioClient(AudioClientInst *inst)
        {
            this.inst = inst;

            WAVEFORMATEX *wfx = null;

            try
            {
                uint res = inst->Vtbl->GetMixFormat(inst, &wfx);
                if (res != 0)
                {
                    throw new InvalidOperationException();
                }

                this.FrameSize  = wfx->nBlockAlign;
                this.SampleSize = wfx->wBitsPerSample / 8u;
                this.MixFormat  = GetAudioFormat(wfx) ?? throw new NotSupportedException("Mix format not supported");
            }
            finally
            {
                if (wfx != null)
                {
                    Marshal.FreeCoTaskMem(new IntPtr(wfx));
                }
            }
        }
Beispiel #2
0
        private static unsafe AudioFormat?GetAudioFormat(WAVEFORMATEX *wfx)
        {
            SampleFormat sampleFormat;

            if (wfx->wFormatTag == WAVE_FORMAT_PCM)
            {
                if (wfx->wBitsPerSample == 8)
                {
                    sampleFormat = SampleFormat.UnsignedPcm8;
                }
                else if (wfx->wBitsPerSample == 16)
                {
                    sampleFormat = SampleFormat.SignedPcm16;
                }
                else
                {
                    return(null);
                }
            }
            else if (wfx->wFormatTag == WAVE_FORMAT_EXTENSIBLE)
            {
                var wfx2 = (WAVEFORMATEXTENSIBLE *)wfx;
                if (wfx2->SubFormat == Guids.KSDATAFORMAT_SUBTYPE_PCM)
                {
                    if (wfx->wBitsPerSample == 8)
                    {
                        sampleFormat = SampleFormat.UnsignedPcm8;
                    }
                    else if (wfx->wBitsPerSample == 16)
                    {
                        sampleFormat = SampleFormat.SignedPcm16;
                    }
                    else
                    {
                        return(null);
                    }
                }
                else if (wfx2->SubFormat == Guids.KSDATAFORMAT_SUBTYPE_IEEE_FLOAT)
                {
                    if (wfx->wBitsPerSample == 32)
                    {
                        sampleFormat = SampleFormat.IeeeFloat32;
                    }
                    else
                    {
                        return(null);
                    }
                }
                else
                {
                    return(null);
                }
            }
            else
            {
                return(null);
            }

            return(new((int)wfx->nSamplesPerSec, wfx->nChannels, sampleFormat));
        }
Beispiel #3
0
        unsafe protected override void OnDispose()
        {
            DirectSoundWorld.criticalSection.Enter();

            if (soundBuffers.Count != freeSoundBuffers.Count)
            {
                Log.Fatal("DirectSound.OnDispose: soundBuffers.Count == freeSoundBuffers.Count");
            }

            for (int n = soundBuffers.Count - 1; n >= 0; n--)
            {
                IDirectSoundBuffer *soundBuffer = (IDirectSoundBuffer *)soundBuffers[n].ToPointer();
                IDirectSoundBuffer.Release(soundBuffer);
            }
            soundBuffers.Clear();
            freeSoundBuffers.Clear();

            if (waveFormat != null)
            {
                NativeUtils.Free((IntPtr)waveFormat);
                waveFormat = null;
            }

            DirectSoundWorld.criticalSection.Leave();

            base.OnDispose();
        }
Beispiel #4
0
 internal static extern int c_APECompress_StartEx(
     IntPtr hAPECompress,
     IntPtr hCIO,
     WAVEFORMATEX *pwfeInput,
     int nMaxAudioBytes,
     int nCompressionLevel,
     void *pHeaderData,
     int nHeaderBytes);
Beispiel #5
0
 public HRESULT CreateSourceVoice(
     IXAudio2SourceVoice **ppSourceVoice,
     WAVEFORMATEX *pSourceFormat,
     uint Flags = 0,
     float MaxFrequencyRatio            = XAudio2Constants.XAUDIO2_DEFAULT_FREQ_RATIO,
     IXAudio2VoiceCallback *pCallback   = null,
     XAUDIO2_VOICE_SENDS *pSendList     = null,
     XAUDIO2_EFFECT_CHAIN *pEffectChain = null
     )
 => ((delegate * unmanaged[Stdcall] < void *, IXAudio2SourceVoice **, WAVEFORMATEX *, uint, float, IXAudio2VoiceCallback *, XAUDIO2_VOICE_SENDS *, XAUDIO2_EFFECT_CHAIN *, HRESULT >)_vtbl[5])(Unsafe.AsPointer(ref this), ppSourceVoice, pSourceFormat, Flags, MaxFrequencyRatio, pCallback, pSendList, pEffectChain);
        private unsafe void DecodeWave(RiffParser rp, int length)
        {
            byte[] ba = new byte[length];
            rp.ReadData(ba, 0, length);

            fixed(byte *bp = &ba[0])
            {
                WAVEFORMATEX *wave = (WAVEFORMATEX *)bp;

                m_numChannels   = wave->nChannels;
                m_bitsPerSec    = wave->nAvgBytesPerSec;
                m_bitsPerSample = wave->wBitsPerSample;
                m_samplesPerSec = wave->nSamplesPerSec;
            }
        }
Beispiel #7
0
        public bool IsFormatSupported(AudioFormat format, out AudioFormat?closestMatch)
        {
            if (format == null)
            {
                throw new ArgumentNullException(nameof(format));
            }
            if (this.disposed)
            {
                throw new ObjectDisposedException(nameof(AudioClient));
            }

            closestMatch = null;

            if (!TryGetWaveFormat(format, out var wfx))
            {
                return(false);
            }

            unsafe
            {
                WAVEFORMATEX *match = null;
                try
                {
                    uint res = this.inst->Vtbl->IsFormatSupported(this.inst, 0, (WAVEFORMATEX *)&wfx, &match);

                    if (match != null)
                    {
                        closestMatch = GetAudioFormat(match);
                    }

                    return(res == 0);
                }
                finally
                {
                    if (match != null)
                    {
                        Marshal.FreeCoTaskMem(new IntPtr(match));
                    }
                }
            }
        }
 public int IsFormatSupported(AUDCLNT_SHAREMODE ShareMode, [NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *pFormat, [NativeTypeName("WAVEFORMATEX **")] WAVEFORMATEX **ppClosestMatch)
 {
     return(((delegate * unmanaged <IAudioClient2 *, AUDCLNT_SHAREMODE, WAVEFORMATEX *, WAVEFORMATEX **, int>)(lpVtbl[7]))((IAudioClient2 *)Unsafe.AsPointer(ref this), ShareMode, pFormat, ppClosestMatch));
 }
 public int Initialize(AUDCLNT_SHAREMODE ShareMode, [NativeTypeName("DWORD")] uint StreamFlags, [NativeTypeName("REFERENCE_TIME")] long hnsBufferDuration, [NativeTypeName("REFERENCE_TIME")] long hnsPeriodicity, [NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *pFormat, [NativeTypeName("LPCGUID")] Guid *AudioSessionGuid)
 {
     return(((delegate * unmanaged <IAudioClient2 *, AUDCLNT_SHAREMODE, uint, long, long, WAVEFORMATEX *, Guid *, int>)(lpVtbl[3]))((IAudioClient2 *)Unsafe.AsPointer(ref this), ShareMode, StreamFlags, hnsBufferDuration, hnsPeriodicity, pFormat, AudioSessionGuid));
 }
 public int GetBufferSizeLimits([NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *pFormat, [NativeTypeName("BOOL")] int bEventDriven, [NativeTypeName("REFERENCE_TIME *")] long *phnsMinBufferDuration, [NativeTypeName("REFERENCE_TIME *")] long *phnsMaxBufferDuration)
 {
     return(((delegate * unmanaged <IAudioClient2 *, WAVEFORMATEX *, int, long *, long *, int>)(lpVtbl[17]))((IAudioClient2 *)Unsafe.AsPointer(ref this), pFormat, bEventDriven, phnsMinBufferDuration, phnsMaxBufferDuration));
 }
Beispiel #11
0
 public HRESULT ScaleAudio([NativeTypeName("const GUID *")] Guid *pAudioFormatId, [NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *pWaveFormatEx)
 {
     return(((delegate * unmanaged <ISpRecoResult *, Guid *, WAVEFORMATEX *, int>)(lpVtbl[12]))((ISpRecoResult *)Unsafe.AsPointer(ref this), pAudioFormatId, pWaveFormatEx));
 }
Beispiel #12
0
 public int IsOutputFormatSupported([NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *pInputFormat, [NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *pRequestedOutputFormat, [NativeTypeName("WAVEFORMATEX **")] WAVEFORMATEX **ppSupportedOutputFormat)
 {
     return(((delegate * unmanaged <IXAPO *, WAVEFORMATEX *, WAVEFORMATEX *, WAVEFORMATEX **, int>)(lpVtbl[5]))((IXAPO *)Unsafe.AsPointer(ref this), pInputFormat, pRequestedOutputFormat, ppSupportedOutputFormat));
 }
 public int IsAudioObjectFormatSupported([NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *objectFormat)
 {
     return(((delegate * unmanaged <ISpatialAudioClient *, WAVEFORMATEX *, int>)(lpVtbl[8]))((ISpatialAudioClient *)Unsafe.AsPointer(ref this), objectFormat));
 }
Beispiel #14
0
 public int SetFormat([NativeTypeName("const GUID &")] Guid *rguidFmtId, [NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *pWaveFormatEx)
 {
     return(((delegate * unmanaged <ISpMMSysAudio *, Guid *, WAVEFORMATEX *, int>)(lpVtbl[16]))((ISpMMSysAudio *)Unsafe.AsPointer(ref this), rguidFmtId, pWaveFormatEx));
 }
        internal static unsafe Factory_WindowsCoreApi.IAudioClient CreateClient(IMMDevice IDevice, out WAVEFORMATEX format, out Type dataFormat)
        {
            Factory_WindowsCoreApi.IAudioClient IAudioClient;
            IDevice.Activate(Factory_WindowsCoreApi.IID_IAudioClient, (uint)PlaybackClient_WindowsCoreApi.CLSCTX.CLSCTX_ALL, new IntPtr(0), out IAudioClient);

            IntPtr rawFormatPtr = new IntPtr();

            IAudioClient.GetMixFormat(out rawFormatPtr);
            WAVEFORMATEX *pFormat = (WAVEFORMATEX *)rawFormatPtr.ToPointer();

            if (pFormat->wBitsPerSample % 8 != 0)
            {
                throw new Exception("Unsupported bits per sample value");
            }
            dataFormat = typeof(byte);
            if (pFormat->wFormatTag == 0xFFFE)
            {
                WAVEFORMATEXTENSIBLE *pFormatEx = (WAVEFORMATEXTENSIBLE *)pFormat;
                if (pFormatEx->SubFormat == FormatEx_IEEE)
                {
                    switch (pFormat->wBitsPerSample)
                    {
                    case 0:
                    case 32: dataFormat = typeof(float); break;

                    case 64: dataFormat = typeof(double); break;

                    default: throw new Exception("Unsupported underlying data format");
                    }
                }
                else if (pFormatEx->SubFormat == FormatEx_PCM)
                {
                    switch (pFormat->wBitsPerSample)
                    {
                    case 8: dataFormat = typeof(byte); break;

                    case 16: dataFormat = typeof(Int16); break;

                    case 32: dataFormat = typeof(Int32); break;

                    case 64: dataFormat = typeof(Int64); break;

                    default: throw new Exception("Unsupported underlying data format");
                    }
                }
            }
            else
            {
                switch ((WAVE_FORMAT)pFormat->wFormatTag)
                {
                case WAVE_FORMAT.PCM:
                    switch (pFormat->wBitsPerSample)
                    {
                    case 8: dataFormat = typeof(byte); break;

                    case 16: dataFormat = typeof(Int16); break;

                    case 32: dataFormat = typeof(Int32); break;

                    case 64: dataFormat = typeof(Int64); break;

                    default: throw new Exception("Unsupported underlying data format");
                    }
                    break;

                case WAVE_FORMAT.IEEE:
                    switch (pFormat->wBitsPerSample)
                    {
                    case 0:
                    case 32: dataFormat = typeof(float); break;

                    case 64: dataFormat = typeof(double); break;

                    default: throw new Exception("Unsupported underlying data format");
                    }
                    break;
                }
            }
            try
            {
                IAudioClient.Initialize(Factory_WindowsCoreApi.AUDCLNT_SHAREMODE.AUDCLNT_SHAREMODE_SHARED, 0, 10000000, 0, new IntPtr(pFormat), Guid.Empty);
            }
            catch { throw new Exception("Unexpected error when creating the client"); }

            format = *pFormat;
            return(IAudioClient);
        }
Beispiel #16
0
 private static extern void RtlMoveMemory(WAVEFORMATEX *dest, byte *src, uint size);
Beispiel #17
0
 private static extern MMSYSERR acmFormatSuggest(IntPtr had, WAVEFORMATEX *source, ref WAVEFORMATEX dest, uint cbwfxDst, uint fdwSuggest);
Beispiel #18
0
 private static extern MMSYSERR acmStreamOpen([In] IntPtr *handle, IntPtr driverHandle, byte *source, WAVEFORMATEX *dest, IntPtr WAVEFILTER, UIntPtr Callback, UIntPtr CallbackInstanceData, uint fdwOpen);
 public int BindToFile([NativeTypeName("LPCWSTR")] ushort *pszFileName, SPFILEMODE eMode, [NativeTypeName("const GUID *")] Guid *pFormatId, [NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *pWaveFormatEx, [NativeTypeName("ULONGLONG")] ulong ullEventInterest)
 {
     return(((delegate * unmanaged <ISpStream *, ushort *, SPFILEMODE, Guid *, WAVEFORMATEX *, ulong, int>)(lpVtbl[17]))((ISpStream *)Unsafe.AsPointer(ref this), pszFileName, eMode, pFormatId, pWaveFormatEx, ullEventInterest));
 }
 public int SetBaseStream(IStream *pStream, [NativeTypeName("const GUID &")] Guid *rguidFormat, [NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *pWaveFormatEx)
 {
     return(((delegate * unmanaged <ISpStream *, IStream *, Guid *, WAVEFORMATEX *, int>)(lpVtbl[15]))((ISpStream *)Unsafe.AsPointer(ref this), pStream, rguidFormat, pWaveFormatEx));
 }
 static extern int waveOutOpen(IntPtr *phwo, uint uDeviceID, WAVEFORMATEX *pwfx, IntPtr dwCallback, IntPtr dwInstance, uint fdwOpen);
Beispiel #22
0
 public static extern uint waveOutOpen([NativeTypeName("LPHWAVEOUT")] HWAVEOUT *phwo, uint uDeviceID, [NativeTypeName("LPCWAVEFORMATEX")] WAVEFORMATEX *pwfx, [NativeTypeName("DWORD_PTR")] nuint dwCallback, [NativeTypeName("DWORD_PTR")] nuint dwInstance, [NativeTypeName("DWORD")] uint fdwOpen);
 public HRESULT GetMaxFrameCountForCategory(AUDIO_STREAM_CATEGORY category, BOOL offloadEnabled, [NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *objectFormat, [NativeTypeName("UINT32 *")] uint *frameCountPerBuffer)
 {
     return(((delegate * unmanaged <ISpatialAudioClient2 *, AUDIO_STREAM_CATEGORY, BOOL, WAVEFORMATEX *, uint *, int>)(lpVtbl[12]))((ISpatialAudioClient2 *)Unsafe.AsPointer(ref this), category, offloadEnabled, objectFormat, frameCountPerBuffer));
 }
 public int GetMaxFrameCount([NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *objectFormat, [NativeTypeName("UINT32 *")] uint *frameCountPerBuffer)
 {
     return(((delegate * unmanaged <ISpatialAudioClient *, WAVEFORMATEX *, uint *, int>)(lpVtbl[7]))((ISpatialAudioClient *)Unsafe.AsPointer(ref this), objectFormat, frameCountPerBuffer));
 }
Beispiel #25
0
 public int InitializeSharedAudioStream([NativeTypeName("DWORD")] uint StreamFlags, [NativeTypeName("UINT32")] uint PeriodInFrames, [NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *pFormat, [NativeTypeName("LPCGUID")] Guid *AudioSessionGuid)
 {
     return(((delegate * stdcall <IAudioClient3 *, uint, uint, WAVEFORMATEX *, Guid *, int>)(lpVtbl[20]))((IAudioClient3 *)Unsafe.AsPointer(ref this), StreamFlags, PeriodInFrames, pFormat, AudioSessionGuid));
 }
Beispiel #26
0
 public int GetSharedModeEnginePeriod([NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *pFormat, [NativeTypeName("UINT32 *")] uint *pDefaultPeriodInFrames, [NativeTypeName("UINT32 *")] uint *pFundamentalPeriodInFrames, [NativeTypeName("UINT32 *")] uint *pMinPeriodInFrames, [NativeTypeName("UINT32 *")] uint *pMaxPeriodInFrames)
 {
     return(((delegate * stdcall <IAudioClient3 *, WAVEFORMATEX *, uint *, uint *, uint *, uint *, int>)(lpVtbl[18]))((IAudioClient3 *)Unsafe.AsPointer(ref this), pFormat, pDefaultPeriodInFrames, pFundamentalPeriodInFrames, pMinPeriodInFrames, pMaxPeriodInFrames));
 }
 public HRESULT CreateSourceVoice(IXAudio2SourceVoice **ppSourceVoice, [NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *pSourceFormat, [NativeTypeName("UINT32")] uint Flags = 0, float MaxFrequencyRatio = 2.0f, IXAudio2VoiceCallback *pCallback = null, [NativeTypeName("const XAUDIO2_VOICE_SENDS *")] XAUDIO2_VOICE_SENDS *pSendList = null, [NativeTypeName("const XAUDIO2_EFFECT_CHAIN *")] XAUDIO2_EFFECT_CHAIN *pEffectChain = null)
 {
     return(((delegate * unmanaged <IXAudio2 *, IXAudio2SourceVoice **, WAVEFORMATEX *, uint, float, IXAudio2VoiceCallback *, XAUDIO2_VOICE_SENDS *, XAUDIO2_EFFECT_CHAIN *, int>)(lpVtbl[5]))((IXAudio2 *)Unsafe.AsPointer(ref this), ppSourceVoice, pSourceFormat, Flags, MaxFrequencyRatio, pCallback, pSendList, pEffectChain));
 }
Beispiel #28
0
 public int SetFormat([NativeTypeName("const GUID &")] Guid *rguidFormatIdOfConvertedStream, [NativeTypeName("const WAVEFORMATEX *")] WAVEFORMATEX *pWaveFormatExOfConvertedStream)
 {
     return(((delegate * unmanaged <ISpStreamFormatConverter *, Guid *, WAVEFORMATEX *, int>)(lpVtbl[17]))((ISpStreamFormatConverter *)Unsafe.AsPointer(ref this), rguidFormatIdOfConvertedStream, pWaveFormatExOfConvertedStream));
 }
Beispiel #29
0
		unsafe protected override void OnDispose()
		{
			DirectSoundWorld.criticalSection.Enter();

			if( soundBuffers.Count != freeSoundBuffers.Count )
				Log.Fatal( "DirectSound.OnDispose: soundBuffers.Count == freeSoundBuffers.Count" );

			for( int n = soundBuffers.Count - 1; n >= 0; n-- )
			{
				IDirectSoundBuffer* soundBuffer = (IDirectSoundBuffer*)soundBuffers[ n ].ToPointer();
				IDirectSoundBuffer.Release( soundBuffer );
			}
			soundBuffers.Clear();
			freeSoundBuffers.Clear();

			if( waveFormat != null )
			{
				NativeUtils.Free( (IntPtr)waveFormat );
				waveFormat = null;
			}

			DirectSoundWorld.criticalSection.Leave();

			base.OnDispose();
		}