public void WriteBuffer(byte[] buff, WaveFormatEx wfex, string destFile) { using (FileStream fs = new FileStream(destFile, FileMode.Create, FileAccess.Write, FileShare.None)) using (BinaryWriter bw = new BinaryWriter(fs)) { bw.Write(RIFF_TAG); bw.Write((uint)(WaveHeaderSize + buff.Length)); bw.Write(WAVE_TAG); bw.Write(FMT__TAG); bw.Write(WaveFormatSize); int size = Marshal.SizeOf(wfex); byte[] arr = new byte[size]; IntPtr ptr = Marshal.AllocHGlobal(size); Marshal.StructureToPtr(wfex, ptr, true); Marshal.Copy(ptr, arr, 0, size); Marshal.FreeHGlobal(ptr); bw.Write(arr); bw.Write(DATA_TAG); bw.Write(buff.Length); bw.Write(buff); } }
public static void SetFormat(ref AMMediaType mt, ref WaveFormatEx wfx) { if (wfx != null) { int cb = Marshal.SizeOf(wfx); IntPtr _ptr = Marshal.AllocCoTaskMem(cb); try { Marshal.StructureToPtr(wfx, _ptr, true); SetFormat(ref mt, _ptr, cb); if (mt != null) { mt.formatType = FormatType.WaveEx; } } finally { Marshal.FreeCoTaskMem(_ptr); } } }
public void SetFormat(WaveFormatEx wfx) { if (wfx != null) { int cb = Marshal.SizeOf(wfx); IntPtr _ptr = Marshal.AllocCoTaskMem(cb); try { Marshal.StructureToPtr(wfx, _ptr, true); SetFormat(_ptr, cb); formatType = FormatType.WaveEx; } finally { Marshal.FreeCoTaskMem(_ptr); } } }
public WaveFormatExtensible() { Format = new WaveFormatEx(); }
public BE_CONFIG(WaveFormatEx format) : this(format, 192) { }
public BE_CONFIG(WaveFormatEx format, uint MpeBitRate) { this.dwConfig = BE_CONFIG_LAME; this.format = new LHV1(format, MpeBitRate); }
public LHV1(WaveFormatEx format, uint MpeBitRate) { if (format.wFormatTag != 1 /* WAVE_FORMAT_PCM */) { throw new ArgumentOutOfRangeException("format", "Only PCM format supported"); } if (format.wBitsPerSample != 16) { throw new ArgumentOutOfRangeException("format", "Only 16 bits samples supported"); } dwStructVersion = 1; dwStructSize = (uint)Marshal.SizeOf(typeof(BE_CONFIG)); switch (format.nSamplesPerSec) { case 16000: case 22050: case 24000: dwMpegVersion = MPEG2; break; case 32000: case 44100: case 48000: dwMpegVersion = MPEG1; break; default: throw new ArgumentOutOfRangeException("format", "Unsupported sample rate"); } dwSampleRate = (uint)format.nSamplesPerSec; // INPUT FREQUENCY dwReSampleRate = 0; // DON'T RESAMPLE switch (format.nChannels) { case 1: nMode = MpegMode.MONO; break; case 2: nMode = MpegMode.STEREO; break; default: throw new ArgumentOutOfRangeException("format", "Invalid number of channels"); } switch (MpeBitRate) { case 32: case 40: case 48: case 56: case 64: case 80: case 96: case 112: case 128: case 160: //Allowed bit rates in MPEG1 and MPEG2 break; case 192: case 224: case 256: case 320: //Allowed only in MPEG1 if (dwMpegVersion != MPEG1) { throw new ArgumentOutOfRangeException("MpsBitRate", "Bit rate not compatible with input format"); } break; case 8: case 16: case 24: case 144: //Allowed only in MPEG2 if (dwMpegVersion != MPEG2) { throw new ArgumentOutOfRangeException("MpsBitRate", "Bit rate not compatible with input format"); } break; default: throw new ArgumentOutOfRangeException("MpsBitRate", "Unsupported bit rate"); } dwBitrate = MpeBitRate; // MINIMUM BIT RATE nPreset = LAME_QUALITY_PRESET.LQP_NORMAL_QUALITY; // QUALITY PRESET SETTING dwPsyModel = 0; // USE DEFAULT PSYCHOACOUSTIC MODEL dwEmphasis = 0; // NO EMPHASIS TURNED ON bOriginal = 1; // SET ORIGINAL FLAG bWriteVBRHeader = 0; bNoRes = 0; // No Bit resorvoir bCopyright = 0; bCRC = 0; bEnableVBR = 0; bPrivate = 0; bStrictIso = 0; dwMaxBitrate = 0; dwVbrAbr_bps = 0; nQuality = 0; nVbrMethod = VBRMETHOD.VBR_METHOD_NONE; nVBRQuality = 0; }
private byte[] ReadWAV(string inputFile, ref WaveFormatEx wfex) { try { using (FileStream fs = new FileStream(inputFile, FileMode.Open, FileAccess.Read, FileShare.Read)) using (BinaryReader br = new BinaryReader(fs)) { br.ReadUInt32(); // RIFF_TAG uint rawSize = br.ReadUInt32(); // WaveHeaderSize + buff.Length uint buffSize = rawSize - CdRipper.WaveHeaderSize - sizeof(uint); br.ReadUInt32(); // WAVE_TAG br.ReadUInt32(); // FMT__TAG br.ReadUInt32(); // WaveFormatSize byte[] bytesWfex = br.ReadBytes(Marshal.SizeOf(wfex)); var pinnedRawData = GCHandle.Alloc(bytesWfex, GCHandleType.Pinned); try { // Get the address of the data array var pinnedRawDataPtr = pinnedRawData.AddrOfPinnedObject(); // overlay the data type on top of the raw data wfex = (WaveFormatEx)Marshal.PtrToStructure(pinnedRawDataPtr, typeof(WaveFormatEx)); } finally { // must explicitly release pinnedRawData.Free(); } br.ReadUInt32(); // DATA_TAG return br.ReadBytes((int)buffSize); } } catch { } return null; }
protected void ReleaseAudioSampleGrabber() { try { if (sampleAnalyzerMustStop != null) sampleAnalyzerMustStop.Set(); // This will cause the thread to stop if (sampleAnalyzerThread != null) sampleAnalyzerThread.Join(200); if (sampleGrabber != null) { int hr = (mediaControl as IGraphBuilder).RemoveFilter(sampleGrabber as IBaseFilter); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(sampleGrabber); sampleGrabber = null; } } catch (Exception ex) { Logger.LogException(ex); } if (rotEntry != null) { rotEntry.Dispose(); rotEntry = null; } lock (_vuLock) { _vuMeterData = null; } lock (_waveformLock) { _waveformData = null; } lock (_spectrogramLock) { _spectrogramData = null; } _actualAudioFormat = null; sampleGrabberConfigured.Reset(); }
protected void CompleteAudioSampleGrabberIntialization() { _actualAudioFormat = null; if (sampleGrabber != null) { AMMediaType mtAudio = new AMMediaType(); if (HRESULT.SUCCEEDED(sampleGrabber.GetConnectedMediaType(mtAudio))) { _actualAudioFormat = (WaveFormatEx)Marshal.PtrToStructure(mtAudio.formatPtr, typeof(WaveFormatEx)); const int WAVEFORM_WNDSIZEFACTOR = 128; const int VU_WNDSIZEFACTOR = 4096; const int FFT_WNDSIZEFACTOR = 16; int freq = (MediaRenderer.DefaultInstance.ActualAudioFormat == null) ? 44100 : MediaRenderer.DefaultInstance.ActualAudioFormat.nSamplesPerSec; try { int k1 = 0, k2 = 0, k3 = 0; while (freq / (1 << k1) > WAVEFORM_WNDSIZEFACTOR) k1++; while (freq / (1 << k2) > FFT_WNDSIZEFACTOR) k2++; while (freq / (1 << k3) > VU_WNDSIZEFACTOR) k3++; _waveformWindowSize = (1 << k1); _fftWindowSize = (1 << k2); _vuMeterWindowSize = (1 << k3); _maxLevel = (MediaRenderer.DefaultInstance.ActualAudioFormat != null) ? (1 << (MediaRenderer.DefaultInstance.ActualAudioFormat.wBitsPerSample - 1)) - 1 : short.MaxValue; } catch { _vuMeterWindowSize = 64; _waveformWindowSize = 512; _fftWindowSize = 4096; _maxLevel = short.MaxValue; } finally { _maxLogLevel = Math.Log(_maxLevel); } sampleGrabberConfigured.Set(); return; } } }
/* protected void InitAudioSampleGrabber() { // Get the graph builder IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder); if (graphBuilder == null) return; try { // Build the sample grabber sampleGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(Filters.SampleGrabber, true)) as ISampleGrabber; if (sampleGrabber == null) return; // Add it to the filter graph int hr = graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "ProTONE_SampleGrabber"); DsError.ThrowExceptionForHR(hr); AMMediaType mtAudio = new AMMediaType(); mtAudio.majorType = MediaType.Audio; mtAudio.subType = MediaSubType.PCM; mtAudio.formatPtr = IntPtr.Zero; _actualAudioFormat = null; hr = sampleGrabber.SetMediaType(mtAudio); DsError.ThrowExceptionForHR(hr); hr = sampleGrabber.SetBufferSamples(true); DsError.ThrowExceptionForHR(hr); hr = sampleGrabber.SetOneShot(false); DsError.ThrowExceptionForHR(hr); hr = sampleGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); sampleAnalyzerMustStop.Reset(); sampleAnalyzerThread = new Thread(new ThreadStart(SampleAnalyzerLoop)); sampleAnalyzerThread.Priority = ThreadPriority.Highest; sampleAnalyzerThread.Start(); } catch(Exception ex) { Logger.LogException(ex); } rotEntry = new DsROTEntry(graphBuilder as IFilterGraph); }*/ protected void InitAudioSampleGrabber_v2() { // Get the graph builder IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder); if (graphBuilder == null) return; try { // Build the sample grabber sampleGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(Filters.SampleGrabber, true)) as ISampleGrabber; if (sampleGrabber == null) return; // Add it to the filter graph int hr = graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "ProTONE_SampleGrabber_v2"); DsError.ThrowExceptionForHR(hr); IBaseFilter ffdAudioDecoder = null; IPin ffdAudioDecoderOutput = null; IPin soundDeviceInput = null; IPin sampleGrabberInput = null; IPin sampleGrabberOutput = null; IntPtr pSoundDeviceInput = IntPtr.Zero; // When using FFDShow, typically we'll find // a ffdshow Audio Decoder connected to the sound device filter // // i.e. [ffdshow Audio Decoder] --> [DirectSound Device] // // Our audio sample grabber supports only PCM sample input and output. // Its entire processing is based on this assumption. // // Thus need to insert the audio sample grabber between the ffdshow Audio Decoder and the sound device // because this is the only place where we can find PCM samples. The sound device only accepts PCM. // // So we need to turn this graph: // // .. -->[ffdshow Audio Decoder]-->[DirectSound Device] // // into this: // // .. -->[ffdshow Audio Decoder]-->[Sample grabber]-->[DirectSound Device] // // Actions to do to achieve the graph change: // // 1. Locate the ffdshow Audio Decoder in the graph // 2. Find its output pin and the pin that it's connected to // 3. Locate the input and output pins of sample grabber // 4. Disconnect the ffdshow Audio Decoder and its correspondent (sound device input pin) // 5. Connect the ffdshow Audio Decoder to sample grabber input // 6. Connect the sample grabber output to sound device input // that's all. // -------------- // 1. Locate the ffdshow Audio Decoder in the graph hr = graphBuilder.FindFilterByName("ffdshow Audio Decoder", out ffdAudioDecoder); DsError.ThrowExceptionForHR(hr); // 2. Find its output pin and the pin that it's connected to hr = ffdAudioDecoder.FindPin("Out", out ffdAudioDecoderOutput); DsError.ThrowExceptionForHR(hr); hr = ffdAudioDecoderOutput.ConnectedTo(out pSoundDeviceInput); DsError.ThrowExceptionForHR(hr); soundDeviceInput = new DSPin(pSoundDeviceInput).Value; // 3. Locate the input and output pins of sample grabber hr = (sampleGrabber as IBaseFilter).FindPin("In", out sampleGrabberInput); DsError.ThrowExceptionForHR(hr); hr = (sampleGrabber as IBaseFilter).FindPin("Out", out sampleGrabberOutput); DsError.ThrowExceptionForHR(hr); // 4. Disconnect the ffdshow Audio Decoder and its correspondent (sound device input pin) hr = ffdAudioDecoderOutput.Disconnect(); DsError.ThrowExceptionForHR(hr); hr = soundDeviceInput.Disconnect(); DsError.ThrowExceptionForHR(hr); // 5. Connect the ffdshow Audio Decoder to sample grabber input hr = graphBuilder.Connect(ffdAudioDecoderOutput, sampleGrabberInput); DsError.ThrowExceptionForHR(hr); // 6. Connect the sample grabber output to sound device input hr = graphBuilder.Connect(sampleGrabberOutput, soundDeviceInput); DsError.ThrowExceptionForHR(hr); AMMediaType mtAudio = new AMMediaType(); mtAudio.majorType = MediaType.Audio; mtAudio.subType = MediaSubType.PCM; mtAudio.formatPtr = IntPtr.Zero; _actualAudioFormat = null; sampleGrabber.SetMediaType(mtAudio); sampleGrabber.SetBufferSamples(true); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(this, 1); sampleAnalyzerMustStop.Reset(); sampleAnalyzerThread = new Thread(new ThreadStart(SampleAnalyzerLoop)); sampleAnalyzerThread.Priority = ThreadPriority.Highest; sampleAnalyzerThread.Start(); } catch (Exception ex) { Logger.LogException(ex); } rotEntry = new DsROTEntry(graphBuilder as IFilterGraph); }