protected override void UpdateCurrentSettings() { _AMMediaType mt; object fb; ac.CaptureGraph.Source.GetMediaType(out mt, out fb); if (fb is WAVEFORMATEX) { WAVEFORMATEX wfe = (WAVEFORMATEX)fb; lblCurrentSettings.Text = string.Format(CultureInfo.CurrentCulture, Strings.AudioFormatSettings, TAB, wfe.Channels, wfe.BitsPerSample, wfe.SamplesPerSec, wfe.AvgBytesPerSec * 8 / 1000); } else if (fb is DVINFO) { DVCaptureGraph dvcg = ac.CaptureGraph as DVCaptureGraph; if (dvcg != null) { dvcg.GetAudioMediaType(out mt, out fb); if (fb is WAVEFORMATEX) { WAVEFORMATEX wfe = (WAVEFORMATEX)fb; lblCurrentSettings.Text = string.Format(CultureInfo.CurrentCulture, Strings.AudioFormatSettings, TAB, wfe.Channels, wfe.BitsPerSample, wfe.SamplesPerSec, wfe.AvgBytesPerSec * 8 / 1000); } } } else { lblCurrentSettings.Text = "Unknown"; } }
internal static AudioCodec TypeOf(WAVEFORMATEX format) { AudioCodec result = AudioCodec.Undefined; switch (format.wFormatTag) { case 1: switch (format.nBlockAlign / format.nChannels) { case 1: result = AudioCodec.PCM8; break; case 2: result = AudioCodec.PCM16; break; } break; case 6: result = AudioCodec.G711A; break; case 7: result = AudioCodec.G711U; break; } return(result); }
/// <summary> /// Opens the device for writing with the specified format. /// </summary> /// <param name="waveFormat">The format of the device to open.</param> public void Open(WaveFormat waveFormat) { lock (this.startStopLock) { if (this.handle != null) { throw new InvalidOperationException("The device is already open."); } WAVEFORMATEX wfx = new WAVEFORMATEX(); wfx.nAvgBytesPerSec = waveFormat.AverageBytesPerSecond; wfx.wBitsPerSample = waveFormat.BitsPerSample; wfx.nBlockAlign = waveFormat.BlockAlign; wfx.nChannels = waveFormat.Channels; wfx.wFormatTag = (short)(int)waveFormat.FormatTag; wfx.nSamplesPerSec = waveFormat.SamplesPerSecond; wfx.cbSize = 0; this.recordingFormat = waveFormat.Clone(); IntPtr tempHandle = new IntPtr(); NativeMethods.Throw( NativeMethods.waveInOpen( ref tempHandle, this.deviceId, ref wfx, this.callback, (IntPtr)0, WaveOpenFlags.CALLBACK_FUNCTION | WaveOpenFlags.WAVE_FORMAT_DIRECT), NativeMethods.ErrorSource.WaveOut); this.handle = new WaveInSafeHandle(tempHandle); } }
public void Play(string fileName) { CloseFile(); try { using (var fileStream = new FileStream(fileName, FileMode.Open)) using (var stream = new WaveStream(fileStream)) { if (stream.Length <= 0) { throw new Exception("Invalid WAV file"); } _format = stream.Format; if (_format.formatTag != WaveFormatTag.PCM && _format.formatTag != WaveFormatTag.IEEE_FLOAT) { throw new Exception("Olny PCM files are supported"); } _stream = stream; } } catch { CloseFile(); } if (_stream != null) { _stream.Position = 0; _player = new WaveOut(_device.DeviceId, _format, 16384, 3, new BufferFillEventHandler(Filler)); } }
/// <summary> /// Populates the listview with the available media types /// </summary> private void GetMediaTypes() { if (captureGraph is AudioCaptureGraph) { AudioSource source = ((AudioCaptureGraph)captureGraph).AudioSource; source.GetMediaTypes(out mts, out fbs); } else if (captureGraph is DVCaptureGraph) { ((DVCaptureGraph)captureGraph).GetAudioMediaTypes(out mts, out fbs); } else { return; } for (int i = 0; i < mts.Length; i++) { WAVEFORMATEX wfe = (WAVEFORMATEX)fbs[i]; lvFormats.Items.Add(wfe.Channels.ToString(CultureInfo.CurrentCulture)); lvFormats.Items[i].SubItems.Add(wfe.SamplesPerSec.ToString(CultureInfo.CurrentCulture)); lvFormats.Items[i].SubItems.Add(wfe.BitsPerSample.ToString(CultureInfo.CurrentCulture)); lvFormats.Items[i].SubItems.Add(((int)(wfe.AvgBytesPerSec * 8 / 1000)).ToString(CultureInfo.CurrentCulture)); } }
private static void WriteWavHeader(FileStream fileStream, int size) { using (MemoryStream memStream = new MemoryStream(64)) { int cbFormat = 18; WAVEFORMATEX format = new WAVEFORMATEX() { wFormatTag = 3, nChannels = 1, nSamplesPerSec = 16000, nAvgBytesPerSec = 64000, nBlockAlign = 4, wBitsPerSample = 32, cbSize = 0 }; using (var bw = new BinaryWriter(memStream)) { WriteString(memStream, "RIFF"); bw.Write(size + cbFormat + 4); WriteString(memStream, "WAVE"); WriteString(memStream, "fmt "); bw.Write(cbFormat); bw.Write(format.wFormatTag); bw.Write(format.nChannels); bw.Write(format.nSamplesPerSec); bw.Write(format.nAvgBytesPerSec); bw.Write(format.nBlockAlign); bw.Write(format.wBitsPerSample); bw.Write(format.cbSize); WriteString(memStream, "data"); bw.Write(size); memStream.WriteTo(fileStream); } } }
/// <summary> /// Iterate through the available media types and mark the first one /// that matches our current media type /// </summary> private void SelectCurrent() { _AMMediaType mt; object fb; source.GetMediaType(out mt, out fb); WAVEFORMATEX cur = (WAVEFORMATEX)fb; for (int i = 0; i < fbs.Length; i++) { WAVEFORMATEX wfe = (WAVEFORMATEX)fbs[i]; if (cur.Channels == wfe.Channels && cur.SamplesPerSec == wfe.SamplesPerSec && cur.BitsPerSample == wfe.BitsPerSample) { this.Visible = true; lvFormats.Focus(); lvFormats.Items[i].Selected = true; return; } } Debug.Fail("We didn't find a matching media type"); }
protected void WaveOutOpen() { if (IntPtr.Zero != mWaveOutHandle) { WaveOutClose(); } mWaveFormatEx = new WAVEFORMATEX(); mWaveFormatEx.wFormatTag = 1; mWaveFormatEx.nChannels = (ushort)Channels; mWaveFormatEx.nSamplesPerSec = (uint)SampleRate; mWaveFormatEx.nAvgBytesPerSec = (uint)(SampleRate * Channels * 16 >> 3); mWaveFormatEx.nBlockAlign = (ushort)(Channels * 16 >> 3); mWaveFormatEx.wBitsPerSample = 16; mWaveFormatEx.cbSize = 0; mCallback = new DCallback(Callback); waveOutOpen(ref mWaveOutHandle, WAVE_MAPPER, ref mWaveFormatEx, mCallback, IntPtr.Zero, 0x00030000); WaveBuffer = new short[BufferSize]; for (int i = 0; i < mWaveHeader.Length; ++i) { mWaveHeaderPtr[i] = Marshal.AllocHGlobal(Marshal.SizeOf(mWaveHeader[i])); mWaveHeader[i].dwBufferLength = (uint)(WaveBuffer.Length * 16 >> 3); mWaveHeader[i].lpData = Marshal.AllocHGlobal((int)mWaveHeader[i].dwBufferLength); mWaveHeader[i].dwFlags = 0; Marshal.Copy(WaveBuffer, 0, mWaveHeader[i].lpData, WaveBuffer.Length); Marshal.StructureToPtr(mWaveHeader[i], mWaveHeaderPtr[i], true); waveOutPrepareHeader(mWaveOutHandle, mWaveHeaderPtr[i], Marshal.SizeOf(typeof(WAVEHDR))); waveOutWrite(mWaveOutHandle, mWaveHeaderPtr[i], (uint)Marshal.SizeOf(typeof(WAVEHDR))); } }
/// <summary> /// Put compatible formats supported by the selected hardware device into the /// compression format ComboBox. We assume the compressor's static properties have already been /// pulled in from the registry. /// </summary> private void RestoreCompressionFormat() { _AMMediaType[] mts = Pin.GetMediaTypes(ac.CaptureGraph.Source.OutputPin); int defaultIndex = 0; // Note: GetMediaTypes appears to return the selected MT in element 0, which is a // duplicate of a MT found elsewhere in the array. That's why we are ignoring // element 0 here. for (int j = 1; j < mts.Length; j++) { _AMMediaType mt = mts[j]; WAVEFORMATEX wfex = (WAVEFORMATEX)MediaType.FormatType.MarshalData(mt); if (OpusAudioCompressor.WorksWithOpus(wfex)) { int i = cbCompressionFormat.Items.Add(new CompressorFmt(wfex)); if ((OpusAudioCompressor.Frequency == wfex.SamplesPerSec) && (OpusAudioCompressor.Channels == wfex.Channels) && (OpusAudioCompressor.Depth == wfex.BitsPerSample)) { defaultIndex = i; } } } if (cbCompressionFormat.Items.Count == 0) { throw new ApplicationException("No audio formats supported by the device are compatible with the Opus Encoder."); } cbCompressionFormat.SelectedIndex = defaultIndex; }
internal static AudioCodec TypeOf(WAVEFORMATEX format) { AudioCodec codec = AudioCodec.Undefined; switch ((WaveFormatTag)format.wFormatTag) { case WaveFormatTag.WAVE_FORMAT_PCM: switch (format.nBlockAlign / format.nChannels) { case 1: codec = AudioCodec.PCM8; break; case 2: codec = AudioCodec.PCM16; break; } break; case WaveFormatTag.WAVE_FORMAT_ALAW: codec = AudioCodec.G711A; break; case WaveFormatTag.WAVE_FORMAT_MULAW: codec = AudioCodec.G711U; break; } return(codec); }
protected override void Open(WaveFormat format) { Buffers = new Queue <byte[]>(); bufPos = 0; SDATA s = new SDATA(); WAVEFORMATEX wfx = new WAVEFORMATEX(); wfx.cbSize = 0; wfx.nAvgBytesPerSec = (int)format.AverageBytesPerSecond; wfx.nBlockAlign = (short)format.BlockAlign; wfx.nChannels = (short)format.Channels; wfx.nSamplesPerSec = (int)format.SamplesPerSecond; wfx.wBitsPerSample = (short)format.BitsPerSample; wfx.wFormatTag = (short)format.FormatTag; s.AsWaveFormatEx = wfx; Sapi4Engine.miniLog("audio.WaveFormatSet"); audio.WaveFormatSet(s); Sapi4Engine.miniLog("audio.LevelGet"); int level; audio.LevelGet(out level); Sapi4Engine.miniLog(String.Format("level={0}", level)); // wfx = s.AsWaveFormatEx; Sapi4Engine.miniLog(String.Format("{0} Hz, {1} bit, {2} channels", wfx.nSamplesPerSec, wfx.wBitsPerSample, wfx.nChannels)); Sapi4Engine.miniLog("audio.Claim"); audio.Claim(); Sapi4Engine.miniLog("audio.Start"); audio.Start(); Sapi4Engine.miniLog(" opened successfully"); }
internal bool PrepareConverter(ref WAVEFORMATEX inWavFormat, ref WAVEFORMATEX outWavFormat) { bool result = true; if (inWavFormat.nSamplesPerSec <= 0 || inWavFormat.nChannels > 2 || inWavFormat.nChannels <= 0 || outWavFormat.nChannels <= 0 || outWavFormat.nSamplesPerSec <= 0 || outWavFormat.nChannels > 2) { throw new FormatException(); } _iInFormatType = AudioFormatConverter.TypeOf(inWavFormat); _iOutFormatType = AudioFormatConverter.TypeOf(outWavFormat); if (_iInFormatType < AudioCodec.G711U || _iOutFormatType < AudioCodec.G711U) { throw new FormatException(); } if (outWavFormat.nSamplesPerSec == inWavFormat.nSamplesPerSec && _iOutFormatType == _iInFormatType && outWavFormat.nChannels == inWavFormat.nChannels) { result = false; } else { if (inWavFormat.nSamplesPerSec != outWavFormat.nSamplesPerSec) { CreateResamplingFilter(inWavFormat.nSamplesPerSec, outWavFormat.nSamplesPerSec); } _inWavFormat = inWavFormat; _outWavFormat = outWavFormat; } return(result); }
/// <summary> /// Determines whether or not the device supports a given format. /// </summary> /// <param name="waveFormat">The format to check.</param> /// <returns>true, if the format is supported; false, otherwise.</returns> public bool SupportsFormat(WaveFormat waveFormat) { WAVEFORMATEX wfx = new WAVEFORMATEX(); wfx.nAvgBytesPerSec = waveFormat.AverageBytesPerSecond; wfx.wBitsPerSample = waveFormat.BitsPerSample; wfx.nBlockAlign = waveFormat.BlockAlign; wfx.nChannels = waveFormat.Channels; wfx.wFormatTag = (short)(int)waveFormat.FormatTag; wfx.nSamplesPerSec = waveFormat.SamplesPerSecond; wfx.cbSize = 0; IntPtr dummy = new IntPtr(0); MMSYSERROR ret = NativeMethods.waveInOpen( ref dummy, this.deviceId, ref wfx, null, (IntPtr)0, WaveOpenFlags.WAVE_FORMAT_QUERY); if (ret == MMSYSERROR.MMSYSERR_NOERROR) { return(true); } else if (ret == MMSYSERROR.WAVERR_BADFORMAT) { return(false); } else { NativeMethods.Throw(ret, NativeMethods.ErrorSource.WaveIn); return(false); } }
private static extern MMRESULT acmStreamOpen(out IntPtr phas, IntPtr had, ref MPEGLAYER3WAVEFORMAT pwfxSrc, ref WAVEFORMATEX pwfxDst, IntPtr pwfltr, uint dwCallback, uint dwInstance, uint fdwOpen);
public override int Initialize(IDictionary parameters) { base.Initialize(parameters); #region waveInOpen WAVEFORMATEX wfx = new WAVEFORMATEX() { nChannels = (short)this.Channel, nSamplesPerSec = this.SamplesPerSec, wBitsPerSample = (short)this.BitsPerSample, nBlockAlign = this.BlockAlign, nAvgBytesPerSec = this.BytesPerSec, cbSize = 0, wFormatTag = 1 }; this.free_pwfx = PInvoke.StructureToPtr(wfx); this.waveInProcDlg = new waveIn.waveInProcDlg(this.waveInProc); int code = waveIn.waveInOpen(out this.hwi, waveIn.WAVE_MAPPER, this.free_pwfx, this.waveInProcDlg, 0, waveIn.WAVE_FORMAT_DIRECT | waveIn.CALLBACK_FUNCTION); if (code != MMSYSERR.MMSYSERR_NOERROR) { logger.ErrorFormat("waveInOpen失败, MMSYSERROR = {0}", code); return(DotNETCode.FAILED); } #endregion #region waveInPrepareHeader waveIn.wavehdr_tag wh = new waveIn.wavehdr_tag() { lpData = this.free_pAudioData = Marshal.AllocHGlobal((int)(BlockAlign * SamplesPerSec)), dwBufferLength = (uint)(BlockAlign * SamplesPerSec), dwFlags = 0x00000002 }; this.whSize = Marshal.SizeOf(typeof(waveIn.wavehdr_tag)); this.free_pwh = PInvoke.StructureToPtr(wh); code = waveIn.waveInPrepareHeader(hwi, this.free_pwh, (uint)this.whSize); if (code != MMSYSERR.MMSYSERR_NOERROR) { logger.ErrorFormat("waveInPrepareHeader失败, MMSYSERROR = {0}", code); return(DotNETCode.FAILED); } #endregion #region waveInAddBuffer if ((code = waveIn.waveInAddBuffer(hwi, this.free_pwh, (uint)this.whSize)) != MMSYSERR.MMSYSERR_NOERROR) { logger.ErrorFormat("waveInAddBuffer失败, MMSYSERROR = {0}", code); return(DotNETCode.FAILED); } #endregion return(DotNETCode.SUCCESS); }
public static void AcmStreamOpen(out IntPtr phas, IntPtr had, ref MPEGLAYER3WAVEFORMAT pwfxSrc, ref WAVEFORMATEX pwfxDst, IntPtr pwfltr, uint dwCallback, uint dwInstance, uint fdwOpen) => MMErrCheck(acmStreamOpen(out phas, had, ref pwfxSrc, ref pwfxDst, pwfltr, dwCallback, dwInstance, fdwOpen));
/// <summary> /// Play a sound file on the default device. /// </summary> /// <param name="file">The file to play.</param> public static void Play(WaveFile file) { IntPtr deviceHandle = new IntPtr(); WAVEFORMATEX format = new WAVEFORMATEX(file); WaveDelegate feed = new WaveDelegate(WaveFeed); uint err = waveOutOpen(out deviceHandle, WAVE_MAPPER, format, feed, 0, CALLBACK_FUNCTION); Console.WriteLine("PLAY/Open = <{0}> ({1})", GetErrorText(err), err); }
public SourceVoice(ComPtr <IXAudio2> xAudio2, WAVEFORMATEX format) { _callback = IXAudio2VoiceCallback.Create(this); fixed(IXAudio2SourceVoice **ppVoice = &_voice) { Common.CheckAndThrow(xAudio2.Get()->CreateSourceVoice(ppVoice, &format, pCallback: _callback), nameof(IXAudio2.CreateSourceVoice)); } _voice->SetVolume(0.1f); }
public static void InitWaveFormatEXData(ref WAVEFORMATEX waveFormat) { waveFormat.wFormatTag = 1; waveFormat.nChannels = 1; waveFormat.nSamplesPerSec = 8000; waveFormat.nAvgBytesPerSec = 16000; waveFormat.nBlockAlign = 2; waveFormat.wBitsPerSample = 16; waveFormat.cbSize = 0; }
private short[] Resample(WAVEFORMATEX inWavFormat, WAVEFORMATEX outWavFormat, short[] pnBuff, float[] memory) { if (inWavFormat.nSamplesPerSec != outWavFormat.nSamplesPerSec) { float[] inSamples = Short2Float(pnBuff); inSamples = Resampling(inSamples, memory); pnBuff = Float2Short(inSamples); } return(pnBuff); }
/// <summary> /// Hardcode audio config for testing. /// </summary> /// <returns></returns> public bool ConfigAudio() { //make up some media types for testing WAVEFORMATEX wfex = new WAVEFORMATEX(); wfex.FormatTag = 1; //1==WAVE_FORMAT_PCM wfex.Channels = 1; wfex.SamplesPerSec = 16000; wfex.AvgBytesPerSec = 32000; wfex.BlockAlign = 2; wfex.BitsPerSample = 16; wfex.Size = 0; IntPtr wfexPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(wfex)); Marshal.StructureToPtr(wfex, wfexPtr, true); _WMMediaType mt = new _WMMediaType(); mt.majortype = WMGuids.ToGUID(WMGuids.WMMEDIATYPE_Audio); mt.subtype = WMGuids.ToGUID(WMGuids.WMMEDIASUBTYPE_PCM); mt.bFixedSizeSamples = 1; //true mt.bTemporalCompression = 0; //false mt.lSampleSize = 2; mt.formattype = WMGuids.ToGUID(WMGuids.WMFORMAT_WaveFormatEx); //This is the only value permitted. mt.pUnk = null; mt.cbFormat = (uint)Marshal.SizeOf(wfex) + wfex.Size; mt.pbFormat = wfexPtr; try { // Used GetMediaType to sanity check the managed structs: //uint size = 0; //audioProps.GetMediaType(IntPtr.Zero,ref size); //IntPtr mtPtr = Marshal.AllocCoTaskMem((int)size); //audioProps.GetMediaType(mtPtr,ref size); //_WMMediaType mt2 = (_WMMediaType)Marshal.PtrToStructure(mtPtr,typeof(_WMMediaType)); //WMMediaType.WaveFormatEx wfex2 = (WMMediaType.WaveFormatEx)Marshal.PtrToStructure(mt2.pbFormat,typeof(WMMediaType.WaveFormatEx)); // Examine here. //Marshal.StructureToPtr(mt,mtPtr,true); //audioProps.SetMediaType( mtPtr ); } catch (Exception e) { eventLog.WriteEntry("Failed to set audio properties: " + e.ToString(), EventLogEntryType.Error, 1000); Debug.WriteLine("Failed to set audio properties: " + e.ToString()); return(false); } bool ret = ConfigAudio(mt); Marshal.FreeCoTaskMem(wfexPtr); return(ret); }
/// /// Opens the audio system, enqueues buffers, and otherwise /// gets us ready to play. /// public void Open() { lock (_mainLock) { if (_isOpen) throw new ApplicationExcption( "Open while already open" ); WAVEFORMATEX wfx = new WAVEFORMATEX(); wfx.wFormatTag = WAVE_FORMAT_PCM; wfx.nChannels = 2; // stereo wfx.nSamplesPerSec = 44100; wfx.nBlockAlign = 4; // four bytes per frame, see? wfx.wBitsPerSample = 16; wfx.nAvgBytesPerSec = wfx.nBlockAlign * wfx.nSamplesPerSec; wfx.cbSize = 0; // no extra info int status = waveOutOpen( out _waveOutHandle, WAVE_MAPPER, wfx, _playEvent.SafeWaitHandle, CALLBACK_EVENT, 0 ); if (status != MMSYSERR_NOERROR) { throw new ApplicationException( "unable to open the default WAVE device" ); } _isOpen = true; // Create audio buffers, including unmanaged data buffers for (int i = 0; i < N_WAVE_HEADERS; i++) { WAVEHDR header = new WAVEHDR(); IntPtr data = Marshal.AllocHGlobal( size ); header.lpData = data; // pointer to the data buffer header.dwBufferLength = 0; // set on each call header.dwBytesRecorded = 0; header.dwUser = size; // hide the real buffer size here header.dwFlags = WHDR_DONE; // so it'll get enqueued header.dwLoops = 0; header.lpNext = 0; // we don't mess with this pointer header.reserved; // or this int // Yeah, this sucker's gonna be pinned for the duration. // This is probably bad. It is better if we unpin it on // each buffer switch? Probably not. _waveHdr[i] = GCHandle.Alloc( header, GCHandldType.Pinned ); } } }
WAVEFORMATEX createWaveFormat(int BitPerSample, int SamplePerSecond, int ChannelCount) { WAVEFORMATEX waveformat = new WAVEFORMATEX(); waveformat.wFormatTag = WAVE_FORMAT_PCM; waveformat.wBitsPerSample = (ushort)BitPerSample; waveformat.nBlockAlign = (ushort)((ChannelCount * BitPerSample) / 8); waveformat.nAvgBytesPerSec = (uint)(SamplePerSecond * (uint)waveformat.nBlockAlign); waveformat.nChannels = (ushort)ChannelCount; waveformat.nSamplesPerSec = (uint)SamplePerSecond; return(waveformat); }
private bool CreateCaptureBuffer() { uint error = DSERR.DS_OK; #region 创建默认音频流格式 this.wfx = new WAVEFORMATEX() { nChannels = Channel, nSamplesPerSec = SamplesPerSec, wBitsPerSample = BitsPerSample, nBlockAlign = BlockAlign, nAvgBytesPerSec = BytesPerSec, cbSize = 0, wFormatTag = Win32API.WAVE_FORMAT_PCM }; this.pwfx_free = PInvoke.StructureToPtr(this.wfx); this.dsbd = new DSCBUFFERDESC() { dwFlags = 0, dwSize = Marshal.SizeOf(typeof(DSCBUFFERDESC)), dwReserved = 0, dwFXCount = 0, dwBufferBytes = BufferSize, lpwfxFormat = this.pwfx_free, lpDSCFXDesc = IntPtr.Zero }; #endregion IntPtr pdscb; Guid iid_dscb8; if ((error = this.dsc8.CreateCaptureBuffer(ref this.dsbd, out pdscb, IntPtr.Zero)) == DSERR.DS_OK) { // 获取IDirectSoundCaptureBuffer8接口实例 iid_dscb8 = new Guid(InterfaceID.IID_IDirectSoundCaptureBuffer8); Marshal.QueryInterface(pdscb, ref iid_dscb8, out this.pdscb8); Marshal.Release(pdscb); this.dscb8 = Marshal.GetObjectForIUnknown(this.pdscb8) as IDirectSoundCaptureBuffer8; } else { logger.ErrorFormat("CreateCaptureBuffer失败, DSERROR = {0}", error); return(false); } return(true); }
/// <summary> /// Populates the listview with the available media types /// </summary> private void GetMediaTypes() { source.GetMediaTypes(out mts, out fbs); for (int i = 0; i < mts.Length; i++) { WAVEFORMATEX wfe = (WAVEFORMATEX)fbs[i]; lvFormats.Items.Add(wfe.Channels.ToString()); lvFormats.Items[i].SubItems.Add(wfe.SamplesPerSec.ToString()); lvFormats.Items[i].SubItems.Add(wfe.BitsPerSample.ToString()); lvFormats.Items[i].SubItems.Add(((int)(wfe.AvgBytesPerSec * 8 / 1000)).ToString()); } }
/// <summary> /// Default constructor. /// </summary> /// <param name="outputDevice">Output device.</param> /// <param name="samplesPerSec">Sample rate, in samples per second (hertz). For PCM common values are /// 8.0 kHz, 11.025 kHz, 22.05 kHz, and 44.1 kHz.</param> /// <param name="bitsPerSample">Bits per sample. For PCM 8 or 16 are the only valid values.</param> /// <param name="channels">Number of channels.</param> /// <exception cref="ArgumentNullException">Is raised when <b>outputDevice</b> is null.</exception> /// <exception cref="ArgumentException">Is raised when any of the aruments has invalid value.</exception> public WaveOut(WavOutDevice outputDevice, int samplesPerSec, int bitsPerSample, int channels) { if (outputDevice == null) { throw new ArgumentNullException("outputDevice"); } if (samplesPerSec < 8000) { throw new ArgumentException("Argument 'samplesPerSec' value must be >= 8000."); } if (bitsPerSample < 8) { throw new ArgumentException("Argument 'bitsPerSample' value must be >= 8."); } if (channels < 1) { throw new ArgumentException("Argument 'channels' value must be >= 1."); } m_pOutDevice = outputDevice; m_SamplesPerSec = samplesPerSec; m_BitsPerSample = bitsPerSample; m_Channels = channels; m_BlockSize = m_Channels * (m_BitsPerSample / 8); m_pPlayItems = new List <PlayItem>(); // Try to open wav device. WAVEFORMATEX format = new WAVEFORMATEX(); format.wFormatTag = WavFormat.PCM; format.nChannels = (ushort)m_Channels; format.nSamplesPerSec = (uint)samplesPerSec; //1秒钟数据量(字节/秒)= (采样频率(Hz)*采样位数(bit)*声道数)/ 8 //换算成20ms的数据量则再除50 format.nAvgBytesPerSec = (uint)((m_SamplesPerSec * m_Channels * (m_BitsPerSample / 8))) / 50; format.nBlockAlign = (ushort)m_BlockSize; format.wBitsPerSample = (ushort)m_BitsPerSample; format.cbSize = 0; m_MinBuffer = (int)format.nAvgBytesPerSec * 10; // We must delegate reference, otherwise GC will collect it. m_pWaveOutProc = new waveOutProc(this.OnWaveOutProc); int result = WavMethods.waveOutOpen(out m_pWavDevHandle, m_pOutDevice.Index, format, m_pWaveOutProc, 0, WavConstants.CALLBACK_FUNCTION); if (result != MMSYSERR.NOERROR) { throw new Exception("Failed to open wav device, error: " + result.ToString() + "."); } }
public HeartBeat() //constructor { // For documentation on the correct values to use, please refer to the MSDN library. waveFormat = new WAVEFORMATEX(); waveFormat.wFormatTag = WAVE_FORMAT_PCM; waveFormat.nChannels = 1; waveFormat.nSamplesPerSec = nSamplesPerSec; waveFormat.nAvgBytesPerSec = nSamplesPerSec * 2; waveFormat.nBlockAlign = 2; waveFormat.wBitsPerSample = 16; waveFormat.cbSize = 0; MMRESULT res = waveInOpen(ref hwWaveIn, WAVE_MAPPER, ref waveFormat, dwCallBack, 0, WaveInOpenFlags.CALLBACK_NULL); if (res != MMRESULT.MMSYSERR_NOERROR) { GpsUtils.Utils.log.Error("waveInOpen", null); } //whdrsize = 2 * Marshal.SizeOf(Type.GetType("IntPtr")) + 6 * sizeof(Int32); for (int i = 0; i < nBuffers; i++) { buffer[i] = Marshal.AllocHGlobal(nSamples * 2); whdr[i] = Marshal.AllocHGlobal(whdrsize); Marshal.WriteInt32(whdr[i], lpData, (int)buffer[i]); //lpData = buffer[i]; Marshal.WriteInt32(whdr[i], dwBufferLength, nSamples * 2); //dwBufferLength = nSamples * 2; Marshal.WriteInt32(whdr[i], dwFlags, 0); //dwFlags = 0; res = waveInPrepareHeader(hwWaveIn, whdr[i], whdrsize); if (res != MMRESULT.MMSYSERR_NOERROR) { GpsUtils.Utils.log.Error("waveInPrepareHeader", null); } res = waveInAddBuffer(hwWaveIn, whdr[i], whdrsize); if (res != MMRESULT.MMSYSERR_NOERROR) { GpsUtils.Utils.log.Error("waveInAddBuffer", null); } } cur_whdr = -1; res = waveInStart(hwWaveIn); if (res != MMRESULT.MMSYSERR_NOERROR) { GpsUtils.Utils.log.Error("waveInStart", null); } PowerPolicyNotify(PPN_UNATTENDEDMODE, 1); wavPowerHandle = SetPowerRequirement("wav1:", CedevicePowerState.D0, POWER_NAME, null, 0); //HTC diamond don't works with POWER_FORCE thresh = Int16.MaxValue; lastbeepidx = -2 * nSamplesPerSec; }
/// <summary> /// Default constructor. /// </summary> /// <param name="device">Input device.</param> /// <param name="samplesPerSec">Sample rate, in samples per second (hertz). For PCM common values are /// 8.0 kHz, 11.025 kHz, 22.05 kHz, and 44.1 kHz.</param> /// <param name="bitsPerSample">Bits per sample. For PCM 8 or 16 are the only valid values.</param> /// <param name="channels">Number of channels.</param> /// <param name="bufferSize">Specifies recording buffer size.</param> /// <exception cref="ArgumentNullException">Is raised when <b>outputDevice</b> is null.</exception> /// <exception cref="ArgumentException">Is raised when any of the aruments has invalid value.</exception> public _WaveIn(AudioInDevice device, int samplesPerSec, int bitsPerSample, int channels, int bufferSize) { if (device == null) { throw new ArgumentNullException("device"); } if (samplesPerSec < 8000) { throw new ArgumentException("Argument 'samplesPerSec' value must be >= 8000."); } if (bitsPerSample < 8) { throw new ArgumentException("Argument 'bitsPerSample' value must be >= 8."); } if (channels < 1) { throw new ArgumentException("Argument 'channels' value must be >= 1."); } m_pInDevice = device; m_SamplesPerSec = samplesPerSec; m_BitsPerSample = bitsPerSample; m_Channels = channels; m_BufferSize = bufferSize; m_BlockSize = m_Channels * (m_BitsPerSample / 8); m_pBuffers = new Dictionary <long, BufferItem>(); // Try to open wav device. WAVEFORMATEX format = new WAVEFORMATEX(); format.wFormatTag = WavFormat.PCM; format.nChannels = (ushort)m_Channels; format.nSamplesPerSec = (uint)samplesPerSec; format.nAvgBytesPerSec = (uint)(m_SamplesPerSec * (m_Channels * (m_BitsPerSample / 8))); format.nBlockAlign = (ushort)m_BlockSize; format.wBitsPerSample = (ushort)m_BitsPerSample; format.cbSize = 0; // We must delegate reference, otherwise GC will collect it. m_pWaveInProc = new waveInProc(this.OnWaveInProc); int result = waveInOpen(out m_pWavDevHandle, m_pInDevice.Index, format, m_pWaveInProc, 0, WavConstants.CALLBACK_FUNCTION); if (result != MMSYSERR.NOERROR) { throw new Exception("Failed to open wav device, error: " + result.ToString() + "."); } CreateBuffers(); }
public WaveFile() { WaveHeder = new WAVEHEADER(); WaveHeder.dwRiff = BitConverter.ToUInt32(RIFF, 0); WaveHeder.dwWave = BitConverter.ToUInt32(WAVE, 0); WaveHeder.dwFmt = BitConverter.ToUInt32(fmt, 0); WaveFormatEx = new WAVEFORMATEX(); WaveFormatEx.wFormatTag = 3; WaveFormatEx.nChannels = 1; WaveFormatEx.nSamplesPerSec = 16000; WaveFormatEx.wBitsPerSample = 32; WaveFormatEx.nBlockAlign = (ushort)(WaveFormatEx.nChannels * WaveFormatEx.wBitsPerSample / 8); WaveFormatEx.nAvgBytesPerSec = WaveFormatEx.nSamplesPerSec * WaveFormatEx.nBlockAlign; WaveFormatEx.cbSize = 0; }
public WaveOut(int device, WAVEFORMATEX format, int bufferSize, int bufferCount, BufferFillEventHandler fillProc) { m_zero = format.wBitsPerSample == 8 ? (byte)128 : (byte)0; m_FillProc = fillProc; var errorCode = (MMErrors)WaveNative.waveOutOpen(out m_WaveOut, device, ref format, m_BufferProc, 0, (int)CallBackFlag.CALLBACK_FUNCTION); if (errorCode != MMErrors.MMSYSERR_NOERROR) { throw new MixerException(errorCode, Audio.GetErrorDescription(FuncName.fnWaveOutOpen, errorCode)); } AllocateBuffers(bufferSize, bufferCount); m_Thread = new Thread(new ThreadStart(ThreadProc)); m_Thread.Start(); }
public WaveFile() { WaveHeder = new WAVEHEADER(); WaveHeder.dwRiff = BitConverter.ToUInt32( RIFF, 0 ); WaveHeder.dwWave = BitConverter.ToUInt32( WAVE, 0 ); WaveHeder.dwFmt = BitConverter.ToUInt32( fmt, 0 ); WaveFormatEx = new WAVEFORMATEX(); WaveFormatEx.wFormatTag = 3; WaveFormatEx.nChannels = 1; WaveFormatEx.nSamplesPerSec = 16000; WaveFormatEx.wBitsPerSample = 32; WaveFormatEx.nBlockAlign = (ushort)(WaveFormatEx.nChannels * WaveFormatEx.wBitsPerSample / 8); WaveFormatEx.nAvgBytesPerSec = WaveFormatEx.nSamplesPerSec * WaveFormatEx.nBlockAlign; WaveFormatEx.cbSize = 0; }
internal static bool OpenPlaybackDevice(out IntPtr handle, uint deviceId, uint samplesPerSec, int numChannels) { const int WAVE_FORMAT_PCM = 1; WAVEFORMATEX format = new WAVEFORMATEX(); format.FormatTag = WAVE_FORMAT_PCM; format.Channels = (ushort)numChannels; format.SamplesPerSec = samplesPerSec; format.BitsPerSample = 16; format.BlockAlign = (ushort)(format.Channels * format.BitsPerSample / 8); format.AvgBytesPerSec = format.SamplesPerSec * format.BlockAlign; format.Size = (ushort)Marshal.SizeOf(format); return(waveOutOpen(out handle, deviceId, ref format, null, IntPtr.Zero, 0) == 0); }
/// <summary> /// Default constructor. /// </summary> /// <param name="device">Input device.</param> /// <param name="samplesPerSec">Sample rate, in samples per second (hertz). For PCM common values are /// 8.0 kHz, 11.025 kHz, 22.05 kHz, and 44.1 kHz.</param> /// <param name="bitsPerSample">Bits per sample. For PCM 8 or 16 are the only valid values.</param> /// <param name="channels">Number of channels.</param> /// <param name="bufferSize">Specifies recording buffer size.</param> /// <exception cref="ArgumentNullException">Is raised when <b>outputDevice</b> is null.</exception> /// <exception cref="ArgumentException">Is raised when any of the aruments has invalid value.</exception> public _WaveIn(AudioInDevice device,int samplesPerSec,int bitsPerSample,int channels,int bufferSize) { if(device == null){ throw new ArgumentNullException("device"); } if(samplesPerSec < 8000){ throw new ArgumentException("Argument 'samplesPerSec' value must be >= 8000."); } if(bitsPerSample < 8){ throw new ArgumentException("Argument 'bitsPerSample' value must be >= 8."); } if(channels < 1){ throw new ArgumentException("Argument 'channels' value must be >= 1."); } m_pInDevice = device; m_SamplesPerSec = samplesPerSec; m_BitsPerSample = bitsPerSample; m_Channels = channels; m_BufferSize = bufferSize; m_BlockSize = m_Channels * (m_BitsPerSample / 8); m_pBuffers = new Dictionary<long,BufferItem>(); // Try to open wav device. WAVEFORMATEX format = new WAVEFORMATEX(); format.wFormatTag = WavFormat.PCM; format.nChannels = (ushort)m_Channels; format.nSamplesPerSec = (uint)samplesPerSec; format.nAvgBytesPerSec = (uint)(m_SamplesPerSec * (m_Channels * (m_BitsPerSample / 8))); format.nBlockAlign = (ushort)m_BlockSize; format.wBitsPerSample = (ushort)m_BitsPerSample; format.cbSize = 0; // We must delegate reference, otherwise GC will collect it. m_pWaveInProc = new waveInProc(this.OnWaveInProc); int result = waveInOpen(out m_pWavDevHandle,m_pInDevice.Index,format,m_pWaveInProc,0,WavConstants.CALLBACK_FUNCTION); if(result != MMSYSERR.NOERROR){ throw new Exception("Failed to open wav device, error: " + result.ToString() + "."); } CreateBuffers(); }
public static void WriteWavHeader(Stream stream, int dataLength) { using (MemoryStream memStream = new MemoryStream(64)) { int cbFormat = 18; WAVEFORMATEX format = new WAVEFORMATEX() { wFormatTag = 1, nChannels = 1, nSamplesPerSec = 16000, nAvgBytesPerSec = 32000, nBlockAlign = 2, wBitsPerSample = 16, cbSize = 0 }; using (var bw = new BinaryWriter(memStream)) { WriteString(memStream, "RIFF"); bw.Write(dataLength + cbFormat + 4); WriteString(memStream, "WAVE"); WriteString(memStream, "fmt "); bw.Write(cbFormat); bw.Write(format.wFormatTag); bw.Write(format.nChannels); bw.Write(format.nSamplesPerSec); bw.Write(format.nAvgBytesPerSec); bw.Write(format.nBlockAlign); bw.Write(format.wBitsPerSample); bw.Write(format.cbSize); WriteString(memStream, "data"); bw.Write(dataLength); memStream.WriteTo(stream); } } }
private static extern MMRESULT waveOutOpen(ref IntPtr hWaveOut, IntPtr uDeviceID, ref WAVEFORMATEX lpFormat, [MarshalAs(UnmanagedType.FunctionPtr)] WaveOutProcCallback dwOutProcCallback, IntPtr dwInstance, WaveOutOpenFlags dwFlags);
/// <summary> /// Default constructor. /// </summary> /// <param name="outputDevice">Output device.</param> /// <param name="samplesPerSec">Sample rate, in samples per second (hertz). For PCM common values are /// 8.0 kHz, 11.025 kHz, 22.05 kHz, and 44.1 kHz.</param> /// <param name="bitsPerSample">Bits per sample. For PCM 8 or 16 are the only valid values.</param> /// <param name="channels">Number of channels.</param> /// <exception cref="ArgumentNullException">Is raised when <b>outputDevice</b> is null.</exception> /// <exception cref="ArgumentException">Is raised when any of the aruments has invalid value.</exception> public WaveOut(AudioOutDevice outputDevice,int samplesPerSec,int bitsPerSample,int channels) { if(outputDevice == null){ throw new ArgumentNullException("outputDevice"); } if(samplesPerSec < 8000){ throw new ArgumentException("Argument 'samplesPerSec' value must be >= 8000."); } if(bitsPerSample < 8){ throw new ArgumentException("Argument 'bitsPerSample' value must be >= 8."); } if(channels < 1){ throw new ArgumentException("Argument 'channels' value must be >= 1."); } m_pOutDevice = outputDevice; m_SamplesPerSec = samplesPerSec; m_BitsPerSample = bitsPerSample; m_Channels = channels; m_BlockSize = m_Channels * (m_BitsPerSample / 8); m_pPlayItems = new List<PlayItem>(); // Try to open wav device. WAVEFORMATEX format = new WAVEFORMATEX(); format.wFormatTag = 0x0001; // PCM - 0x0001 format.nChannels = (ushort)m_Channels; format.nSamplesPerSec = (uint)samplesPerSec; format.nAvgBytesPerSec = (uint)(m_SamplesPerSec * m_Channels * (m_BitsPerSample / 8)); format.nBlockAlign = (ushort)m_BlockSize; format.wBitsPerSample = (ushort)m_BitsPerSample; format.cbSize = 0; // We must delegate reference, otherwise GC will collect it. m_pWaveOutProc = new waveOutProc(this.OnWaveOutProc); int result = WavMethods.waveOutOpen(out m_pWavDevHandle,m_pOutDevice.Index,format,m_pWaveOutProc,0,WavConstants.CALLBACK_FUNCTION); if(result != MMSYSERR.NOERROR){ throw new Exception("Failed to open wav device, error: " + result.ToString() + "."); } }
public static extern int waveOutOpen(out IntPtr hWaveOut,int uDeviceID,WAVEFORMATEX lpFormat,waveOutProc dwCallback,int dwInstance,int dwFlags);
private static void WriteWavHeader(Stream stream) { // Data length to be fixed up later int dataLength = 0; // We need to use a memory stream because the BinaryWriter will close the underlying stream when it is closed MemoryStream memStream = null; BinaryWriter bw = null; try { memStream = new MemoryStream(64); WAVEFORMATEX format = new WAVEFORMATEX { FormatTag = 1, Channels = 1, SamplesPerSec = 16000, AvgBytesPerSec = 32000, BlockAlign = 2, BitsPerSample = 16, Size = 0 }; bw = new BinaryWriter(memStream); // RIFF header WriteHeaderString(memStream, RiffHeaderTag); bw.Write(dataLength + FullHeaderSize - 8); // File size - 8 WriteHeaderString(memStream, "WAVE"); WriteHeaderString(memStream, "fmt "); bw.Write(WaveformatExSize); // WAVEFORMATEX bw.Write(format.FormatTag); bw.Write(format.Channels); bw.Write(format.SamplesPerSec); bw.Write(format.AvgBytesPerSec); bw.Write(format.BlockAlign); bw.Write(format.BitsPerSample); bw.Write(format.Size); // data header WriteHeaderString(memStream, DataHeaderTag); bw.Write(dataLength); memStream.WriteTo(stream); } finally { if (bw != null) { memStream = null; bw.Dispose(); } if (memStream != null) { memStream.Dispose(); } } }
/// <summary> /// A bare bones WAV file header writer /// </summary> static void WriteWavHeader(Stream stream, int dataLength) { //We need to use a memory stream because the BinaryWriter will close the underlying stream when it is closed using (MemoryStream memStream = new MemoryStream(64)) { int cbFormat = 18; //sizeof(WAVEFORMATEX) WAVEFORMATEX format = new WAVEFORMATEX() { wFormatTag = 1, nChannels = 1, nSamplesPerSec = 16000, nAvgBytesPerSec = 32000, nBlockAlign = 2, wBitsPerSample = 16, cbSize = 0 }; using (var bw = new BinaryWriter(memStream)) { //RIFF header WriteString(memStream, "RIFF"); bw.Write(dataLength + cbFormat + 4); //File size - 8 WriteString(memStream, "WAVE"); WriteString(memStream, "fmt "); bw.Write(cbFormat); //WAVEFORMATEX bw.Write(format.wFormatTag); bw.Write(format.nChannels); bw.Write(format.nSamplesPerSec); bw.Write(format.nAvgBytesPerSec); bw.Write(format.nBlockAlign); bw.Write(format.wBitsPerSample); bw.Write(format.cbSize); //data header WriteString(memStream, "data"); bw.Write(dataLength); memStream.WriteTo(stream); } } }
private static extern MMRESULT waveInOpen(ref IntPtr hWaveIn, uint deviceId, ref WAVEFORMATEX wfx, IntPtr dwCallBack, uint dwInstance, WaveInOpenFlags dwFlags);
public static extern int AVIStreamSetFormat( IntPtr pavi, int lPos, ref WAVEFORMATEX lpFormat, int cbFormat);
public static extern uint waveOutOpen(ref IntPtr phwo, uint uDeviceID, ref WAVEFORMATEX pwfx, IntPtr dwCallback, IntPtr dwCallbackInstance, uint fdwOpen);
public HeartBeat() //constructor { // For documentation on the correct values to use, please refer to the MSDN library. waveFormat = new WAVEFORMATEX(); waveFormat.wFormatTag = WAVE_FORMAT_PCM; waveFormat.nChannels = 1; waveFormat.nSamplesPerSec = nSamplesPerSec; waveFormat.nAvgBytesPerSec = nSamplesPerSec * 2; waveFormat.nBlockAlign = 2; waveFormat.wBitsPerSample = 16; waveFormat.cbSize = 0; MMRESULT res = waveInOpen(ref hwWaveIn, WAVE_MAPPER, ref waveFormat, dwCallBack, 0, WaveInOpenFlags.CALLBACK_NULL); if (res != MMRESULT.MMSYSERR_NOERROR) GpsUtils.Utils.log.Error("waveInOpen", null); //whdrsize = 2 * Marshal.SizeOf(Type.GetType("IntPtr")) + 6 * sizeof(Int32); for (int i = 0; i < nBuffers; i++) { buffer[i] = Marshal.AllocHGlobal(nSamples * 2); whdr[i] = Marshal.AllocHGlobal(whdrsize); Marshal.WriteInt32(whdr[i], lpData, (int)buffer[i]); //lpData = buffer[i]; Marshal.WriteInt32(whdr[i], dwBufferLength, nSamples * 2); //dwBufferLength = nSamples * 2; Marshal.WriteInt32(whdr[i], dwFlags, 0); //dwFlags = 0; res = waveInPrepareHeader(hwWaveIn, whdr[i], whdrsize); if (res != MMRESULT.MMSYSERR_NOERROR) GpsUtils.Utils.log.Error("waveInPrepareHeader", null); res = waveInAddBuffer(hwWaveIn, whdr[i], whdrsize); if (res != MMRESULT.MMSYSERR_NOERROR) GpsUtils.Utils.log.Error("waveInAddBuffer", null); } cur_whdr = -1; res = waveInStart(hwWaveIn); if (res != MMRESULT.MMSYSERR_NOERROR) GpsUtils.Utils.log.Error("waveInStart", null); PowerPolicyNotify(PPN_UNATTENDEDMODE, 1); wavPowerHandle = SetPowerRequirement("wav1:", CedevicePowerState.D0, POWER_NAME, null, 0); //HTC diamond don't works with POWER_FORCE thresh = Int16.MaxValue; lastbeepidx = -2 * nSamplesPerSec; }
protected void ReportOpenMediaCompleted (IDictionary<MediaSourceAttributesKeys, string> mediaStreamAttributes, IEnumerable<MediaStreamDescription> availableMediaStreams) { IntPtr stream; string str_duration; string str_can_seek; bool can_seek; ulong duration; // FIXME: wrong/overzealous validations wrt SL2 (see unit tests) if (closed) throw new InvalidOperationException ("closed"); if (media_element == null) throw new InvalidOperationException ("media_element"); if (demuxer == IntPtr.Zero) throw new InvalidOperationException ("demuxer"); // FIXME: mediaStreamAttributes and availableMediaStreams can be null in SL2 if (mediaStreamAttributes == null) throw new ArgumentNullException ("mediaStreamAttributes"); if (availableMediaStreams == null) throw new ArgumentNullException ("availableMediaStreams"); if (media == IntPtr.Zero) media = NativeMethods.imedia_object_get_media_reffed (demuxer); if (mediaStreamAttributes.TryGetValue (MediaSourceAttributesKeys.Duration, out str_duration)) { duration = ulong.Parse (str_duration); } else { throw new ArgumentException ("mediaStreamAttributes.Duration is required."); } if (mediaStreamAttributes.TryGetValue (MediaSourceAttributesKeys.CanSeek, out str_can_seek)) { if (string.Equals (str_can_seek, "False", StringComparison.OrdinalIgnoreCase)) { can_seek = false; } else { can_seek = true; } NativeMethods.external_demuxer_set_can_seek (demuxer, can_seek); } if (mediaStreamAttributes.ContainsKey (MediaSourceAttributesKeys.DRMHeader)) { NativeMethods.imedia_demuxer_set_is_drm (demuxer, true); } else { foreach (MediaStreamDescription stream_description in availableMediaStreams) { string str_width, str_height; string str_fourcc, str_codec_private_data; uint width, height; int fourcc; IntPtr extra_data = IntPtr.Zero; uint extra_data_size = 0; if (stream_description == null) throw new ArgumentNullException ("availableMediaStreams"); if (stream_description.MediaAttributes == null) throw new ArgumentNullException ("availableMediaStreams.MediaAttributes"); switch (stream_description.Type) { case MediaStreamType.Video: if (stream_description.MediaAttributes.TryGetValue (MediaStreamAttributeKeys.VideoFourCC, out str_fourcc)) { if (str_fourcc == null || str_fourcc.Length != 4) throw new ArgumentOutOfRangeException ("availableMediaStreams.MediaAttributes.VideoFourCC", str_fourcc); fourcc = 0; for (int i = 0; i < str_fourcc.Length; i++) fourcc += ((byte) str_fourcc [i]) << (8 * i); } else { throw new ArgumentException ("availableMediaStreams.MediaAttributes.VideoFourCC"); } if (stream_description.MediaAttributes.TryGetValue (MediaStreamAttributeKeys.Height, out str_height)) { height = uint.Parse (str_height); } else { throw new ArgumentException ("availableMediaStreams.MediaAttributes.Height"); } if (stream_description.MediaAttributes.TryGetValue (MediaStreamAttributeKeys.Width, out str_width)) { width = uint.Parse (str_width); } else { throw new ArgumentException ("availableMediaStreams.MediaAttributes.Height"); } if (stream_description.MediaAttributes.TryGetValue (MediaStreamAttributeKeys.CodecPrivateData, out str_codec_private_data)) { extra_data_size = (uint) str_codec_private_data.Length / 2; byte [] buf = new byte [extra_data_size]; for (int i = 0; i < buf.Length; i++) buf[i] = byte.Parse (str_codec_private_data.Substring (i*2, 2), NumberStyles.HexNumber); extra_data = Marshal.AllocHGlobal ((int) extra_data_size); Marshal.Copy (buf, 0, extra_data, buf.Length); } stream = NativeMethods.video_stream_new (media, fourcc, width, height, (ulong) duration, extra_data, extra_data_size); break; case MediaStreamType.Audio: WAVEFORMATEX wave; if (stream_description.MediaAttributes.TryGetValue (MediaStreamAttributeKeys.CodecPrivateData, out str_codec_private_data)) { // str_codec_private_data is WAVEFORMATEX in base16 encoding if (str_codec_private_data == null || str_codec_private_data.Length < 36) throw new ArgumentOutOfRangeException ("availableMediaStreams.MediaAttributes.CodecPrivateData", str_codec_private_data); wave = new WAVEFORMATEX (str_codec_private_data); extra_data_size = wave.Size; byte [] buf = new byte [extra_data_size]; for (int i = 0; i < buf.Length; i++) buf[i] = byte.Parse (str_codec_private_data.Substring (36 + i * 2, 2), NumberStyles.HexNumber); extra_data = Marshal.AllocHGlobal ((int) extra_data_size); Marshal.Copy (buf, 0, extra_data, buf.Length); } else { // CodecPrivateData is required for audio throw new ArgumentException ("availableMediaStreams.MediaAttributes.CodecPrivateData"); } stream = NativeMethods.audio_stream_new (media, wave.FormatTag, wave.BitsPerSample, wave.BlockAlign, (int) wave.SamplesPerSec, wave.Channels, (int) wave.AvgBytesPerSec * 8, extra_data, extra_data_size); break; case MediaStreamType.Script: continue; // We don't care about these yet, SL probably doesn't either default: throw new ArgumentOutOfRangeException ("mediaStreamType"); } stream_description.StreamId = NativeMethods.external_demuxer_add_stream (demuxer, stream); stream_description.NativeStream = stream; } } NativeMethods.imedia_demuxer_report_open_demuxer_completed (demuxer); }
int waveOutOpen( out IntPtr waveOutHandle, int deviceId, WAVEFORMATEX format, SafeWaitHandle event_handle, int instanceData, int flags );
/// <summary> /// A bare bones WAV file header writer /// </summary> private static void WriteWavHeader(Stream stream) { // Data length to be fixed up later int dataLength = 0; // We need to use a memory stream because the BinaryWriter will close the underlying stream when it is closed MemoryStream memStream = null; BinaryWriter bw = null; // FXCop note: This try/finally block may look strange, but it is // the recommended way to correctly dispose a stream that is used // by a writer to avoid the stream from being double disposed. // For more information see FXCop rule: CA2202 try { memStream = new MemoryStream(64); WAVEFORMATEX format = new WAVEFORMATEX { FormatTag = 1, Channels = 1, SamplesPerSec = 16000, AvgBytesPerSec = 32000, BlockAlign = 2, BitsPerSample = 16, Size = 0 }; bw = new BinaryWriter(memStream); // RIFF header WriteHeaderString(memStream, RiffHeaderTag); bw.Write(dataLength + FullHeaderSize - 8); // File size - 8 WriteHeaderString(memStream, "WAVE"); WriteHeaderString(memStream, "fmt "); bw.Write(WaveformatExSize); // WAVEFORMATEX bw.Write(format.FormatTag); bw.Write(format.Channels); bw.Write(format.SamplesPerSec); bw.Write(format.AvgBytesPerSec); bw.Write(format.BlockAlign); bw.Write(format.BitsPerSample); bw.Write(format.Size); // data header WriteHeaderString(memStream, DataHeaderTag); bw.Write(dataLength); memStream.WriteTo(stream); } finally { if (bw != null) { memStream = null; bw.Dispose(); } if (memStream != null) { memStream.Dispose(); } } }
static extern uint waveOutOpen(IntPtr* phwo, uint uDeviceID, WAVEFORMATEX* pwfx, uint dwcb, uint dwInstance, uint dwfOpen);