private void TestOutputType() { int hr; AMMediaType pmt = new AMMediaType(); AMMediaType pmt2 = new AMMediaType(); WaveFormatEx w = new WaveFormatEx(); hr = m_imo.GetOutputType(0, 0, pmt); Debug.WriteLine(DsToString.AMMediaTypeToString(pmt)); Marshal.PtrToStructure(pmt.formatPtr, w); //pmt.sampleSize = 44100; //pmt.fixedSizeSamples = true; hr = m_imo.SetOutputType(0, pmt, DMOSetType.Clear); DMOError.ThrowExceptionForHR(hr); hr = m_imo.SetOutputType(0, pmt, DMOSetType.TestOnly); DMOError.ThrowExceptionForHR(hr); hr = m_imo.SetOutputType(0, pmt, DMOSetType.None); DMOError.ThrowExceptionForHR(hr); hr = m_imo.GetOutputCurrentType(0, pmt2); DMOError.ThrowExceptionForHR(hr); Debug.Assert(pmt2.majorType == pmt.majorType, "GetOutputCurrentType"); }
public void AudioBuffer_Persist() { byte[] rawBytes = new byte[] { 1, 2, 3, 4, 5, 6 }; var wf = WaveFormatEx.Create(WaveFormatTag.WAVE_FORMAT_APTX, 16000, 16, 2, 0, 16000); AudioBuffer buffer = new AudioBuffer(rawBytes, wf); AudioBuffer bresult = default(AudioBuffer); var p1 = Pipeline.Create(); var store = Store.Create(p1, "audio", null); Generators.Return(p1, buffer).Write("audio", store); p1.RunAsync(); var p2 = Pipeline.Create(); var store2 = Store.Open(p2, "audio", null); store2.OpenStream <AudioBuffer>("audio").Do(b => bresult = b); p2.RunAsync(); System.Threading.Thread.Sleep(100); p1.Dispose(); p2.Dispose(); Assert.AreEqual(6, bresult.Length); Assert.AreEqual(6, bresult.Data.Length); Assert.AreEqual(wf, bresult.Format); CollectionAssert.AreEqual(rawBytes, bresult.Data); }
private void GetAudioPrivateCodecData() { WaveFormatEx waveFormat; waveFormat = new WaveFormatEx(); waveFormat.BitsPerSample = (short)this.SampleSize; waveFormat.AvgBytesPerSec = (this.SampleSize / 8) * this.SampleRate * this.ChannelCount; waveFormat.Channels = (short)this.ChannelCount; waveFormat.FormatTag = WaveFormatEx.FormatMpegHEAAC; // 0xFF; // WaveFormatEx.FormatPCM; // Raw_AAC waveFormat.SamplesPerSec = (int)this.SampleRate; waveFormat.BlockAlign = 1; // (short)(waveFormat.Channels * (waveFormat.BitsPerSample / 8)); byte[] config = QBox.GetAudioSpecificConfig(this.audioMetaSample); waveFormat.ext = new byte[12 + config.Length]; for (int i = 0; i < 12; i++) { waveFormat.ext[i] = 0; } //waveFormat.ext[0] = 3; // payload type waveFormat.Size = (short)waveFormat.ext.Length; for (int i = 12, j = 0; i < waveFormat.Size; i++, j++) { waveFormat.ext[i] = config[j]; } waveFormat.ValidateWaveFormat(); Codec.PrivateCodecData = waveFormat.ToHexString(); }
//Can't get this to work!! AMMediaType getAlacMediaType() { byte[] extraInfo = new byte[] { 0x00, 0x00, 0x00, 0x24, 0x61, 0x6C, 0x61, 0x63, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x60, 0x00, 0x10, 0x28, 0x0E, 0x0A, 0x02, 0x00, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xAC, 0x44 }; WaveFormatEx w = new WaveFormatEx(); w.wBitsPerSample = 16; w.cbSize = (ushort)extraInfo.Length; w.nChannels = 2; w.nSamplesPerSec = 44100; w.wFormatTag = 27745; w.nAvgBytesPerSec = 87765; w.nBlockAlign = 4; AMMediaType amt = new AMMediaType(); amt.majorType = MediaType.Audio; amt.subType = new Guid("63616C61-0000-0010-8000-00AA00389B71"); //ALAC amt.formatType = FormatType.WaveEx; amt.SetFormat(w); amt.AddFormatExtraData(extraInfo); amt.fixedSizeSamples = true; amt.sampleSize = 4; return(amt); }
private static void UpdateAudioPart(IMediaDet mediaDet, MediaProperty mediaProperty) { int hr = 0; AMMediaType mediaType = new AMMediaType(); hr = mediaDet.get_StreamMediaType(mediaType); DsError.ThrowExceptionForHR(hr); mediaProperty.audioSubType = mediaType.subType; double streamLength; hr = mediaDet.get_StreamLength(out streamLength); DsError.ThrowExceptionForHR(hr); mediaProperty.audioLength = TimeSpan.FromSeconds(streamLength); if (mediaType.formatType == FormatType.WaveEx) { WaveFormatEx waveFormatEx = (WaveFormatEx)Marshal.PtrToStructure(mediaType.formatPtr, typeof(WaveFormatEx)); mediaProperty.channels = waveFormatEx.nChannels; mediaProperty.samplesPerSec = ((float)waveFormatEx.nSamplesPerSec) / 1000; mediaProperty.bitsPerSample = waveFormatEx.wBitsPerSample; } }
private object GetField(AMMediaType mediaType, String fieldName) { object formatStruct; if (mediaType.formatType == FormatType.WaveEx) { formatStruct = new WaveFormatEx(); } else if (mediaType.formatType == FormatType.VideoInfo) { formatStruct = new VideoInfoHeader(); } else if (mediaType.formatType == FormatType.VideoInfo2) { formatStruct = new VideoInfoHeader2(); } else { throw new NotSupportedException("This device does not support a recognized format block."); } // Retrieve the nested structure Marshal.PtrToStructure(mediaType.formatPtr, formatStruct); // Find the required field Type structType = formatStruct.GetType(); FieldInfo fieldInfo = structType.GetField(fieldName); if (fieldInfo != null) { return(fieldInfo.GetValue(formatStruct)); } return(null); }
/// <summary> /// Save the sample grabber size. /// </summary> /// <param name="sampGrabber">The sanmple grabber.</param> private void SaveSizeInfo(ISampleGrabber sampGrabber) { int hr; // Get the media type from the SampleGrabber AMMediaType media = new AMMediaType(); hr = sampGrabber.GetConnectedMediaType(media); DsError.ThrowExceptionForHR(hr); if ((media.formatType != FormatType.WaveEx) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } // Grab the size info WaveFormatEx waveFormatEx = (WaveFormatEx)Marshal.PtrToStructure(media.formatPtr, typeof(WaveFormatEx)); _channels = waveFormatEx.nChannels; _sampleRate = waveFormatEx.nSamplesPerSec; _bitsPerSample = waveFormatEx.wBitsPerSample; // Calculate the size of the sound data. _soundMemorySize = (int)_channels * _sampleRate; DsUtils.FreeAMMediaType(media); media = null; }
public static WaveFormatExtensible FromMediaType(AMMediaType pMediaType) { if (MediaType.Audio != pMediaType.majorType) { throw new Exception("not Audio"); } if (FormatType.WaveEx != pMediaType.formatType || pMediaType.formatSize < 18) { throw new Exception("not WaveEx"); } WaveFormatEx pWfx = new WaveFormatEx(); Marshal.PtrToStructure(pMediaType.formatPtr, pWfx); if (pWfx.wFormatTag == unchecked ((short)WAVE_FORMAT_EXTENSIBLE) && pWfx.cbSize >= 22) { var pWfe = new WaveFormatExtensible(); Marshal.PtrToStructure(pMediaType.formatPtr, pWfe); return(pWfe); } return(new WaveFormatExtensible() { nChannels = pWfx.nChannels, nSamplesPerSec = pWfx.nSamplesPerSec, nBlockAlign = pWfx.nBlockAlign, wBitsPerSample = pWfx.wBitsPerSample, nAvgBytesPerSec = pWfx.nAvgBytesPerSec, wFormatTag = pWfx.wFormatTag, cbSize = 0 }); }
protected object setStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue) { if (streamConfig == null) { throw new NotSupportedException(); } this.assertStopped(); this.derenderGraph(); IntPtr zero = IntPtr.Zero; AMMediaType structure = new AMMediaType(); try { object obj2; int format = streamConfig.GetFormat(out zero); if (format != 0) { Marshal.ThrowExceptionForHR(format); } Marshal.PtrToStructure(zero, structure); if (structure.formatType == FormatType.WaveEx) { obj2 = new WaveFormatEx(); } else if (structure.formatType == FormatType.VideoInfo) { obj2 = new VideoInfoHeader(); } else { if (structure.formatType != FormatType.VideoInfo2) { throw new NotSupportedException("This device does not support a recognized format block."); } obj2 = new VideoInfoHeader2(); } Marshal.PtrToStructure(structure.formatPtr, obj2); FieldInfo field = obj2.GetType().GetField(fieldName); if (field == null) { throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block."); } field.SetValue(obj2, newValue); Marshal.StructureToPtr(obj2, structure.formatPtr, false); format = streamConfig.SetFormat(structure); if (format != 0) { Marshal.ThrowExceptionForHR(format); } } finally { DsUtils.FreeAMMediaType(structure); Marshal.FreeCoTaskMem(zero); } this.renderStream = false; this.renderGraph(); this.startPreviewIfNeeded(); return(null); }
static void WriteRiffHeader(BinaryWriter writer, WaveFormatEx format, uint dataSize) { var formatSize = (uint)Marshal.SizeOf(typeof(WaveFormatEx)); var totalSize = (uint) ( formatSize + dataSize + waveId.Length + fmtId.Length + dataId.Length + 4 + 4 ); writer.Write(riffId); writer.Write(totalSize); writer.Write(waveId); writer.Write(fmtId); writer.Write(formatSize); writer.Write(format.wFormatTag); writer.Write(format.nChannels); writer.Write(format.nSamplesPerSec); writer.Write(format.nAvgBytesPerSec); writer.Write(format.nBlockAlign); writer.Write(format.wBitsPerSample); writer.Write(dataId); writer.Write(dataSize); }
protected override HRESULT LoadTracks() { uint size = 0; int trackSize = _cdrom.ReadTrack(_track, null, ref size, null); if (size > 0) { WaveFormatEx wfex = WaveFormatEx.Cdda; AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Audio; mt.subType = MediaSubType.PCM; mt.sampleSize = wfex.nBlockAlign; mt.fixedSizeSamples = true; mt.SetFormat(wfex); m_Tracks.Add(new CdTrack(this, mt)); m_llDataOffset = 0; m_rtDuration = (UNITS * (size - m_llDataOffset)) / wfex.nAvgBytesPerSec; return(S_OK); } _buffer = new byte[size]; return(S_FALSE); }
/// <summary> /// Given a specific AMMediaType, we are asked if we support it /// </summary> /// <param name="dwInputStreamIndex">Stream number</param> /// <param name="pmt">The AMMediaType to check</param> /// <returns>S_OK if it is supported, DMOResults.E_InvalidType if not</returns> override protected int InternalCheckInputType(int dwInputStreamIndex, AMMediaType pmt) { int hr; // Check the format is defined if (pmt.majorType == MediaType.Audio && (pmt.subType == MediaSubType.PCM) && pmt.formatType == FormatType.WaveEx && pmt.formatPtr != IntPtr.Zero) { WaveFormatEx wav = new WaveFormatEx(); Marshal.PtrToStructure(pmt.formatPtr, wav); if (wav.nChannels == 2 && (wav.wBitsPerSample == 8 || wav.wBitsPerSample == 16)) { hr = S_OK; } else { hr = DMOResults.E_InvalidType; } } else { hr = DMOResults.E_InvalidType; } return(hr); }
public void startWaveStream(WaveFormatEx waveHeader) { mWaveDataSerializer = new Serializer(); byte[] riffByte = new byte[4] { (byte)'R', (byte)'I', (byte)'F', (byte)'F' }; mRiffMark = bytesToInt(riffByte); mFileSize = 0; byte[] waveByte = new byte[4] { (byte)'W', (byte)'A', (byte)'V', (byte)'E' }; mWaveMark = bytesToInt(waveByte); byte[] fmtByte = new byte[4] { (byte)'f', (byte)'m', (byte)'t', (byte)' ' }; mFmtMark = bytesToInt(fmtByte); mFmtChunkSize = 16; mFormatType = waveHeader.wFormatTag; mSoundChannels = waveHeader.nChannels; mSamplesPerSec = waveHeader.nSamplesPerSec; mAvgBytesPerSec = waveHeader.nAvgBytesPerSec; mBlockAlign = waveHeader.nBlockAlign; mBitsPerSample = waveHeader.wBitsPerSample; mOtherSize = waveHeader.cbSize; mDataMark = new byte[4] { (byte)'d', (byte)'a', (byte)'t', (byte)'a' }; }
public AudioTrackInfo(XmlNode element, IDictionary <string, string> streamAttributes, uint index, StreamInfo stream) : base(element, index, stream) { WaveFormatEx waveFormatEx; if (base.Attributes.ContainsKey("WaveFormatEx")) { byte[] data = Parse.HexStringAttribute(base.Attributes, "WaveFormatEx"); waveFormatEx = new WaveFormatEx(data); } else { ushort wFormatTag = Parse.UInt16Attribute(base.Attributes, "AudioTag"); ushort nChannels = Parse.UInt16Attribute(base.Attributes, "Channels"); uint nSamplesPerSec = Parse.UInt32Attribute(base.Attributes, "SamplingRate"); uint num = Parse.UInt32Attribute(base.Attributes, "Bitrate"); ushort nBlockAlign = Parse.UInt16Attribute(base.Attributes, "PacketSize"); ushort wBitsPerSample = Parse.UInt16Attribute(base.Attributes, "BitsPerSample"); byte[] decoderSpecificData = Parse.HexStringAttribute(base.Attributes, "CodecPrivateData"); waveFormatEx = new WaveFormatEx(wFormatTag, nChannels, nSamplesPerSec, num / 8u, nBlockAlign, wBitsPerSample, decoderSpecificData); } byte[] audioInfoBytes = MkvUtils.GetAudioInfoBytes( waveFormatEx.nSamplesPerSec, (ulong)waveFormatEx.nChannels, (ulong)waveFormatEx.wBitsPerSample); switch (waveFormatEx.wFormatTag) { case 353: case 354: { base.TrackEntry = new TrackEntry(TrackType.Audio, audioInfoBytes, CodecID.A_MS, waveFormatEx.GetBytes()); break; } case 255: case 5633: { base.TrackEntry = new TrackEntry(TrackType.Audio, audioInfoBytes, CodecID.A_AAC, GetAudioSpecificConfigBytes( waveFormatEx.nSamplesPerSec, (byte)waveFormatEx.nChannels)); break; } case 1: { throw new Exception("Unsupported audio format: 'LPCM'!"); } case 65534: { throw new Exception("Unsupported audio format: 'Vendor-extensible format'!"); } default: { throw new Exception("Unsupported AudioTag: '" + waveFormatEx.wFormatTag + "'"); } } if (base.Attributes.ContainsKey("Name")) { base.TrackEntry.Name = Parse.StringAttribute(streamAttributes, "Name"); } base.TrackEntry.Language = LanguageID.Hungarian; // TODO: Make this configurable. base.Description = string.Format("{0} {1} channels {2} Hz @ {3} kbps", new object[] { GetCodecNameForAudioTag(waveFormatEx.wFormatTag), waveFormatEx.nChannels, waveFormatEx.nSamplesPerSec, base.Bitrate / 1000u }); }
public CdTrack(AudioCdFileParser _parser, AMMediaType mt) : base(_parser, TrackType.Audio) { m_mt = mt; WaveFormatEx _wfx = m_mt; m_lSampleSize = _wfx.nAvgBytesPerSec / 2; }
/// <summary> /// Parses a stream list item. /// </summary> /// <param name="id"> /// A <see cref="ByteVector" /> object containing the item's /// ID. /// </param> /// <param name="data"> /// A <see cref="ByteVector" /> object containing the item's /// data. /// </param> /// <param name="start"> /// A <see cref="uint" /> value specifying the index in /// <paramref name="data" /> at which the item data begins. /// </param> /// <param name="length"> /// A <see cref="uint" /> value specifying the length of the /// item. /// </param> public override void ParseItem(ByteVector id, ByteVector data, int start, int length) { if (id == "strf") { Codec = new WaveFormatEx(data, start); } }
/// <inheritdoc /> /// <summary> /// </summary> /// <param name="element"></param> /// <param name="streamAttributes"></param> /// <param name="position"></param> /// <param name="stream"></param> public AudioTrackInfo(XmlNode element, IDictionary <string, string> streamAttributes, uint position, StreamInfo stream) : base(element, position, stream) { WaveFormatEx waveFormatEx; if (Attributes.ContainsKey("WaveFormatEx")) { waveFormatEx = new WaveFormatEx(Parser.HexStringAttribute(Attributes, "WaveFormatEx")); } else { waveFormatEx = new WaveFormatEx( Parser.UInt16Attribute(Attributes, "AudioTag"), Parser.UInt16Attribute(Attributes, "Channels"), Parser.UInt32Attribute(Attributes, "SamplingRate"), Parser.UInt32Attribute(Attributes, "Bitrate") / 8, Parser.UInt16Attribute(Attributes, "PacketSize"), Parser.UInt16Attribute(Attributes, "BitsPerSample"), Parser.HexStringAttribute(Attributes, "CodecPrivateData")); } var audioInfoBytes = MkvUtils.GetAudioInfoBytes(waveFormatEx.NSamplesPerSec, waveFormatEx.NChannels, waveFormatEx.WBitsPerSample); switch (waveFormatEx.WFormatTag) { case 0x0161: case 0x0162: TrackEntry = new TrackEntry( MkvTrackType.Audio, audioInfoBytes, MkvCodec.AudioMs, waveFormatEx.GetBytes()); break; case 0x00FF: case 0x1601: TrackEntry = new TrackEntry( MkvTrackType.Audio, audioInfoBytes, MkvCodec.AudioAac, GetAudioSpecificConfigBytes(waveFormatEx.NSamplesPerSec, (byte)waveFormatEx.NChannels)); break; default: throw new Exception(); } if (Attributes.ContainsKey("Name")) { TrackEntry.Name = Parser.StringAttribute(streamAttributes, "Name"); } TrackEntry.Language = LanguageId.Hungarian; Description = $"{GetCodecNameForAudioTag(waveFormatEx.WFormatTag)} {waveFormatEx.NChannels} channels {waveFormatEx.NSamplesPerSec} Hz @ {Bitrate / 1000u} kbps"; }
public void CreateAudio(uint sampleRate = 44100, uint channels = 1, uint blocks = 4, uint blockSamples = 512) { try { //44100 Active = false; this.sampleRate = sampleRate; this.channels = channels; blockCount = blocks; this.blockSamples = blockSamples; blockFree = blockCount; blockCurrent = 0; blockMemory = null; waveHeaders = null; WaveFormatEx waveFormat = new WaveFormatEx { FormatTag = WaveFormatPcm, SamplesPerSec = (int)sampleRate, BitsPerSample = sizeof(short) * 8, Channels = (short)channels, }; waveFormat.BlockAlign = (short)((waveFormat.BitsPerSample / 8) * waveFormat.Channels); waveFormat.AvgBytesPerSec = waveFormat.SamplesPerSec * waveFormat.BlockAlign; waveFormat.Size = (short)Marshal.SizeOf(waveFormat); waveProc = WaveOutProc; if (WaveOutOpen(out device, WaveMapper, waveFormat, waveProc, 0, CallbackFunction) != 0) { DestroyAudio(); } blockMemory = new short[blockCount * blockSamples]; waveHeaders = new WaveHdr[blockCount]; unsafe { fixed(short *mem = blockMemory) { for (uint n = 0; n < blockCount; n++) { waveHeaders[n].BufferLength = (int)(blockSamples * sizeof(short)); waveHeaders[n].Data = (IntPtr)(mem + (n * blockSamples)); } } } Active = true; try { audioThread = new Thread(AudioThread); audioThread.Start(); }catch (Exception ex) { Debug.Print(ex.Message); } }catch (Exception ex) { System.Diagnostics.Debug.Print(ex.Message); } }
protected object setStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue) { if (streamConfig == null) throw new NotSupportedException(); object returnValue = null; IntPtr pmt = IntPtr.Zero; AMMediaType mediaType = new AMMediaType(); try { // Get the current format info int hr = streamConfig.GetFormat(out pmt); if (hr != 0) Marshal.ThrowExceptionForHR(hr); Marshal.PtrToStructure(pmt, mediaType); // The formatPtr member points to different structures // dependingon the formatType object formatStruct; if (mediaType.formatType == FormatType.WaveEx) formatStruct = new WaveFormatEx(); else if (mediaType.formatType == FormatType.VideoInfo) formatStruct = new VideoInfoHeader(); else if (mediaType.formatType == FormatType.VideoInfo2) formatStruct = new VideoInfoHeader2(); else throw new NotSupportedException("This device does not support a recognized format block."); // Retrieve the nested structure Marshal.PtrToStructure(mediaType.formatPtr, formatStruct); // Find the required field Type structType = formatStruct.GetType(); FieldInfo fieldInfo = structType.GetField(fieldName); if (fieldInfo == null) throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block."); // Update the value of the field fieldInfo.SetValue(formatStruct, newValue); // PtrToStructure copies the data so we need to copy it back Marshal.StructureToPtr(formatStruct, mediaType.formatPtr, false); // Save the changes hr = streamConfig.SetFormat(mediaType); if (hr != 0) Marshal.ThrowExceptionForHR(hr); } finally { //DsUtils.FreeAMMediaType(mediaType); Marshal.FreeCoTaskMem(pmt); } return (returnValue); }
private string WaveFormatExString(AMMediaType mtype) { WaveFormatEx wfex = new WaveFormatEx(); Marshal.PtrToStructure(mtype.formatPtr, wfex); return("\t\t" + wfex.nSamplesPerSec.ToString() + " KHz \r\n" + "\t\t" + wfex.wBitsPerSample.ToString() + " bit \r\n" + "\t\t" + wfex.nChannels.ToString() + " channels \r\n"); }
public static string DumpAMMediaTypeInfo(AMMediaType mt) { StringBuilder sb = new StringBuilder(); sb.AppendLine(">>>>> AMMediaType Info"); sb.AppendLine(DsToString.AMMediaTypeToString(mt)); sb.AppendLine(string.Format("majorType: {0} subType:{1} formatType:{2}", mt.majorType, mt.subType, mt.formatType)); if ((mt.majorType == DirectShowLib.MediaType.Video) && (mt.formatType == DirectShowLib.FormatType.VideoInfo)) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); sb.AppendLine("VideoInfoHeader fields"); sb.AppendLine("AvgTimePerFrame: " + vih.AvgTimePerFrame.ToString()); sb.AppendLine("BitErrorRate: " + vih.BitErrorRate.ToString()); sb.AppendLine("BitRate: " + vih.BitRate.ToString()); BitmapInfoHeader bmi = vih.BmiHeader; sb.AppendLine("BitmapInfoHeader fields"); sb.AppendLine("BitCount: " + bmi.BitCount.ToString()); sb.AppendLine("ClrImportant: " + bmi.ClrImportant.ToString()); sb.AppendLine("ClrUsed: " + bmi.ClrUsed.ToString()); sb.AppendLine("Compression: " + bmi.Compression.ToString()); sb.AppendLine("Height: " + bmi.Height.ToString()); sb.AppendLine("Width: " + bmi.Width.ToString()); sb.AppendLine("ImageSize: " + bmi.ImageSize.ToString()); sb.AppendLine("Planes: " + bmi.Planes.ToString()); sb.AppendLine("Size: " + bmi.Size.ToString()); sb.AppendLine("XPelsPerMeter: " + bmi.XPelsPerMeter.ToString()); sb.AppendLine("YPelsPerMeter: " + bmi.YPelsPerMeter.ToString()); } if ((mt.majorType == DirectShowLib.MediaType.Audio) || (mt.formatType == DirectShowLib.FormatType.WaveEx)) { WaveFormatEx wfx = (WaveFormatEx)Marshal.PtrToStructure(mt.formatPtr, typeof(WaveFormatEx)); sb.AppendLine("WaveFormatEx fields"); sb.AppendLine("wFormatTag: " + wfx.wFormatTag.ToString()); sb.AppendLine("cbSize: " + wfx.cbSize.ToString()); sb.AppendLine("nAvgBytesPerSec: " + wfx.nAvgBytesPerSec.ToString()); sb.AppendLine("nBlockAlign: " + wfx.nBlockAlign.ToString()); sb.AppendLine("nChannels: " + wfx.nChannels.ToString()); sb.AppendLine("nSamplesPerSec: " + wfx.nSamplesPerSec.ToString()); sb.AppendLine("wBitsPerSample: " + wfx.wBitsPerSample.ToString()); sb.AppendLine("cbSize: " + wfx.cbSize.ToString()); } sb.AppendLine("<<<<< AMMediaType Info"); return(sb.ToString()); }
// Set the Framerate, and video size private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int iSampleRate, int iChannels) { int hr; object o; AMMediaType media; // Find the stream config interface hr = capGraph.FindInterface( PinCategory.Capture, MediaType.Audio, capFilter, typeof(IAMStreamConfig).GUID, out o); IAMStreamConfig audioStreamConfig = o as IAMStreamConfig; if (audioStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } // Get the existing format block hr = audioStreamConfig.GetFormat(out media); DsError.ThrowExceptionForHR(hr); // copy out the videoinfoheader WaveFormatEx i = new WaveFormatEx(); Marshal.PtrToStructure(media.formatPtr, i); i.wFormatTag = 0x0001; // WAVE_FORMAT_PCM i.wBitsPerSample = 16; i.nSamplesPerSec = 44100; i.nChannels = m_Channels; i.nBlockAlign = 2; i.nAvgBytesPerSec = (i.nSamplesPerSec * i.nBlockAlign); i.cbSize = 0; // if overriding the framerate, set the frame rate if (iSampleRate > 0) { i.nSamplesPerSec = iSampleRate; } // if overriding the width, set the width if (iChannels > 0) { i.nChannels = (short)iChannels; } // Copy the media structure back Marshal.StructureToPtr(i, media.formatPtr, false); // Set the new format hr = audioStreamConfig.SetFormat(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; }
/// <summary> /// Read the WaveFormatEx from the input file and find the place to start /// writing data. /// </summary> private void LoadWFE() { MMCKINFO mmckinfoParentIn = new MMCKINFO(); MMCKINFO mmckinfoSubchunkIn = new MMCKINFO(); int mm = MMIO.Seek(m_OutputFile, 0, MMIOSeekFlags.Set); if (mm < 0) { throw new Exception("seek failure"); } // Check if this is a wave file mmckinfoParentIn.fccType = new FourCC("WAVE"); MMIOError rc = MMIO.Descend(m_OutputFile, mmckinfoParentIn, null, RiffChunkFlags.FindRiff); MMIO.ThrowExceptionForError(rc); // Get format info mmckinfoSubchunkIn.ckid = new FourCC("fmt "); rc = MMIO.Descend(m_OutputFile, mmckinfoSubchunkIn, mmckinfoParentIn, RiffChunkFlags.FindChunk); MMIO.ThrowExceptionForError(rc); // Read the data format from the file (WaveFormatEx) IntPtr ip = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(WaveFormatEx))); try { rc = MMIO.Read(m_OutputFile, ip, mmckinfoSubchunkIn.ckSize); if (rc < 0) { throw new Exception("Read failed"); } m_wfe = new WaveFormatEx(); Marshal.PtrToStructure(ip, m_wfe); } finally { Marshal.FreeCoTaskMem(ip); } rc = MMIO.Ascend(m_OutputFile, mmckinfoSubchunkIn, 0); MMIO.ThrowExceptionForError(rc); // Find the data subchunk mmckinfoSubchunkIn.ckid = new FourCC("data"); rc = MMIO.Descend(m_OutputFile, mmckinfoSubchunkIn, mmckinfoParentIn, RiffChunkFlags.FindChunk); MMIO.ThrowExceptionForError(rc); // Here is where data gets written m_DataOffset = MMIO.Seek(m_OutputFile, 0, MMIOSeekFlags.Cur); // Get the length of the audio m_AudioLength = mmckinfoSubchunkIn.ckSize; }
protected void ReleaseAudioSampleGrabber() { try { if (sampleAnalyzerMustStop != null) { sampleAnalyzerMustStop.Set(); // This will cause the thread to stop } if (sampleAnalyzerThread != null) { sampleAnalyzerThread.Join(200); } IBaseFilter filter = sampleGrabber as IBaseFilter; if (filter != null) { IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder); if (graphBuilder != null) { int hr = graphBuilder.RemoveFilter(filter); DsError.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(filter); sampleGrabber = null; } } catch (Exception ex) { Logger.LogException(ex); } if (rotEntry != null) { rotEntry.Dispose(); rotEntry = null; } lock (_vuLock) { _vuMeterData = null; } lock (_waveformLock) { _waveformData = null; } lock (_spectrogramLock) { _spectrogramData = null; } _actualAudioFormat = null; sampleGrabberConfigured.Reset(); }
public override void LoadFromStream(Stream st) { m_posStart = st.Position - 4; byte[] data = new byte[4]; st.Read(data, 0, 4); m_nSize = BitConverter.ToInt32(data, 0); data = new byte[18]; st.Read(data, 0, Math.Min(18, m_nSize)); m_fmt = new WaveFormatEx(data); st.Position = m_posStart + m_nSize; }
/// <summary> /// Converts an audio media type to a WaveFormatEx structure. /// </summary> /// <param name="mediaType">A valid IMFMediaType instance.</param> /// <param name="waveFormatEx">Receives a WaveFormatEx structure representing this audio media type.</param> /// <returns>If this function succeeds, it returns the S_OK member. Otherwise, it returns another HResult's member that describe the error.</returns> public static HResult CreateWaveFormatEx(this IMFMediaType mediaType, out WaveFormatEx waveFormat) { if (mediaType == null) { throw new ArgumentNullException("mediaType"); } int structSize; return(MFExtern.MFCreateWaveFormatExFromMFMediaType(mediaType, out waveFormat, out structSize, MFWaveFormatExConvertFlags.Normal)); }
protected int CloseFile() { lock (m_Lock) { if (m_Stream != null) { WaveFormatEx _wfx = Pins[0].CurrentMediaType; int _size; byte[] _buffer; IntPtr _ptr; OUTPUT_FILE_HEADER _header = new OUTPUT_FILE_HEADER(); _header.dwRiff = RIFF_TAG; _header.dwFileSize = (uint)m_Stream.Length - 2 * 4; _header.dwWave = WAVE_TAG; _header.dwFormat = FMT__TAG; _header.dwFormatLength = (uint)Marshal.SizeOf(_wfx); _size = Marshal.SizeOf(_header); _buffer = new byte[_size]; _ptr = Marshal.AllocCoTaskMem(_size); Marshal.StructureToPtr(_header, _ptr, true); Marshal.Copy(_ptr, _buffer, 0, _size); m_Stream.Write(_buffer, 0, _size); Marshal.FreeCoTaskMem(_ptr); _size = Marshal.SizeOf(_wfx); _buffer = new byte[_size]; _ptr = Marshal.AllocCoTaskMem(_size); Marshal.StructureToPtr(_wfx, _ptr, true); Marshal.Copy(_ptr, _buffer, 0, _size); m_Stream.Write(_buffer, 0, _size); Marshal.FreeCoTaskMem(_ptr); OUTPUT_DATA_HEADER _data = new OUTPUT_DATA_HEADER(); _data.dwData = DATA_TAG; _data.dwDataLength = (uint)(m_Stream.Length - Marshal.SizeOf(_header) - _header.dwFormatLength - Marshal.SizeOf(_data)); _size = Marshal.SizeOf(_data); _buffer = new byte[_size]; _ptr = Marshal.AllocCoTaskMem(_size); Marshal.StructureToPtr(_data, _ptr, true); Marshal.Copy(_ptr, _buffer, 0, _size); m_Stream.Write(_buffer, 0, _size); Marshal.FreeCoTaskMem(_ptr); m_Stream.Dispose(); m_Stream = null; } } return(NOERROR); }
private static void AssignStreamInfoFields(InputstreamInfo streamInfo, ref WaveFormatEx wf, ref AMMediaType amt) { wf.nChannels = (ushort)streamInfo.Channels; wf.nSamplesPerSec = (int)streamInfo.SampleRate; if (wf.nSamplesPerSec == 0) { wf.nSamplesPerSec = 48000; // Fallback if missing, otherwise audio decoder filter will not connect } wf.nAvgBytesPerSec = streamInfo.Bandwidth / 8; amt.sampleSize = streamInfo.Bandwidth; }
private void TestFormat() { int hr; WaveFormatEx pCurrent = new WaveFormatEx(); hr = m_ams.GetFormat(pCurrent); MsError.ThrowExceptionForHR(hr); hr = m_ams.SetFormat(pCurrent); MsError.ThrowExceptionForHR(hr); }
protected int OpenFile() { if (m_Stream == null && m_sFileName != "" && Pins[0].IsConnected) { m_Stream = new FileStream(m_sFileName, FileMode.Create, FileAccess.Write, FileShare.Read); WaveFormatEx _wfx = Pins[0].CurrentMediaType; int _size; byte[] _buffer; IntPtr _ptr; OUTPUT_FILE_HEADER _header = new OUTPUT_FILE_HEADER(); _header.dwRiff = RIFF_TAG; _header.dwFileSize = 0; _header.dwWave = WAVE_TAG; _header.dwFormat = FMT__TAG; _header.dwFormatLength = (uint)Marshal.SizeOf(_wfx); _size = Marshal.SizeOf(_header); _buffer = new byte[_size]; _ptr = Marshal.AllocCoTaskMem(_size); Marshal.StructureToPtr(_header, _ptr, true); Marshal.Copy(_ptr, _buffer, 0, _size); m_Stream.Write(_buffer, 0, _size); Marshal.FreeCoTaskMem(_ptr); _size = Marshal.SizeOf(_wfx); _buffer = new byte[_size]; _ptr = Marshal.AllocCoTaskMem(_size); Marshal.StructureToPtr(_wfx, _ptr, true); Marshal.Copy(_ptr, _buffer, 0, _size); m_Stream.Write(_buffer, 0, _size); Marshal.FreeCoTaskMem(_ptr); OUTPUT_DATA_HEADER _data = new OUTPUT_DATA_HEADER(); _data.dwData = DATA_TAG; _data.dwDataLength = 0; _size = Marshal.SizeOf(_data); _buffer = new byte[_size]; _ptr = Marshal.AllocCoTaskMem(_size); Marshal.StructureToPtr(_data, _ptr, true); Marshal.Copy(_ptr, _buffer, 0, _size); m_Stream.Write(_buffer, 0, _size); Marshal.FreeCoTaskMem(_ptr); return(NOERROR); } return(S_FALSE); }
public AudioTrackInfo(XmlNode element, IDictionary<string, string> streamAttributes, uint index, StreamInfo stream) : base(element, index, stream) { WaveFormatEx waveFormatEx; if (base.Attributes.ContainsKey("WaveFormatEx")) { byte[] data = Parse.HexStringAttribute(base.Attributes, "WaveFormatEx"); waveFormatEx = new WaveFormatEx(data); } else { ushort wFormatTag = Parse.UInt16Attribute(base.Attributes, "AudioTag"); ushort nChannels = Parse.UInt16Attribute(base.Attributes, "Channels"); uint nSamplesPerSec = Parse.UInt32Attribute(base.Attributes, "SamplingRate"); uint num = Parse.UInt32Attribute(base.Attributes, "Bitrate"); ushort nBlockAlign = Parse.UInt16Attribute(base.Attributes, "PacketSize"); ushort wBitsPerSample = Parse.UInt16Attribute(base.Attributes, "BitsPerSample"); byte[] decoderSpecificData = Parse.HexStringAttribute(base.Attributes, "CodecPrivateData"); waveFormatEx = new WaveFormatEx(wFormatTag, nChannels, nSamplesPerSec, num / 8u, nBlockAlign, wBitsPerSample, decoderSpecificData); } byte[] audioInfoBytes = MkvUtils.GetAudioInfoBytes( waveFormatEx.nSamplesPerSec, (ulong)waveFormatEx.nChannels, (ulong)waveFormatEx.wBitsPerSample); switch (waveFormatEx.wFormatTag) { case 353: case 354: { base.TrackEntry = new TrackEntry(TrackType.Audio, audioInfoBytes, CodecID.A_MS, waveFormatEx.GetBytes()); break; } case 255: case 5633: { base.TrackEntry = new TrackEntry(TrackType.Audio, audioInfoBytes, CodecID.A_AAC, GetAudioSpecificConfigBytes( waveFormatEx.nSamplesPerSec, (byte)waveFormatEx.nChannels)); break; } case 1: { throw new Exception("Unsupported audio format: 'LPCM'!"); } case 65534: { throw new Exception("Unsupported audio format: 'Vendor-extensible format'!"); } default: { throw new Exception("Unsupported AudioTag: '" + waveFormatEx.wFormatTag + "'"); } } if (base.Attributes.ContainsKey("Name")) { base.TrackEntry.Name = Parse.StringAttribute(streamAttributes, "Name"); } base.TrackEntry.Language = LanguageID.Hungarian; // TODO: Make this configurable. base.Description = string.Format("{0} {1} channels {2} Hz @ {3} kbps", new object[] { GetCodecNameForAudioTag(waveFormatEx.wFormatTag), waveFormatEx.nChannels, waveFormatEx.nSamplesPerSec, base.Bitrate / 1000u }); }
public static extern int acmFormatSuggest(IntPtr had, ref MPEGLayer3WaveFormat pwfxSrc, ref WaveFormatEx pwfxDst, uint cbwfxDst, uint fdwSuggest);
/// <summary> /// Retrieves the value of one member of the IAMStreamConfig format block. /// Helper function for several properties that expose /// video/audio settings from IAMStreamConfig.GetFormat(). /// IAMStreamConfig.GetFormat() returns a AMMediaType struct. /// AMMediaType.formatPtr points to a format block structure. /// This format block structure may be one of several /// types, the type being determined by AMMediaType.formatType. /// </summary> protected object getStreamConfigSetting( IAMStreamConfig streamConfig, string fieldName) { if ( streamConfig == null ) throw new NotSupportedException(); assertStopped(); derenderGraph(); object returnValue = null; #if DSHOWNET IntPtr pmt = IntPtr.Zero; #endif AMMediaType mediaType = new AMMediaType(); try { // Get the current format info #if DSHOWNET int hr = streamConfig.GetFormat(out pmt); #else int hr = streamConfig.GetFormat(out mediaType); #endif if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr ); #if DSHOWNET Marshal.PtrToStructure( pmt, mediaType ); #endif // The formatPtr member points to different structures // dependingon the formatType object formatStruct; if ( mediaType.formatType == FormatType.WaveEx ) formatStruct = new WaveFormatEx(); else if ( mediaType.formatType == FormatType.VideoInfo ) formatStruct = new VideoInfoHeader(); else if ( mediaType.formatType == FormatType.VideoInfo2 ) formatStruct = new VideoInfoHeader2(); else throw new NotSupportedException( "This device does not support a recognized format block." ); // Retrieve the nested structure Marshal.PtrToStructure( mediaType.formatPtr, formatStruct ); // Find the required field Type structType = formatStruct.GetType(); FieldInfo fieldInfo = structType.GetField( fieldName ); if ( fieldInfo == null ) throw new NotSupportedException( "Unable to find the member '" + fieldName + "' in the format block." ); // Extract the field's current value returnValue = fieldInfo.GetValue( formatStruct ); } finally { DsUtils.FreeAMMediaType( mediaType ); #if DSHOWNET Marshal.FreeCoTaskMem( pmt ); #endif } renderGraph(); startPreviewIfNeeded(); return( returnValue ); }
public static extern int acmStreamOpen(out IntPtr phas, IntPtr had, ref MPEGLayer3WaveFormat pwfxSrc, ref WaveFormatEx pwfxDst, IntPtr pwfltr, IntPtr dwCallback, IntPtr dwInstance, uint fdwOpen);
/// <summary> /// A bare bones WAV file header writer /// </summary> static void WriteWavHeader(Stream stream, int dataLength) { //We need to use a memory stream because the BinaryWriter will close the underlying stream when it is closed using (var memStream = new MemoryStream(64)) { const int cbFormat = 18; //sizeof(WAVEFORMATEX) var format = new WaveFormatEx { wFormatTag = 1, nChannels = 1, nSamplesPerSec = 16000, nAvgBytesPerSec = 32000, nBlockAlign = 2, wBitsPerSample = 16, cbSize = 0 }; using (var bw = new BinaryWriter(memStream)) { //RIFF header WriteString(memStream, "RIFF"); bw.Write(dataLength + cbFormat + 4); //File size - 8 WriteString(memStream, "WAVE"); WriteString(memStream, "fmt "); bw.Write(cbFormat); //WAVEFORMATEX bw.Write(format.wFormatTag); bw.Write(format.nChannels); bw.Write(format.nSamplesPerSec); bw.Write(format.nAvgBytesPerSec); bw.Write(format.nBlockAlign); bw.Write(format.wBitsPerSample); bw.Write(format.cbSize); //data header WriteString(memStream, "data"); bw.Write(dataLength); memStream.WriteTo(stream); } } }
public static extern int waveInOpen(out IntPtr phwi, uint uDeviceID, ref WaveFormatEx pwfx, IntPtr dwCallback, IntPtr dwCallbackInstance, int fdwOpen);
private object GetField(AMMediaType mediaType, String fieldName) { object formatStruct; if ( mediaType.formatType == FormatType.WaveEx ) formatStruct = new WaveFormatEx(); else if ( mediaType.formatType == FormatType.VideoInfo ) formatStruct = new VideoInfoHeader(); else if ( mediaType.formatType == FormatType.VideoInfo2 ) formatStruct = new VideoInfoHeader2(); else throw new NotSupportedException( "This device does not support a recognized format block." ); // Retrieve the nested structure Marshal.PtrToStructure( mediaType.formatPtr, formatStruct ); // Find the required field Type structType = formatStruct.GetType(); FieldInfo fieldInfo = structType.GetField(fieldName); if(fieldInfo != null) { return fieldInfo.GetValue(formatStruct); } return null; }
/// <summary> /// Set the media type based on values from BASS.DLL /// </summary> /// <param name="psc">The IGenericSampleConfig onto which we set the mediatype</param> public override void SetMediaType(IGenericSampleConfig psc) { int lFrequency = 0; int lVolume = 0; int lPan = 0; WaveFormatEx w = new WaveFormatEx(); BASS_CHANNELINFO lInfo = new BASS_CHANNELINFO(); Bass.BASS_ChannelGetInfo(m_fChan, lInfo); if ((lInfo.flags & (int)BASSStream.BASS_SAMPLE_8BITS) == (int)BASSStream.BASS_SAMPLE_8BITS) { w.wBitsPerSample = 8; } else { w.wBitsPerSample = 16; } Bass.BASS_ChannelGetAttributes(m_fChan, ref lFrequency, ref lVolume, ref lPan); w.cbSize = (short)Marshal.SizeOf(typeof(WaveFormatEx)); w.nChannels = (short)lInfo.chans; w.nSamplesPerSec = lFrequency; w.wFormatTag = 1; w.nAvgBytesPerSec = w.nSamplesPerSec * w.nBlockAlign; m_BytesPerSample = (short)(w.nChannels * (w.wBitsPerSample / 8)); m_Frequency = lFrequency; m_Channels = lInfo.chans; w.nBlockAlign = (short)m_BytesPerSample; w.nAvgBytesPerSec = w.nSamplesPerSec * w.nBlockAlign; AMMediaType amt = new AMMediaType(); amt.majorType = MediaType.Audio; amt.subType = MediaSubType.PCM; amt.formatType = FormatType.WaveEx; amt.formatPtr = Marshal.AllocCoTaskMem(w.cbSize); amt.formatSize = w.cbSize; Marshal.StructureToPtr(w, amt.formatPtr, false); int hr = psc.SetMediaTypeEx(amt, BUFSIZE); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(amt); }
public override void ParseItem (ByteVector id, ByteVector data, int start, int length) { if (id == "strf") Codec = new WaveFormatEx (data, start); }
/// <summary> /// Set the value of one member of the IAMStreamConfig format block. /// Helper function for several properties that expose /// video/audio settings from IAMStreamConfig.GetFormat(). /// IAMStreamConfig.GetFormat() returns a AMMediaType struct. /// AMMediaType.formatPtr points to a format block structure. /// This format block structure may be one of several /// types, the type being determined by AMMediaType.formatType. /// </summary> protected object setStreamConfigSetting( IAMStreamConfig streamConfig, string fieldName, object newValue) { if ( streamConfig == null ) throw new NotSupportedException(); assertStopped(); derenderGraph(); object returnValue = null; #if DSHOWNET IntPtr pmt = IntPtr.Zero; #endif AMMediaType mediaType = new AMMediaType(); try { // Get the current format info #if DSHOWNET int hr = streamConfig.GetFormat(out pmt); #else int hr = streamConfig.GetFormat(out mediaType); #endif if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr ); #if DSHOWNET Marshal.PtrToStructure(pmt, mediaType); #endif // The formatPtr member points to different structures // dependingon the formatType object formatStruct; if ( mediaType.formatType == FormatType.WaveEx ) formatStruct = new WaveFormatEx(); else if ( mediaType.formatType == FormatType.VideoInfo ) formatStruct = new VideoInfoHeader(); else if ( mediaType.formatType == FormatType.VideoInfo2 ) formatStruct = new VideoInfoHeader2(); else throw new NotSupportedException( "This device does not support a recognized format block." ); // Retrieve the nested structure Marshal.PtrToStructure( mediaType.formatPtr, formatStruct ); // Find the required field Type structType = formatStruct.GetType(); FieldInfo fieldInfo = structType.GetField( fieldName ); if ( fieldInfo == null ) throw new NotSupportedException( "Unable to find the member '" + fieldName + "' in the format block." ); // Update the value of the field fieldInfo.SetValue( formatStruct, newValue ); // Update fields that may depend on specific values of other attributes if (mediaType.formatType == FormatType.WaveEx) { WaveFormatEx waveFmt = formatStruct as WaveFormatEx; waveFmt.nBlockAlign = (short)(waveFmt.nChannels * waveFmt.wBitsPerSample / 8); waveFmt.nAvgBytesPerSec = waveFmt.nBlockAlign * waveFmt.nSamplesPerSec; } // PtrToStructure copies the data so we need to copy it back Marshal.StructureToPtr( formatStruct, mediaType.formatPtr, false ); // Save the changes hr = streamConfig.SetFormat( mediaType ); if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr ); } finally { DsUtils.FreeAMMediaType( mediaType ); #if DSHOWNET Marshal.FreeCoTaskMem(pmt); #endif } renderGraph(); startPreviewIfNeeded(); return( returnValue ); }