public void CreateGraph() { try { int result = 0; // フィルタグラフマネージャ作成 graphBuilder = new FilterGraph() as IFilterGraph2; // キャプチャグラフビルダ作成 captureGraphBuilder = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; //captureGraphBuilder(キャプチャグラフビルダ)をgraphBuilder(フィルタグラフマネージャ)に追加. result = captureGraphBuilder.SetFiltergraph(graphBuilder); DsError.ThrowExceptionForHR(result); // ソースフィルタ作成 // キャプチャデバイスをソースフィルタに対応付ける captureFilter = null; result = graphBuilder.AddSourceFilterForMoniker( _capDevice.Mon, null, _capDevice.Name, out captureFilter); DsError.ThrowExceptionForHR(result); // サンプルグラバ作成 sampleGrabber = new SampleGrabber() as ISampleGrabber; // フィルタと関連付ける IBaseFilter grabFilter = sampleGrabber as IBaseFilter; // キャプチャするオーディオのフォーマットを設定 AMMediaType amMediaType = new AMMediaType(); amMediaType.majorType = MediaType.Audio; amMediaType.subType = MediaSubType.PCM; amMediaType.formatPtr = IntPtr.Zero; result = sampleGrabber.SetMediaType(amMediaType); DsError.ThrowExceptionForHR(result); DsUtils.FreeAMMediaType(amMediaType); // callback 登録 sampleGrabber.SetOneShot(false); DsError.ThrowExceptionForHR(result); result = sampleGrabber.SetBufferSamples(true); DsError.ThrowExceptionForHR(result); // キャプチャするフォーマットを取得 object o; result = captureGraphBuilder.FindInterface( DsGuid.FromGuid(PinCategory.Capture), DsGuid.FromGuid(MediaType.Audio), captureFilter, typeof(IAMStreamConfig).GUID, out o); DsError.ThrowExceptionForHR(result); IAMStreamConfig config = o as IAMStreamConfig; AMMediaType media; result = config.GetFormat(out media); DsError.ThrowExceptionForHR(result); WaveFormatEx wf = new WaveFormatEx(); Marshal.PtrToStructure(media.formatPtr, wf); CaptureOption opt = new CaptureOption(wf); _sampler = new DSAudioSampler(opt); DsUtils.FreeAMMediaType(media); Marshal.ReleaseComObject(config); result = sampleGrabber.SetCallback(_sampler, 1); DsError.ThrowExceptionForHR(result); //grabFilter(変換フィルタ)をgraphBuilder(フィルタグラフマネージャ)に追加. result = graphBuilder.AddFilter(grabFilter, "Audio Grab Filter"); DsError.ThrowExceptionForHR(result); //キャプチャフィルタをサンプルグラバーフィルタに接続する result = captureGraphBuilder.RenderStream( DsGuid.FromGuid(PinCategory.Capture), DsGuid.FromGuid(MediaType.Audio), captureFilter, null, grabFilter); DsError.ThrowExceptionForHR(result); } catch (Exception ex) { System.Windows.MessageBox.Show(ex.Message); } }
// Set the Framerate, and video size private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int iSampleRate, int iChannels) { int hr; object o; AMMediaType media; // Find the stream config interface hr = capGraph.FindInterface( PinCategory.Capture, MediaType.Audio, capFilter, typeof(IAMStreamConfig).GUID, out o); IAMStreamConfig audioStreamConfig = o as IAMStreamConfig; if (audioStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } // Get the existing format block hr = audioStreamConfig.GetFormat(out media); DsError.ThrowExceptionForHR(hr); // copy out the videoinfoheader WaveFormatEx i = new WaveFormatEx(); Marshal.PtrToStructure(media.formatPtr, i); i.wFormatTag = 0x0001; // WAVE_FORMAT_PCM i.wBitsPerSample = 16; i.nSamplesPerSec = 44100; i.nChannels = m_Channels; i.nBlockAlign = 2; i.nAvgBytesPerSec = (i.nSamplesPerSec * i.nBlockAlign); i.cbSize = 0; // if overriding the framerate, set the frame rate if (iSampleRate > 0) { i.nSamplesPerSec = iSampleRate; } // if overriding the width, set the width if (iChannels > 0) { i.nChannels = (short)iChannels; } // Copy the media structure back Marshal.StructureToPtr(i, media.formatPtr, false); // Set the new format hr = audioStreamConfig.SetFormat(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; }
/// <summary> /// Report the list of supported types /// </summary> /// <param name="dwOutputStreamIndex">Output stream number</param> /// <param name="dwTypeIndex">Zero based index into array of supported types</param> /// <param name="pmt">Supported type</param> /// <returns>S_OK for successful completion or DMOResults.E_NoMoreItems if dwTypeIndex is /// out of range.</returns> protected override int InternalGetOutputType(int dwOutputStreamIndex, int dwTypeIndex, out AMMediaType pmt) { int hr; pmt = InputType(0); // If the input type hasn't been set, we can't describe out output if (pmt != null) { // Only one possible value if (dwTypeIndex == 0) { // Copy the input type pmt = MoCloneMediaType(pmt); // Parse out the wave struct WaveFormatEx wav = new WaveFormatEx(); Marshal.PtrToStructure(pmt.formatPtr, wav); // Adjust the struct for half as many channels wav.nChannels = 1; wav.nBlockAlign /= 2; wav.nAvgBytesPerSec /= 2; // Put the struct back Marshal.StructureToPtr(wav, pmt.formatPtr, false); hr = S_OK; } else { pmt = null; hr = DMOResults.E_NoMoreItems; } } else { pmt = null; hr = DMOResults.E_TypeNotSet; } return hr; }
public CaptureOption(WaveFormatEx wf) { _channelNum = wf.nChannels; _samplePerSec = wf.nSamplesPerSec; }
/// <summary> /// What size (and alignment) do we require of our output buffer? /// </summary> /// <param name="dwOutputStreamIndex">Stream number</param> /// <param name="pcbSize">returns the buffer size needed</param> /// <param name="pcbAlignment">Returns the alignment needed (don't use zero!)</param> /// <returns>S_OK</returns> protected override int InternalGetOutputSizeInfo(int dwOutputStreamIndex, out int pcbSize, out int pcbAlignment) { pcbAlignment = 1; AMMediaType pmt = OutputType(dwOutputStreamIndex); VideoInfoHeader v = new VideoInfoHeader(); WaveFormatEx wav = new WaveFormatEx(); Marshal.PtrToStructure(pmt.formatPtr, wav); pcbSize = wav.nBlockAlign; return S_OK; }
/// <summary> /// Given a specific AMMediaType, we are asked if we support it /// </summary> /// <param name="dwOutputStreamIndex">Stream number</param> /// <param name="pmt">The AMMediaType to check</param> /// <returns>S_OK if it is supported, DMOResults.E_InvalidType if not</returns> protected override int InternalCheckOutputType(int dwOutputStreamIndex, AMMediaType pmt) { int hr; AMMediaType pIn = InputType(0); // We don't support anything until after our input pin is set if (pIn != null) { // Match the types if (pmt.majorType == MediaType.Audio && pmt.subType == MediaSubType.PCM && pmt.formatType == FormatType.WaveEx) { // Parse out the wav structure from the input stream // and the proposed output stream WaveFormatEx wavOut = new WaveFormatEx(); Marshal.PtrToStructure(pmt.formatPtr, wavOut); WaveFormatEx wavIn = new WaveFormatEx(); Marshal.PtrToStructure(pIn.formatPtr, wavIn); // Output must be one channel, which means half the // nAvgBytesPerSec and half the nBlockAlign if (wavOut.nChannels == 1 && wavOut.wBitsPerSample == wavIn.wBitsPerSample && wavOut.nAvgBytesPerSec == wavIn.nAvgBytesPerSec / 2 && wavOut.nBlockAlign == wavIn.nBlockAlign / 2) { hr = S_OK; } else { hr = DMOResults.E_InvalidType; } } else { hr = DMOResults.E_InvalidType; } } else { hr = DMOResults.E_InvalidType; } return hr; }
/// <summary> /// Given a specific AMMediaType, we are asked if we support it /// </summary> /// <param name="dwInputStreamIndex">Stream number</param> /// <param name="pmt">The AMMediaType to check</param> /// <returns>S_OK if it is supported, DMOResults.E_InvalidType if not</returns> protected override int InternalCheckInputType(int dwInputStreamIndex, AMMediaType pmt) { int hr; // Check the format is defined if (pmt.majorType == MediaType.Audio && (pmt.subType == MediaSubType.PCM) && pmt.formatType == FormatType.WaveEx && pmt.formatPtr != IntPtr.Zero) { WaveFormatEx wav = new WaveFormatEx(); Marshal.PtrToStructure(pmt.formatPtr, wav); if (wav.nChannels == 2 && (wav.wBitsPerSample == 8 || wav.wBitsPerSample == 16)) { hr = S_OK; } else { hr = DMOResults.E_InvalidType; } } else { hr = DMOResults.E_InvalidType; } return hr; }
/// <summary> /// Our chance to allocate any storage we may need /// </summary> /// <returns>S_OK</returns> protected override int InternalAllocateStreamingResources() { // Reinitialize variables InternalDiscontinuity(0); AMMediaType pmt = InputType(0); WaveFormatEx wav = new WaveFormatEx(); Marshal.PtrToStructure(pmt.formatPtr, wav); m_Bits = wav.wBitsPerSample; return S_OK; }
/// <summary> /// Set the value of one member of the IAMStreamConfig format block. /// Helper function for several properties that expose /// video/audio settings from IAMStreamConfig.GetFormat(). /// IAMStreamConfig.GetFormat() returns a AMMediaType struct. /// AMMediaType.formatPtr points to a format block structure. /// This format block structure may be one of several /// types, the type being determined by AMMediaType.formatType. /// </summary> private object setStreamConfigSetting(IAMStreamConfig streamConfig, string fieldName, object newValue) { if (streamConfig == null) throw new NotSupportedException(); assertStopped(); derenderGraph(); object returnValue = null; //IntPtr pmt = IntPtr.Zero; AMMediaType mediaType = new AMMediaType(); try { // Get the current format info Marshal.ThrowExceptionForHR(streamConfig.GetFormat(out mediaType)); //Marshal.PtrToStructure(pmt, mediaType); // The formatPtr member points to different structures // dependingon the formatType object formatStruct; if (mediaType.formatType == FormatType.WaveEx) formatStruct = new WaveFormatEx(); else if (mediaType.formatType == FormatType.VideoInfo) formatStruct = new VideoInfoHeader(); else if (mediaType.formatType == FormatType.VideoInfo2) formatStruct = new VideoInfoHeader2(); else throw new NotSupportedException("This device does not support a recognized format block."); // Retrieve the nested structure Marshal.PtrToStructure(mediaType.formatPtr, formatStruct); // Find the required field Type structType = formatStruct.GetType(); System.Reflection.FieldInfo fieldInfo = structType.GetField(fieldName); if (fieldInfo == null) throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block."); // Update the value of the field fieldInfo.SetValue(formatStruct, newValue); // PtrToStructure copies the data so we need to copy it back Marshal.StructureToPtr(formatStruct, mediaType.formatPtr, false); // Save the changes Marshal.ThrowExceptionForHR(streamConfig.SetFormat(mediaType)); } finally { DsUtils.FreeAMMediaType(mediaType); //Marshal.FreeCoTaskMem(pmt); } renderGraph(); startPreviewIfNeeded(); return returnValue; }