private void InitCaptureInterface() { // release com object (useless here but can't hurt) Cleanup(true); this.fmc = new FilgraphManagerClass(); // create the cg object and add the filter graph to it Type t = Type.GetTypeFromCLSID(CLSID_CaptureGraphBuilder2); this.icgb = (ICaptureGraphBuilder2)Activator.CreateInstance(t); t = Type.GetTypeFromCLSID(CLSID_SampleGrabber); this.isg = (ISampleGrabber)Activator.CreateInstance(t); // source filter (the capture device) this.sf = (IBaseFilter)this.SourceFilterList[this.cbxDevice.SelectedIndex]; // sample grabber filter this.sgf = (IBaseFilter)this.isg; object o = null; this.icgb.RemoteFindInterface(ref PIN_CATEGORY_CAPTURE, ref MEDIATYPE_Video, sf, ref IID_IAMStreamConfig, out o); this.iamsc = (IAMStreamConfig)o; // set sample grabber media type this.SGMediaType = new _AMMediaType(); this.SGMediaType.majortype = MEDIATYPE_Video; this.SGMediaType.subtype = MEDIASUBTYPE_RGB24; this.SGMediaType.formattype = FORMAT_VideoInfo; this.isg.SetMediaType(ref SGMediaType); this.isg.SetOneShot(0); this.isg.SetBufferSamples(1); }
public override bool RemoveCapability(ICapability capability) { bool ret = base.RemoveCapability(capability); if (ret) { if (fgm != null) { fgm.Stop(); FilterGraph.RemoveFromRot(rotID); FilterGraph.RemoveAllFilters(fgm); fgm = null; } if (fgmEventMonitor != null) { fgmEventMonitor.FgmEvent -= new FgmEventMonitor.FgmEventHandler(FgmEvent); fgmEventMonitor.Dispose(); fgmEventMonitor = null; } wmf = null; } return(ret); }
protected CaptureGraph(FilterInfo fiSource) { try { // Fgm initialization fgm = new FilgraphManagerClass(); iFG = (IFilterGraph)fgm; iGB = (IGraphBuilder)fgm; rotID = FilterGraph.AddToRot(iGB); // Create source filter and initialize it source = (SourceFilter)Filter.CreateFilter(fiSource); iGB.AddFilter(source.BaseFilter, source.FriendlyName); source.AddedToGraph(fgm); // Pass flags to the RtpRenderer filter from the config file. this.rtpRendererFlags = 0; string setting = ConfigurationManager.AppSettings[AppConfig.MDS_RtpRendererFlags]; if (!String.IsNullOrEmpty(setting)) { if (!byte.TryParse(setting, out rtpRendererFlags)) { rtpRendererFlags = 0; } } } catch (Exception) { Cleanup(); throw; } }
public static IBaseFilter FindBaseFilterByName(FilgraphManagerClass fgm, string name) { IFilterGraph iFG = (IFilterGraph)fgm; IEnumFilters iEnum; iFG.EnumFilters(out iEnum); IBaseFilter iBF = null; uint fetched = 0; iEnum.Next(1, out iBF, out fetched); while (fetched == 1) { _FilterInfo fi; iBF.QueryFilterInfo(out fi); string filterName = fi.achName; if (String.Compare(name, filterName) == 0) { return iBF; } iEnum.Next(1, out iBF, out fetched); } return null; }
/// <summary> /// Creation of the fgm and the adding / removing of filters needs to happen on the /// same thread. So make sure it all happens on the UI thread. /// </summary> private void _RtpStream_FirstFrameReceived() { lock (fgmLock) { DisposeFgm(); Debug.Assert(fgm == null); // Create the DirectShow filter graph manager fgm = new FilgraphManagerClass(); IGraphBuilder iGB = (IGraphBuilder)fgm; rotID = FilterGraph.AddToRot((IGraphBuilder)fgm); IBaseFilter bfSource = RtpSourceClass.CreateInstance(); ((MSR.LST.MDShow.Filters.IRtpSource)bfSource).Initialize(rtpStream); iGB.AddFilter(bfSource, "RtpSource"); iGB.Render(Filter.GetPin(bfSource, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0)); DisableDXVA(fgm); // Render the video inside of the form iVW = (IVideoWindow)fgm; // Get the correct ratio to use for the video stretching // I would expect the fgm to always be castable to this, but I simply don't trust DShow IBasicVideo iBV = fgm as IBasicVideo; if (iBV != null) { int vidWidth, vidHeight; iBV.GetVideoSize(out vidWidth, out vidHeight); vidSrcRatio = (double)vidHeight / (double)vidWidth; } // Remove the border from the default DShow renderer UI int ws = WindowStyle; ws = ws & ~(0x00800000); // Remove WS_BORDER ws = ws & ~(0x00400000); // Remove WS_DLGFRAME WindowStyle = ws; iVW = null; uiState &= ~(int)FAudioVideo.UIState.RemoteVideoStopped; if (form != null) { ((FAudioVideo)form).UpdateVideoUI(uiState); } // FirstFrameReceived interprets fgmState as the *desired* state for the fgm // Because ResumePlayingVideo won't actually start if the state is already // Running, we change it to Stopped so that it will start if (IsPlaying && fgmState == FilterGraph.State.Running) { fgmState = FilterGraph.State.Stopped; ResumePlayingVideo(); } } }
private void createObjects() { filter = new FilgraphManagerClass(); mediaPos = (IMediaPosition)filter; mediaEvent = (IMediaEventEx)filter; mediaEvent.SetNotifyWindow(wnd.Handle.ToInt32(), WM_DS_NOTIFY, DS_NOTIFY_CODE); }
private void InitializeFgm() { fgm = new FilgraphManagerClass(); iGB = (IGraphBuilder)fgm; rotID = FilterGraph.AddToRot(iGB); iCGB2 = CaptureGraphBuilder2Class.CreateInstance(); iCGB2.SetFiltergraph(iGB); }
// CF3, CF2, CF1 private void Cleanup() { // CF3, Quit processing received data if (fgm != null) { rtpStream.UnblockNextFrame(); fgm.Stop(); FilterGraph.RemoveAllFilters(fgm); fgm = null; } DisposeDevice(); // CF2 LeaveRtpSession(); // CF1 }
/// <summary> /// Disable DirectX Video Acceleration. /// </summary> /// <param name="fgm"></param> public static void DisableDXVA(FilgraphManagerClass fgm) { // Retrieve WM decoder so we can turn off DXVA IBaseFilter decoder = Filter.FindBaseFilterByName(fgm, "WMVideo Decoder DMO"); if (decoder == null) { return; } // Remove the renderer and everything back to the decoder IFilterGraph iFG = (IFilterGraph)fgm; IEnumFilters iEnum; iFG.EnumFilters(out iEnum); uint fetched; IBaseFilter iBF; iEnum.Next(1, out iBF, out fetched); List <IBaseFilter> toRemove = new List <IBaseFilter>(); /// A base assumption is that the first items returned by the enumerator /// are the video renderer and other filters upstream to the video decoder. /// This should be true if these filters were the most recently added. /// Otherwise it would break in some contexts, including that of graphs with multiple branches. while (fetched == 1 && (Filter.Name(iBF) != Filter.Name(decoder))) { toRemove.Add(iBF); iEnum.Next(1, out iBF, out fetched); } foreach (IBaseFilter ibf in toRemove) { iFG.RemoveFilter(ibf); } // Try turning off DXVA try { IPropertyBag iPB = (IPropertyBag)decoder; object o = false; iPB.Write(WM9PropList.g_wszWMVCDXVAEnabled, ref o); } catch (Exception e) { Console.WriteLine(e.ToString()); } // Might be WM7 instead of WM9 // Render again ((IGraphBuilder)fgm).Render(Filter.GetPin(decoder, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0)); }
public override bool RemoveCapability(ICapability capability) { bool ret = base.RemoveCapability (capability); if(ret) { ssc = null; fgm.Stop(); FilterGraph.RemoveAllFilters(fgm); fgm = null; } return ret; }
public override bool RemoveCapability(ICapability capability) { bool ret = base.RemoveCapability(capability); if (ret) { ssc = null; fgm.Stop(); FilterGraph.RemoveAllFilters(fgm); fgm = null; } return(ret); }
void InitInterfaces() { try { Filgraph = new FilgraphManagerClass(); MediaControl = Filgraph as IMediaControl; MediaEvent = Filgraph as IMediaEventEx; MediaPosition = Filgraph as IMediaPosition; VideoWindow = Filgraph as IVideoWindow; } catch (Exception) { MessageBox.Show("Couldn't start"); } }
/// <summary> /// Disable DirectX Video Acceleration. /// </summary> /// <param name="fgm"></param> public static void DisableDXVA(FilgraphManagerClass fgm) { // Retrieve WM decoder so we can turn off DXVA IBaseFilter decoder = Filter.FindBaseFilterByName(fgm, "WMVideo Decoder DMO"); if(decoder == null) { return; } // Remove the renderer and everything back to the decoder IFilterGraph iFG = (IFilterGraph)fgm; IEnumFilters iEnum; iFG.EnumFilters(out iEnum); uint fetched; IBaseFilter iBF; iEnum.Next(1, out iBF, out fetched); List<IBaseFilter> toRemove = new List<IBaseFilter>(); /// A base assumption is that the first items returned by the enumerator /// are the video renderer and other filters upstream to the video decoder. /// This should be true if these filters were the most recently added. /// Otherwise it would break in some contexts, including that of graphs with multiple branches. while(fetched == 1 && (Filter.Name(iBF) != Filter.Name(decoder))) { toRemove.Add(iBF); iEnum.Next(1, out iBF, out fetched); } foreach (IBaseFilter ibf in toRemove) { iFG.RemoveFilter(ibf); } // Try turning off DXVA try { IPropertyBag iPB = (IPropertyBag)decoder; object o = false; iPB.Write(WM9PropList.g_wszWMVCDXVAEnabled, ref o); } catch(Exception e){Console.WriteLine(e.ToString());} // Might be WM7 instead of WM9 // Render again ((IGraphBuilder)fgm).Render(Filter.GetPin(decoder, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0)); }
private void Cleanup() { if (fgm != null) { fgm.Stop(); FilterGraph.RemoveAllFilters(fgm); FilterGraph.RemoveFromRot(rotID); iGB = null; iFG = null; fgm = null; } DisposeSource(); DisposeCompressor(); DisposeRenderer(); }
public static void Main(string[] args) { FilgraphManagerClass graphClass = null; try { graphClass = new FilgraphManagerClass(); graphClass.RenderFile(@"C:\Users\Ryuzaki\Desktop\AviTokaiNoHitorigurashi.avi"); graphClass.Run(); int evCode; graphClass.WaitForCompletion(-1, out evCode); } catch (Exception) { } finally { graphClass = null; } }
void CloseInterfaces() { if (MediaControl != null) { MediaControl.StopWhenReady(); MediaEvent.SetNotifyWindow((int)0, cWindowStyle.WM_GRAPHNOTIFY, (int)0); } MediaControl = null; MediaEvent = null; MediaPosition = null; VideoWindow = null; if (Filgraph != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(this.Filgraph); } Filgraph = null; }
/// <summary> /// Creation of the fgm and the adding / removing of filters needs to happen on the /// same thread. So make sure it all happens on the UI thread. /// </summary> private void _RtpStream_FirstFrameReceived() { lock (fgmLock) { DisposeFgm(); Debug.Assert(fgm == null); fgm = new FilgraphManagerClass(); IGraphBuilder iGB = (IGraphBuilder)fgm; rotID = FilterGraph.AddToRot(iGB); IBaseFilter rtpSource = RtpSourceClass.CreateInstance(); ((MSR.LST.MDShow.Filters.IRtpSource)rtpSource).Initialize(rtpStream); iGB.AddFilter(rtpSource, "RtpSource"); // Add the chosen audio renderer FilterInfo fi = SelectedSpeaker(); iGB.AddFilter(Filter.CreateBaseFilter(fi), fi.Name); iGB.Render(Filter.GetPin(rtpSource, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0)); iBA = (IBasicAudio)fgm; currentVolume = (int)Math.Round(Math.Pow(10.0, (2.0 * (double)(iBA.Volume + 10000)) / 10000.0)); iBA = null; uiState &= ~(int)FAudioVideo.UIState.RemoteAudioStopped; if (form != null) { ((FAudioVideo)form).UpdateAudioUI(uiState); } // FirstFrameReceived interprets fgmState as the *desired* state for the fgm // Because ResumePlayingAudio won't actually start if the state is already // Running, we change it to Stopped so that it will start if (IsPlaying && fgmState == FilterGraph.State.Running) { fgmState = FilterGraph.State.Stopped; ResumePlayingAudio(); } } }
public static void DisableDXVA(FilgraphManagerClass fgm) { // Retrieve WM decoder so we can turn off DXVA IBaseFilter decoder = Filter.FindBaseFilterByName(fgm, "WMVideo Decoder DMO"); if (decoder == null) { return; } // Remove the renderer and everything back to the decoder IFilterGraph iFG = (IFilterGraph)fgm; IEnumFilters iEnum; iFG.EnumFilters(out iEnum); uint fetched; IBaseFilter iBF; iEnum.Next(1, out iBF, out fetched); while (fetched == 1 && (Filter.Name(iBF) != Filter.Name(decoder))) { // Remove filter from graph iFG.RemoveFilter(iBF); // Because the state of the enumerator has changed (item was removed from collection) iEnum.Reset(); iEnum.Next(1, out iBF, out fetched); } // Try turning off DXVA try { IPropertyBag iPB = (IPropertyBag)decoder; object o = false; iPB.Write(WM9PropList.g_wszWMVCDXVAEnabled, ref o); } catch (Exception e) { Console.WriteLine(e.ToString()); } // Might be WM7 instead of WM9 // Render again ((IGraphBuilder)fgm).Render(Filter.GetPin(decoder, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0)); }
/// <summary> /// Removes all filters from a graph /// /// Important Note: /// If this method goes into an infinite loop, it is most likely because you are trying to /// remove a filter from a different thread than the one that added the filter, and your /// app was launched with the STAThreaded attribute on the Main method. /// /// Either use MTAThreaded attribute, or remove filter from the same thread it was added. /// </summary> public static void RemoveAllFilters( FilgraphManagerClass fgm) { IFilterGraph iFG = (IFilterGraph)fgm; IEnumFilters iEnum; iFG.EnumFilters(out iEnum); uint fetched; IBaseFilter iBF; iEnum.Next(1, out iBF, out fetched); while(fetched == 1) { // Remove filter from graph iFG.RemoveFilter(iBF); // Because the state of the enumerator has changed (item was removed from collection) iEnum.Reset(); iEnum.Next(1, out iBF, out fetched); } }
/// <summary> /// Creates and initializes the Fgm, retrieves interfaces from it, starts the monitoring /// thread etc. the first time. Stops it subsequent times. /// </summary> private void InitializeFgm() { if (fgm == null) { fgm = new FilgraphManagerClass(); iGB = (IGraphBuilder)fgm; iCGB2 = MDShow.CaptureGraphBuilder2Class.CreateInstance(); iCGB2.SetFiltergraph(iGB); rotID = FilterGraph.AddToRot(iGB); // Initialize class that will monitor events on the fgm fgmEventMonitor = new FgmEventMonitor((IMediaEvent)fgm); fgmEventMonitor.FgmEvent += new FgmEventMonitor.FgmEventHandler(FgmEvent); } else { fgm.Stop(); } }
private void CreateReceivingGraph() { // Tell the stream we will poll it for data with our own (DShow) thread // Instead of receiving data through the FrameReceived event rtpStream.IsUsingNextFrame = true; // Create receiving filtergraph fgm = new FilgraphManagerClass(); IGraphBuilder iGB = (IGraphBuilder)fgm; IBaseFilter rtpSource = RtpSourceClass.CreateInstance(); ((MSR.LST.MDShow.Filters.IRtpSource)rtpSource).Initialize(rtpStream); iGB.AddFilter(rtpSource, "RtpSource"); iGB.Render(Filter.GetPin(rtpSource, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0)); VideoWindow(); fgm.Run(); }
protected CaptureGraph(FilterInfo fiSource) { try { // Fgm initialization fgm = new FilgraphManagerClass(); iFG = (IFilterGraph)fgm; iGB = (IGraphBuilder)fgm; rotID = FilterGraph.AddToRot(iGB); // Create source filter and initialize it source = (SourceFilter)Filter.CreateFilter(fiSource); iGB.AddFilter(source.BaseFilter, source.FriendlyName); source.AddedToGraph(fgm); } catch(Exception) { Cleanup(); throw; } }
protected CaptureGraph(FilterInfo fiSource) { try { // Fgm initialization fgm = new FilgraphManagerClass(); iFG = (IFilterGraph)fgm; iGB = (IGraphBuilder)fgm; rotID = FilterGraph.AddToRot(iGB); // Create source filter and initialize it source = (SourceFilter)Filter.CreateFilter(fiSource); iGB.AddFilter(source.BaseFilter, source.FriendlyName); source.AddedToGraph(fgm); } catch (Exception) { Cleanup(); throw; } }
/// <summary> /// Removes all filters from a graph /// /// Important Note: /// If this method goes into an infinite loop, it is most likely because you are trying to /// remove a filter from a different thread than the one that added the filter, and your /// app was launched with the STAThreaded attribute on the Main method. /// /// Either use MTAThreaded attribute, or remove filter from the same thread it was added. /// </summary> public static void RemoveAllFilters(FilgraphManagerClass fgm) { IFilterGraph iFG = (IFilterGraph)fgm; IEnumFilters iEnum; iFG.EnumFilters(out iEnum); uint fetched; IBaseFilter iBF; iEnum.Next(1, out iBF, out fetched); while (fetched == 1) { // Remove filter from graph iFG.RemoveFilter(iBF); // Because the state of the enumerator has changed (item was removed from collection) iEnum.Reset(); iEnum.Next(1, out iBF, out fetched); } }
/// <summary> /// Remove all filters from a graph. If the graph is running, we will stop it first. /// </summary> /// <returns></returns> public bool Teardown() { if (fgm == null) { playing = false; return(true); } if (playing) { try { //If stream is paused Stop will hang unless we first call //RtpStream.UnblockNextFrame if (this.stream != null) { this.stream.UnblockNextFrame(); } if (rotnum != 0) { FilterGraph.RemoveFromRot((uint)rotnum); //RemoveFromRot(rotnum); } fgm.Stop(); } catch (Exception e) { Debug.WriteLine("Failed to stop graph: " + e.ToString()); } playing = false; } FilterGraph.RemoveAllFilters(fgm); fgm = null; return(true); }
protected CaptureGraph(FilterInfo fiSource) { try { // Fgm initialization fgm = new FilgraphManagerClass(); iFG = (IFilterGraph)fgm; iGB = (IGraphBuilder)fgm; rotID = FilterGraph.AddToRot(iGB); // Create source filter and initialize it source = (SourceFilter)Filter.CreateFilter(fiSource); iGB.AddFilter(source.BaseFilter, source.FriendlyName); source.AddedToGraph(fgm); // Pass flags to the RtpRenderer filter from the config file. this.rtpRendererFlags = 0; string setting = ConfigurationManager.AppSettings[AppConfig.MDS_RtpRendererFlags]; if (!String.IsNullOrEmpty(setting)) { if (!byte.TryParse(setting,out rtpRendererFlags)) { rtpRendererFlags = 0; } } } catch(Exception) { Cleanup(); throw; } }
private void Cleanup() { if(fgm != null) { fgm.Stop(); FilterGraph.RemoveAllFilters(fgm); FilterGraph.RemoveFromRot(rotID); iGB = null; iFG = null; fgm = null; } DisposeSource(); DisposeCompressor(); DisposeRenderer(); }
public override bool RemoveCapability(ICapability capability) { bool ret = base.RemoveCapability(capability); if (ret) { if (fgm != null) { fgm.Stop(); FilterGraph.RemoveFromRot(rotID); FilterGraph.RemoveAllFilters(fgm); fgm = null; } if (fgmEventMonitor != null) { fgmEventMonitor.FgmEvent -= new FgmEventMonitor.FgmEventHandler(FgmEvent); fgmEventMonitor.Dispose(); fgmEventMonitor = null; } wmf = null; } return ret; }
/// <summary> /// Creation of the fgm and the adding / removing of filters needs to happen on the /// same thread. So make sure it all happens on the UI thread. /// </summary> private void _RtpStream_FirstFrameReceived() { lock(fgmLock) { DisposeFgm(); Debug.Assert(fgm == null); fgm = new FilgraphManagerClass(); IGraphBuilder iGB = (IGraphBuilder)fgm; rotID = FilterGraph.AddToRot(iGB); IBaseFilter rtpSource = RtpSourceClass.CreateInstance(); ((MSR.LST.MDShow.Filters.IRtpSource)rtpSource).Initialize(rtpStream); iGB.AddFilter(rtpSource, "RtpSource"); // Add the chosen audio renderer FilterInfo fi = SelectedSpeaker(); iGB.AddFilter(Filter.CreateBaseFilter(fi), fi.Name); iGB.Render(Filter.GetPin(rtpSource, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0)); iBA = (IBasicAudio)fgm; currentVolume = (int)Math.Round(Math.Pow(10.0, (2.0*(double)(iBA.Volume+10000))/10000.0)); iBA = null; uiState &= ~(int)FAudioVideo.UIState.RemoteAudioStopped; if(form != null) { ((FAudioVideo)form).UpdateAudioUI(uiState); } // FirstFrameReceived interprets fgmState as the *desired* state for the fgm // Because ResumePlayingAudio won't actually start if the state is already // Running, we change it to Stopped so that it will start if(IsPlaying && fgmState == FilterGraph.State.Running) { fgmState = FilterGraph.State.Stopped; ResumePlayingAudio(); } } }
/// <summary> /// Creation of the fgm and the adding / removing of filters needs to happen on the /// same thread. So make sure it all happens on the UI thread. /// </summary> private void _RtpStream_FirstFrameReceived() { lock(fgmLock) { DisposeFgm(); Debug.Assert(fgm == null); // Create the Direct Show filter graph manager fgm = new FilgraphManagerClass(); IGraphBuilder iGB = (IGraphBuilder)fgm; rotID = FilterGraph.AddToRot((IGraphBuilder)fgm); IBaseFilter bfSource = RtpSourceClass.CreateInstance(); ((MSR.LST.MDShow.Filters.IRtpSource)bfSource).Initialize(rtpStream); iGB.AddFilter(bfSource, "RtpSource"); iGB.Render(Filter.GetPin(bfSource, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0)); DisableDXVA(fgm); // Render the video inside of the form iVW = (IVideoWindow)fgm; // Get the correct ratio to use for the video stretching // I would expect the fgm to always be castable to this, but I simply don't trust DShow IBasicVideo iBV = fgm as IBasicVideo; if (iBV != null) { int vidWidth, vidHeight; iBV.GetVideoSize(out vidWidth, out vidHeight); vidSrcRatio = (double)vidHeight / (double)vidWidth; } // Remove the border from the default DShow renderer UI int ws = WindowStyle; ws = ws & ~(0x00800000); // Remove WS_BORDER ws = ws & ~(0x00400000); // Remove WS_DLGFRAME WindowStyle = ws; iVW = null; uiState &= ~(int)FAudioVideo.UIState.RemoteVideoStopped; if(form != null) { ((FAudioVideo)form).UpdateVideoUI(uiState); } // FirstFrameReceived interprets fgmState as the *desired* state for the fgm // Because ResumePlayingVideo won't actually start if the state is already // Running, we change it to Stopped so that it will start if(IsPlaying && fgmState == FilterGraph.State.Running) { fgmState = FilterGraph.State.Stopped; ResumePlayingVideo(); } } }
public static void DisableDXVA(FilgraphManagerClass fgm) { // Retrieve WM decoder so we can turn off DXVA IBaseFilter decoder = Filter.FindBaseFilterByName(fgm, "WMVideo Decoder DMO"); if(decoder == null) { return; } // Remove the renderer and everything back to the decoder IFilterGraph iFG = (IFilterGraph)fgm; IEnumFilters iEnum; iFG.EnumFilters(out iEnum); uint fetched; IBaseFilter iBF; iEnum.Next(1, out iBF, out fetched); while(fetched == 1 && (Filter.Name(iBF) != Filter.Name(decoder))) { // Remove filter from graph iFG.RemoveFilter(iBF); // Because the state of the enumerator has changed (item was removed from collection) iEnum.Reset(); iEnum.Next(1, out iBF, out fetched); } // Try turning off DXVA try { IPropertyBag iPB = (IPropertyBag)decoder; object o = false; iPB.Write(WM9PropList.g_wszWMVCDXVAEnabled, ref o); } catch(Exception e){Console.WriteLine(e.ToString());} // Might be WM7 instead of WM9 // Render again ((IGraphBuilder)fgm).Render(Filter.GetPin(decoder, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0)); }
// CF3, CF2, CF1 private void Cleanup() { // CF3, Quit processing received data if(fgm != null) { rtpStream.UnblockNextFrame(); fgm.Stop(); FilterGraph.RemoveAllFilters(fgm); fgm = null; } DisposeDevice(); // CF2 LeaveRtpSession(); // CF1 }
/// <summary> /// Build a graph with sampleGrabber. Render it, and get the media type. /// </summary> /// <param name="payload"></param> /// <returns></returns> public bool Build(PayloadType payload, RtpStream newStream) { this.stream = newStream; this.ssrc = newStream.SSRC; //Required as of RC3: this.stream.IsUsingNextFrame = true; if ((ssrc == 0) || !((payload == PayloadType.dynamicVideo) || (payload == PayloadType.dynamicAudio))) { errorMsg = "Invalid inputs to build method."; return(false); } fgm = new FilgraphManagerClass(); MSR.LST.MDShow.IBaseFilter bfSource = null; IGraphBuilder iGB = (IGraphBuilder)fgm; //if (false) // rotnum = FilterGraph.AddToRot(iGB); //AddToRot(iGB); try { bfSource = RtpSourceClass.CreateInstance(); ((MSR.LST.MDShow.Filters.IRtpSource)bfSource).Initialize(this.stream); iGB.AddFilter(bfSource, "RtpSource"); MSR.LST.MDShow.IPin sourceOutput = Filter.GetPin(bfSource, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0); //Add SampleGrabber filter MSR.LST.MDShow.IBaseFilter bfGrabber = SampleGrabberClass.CreateInstance(); iGB.AddFilter(bfGrabber, "Grabber"); UW.CSE.MDShow.ISampleGrabber sgGrabber = (UW.CSE.MDShow.ISampleGrabber)bfGrabber; //Set mediatype UW.CSE.MDShow._AMMediaType mt = new UW.CSE.MDShow._AMMediaType(); if (payload == PayloadType.dynamicVideo) { mt.majortype = MediaType.MajorType.MEDIATYPE_Video; //PRI2: RGB24 seems to work for all video? We have used YUY2 in the past, but that won't work // for screen streaming. Probably could use more testing //mt.subtype = MediaType.SubType.MEDIASUBTYPE_YUY2; mt.subtype = MediaType.SubType.MEDIASUBTYPE_RGB24; } else { mt.majortype = MediaType.MajorType.MEDIATYPE_Audio; mt.subtype = MediaType.SubType.MEDIASUBTYPE_PCM; //MEDIASUBTYPE_PCM; } sgGrabber.SetMediaType(ref mt); //Add samplegrabber callback //0 is sampleCB, 1 is bufferCB. Only bufferCB is actually returning data so far. sgGrabber.SetCallback(callBack, 1); sgGrabber.SetOneShot(0); sgGrabber.SetBufferSamples(0); iGB.Render(sourceOutput); UW.CSE.MDShow._AMMediaType uwmt = new UW.CSE.MDShow._AMMediaType(); sgGrabber.GetConnectedMediaType(ref uwmt); connectedMT = copy_AMMediaType(uwmt); } catch (Exception e) { errorMsg = e.Message; Debug.WriteLine("Exception while building graph: " + e.ToString()); eventLog.WriteEntry("Exception while building graph: " + e.ToString(), EventLogEntryType.Error, 1001); return(false); } return(true); }