protected void derenderGraph() { if (this.mediaControl != null) { this.mediaControl.Stop(); } if (this.videoWindow != null) { this.videoWindow.put_Visible(0); this.videoWindow.put_Owner(IntPtr.Zero); this.videoWindow = null; } if (this.PreviewWindow != null) { this.previewWindow.Resize -= new EventHandler(this.onPreviewWindowResize); } if (this.graphState >= GraphState.Rendered) { this.graphState = GraphState.Created; this.isCaptureRendered = false; this.isPreviewRendered = false; if (this.videoDeviceFilter != null) { this.removeDownstream(this.videoDeviceFilter, this.videoCompressor == null); } if (this.audioDeviceFilter != null) { this.removeDownstream(this.audioDeviceFilter, this.audioCompressor == null); } this.muxFilter = null; this.fileWriterFilter = null; this.baseGrabFlt = null; } }
public void Stop() { if (this.mediaControl != null) { this.mediaControl.Stop(); } this.wantCaptureRendered = false; this.wantPreviewRendered = true; if (this.graphState == GraphState.Capturing) { this.graphState = GraphState.Rendered; if (this.CaptureComplete != null) { this.CaptureComplete(this, null); } } this.firstFrame = true; this.renderStream = false; try { this.renderGraph(); } catch { } try { this.startPreviewIfNeeded(); } catch { } }
// Creates a test graph, grid of 10 by 10 public static GraphState CreateTest() { int n = 10; List <IState> vertices = new List <IState>(); for (int i = 0; i < n; i++) { for (int j = 0; j < n; j++) { vertices.Add(new DiscreteState(i, j)); } } GraphState g = new GraphState(vertices); for (int i = 0; i < n - 1; i++) { for (int j = 0; j < n - 1; j++) { g.AddEdge(new DiscreteState(i, j), new DiscreteState(i + 1, j)); g.AddEdge(new DiscreteState(i, j), new DiscreteState(i, j + 1)); } } for (int i = 0; i < n - 1; i++) { g.AddEdge(new DiscreteState(i, n - 1), new DiscreteState(i + 1, n - 1)); g.AddEdge(new DiscreteState(n - 1, i), new DiscreteState(n - 1, i + 1)); } return(g); }
public void Stop() { int hr = _mediaControl.Stop(); DsError.ThrowExceptionForHR(hr); State = GraphState.Stopped; }
public VideoState getVideoState() { if (cam == null) { return(VideoState.Stopped); } GraphState lGraphState = cam.getGraphState(); switch (lGraphState) { case GraphState.Running: return(VideoState.Running); case GraphState.Exiting: return(VideoState.Exiting); case GraphState.Paused: return(VideoState.Paused); case GraphState.Stopped: return(VideoState.Stopped); default: return(VideoState.Stopped); } }
public void SkipForward(double percentagetoskip) { //double factor = percentagetoskip / 100; // If we are playing if (m_State == GraphState.Running) { int hr = m_mediaCtrl.Pause(); DsError.ThrowExceptionForHR(hr); m_State = GraphState.Paused; DirectShowLib.OABool result; m_mediaPosition.CanSeekForward(out result); if (result == OABool.True) { //double plltime; //m_mediaPosition.get_StopTime(out plltime); //m_mediaPosition.put_CurrentPosition(plltime * factor); m_mediaPosition.put_CurrentPosition(percentagetoskip); hr = m_mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); m_State = GraphState.Running; } } }
public virtual void Run() { int hr = _mediaControl.Run(); DsError.ThrowExceptionForHR(hr); State = GraphState.Running; }
/// <summary> /// Disconnect and remove all filters except the device /// and compressor filters. This is the opposite of /// renderGraph(). Soem properties such as FrameRate /// can only be set when the device output pins are not /// connected. /// </summary> protected void DerenderGraph() { // Stop the graph if it is running (ignore errors) MediaControl?.Stop(); // Free the preview window (ignore errors) if (VideoWindow != null) { VideoWindow.put_Visible(CoreStreaming.DsHlp.OAFALSE); VideoWindow.put_Owner(IntPtr.Zero); VideoWindow = null; } // Remove the Resize event handler if (PreviewWindow != null) { PreviewWindow.SizeChanged -= OnPreviewWindowResize; } if ((int)ActualGraphState >= (int)GraphState.Rendered) { // Update the state ActualGraphState = GraphState.Created; IsPreviewRendered = false; // Disconnect all filters downstream of the // video and audio devices. If we have a compressor // then disconnect it, but don't remove it if (VideoDeviceFilter != null) { RemoveDownstream(VideoDeviceFilter, VideoCompressor == null); } } }
protected void derenderGraph() { if (this.mediaControl != null) { this.mediaControl.Stop(); } if (this.videoWindow != null) { this.videoWindow.put_Visible(0); this.videoWindow.put_Owner(IntPtr.Zero); this.videoWindow = null; } if (this.PreviewWindow != null) { this.previewWindow.Resize -= new EventHandler(this.onPreviewWindowResize); } if (this.graphState >= GraphState.Rendered) { this.graphState = GraphState.Created; this.isCaptureRendered = false; this.isPreviewRendered = false; this.muxFilter = null; this.baseGrabFlt = null; } }
protected void StopWhenReady() { int hr = _mediaControl.StopWhenReady(); DsError.ThrowExceptionForHR(hr); State = GraphState.Stopped; }
public void Visit <TEntity>(TEntity entity, GraphState state) where TEntity : class { var dbSet = Set <TEntity>(); switch (state) { case GraphState.Added: dbSet.Add(entity); return; case GraphState.Deleted: dbSet.Remove(entity); return; case GraphState.Unchanged: if (!dbSet.Contains(entity)) { dbSet.Add(entity); } return; case GraphState.Modified: return; default: return; } }
// Shut down capture public void CloseInterfaces() { int hr; IsOnDeleting = true; lock (this) { if (m_State != GraphState.Exiting) { m_State = GraphState.Exiting; // Release the thread (if the thread was started) if (m_mre != null) { m_mre.Set(); } } if (m_mediaCtrl != null) { // Stop the graph hr = m_mediaCtrl.Stop(); m_mediaCtrl = null; } if (m_mediaSeeking != null) { Marshal.ReleaseComObject(m_mediaSeeking); m_mediaSeeking = null; } if (m_sampGrabber != null) { Marshal.ReleaseComObject(m_sampGrabber); m_sampGrabber = null; } #if DEBUG if (m_DsRot != null) { m_DsRot.Dispose(); } #endif if (m_FilterGraph != null) { try { Marshal.ReleaseComObject(m_FilterGraph); } catch { } finally { m_FilterGraph = null; } } } GC.Collect(); }
public void Pause() { int hr = _mediaControl.Pause(); DsError.ThrowExceptionForHR(hr); State = GraphState.Paused; }
public void HasCorrectFinalStates() { var m1Start = new GraphState () { StateNumber = 0, IsFinal = true }; var m2Start = new GraphState () { StateNumber = 1, IsFinal = true }; var m1 = new Graph () { StartState = m1Start }; var m2 = new Graph () { StartState = m2Start }; var ndfa = m1.Concat (m2); var finals = ndfa.FindFinalStates (ndfa.StartState); finals.Count.Should ().Be (1); finals [0].Should ().Be (m1Start); }
/// <summary> /// Completely tear down a filter graph and /// release all associated resources. /// </summary> protected void DestroyGraph() { // Derender the graph (This will stop the graph // and release preview window. It also destroys // half of the graph which is unnecessary but // harmless here.) (ignore errors) try { DerenderGraph(); } catch { } // Update the state after derender because it // depends on correct status. But we also want to // update the state as early as possible in case // of error. ActualGraphState = GraphState.Null; IsPreviewRendered = false; // Remove filters from the graph // This should be unnecessary but the Nvidia WDM // video driver cannot be used by this application // again unless we remove it. Ideally, we should // simply enumerate all the filters in the graph // and remove them. (ignore errors) if (VideoCompressorFilter != null) { GraphBuilder.RemoveFilter(VideoCompressorFilter); } if (VideoDeviceFilter != null) { GraphBuilder.RemoveFilter(VideoDeviceFilter); } // Cleanup if (GraphBuilder != null) { Marshal.ReleaseComObject(GraphBuilder); } GraphBuilder = null; if (CaptureGraphBuilder != null) { Marshal.ReleaseComObject(CaptureGraphBuilder); } CaptureGraphBuilder = null; if (VideoDeviceFilter != null) { Marshal.ReleaseComObject(VideoDeviceFilter); } VideoDeviceFilter = null; if (VideoCompressorFilter != null) { Marshal.ReleaseComObject(VideoCompressorFilter); } VideoCompressorFilter = null; // These are copies of graphBuilder MediaControl = null; VideoWindow = null; // For unmanaged objects we haven't released explicitly GC.Collect(); }
public void CreatesCorrectConnection() { var m1Start = new GraphState () { StateNumber = 0, IsFinal = true }; var m2Start = new GraphState () { StateNumber = 1, IsFinal = true }; var m1 = new Graph () { StartState = m1Start }; var m2 = new Graph () { StartState = m2Start }; var ndfa = m1.Concat (m2); ndfa.StartState.Out[0].Start.Should ().Be (m2Start); ndfa.StartState.Out[0].End.Should ().Be (m1Start); ndfa.StartState.Out[0].ConnectedBy.Letter.Should ().Be (Word.Epsilon.Letter); ndfa.StartState.Out[0].ConnectedBy.Mapping.Should ().Be (Word.Epsilon.Mapping); }
/// <summary> /// Disconnect and remove all filters except the device /// and compressor filters. This is the opposite of /// renderGraph(). Soem properties such as FrameRate /// can only be set when the device output pins are not /// connected. /// </summary> void DerenderGraph() { // Stop the graph if it is running (ignore errors) _mediaControl?.Stop(); // Free the preview window (ignore errors) if (_videoWindow != null) { _videoWindow.put_Visible(OABool.False); _videoWindow.put_Owner(IntPtr.Zero); _videoWindow = null; } if ((int)_actualGraphState < (int)GraphState.Rendered) { return; } // Update the state _actualGraphState = GraphState.Created; _isPreviewRendered = false; // Disconnect all filters downstream of the // video and audio devices. If we have a compressor // then disconnect it, but don't remove it if (_videoDeviceFilter != null) { RemoveDownstream(_videoDeviceFilter); } }
private static void AStar_0() { Console.WriteLine("AStar_0"); Node a = new Node("A"); Node b = new Node("B"); Node c = new Node("C"); Node d = new Node("D"); Node e = new Node("E"); Node f = new Node("F"); Node g = new Node("G"); Node h = new Node("H"); a.Edges.Add(new Edge(b, 2.0d)); b.Edges.Add(new Edge(h, 4.0d)); b.Edges.Add(new Edge(d, 1.0d)); b.Edges.Add(new Edge(c, 2.0d)); b.Edges.Add(new Edge(a, 2.0d)); c.Edges.Add(new Edge(b, 2.0d)); d.Edges.Add(new Edge(e, 2.5d)); d.Edges.Add(new Edge(f, 2.0d)); d.Edges.Add(new Edge(g, 1.5d)); GraphProblem problem = new GraphProblem(a, new Node[] { f, h }); AStarSearch search = new AStarSearch(); State[] results = search.Search(problem); Console.WriteLine("Expanded States:"); for (int index = 0; index < problem.ExpandedStates.Count; index++) { GraphState state = problem.ExpandedStates[index] as GraphState; Console.Write(state.Label); if (index != problem.ExpandedStates.Count - 1) { Console.Write("->"); } } Console.WriteLine("\n"); Console.WriteLine("Solution from starting state:"); for (int index = 0; index < results.Length; index++) { GraphState state = results[index] as GraphState; Console.Write(state.Label); if (index != results.Length - 1) { Console.Write("->"); } } Console.WriteLine("\n"); }
public GraphViewModel() { mainWindow = App.Current.MainWindow as MainWindow; mainWindow.graphViewModel = this; last_added_id = 1; graphState = GraphState.NORMAL; }
/// <summary> /// Stop recording /// </summary> /// <returns></returns> public bool StopRecording() { Log.Log.WriteFile("basesubchannel.StopRecording {}", this._subChannelId); OnStopRecording(); _graphState = _timeshiftFileName != "" ? GraphState.TimeShifting : GraphState.Created; _recordingFileName = ""; _dateRecordingStarted = DateTime.MinValue; return(true); }
public virtual void Run() { int hr = _mediaControl.Run(); DsError.ThrowExceptionForHR(hr); Process currentProc = Process.GetCurrentProcess(); currentProc.PriorityClass = ProcessPriorityClass.AboveNormal; State = GraphState.Running; }
public static void Initialize() { _scrollView = new Vector2(0, 60); _errorType = ErrorType.None; _graphState = GraphState.Mission; _currentGraph = new Mission.GraphGrammar(); _isInitTabButton = true; _isRuleChanged = true; Seed = 0; }
public void Resume() { IMediaControl mediaCtrl = m_FilterGraph as IMediaControl; int hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); m_State = GraphState.Running; }
public virtual void Pause() { if (this.State == GraphState.Running) { int hr = this.MediaCtrl.Pause(); DsError.ThrowExceptionForHR(hr); this.State = GraphState.Paused; } }
protected void createGraph() { System.Type typeFromCLSID = null; object obj2 = null; if (this.graphState < GraphState.Created) { GC.Collect(); this.graphBuilder = (IGraphBuilder)Activator.CreateInstance(System.Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); typeFromCLSID = System.Type.GetTypeFromCLSID(Clsid.SampleGrabber, true); if (typeFromCLSID == null) { throw new NotImplementedException("DirectShow SampleGrabber not installed/registered"); } obj2 = Activator.CreateInstance(typeFromCLSID); this.sampGrabber = (ISampleGrabber)obj2; obj2 = null; AMMediaType pmt = new AMMediaType(); pmt.majorType = MediaType.Video; pmt.subType = MediaSubType.RGB24; pmt.formatType = FormatType.VideoInfo; int errorCode = this.sampGrabber.SetMediaType(pmt); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } this.mediaEvt = (IMediaEventEx)this.graphBuilder; this.baseGrabFlt = (IBaseFilter)this.sampGrabber; errorCode = this.graphBuilder.AddFilter(this.baseGrabFlt, "DS.NET Grabber"); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } Guid capture = PinCategory.Capture; Guid interleaved = MediaType.Interleaved; Guid gUID = typeof(IAMStreamConfig).GUID; if (errorCode != 0) { Guid video = MediaType.Video; } object obj3 = null; Guid guid5 = PinCategory.Capture; Guid audio = MediaType.Audio; Guid guid7 = typeof(IAMStreamConfig).GUID; this.audioStreamConfig = obj3 as IAMStreamConfig; this.mediaControl = (IMediaControl)this.graphBuilder; this.videoCaps = null; this.audioCaps = null; obj3 = null; Guid guid8 = PinCategory.Capture; Guid guid9 = MediaType.Interleaved; Guid guid10 = typeof(IAMTVTuner).GUID; this.graphState = GraphState.Created; } }
/// <summary> /// Import specified state to the StateStorage /// </summary> /// <param name="key">State key</param> /// <param name="state">State object</param> public virtual void ImportState(string key, GraphState <TVertex, TEdge, TGraph> state) { if (ContainsState(key)) { throw new GX_ConsistencyException(string.Format("Graph state {0} already exist in state storage", key)); } //if(!unsafeImport && (_area.LogicCore == null || _area.LogicCore.Graph == null || _area.LogicCore.Graph != state.Graph)) // throw new GX_ConsistencyException("Can't validate that imported graph state belong to the target area Graph! You can try to import the state with unsafeImport parameter set to True."); _states.Add(key, state); }
public void SetGraphState(GraphState state) { this.range = state.Range; this.years = state.Years; this.start = state.Start; this.end = state.End; this.yearToDate = state.YearToDate; this.menuItemYearToDate.IsChecked = this.yearToDate; this.series = state.Series; GenerateGraph(); }
// Pause the capture graph. public void Pause() { // If we are playing if (m_State == GraphState.Running) { int hr = m_mediaCtrl.Pause(); DsError.ThrowExceptionForHR(hr); m_State = GraphState.Paused; } }
// Pause the capture graph. public void Stop() { // Can only Stop when playing or paused if (m_State == GraphState.Running || m_State == GraphState.Paused) { int hr = m_mediaCtrl.Stop(); DsError.ThrowExceptionForHR(hr); m_State = GraphState.Stopped; } }
/// <summary> /// Initializes a new instance of the <see cref="BaseSubChannel"/> class. /// </summary> protected BaseSubChannel() { _teletextDecoder = new DVBTeletext(); _timeshiftFileName = String.Empty; _recordingFileName = String.Empty; _dateRecordingStarted = DateTime.MinValue; _dateTimeShiftStarted = DateTime.MinValue; //_graphRunning = false; _graphState = GraphState.Created; _tsHelper = new TSHelperTools(); }
/// <summary> /// Stops timeshifting /// </summary> /// <returns></returns> public bool StopTimeShifting() { OnStopTimeShifting(); _startTimeShifting = false; _graphState = GraphState.Created; _timeshiftFileName = ""; _dateTimeShiftStarted = DateTime.MinValue; return(true); }
// start playing public void Start() { // If we aren't already playing (or shutting down) if (m_State == GraphState.Stopped || m_State == GraphState.Paused) { int hr = m_mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); m_State = GraphState.Running; } }
/// <summary> /// Recursive call to loop over states and print them. /// </summary> /// <param name="currentState">Current state.</param> /// <param name="visited">List of states visited.</param> /// <param name="output">List of state outputs.</param> void PrintStatesOutputs(GraphState currentState, List<GraphState> visited, List<string> output) { //add this node to the visited list visited.Add (currentState); //for each connection, recurse and add the output to our master list. foreach (var state in currentState.Out) { //add the connection friendly string to the master list. output.Add(state.ToString()); if (!visited.Contains (state.End)) { //not seen, so keep going. PrintStatesOutputs(state.End, visited, output); } } }
public void CreatesOneConnection() { var m1Start = new GraphState () { StateNumber = 0, IsFinal = true }; var m2Start = new GraphState () { StateNumber = 1, IsFinal = true }; var m1 = new Graph () { StartState = m1Start }; var m2 = new Graph () { StartState = m2Start }; var ndfa = m1.Concat (m2); ndfa.StartState.Out.Count.Should ().Be (1); }
/// <summary> /// Completely tear down a filter graph and /// release all associated resources. /// </summary> protected void destroyGraph() { // Derender the graph (This will stop the graph // and release preview window. It also destroys // half of the graph which is unnecessary but // harmless here.) (ignore errors) try{ derenderGraph(); } catch {} // Update the state after derender because it // depends on correct status. But we also want to // update the state as early as possible in case // of error. graphState = GraphState.Null; isCaptureRendered = false; isPreviewRendered = false; // Remove graph from the ROT #if DSHOWNET if ( rotCookie != 0 ) { DsROT.RemoveGraphFromRot( ref rotCookie ); rotCookie = 0; } #else if (rotCookie != null) { rotCookie.Dispose(); rotCookie = null; } #endif // Remove filters from the graph // This should be unnecessary but the Nvidia WDM // video driver cannot be used by this application // again unless we remove it. Ideally, we should // simply enumerate all the filters in the graph // and remove them. (ignore errors) if ( muxFilter != null ) graphBuilder.RemoveFilter( muxFilter ); if ( videoCompressorFilter != null ) graphBuilder.RemoveFilter( videoCompressorFilter ); if ( audioCompressorFilter != null ) graphBuilder.RemoveFilter( audioCompressorFilter ); if ( videoDeviceFilter != null ) graphBuilder.RemoveFilter( videoDeviceFilter ); if ( audioDeviceFilter != null ) graphBuilder.RemoveFilter( audioDeviceFilter ); if(this.videoRendererFilter != null) { this.graphBuilder.RemoveFilter(this.videoRendererFilter); } // Clean up properties if ( videoSources != null ) videoSources.Dispose(); videoSources = null; if ( audioSources != null ) audioSources.Dispose(); audioSources = null; this.PropertyPages = null; // Disposal done within PropertyPages if ( tuner != null ) tuner.Dispose(); tuner = null; // #if NEWCODE if(this.tvAudio != null) { Marshal.ReleaseComObject(this.tvAudio); tvAudio = null; } if(this.dxUtils != null) { this.dxUtils.Dispose(); this.dxUtils = null; } // #endif // Cleanup if ( graphBuilder != null ) Marshal.ReleaseComObject( graphBuilder ); graphBuilder = null; if ( captureGraphBuilder != null ) Marshal.ReleaseComObject( captureGraphBuilder ); captureGraphBuilder = null; if ( muxFilter != null ) Marshal.ReleaseComObject( muxFilter ); muxFilter = null; if ( fileWriterFilter != null ) Marshal.ReleaseComObject( fileWriterFilter ); fileWriterFilter = null; if ( videoDeviceFilter != null ) Marshal.ReleaseComObject( videoDeviceFilter ); videoDeviceFilter = null; if ( audioDeviceFilter != null ) Marshal.ReleaseComObject( audioDeviceFilter ); audioDeviceFilter = null; if ( videoCompressorFilter != null ) Marshal.ReleaseComObject( videoCompressorFilter ); videoCompressorFilter = null; if ( audioCompressorFilter != null ) Marshal.ReleaseComObject( audioCompressorFilter ); audioCompressorFilter = null; // #if NEWCODE this.DisposeSampleGrabber(); // #endif if(this.videoRendererFilter != null) { Marshal.ReleaseComObject(this.videoRendererFilter); this.videoRendererFilter = null; } // These are copies of graphBuilder mediaControl = null; videoWindow = null; // For unmanaged objects we haven't released explicitly GC.Collect(); }
protected void createGraph() { if ((this.videoDevice == null) && (this.audioDevice == null)) { throw new ArgumentException("The video and/or audio device have not been set. Please set one or both to valid capture devices.\n"); } if (this.graphState < GraphState.Created) { object obj2; GC.Collect(); this.graphBuilder = (IGraphBuilder) Activator.CreateInstance(System.Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); Guid clsid = Clsid.CaptureGraphBuilder2; Guid gUID = typeof(ICaptureGraphBuilder2).GUID; this.captureGraphBuilder = (ICaptureGraphBuilder2) DsBugWO.CreateDsInstance(ref clsid, ref gUID); int errorCode = this.captureGraphBuilder.SetFiltergraph(this.graphBuilder); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } if (this.VideoDevice != null) { this.videoDeviceFilter = (IBaseFilter) Marshal.BindToMoniker(this.VideoDevice.MonikerString); errorCode = this.graphBuilder.AddFilter(this.videoDeviceFilter, "Video Capture Device"); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } } if (this.AudioDevice != null) { this.audioDeviceFilter = (IBaseFilter) Marshal.BindToMoniker(this.AudioDevice.MonikerString); errorCode = this.graphBuilder.AddFilter(this.audioDeviceFilter, "Audio Capture Device"); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } } if (this.VideoCompressor != null) { this.videoCompressorFilter = (IBaseFilter) Marshal.BindToMoniker(this.VideoCompressor.MonikerString); errorCode = this.graphBuilder.AddFilter(this.videoCompressorFilter, "Video Compressor"); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } } if (this.AudioCompressor != null) { this.audioCompressorFilter = (IBaseFilter) Marshal.BindToMoniker(this.AudioCompressor.MonikerString); errorCode = this.graphBuilder.AddFilter(this.audioCompressorFilter, "Audio Compressor"); if (errorCode < 0) { Marshal.ThrowExceptionForHR(errorCode); } } Guid capture = PinCategory.Capture; Guid interleaved = MediaType.Interleaved; Guid riid = typeof(IAMStreamConfig).GUID; if (this.captureGraphBuilder.FindInterface(ref capture, ref interleaved, this.videoDeviceFilter, ref riid, out obj2) != 0) { interleaved = MediaType.Video; if (this.captureGraphBuilder.FindInterface(ref capture, ref interleaved, this.videoDeviceFilter, ref riid, out obj2) != 0) { obj2 = null; } } this.videoStreamConfig = obj2 as IAMStreamConfig; obj2 = null; capture = PinCategory.Capture; interleaved = MediaType.Audio; riid = typeof(IAMStreamConfig).GUID; if (this.captureGraphBuilder.FindInterface(ref capture, ref interleaved, this.audioDeviceFilter, ref riid, out obj2) != 0) { obj2 = null; } this.audioStreamConfig = obj2 as IAMStreamConfig; this.mediaControl = (IMediaControl) this.graphBuilder; if (this.videoSources != null) { this.videoSources.Dispose(); } this.videoSources = null; if (this.audioSources != null) { this.audioSources.Dispose(); } this.audioSources = null; if (this.propertyPages != null) { this.propertyPages.Dispose(); } this.propertyPages = null; this.videoCaps = null; this.audioCaps = null; obj2 = null; capture = PinCategory.Capture; interleaved = MediaType.Interleaved; riid = typeof(IAMTVTuner).GUID; if (this.captureGraphBuilder.FindInterface(ref capture, ref interleaved, this.videoDeviceFilter, ref riid, out obj2) != 0) { interleaved = MediaType.Video; if (this.captureGraphBuilder.FindInterface(ref capture, ref interleaved, this.videoDeviceFilter, ref riid, out obj2) != 0) { obj2 = null; } } IAMTVTuner tuner = obj2 as IAMTVTuner; if (tuner != null) { this.tuner = new DirectX.Capture.Tuner(tuner); } this.graphState = GraphState.Created; } }
/// <summary> Begin capturing. </summary> public void Start() { Stop(); // Para que cuando estemos capturando un video podamos capturar frames firstFrame = false; assertStopped(); // We want the capture stream rendered wantCaptureRendered = true; // Re-render the graph (if necessary) renderStream = true; renderGraph(); // Start the filter graph: begin capturing int hr = mediaControl.Run(); //if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr ); // Update the state graphState = GraphState.Capturing; }
// --------------------- Private Methods ----------------------- /// <summary> /// Create a new filter graph and add filters (devices, compressors, /// misc), but leave the filters unconnected. Call renderGraph() /// to connect the filters. /// </summary> protected void createGraph() { Guid cat; Guid med; int hr; Type comType = null; object comObj = null; // Ensure required properties are set if ( videoDevice == null && audioDevice == null ) throw new ArgumentException( "The video and/or audio device have not been set. Please set one or both to valid capture devices.\n" ); // Skip if we are already created if ( (int)graphState < (int)GraphState.Created ) { // Garbage collect, ensure that previous filters are released GC.Collect(); // Make a new filter graph graphBuilder = (IGraphBuilder) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.FilterGraph, true ) ); // Get the Capture Graph Builder Guid clsid = Clsid.CaptureGraphBuilder2; Guid riid = typeof(ICaptureGraphBuilder2).GUID; captureGraphBuilder = (ICaptureGraphBuilder2) DsBugWO.CreateDsInstance( ref clsid, ref riid ); // sampGrabber, ISampleGrabber to capture frames comType=Type.GetTypeFromCLSID( Clsid.SampleGrabber, true ); if(comType==null) throw new NotImplementedException (@"DirectShow SampleGrabber not installed/registered"); comObj=Activator.CreateInstance( comType ); sampGrabber = (ISampleGrabber) comObj; comObj = null; // Link the CaptureGraphBuilder to the filter graph hr = captureGraphBuilder.SetFiltergraph( graphBuilder ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // Begin set up of SampGrabber <<<<<<---------------------------------------------------- AMMediaType media = new AMMediaType(); media.majorType= MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; hr = sampGrabber.SetMediaType( media ); //if( hr<0 ) Marshal.ThrowExceptionForHR( hr ); // Finish set up of SampGrabber <<<<<<---------------------------------------------------- // Add the graph to the Running Object Table so it can be // viewed with GraphEdit #if DEBUG DsROT.AddGraphToRot( graphBuilder, out rotCookie ); #endif // Get the video device and add it to the filter graph if ( VideoDevice != null ) { videoDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( VideoDevice.MonikerString ); hr = graphBuilder.AddFilter( videoDeviceFilter, "Video Capture Device" ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // Add SampGrabber Filter <<<<<<---------------------------------------------------- mediaEvt = (IMediaEventEx) graphBuilder; baseGrabFlt = (IBaseFilter) sampGrabber; hr = graphBuilder.AddFilter( baseGrabFlt, "DS.NET Grabber" ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Get the audio device and add it to the filter graph if ( AudioDevice != null ) { audioDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( AudioDevice.MonikerString ); hr = graphBuilder.AddFilter( audioDeviceFilter, "Audio Capture Device" ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Get the video compressor and add it to the filter graph if ( VideoCompressor != null ) { videoCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( VideoCompressor.MonikerString ); hr = graphBuilder.AddFilter( videoCompressorFilter, "Video Compressor" ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Get the audio compressor and add it to the filter graph if ( AudioCompressor != null ) { audioCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( AudioCompressor.MonikerString ); hr = graphBuilder.AddFilter( audioCompressorFilter, "Audio Compressor" ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Retrieve the stream control interface for the video device // FindInterface will also add any required filters // (WDM devices in particular may need additional // upstream filters to function). // Try looking for an interleaved media type object o; cat = PinCategory.Capture; med = MediaType.Interleaved; Guid iid = typeof(IAMStreamConfig).GUID; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o ); if ( hr != 0 ) { // If not found, try looking for a video media type med = MediaType.Video; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o ); if ( hr != 0 ) o = null; } videoStreamConfig = o as IAMStreamConfig; // Retrieve the stream control interface for the audio device o = null; cat = PinCategory.Capture; med = MediaType.Audio ; iid = typeof(IAMStreamConfig).GUID; hr = captureGraphBuilder.FindInterface( ref cat, ref med, audioDeviceFilter, ref iid, out o ); if ( hr != 0 ) o = null; audioStreamConfig = o as IAMStreamConfig; // Retreive the media control interface (for starting/stopping graph) mediaControl = (IMediaControl) graphBuilder; // Reload any video crossbars if ( videoSources != null ) videoSources.Dispose(); videoSources = null; // Reload any audio crossbars if ( audioSources != null ) audioSources.Dispose(); audioSources = null; // Reload any property pages exposed by filters if ( propertyPages != null ) propertyPages.Dispose(); propertyPages = null; // Reload capabilities of video device videoCaps = null; // Reload capabilities of video device audioCaps = null; // Retrieve TV Tuner if available o = null; cat = PinCategory.Capture; med = MediaType.Interleaved; iid = typeof(IAMTVTuner).GUID; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o ); if ( hr != 0 ) { med = MediaType.Video ; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o ); if ( hr != 0 ) o = null; } IAMTVTuner t = o as IAMTVTuner; if ( t != null ) tuner = new Tuner( t ); /* // ----------- VMR 9 ------------------- //## check out samples\inc\vmrutil.h :: RenderFileToVMR9 IBaseFilter vmr = null; if ( ( VideoDevice != null ) && ( previewWindow != null ) ) { vmr = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.VideoMixingRenderer9, true ) ); hr = graphBuilder.AddFilter( vmr, "VMR" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); IVMRFilterConfig9 vmrFilterConfig = (IVMRFilterConfig9) vmr; hr = vmrFilterConfig.SetRenderingMode( VMRMode9.Windowless ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); IVMRWindowlessControl9 vmrWindowsless = (IVMRWindowlessControl9) vmr; hr = vmrWindowsless.SetVideoClippingWindow( previewWindow.Handle ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } //------------------------------------------- // ---------- SmartTee --------------------- IBaseFilter smartTeeFilter = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.SmartTee, true ) ); hr = graphBuilder.AddFilter( smartTeeFilter, "Video Smart Tee" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // Video -> SmartTee cat = PinCategory.Capture; med = MediaType.Video; hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, null, smartTeeFilter ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // smarttee -> mux cat = PinCategory.Capture; med = MediaType.Video; hr = captureGraphBuilder.RenderStream( ref cat, ref med, smartTeeFilter, null, muxFilter ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // smarttee -> vmr cat = PinCategory.Preview; med = MediaType.Video; hr = captureGraphBuilder.RenderStream( ref cat, ref med, smartTeeFilter, null, vmr ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // ------------------------------------- */ // Update the state now that we are done graphState = GraphState.Created; } }
public void Start() { this.assertStopped(); this.wantCaptureRendered = true; this.renderGraph(); int errorCode = this.mediaControl.Run(); if (errorCode != 0) { Marshal.ThrowExceptionForHR(errorCode); } this.graphState = GraphState.Capturing; }
/// <summary> /// Connects the filters of a previously created graph /// (created by createGraph()). Once rendered the graph /// is ready to be used. This method may also destroy /// streams if we have streams we no longer want. /// </summary> protected void renderGraph() { Guid cat; Guid med; int hr; bool didSomething = false; const int WS_CHILD = 0x40000000; const int WS_CLIPCHILDREN = 0x02000000; const int WS_CLIPSIBLINGS = 0x04000000; assertStopped(); // Ensure required properties set if ( filename == null ) throw new ArgumentException( "The Filename property has not been set to a file.\n" ); // Stop the graph if ( mediaControl != null ) mediaControl.Stop(); // Create the graph if needed (group should already be created) createGraph(); // Derender the graph if we have a capture or preview stream // that we no longer want. We can't derender the capture and // preview streams seperately. // Notice the second case will leave a capture stream intact // even if we no longer want it. This allows the user that is // not using the preview to Stop() and Start() without // rerendering the graph. if ( !wantPreviewRendered && isPreviewRendered ) derenderGraph(); if ( !wantCaptureRendered && isCaptureRendered ) if ( wantPreviewRendered ) { derenderGraph(); graphState = GraphState.Null; createGraph(); } // Video Capture // =================================================================================== if ( wantCaptureRendered && !isCaptureRendered ) { // Render the file writer portion of graph (mux -> file) Guid mediaSubType = MediaSubType.Avi; hr = captureGraphBuilder.SetOutputFileName( ref mediaSubType, Filename, out muxFilter, out fileWriterFilter ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); if ( VideoDevice != null ) { // Try interleaved first, because if the device supports it, // it's the only way to get audio as well as video cat = PinCategory.Capture; med = MediaType.Interleaved; hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, videoCompressorFilter, muxFilter); if( hr < 0 ) { med = MediaType.Video; hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, videoCompressorFilter, muxFilter); //if ( hr == -2147220969 ) throw new DeviceInUseException( "Video device", hr ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } } // Render audio (audio -> mux) if ( AudioDevice != null ) { cat = PinCategory.Capture; med = MediaType.Audio; hr = captureGraphBuilder.RenderStream( ref cat, ref med, audioDeviceFilter, audioCompressorFilter, muxFilter ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } isCaptureRendered = true; didSomething = true; } // Render preview stream and launch the baseGrabFlt to capture frames // =================================================================================== if ( wantPreviewRendered && renderStream && !isPreviewRendered ) { // Render preview (video.PinPreview -> baseGrabFlt -> renderer) // At this point intelligent connect is used, because my webcams don't have a preview pin and // a capture pin, so Smart Tee filter will be used. I have tested it using GraphEdit. // I can type hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, null, baseGrabFlt); // because baseGrabFlt is a transform filter, like videoCompressorFilter. cat = PinCategory.Preview; med = MediaType.Video; hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, baseGrabFlt, null ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // Get the IVideoWindow interface videoWindow = (IVideoWindow) graphBuilder; // Set the video window to be a child of the main window hr = videoWindow.put_Owner( previewWindow.Handle ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // Set video window style hr = videoWindow.put_WindowStyle( WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // Position video window in client rect of owner window previewWindow.Resize += new EventHandler( onPreviewWindowResize ); onPreviewWindowResize( this, null ); // Make the video window visible, now that it is properly positioned hr = videoWindow.put_Visible( DsHlp.OATRUE ); //if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); hr = mediaEvt.SetNotifyWindow( this.Handle, WM_GRAPHNOTIFY, IntPtr.Zero ); //if( hr < 0 ) // Marshal.ThrowExceptionForHR( hr ); isPreviewRendered = true; didSomething = true; // Begin Configuration of SampGrabber <<<<<<---------------------------------------------------- AMMediaType media = new AMMediaType(); hr = sampGrabber.GetConnectedMediaType( media ); //if( hr < 0 ) // Marshal.ThrowExceptionForHR( hr ); if( (media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero) ) throw new NotSupportedException( "Unknown Grabber Media Format" ); videoInfoHeader = (VideoInfoHeader) Marshal.PtrToStructure( media.formatPtr, typeof(VideoInfoHeader) ); Marshal.FreeCoTaskMem( media.formatPtr ); media.formatPtr = IntPtr.Zero; hr = sampGrabber.SetBufferSamples( false ); if( hr == 0 ) hr = sampGrabber.SetOneShot( false ); if( hr == 0 ) hr = sampGrabber.SetCallback( null, 0 ); //if( hr < 0 ) // Marshal.ThrowExceptionForHR( hr ); // Finish Configuration of SampGrabber <<<<<<---------------------------------------------------- } if ( didSomething ) graphState = GraphState.Rendered; }
protected void renderGraph() { Guid capture; Guid interleaved; int num; bool flag = false; this.assertStopped(); if (this.filename == null) { throw new ArgumentException("The Filename property has not been set to a file.\n"); } if (this.mediaControl != null) { this.mediaControl.Stop(); } this.createGraph(); if (!this.wantPreviewRendered && this.isPreviewRendered) { this.derenderGraph(); } if ((!this.wantCaptureRendered && this.isCaptureRendered) && this.wantPreviewRendered) { this.derenderGraph(); } if (this.wantCaptureRendered && !this.isCaptureRendered) { Guid avi = MediaSubType.Avi; num = this.captureGraphBuilder.SetOutputFileName(ref avi, this.Filename, out this.muxFilter, out this.fileWriterFilter); if (num < 0) { Marshal.ThrowExceptionForHR(num); } if (this.VideoDevice != null) { capture = PinCategory.Capture; interleaved = MediaType.Interleaved; if (this.captureGraphBuilder.RenderStream(ref capture, ref interleaved, this.videoDeviceFilter, this.videoCompressorFilter, this.muxFilter) < 0) { interleaved = MediaType.Video; num = this.captureGraphBuilder.RenderStream(ref capture, ref interleaved, this.videoDeviceFilter, this.videoCompressorFilter, this.muxFilter); if (num == -2147220969) { throw new DeviceInUseException("Video device", num); } if (num < 0) { Marshal.ThrowExceptionForHR(num); } } } if (this.AudioDevice != null) { capture = PinCategory.Capture; interleaved = MediaType.Audio; num = this.captureGraphBuilder.RenderStream(ref capture, ref interleaved, this.audioDeviceFilter, this.audioCompressorFilter, this.muxFilter); if (num < 0) { Marshal.ThrowExceptionForHR(num); } } this.isCaptureRendered = true; flag = true; } if (this.wantPreviewRendered && !this.isPreviewRendered) { capture = PinCategory.Preview; interleaved = MediaType.Video; num = this.captureGraphBuilder.RenderStream(ref capture, ref interleaved, this.videoDeviceFilter, null, null); if (num < 0) { Marshal.ThrowExceptionForHR(num); } this.videoWindow = (IVideoWindow) this.graphBuilder; num = this.videoWindow.put_Owner(this.previewWindow.Handle); if (num < 0) { Marshal.ThrowExceptionForHR(num); } num = this.videoWindow.put_WindowStyle(0x46000000); if (num < 0) { Marshal.ThrowExceptionForHR(num); } this.previewWindow.Resize += new EventHandler(this.onPreviewWindowResize); this.onPreviewWindowResize(this, null); num = this.videoWindow.put_Visible(-1); if (num < 0) { Marshal.ThrowExceptionForHR(num); } this.isPreviewRendered = true; flag = true; } if (flag) { this.graphState = GraphState.Rendered; } }
protected void destroyGraph() { try { this.derenderGraph(); } catch { } this.graphState = GraphState.Null; this.isCaptureRendered = false; this.isPreviewRendered = false; if (this.rotCookie != 0) { DsROT.RemoveGraphFromRot(ref this.rotCookie); this.rotCookie = 0; } if (this.muxFilter != null) { this.graphBuilder.RemoveFilter(this.muxFilter); } if (this.videoCompressorFilter != null) { this.graphBuilder.RemoveFilter(this.videoCompressorFilter); } if (this.audioCompressorFilter != null) { this.graphBuilder.RemoveFilter(this.audioCompressorFilter); } if (this.videoDeviceFilter != null) { this.graphBuilder.RemoveFilter(this.videoDeviceFilter); } if (this.audioDeviceFilter != null) { this.graphBuilder.RemoveFilter(this.audioDeviceFilter); } if (this.videoSources != null) { this.videoSources.Dispose(); } this.videoSources = null; if (this.audioSources != null) { this.audioSources.Dispose(); } this.audioSources = null; if (this.propertyPages != null) { this.propertyPages.Dispose(); } this.propertyPages = null; if (this.tuner != null) { this.tuner.Dispose(); } this.tuner = null; if (this.graphBuilder != null) { Marshal.ReleaseComObject(this.graphBuilder); } this.graphBuilder = null; if (this.captureGraphBuilder != null) { Marshal.ReleaseComObject(this.captureGraphBuilder); } this.captureGraphBuilder = null; if (this.muxFilter != null) { Marshal.ReleaseComObject(this.muxFilter); } this.muxFilter = null; if (this.fileWriterFilter != null) { Marshal.ReleaseComObject(this.fileWriterFilter); } this.fileWriterFilter = null; if (this.videoDeviceFilter != null) { Marshal.ReleaseComObject(this.videoDeviceFilter); } this.videoDeviceFilter = null; if (this.audioDeviceFilter != null) { Marshal.ReleaseComObject(this.audioDeviceFilter); } this.audioDeviceFilter = null; if (this.videoCompressorFilter != null) { Marshal.ReleaseComObject(this.videoCompressorFilter); } this.videoCompressorFilter = null; if (this.audioCompressorFilter != null) { Marshal.ReleaseComObject(this.audioCompressorFilter); } this.audioCompressorFilter = null; this.mediaControl = null; this.videoWindow = null; GC.Collect(); }
protected void derenderGraph() { if (this.mediaControl != null) { this.mediaControl.Stop(); } if (this.videoWindow != null) { this.videoWindow.put_Visible(0); this.videoWindow.put_Owner(IntPtr.Zero); this.videoWindow = null; } if (this.PreviewWindow != null) { this.previewWindow.Resize -= new EventHandler(this.onPreviewWindowResize); } if (this.graphState >= GraphState.Rendered) { this.graphState = GraphState.Created; this.isCaptureRendered = false; this.isPreviewRendered = false; if (this.videoDeviceFilter != null) { this.removeDownstream(this.videoDeviceFilter, this.videoCompressor == null); } if (this.audioDeviceFilter != null) { this.removeDownstream(this.audioDeviceFilter, this.audioCompressor == null); } this.muxFilter = null; this.fileWriterFilter = null; } }
/// <summary> release everything. </summary> public void Dispose() { ConsoleLogger.logMessage("In Dispose"); m_State = GraphState.Stopped; CloseInterfaces(); if (bitmapOverlay != null) bitmapOverlay.Dispose(); if (fontOverlay != null) fontOverlay.Dispose(); if (transparentBrush != null) transparentBrush.Dispose(); if (transparentFont != null) transparentFont.Dispose(); }
/// <summary> /// Stop the current capture capture. If there is no /// current capture, this method will succeed. /// </summary> public void Stop() { // Stop the graph if it is running // If we have a preview running we should only stop the // capture stream. However, if we have a preview stream // we need to re-render the graph anyways because we // need to get rid of the capture stream. To re-render // we need to stop the entire graph if ( mediaControl != null ) { mediaControl.Stop(); } // Config is true when the parametres of the device are to be changed wantCaptureRendered = false; wantPreviewRendered = true; // Update the state if ( graphState == GraphState.Capturing ) { graphState = GraphState.Rendered; if ( CaptureComplete != null ) CaptureComplete( this, null ); } // Para que cuando volvamos a capturar frames no haya problemas firstFrame = true; // So we destroy the capture stream IF // we need a preview stream. If we don't // this will leave the graph as it is. renderStream = false; try { renderGraph(); } catch {} try { startPreviewIfNeeded(); } catch {} }
// Pause the capture graph. public void Pause() { // If we are playing if (m_State == GraphState.Running) { IMediaControl lmediaCtrl = (IMediaControl)m_FilterGraph; int hr = lmediaCtrl.Pause(); DsError.ThrowExceptionForHR(hr); m_State = GraphState.Paused; } }
/// <summary> Begin capturing. </summary> public void Start() { assertStopped(); // We want the capture stream rendered wantCaptureRendered = true; // Re-render the graph (if necessary) renderGraph(); // Start the filter graph: begin capturing int hr = mediaControl.Run(); if ( hr != 0 ) Marshal.ThrowExceptionForHR( hr ); // Update the state graphState = GraphState.Capturing; }
// Pause the capture graph. public void Stop() { // Can only Stop when playing or paused if (m_State == GraphState.Running || m_State == GraphState.Paused) { IMediaControl lmediaCtrl = (IMediaControl)m_FilterGraph; int hr = lmediaCtrl.Stop(); DsError.ThrowExceptionForHR(hr); m_State = GraphState.Stopped; } }
/// <summary> /// Stop the current capture capture. If there is no /// current capture, this method will succeed. /// </summary> public void Stop() { wantCaptureRendered = false; // Stop the graph if it is running // If we have a preview running we should only stop the // capture stream. However, if we have a preview stream // we need to re-render the graph anyways because we // need to get rid of the capture stream. To re-render // we need to stop the entire graph if ( mediaControl != null ) { mediaControl.Stop(); } // Update the state if ( graphState == GraphState.Capturing ) { graphState = GraphState.Rendered; if ( CaptureComplete != null ) CaptureComplete( this, null ); } // So we destroy the capture stream IF // we need a preview stream. If we don't // this will leave the graph as it is. try { renderGraph(); } catch {} try { startPreviewIfNeeded(); } catch {} }
// Release everything. public void Dispose() { CloseInterfaces(); m_State = GraphState.Exiting; }
// --------------------- Private Methods ----------------------- /// <summary> /// Create a new filter graph and add filters (devices, compressors, /// misc), but leave the filters unconnected. Call renderGraph() /// to connect the filters. /// </summary> protected void createGraph() { Guid cat; Guid med; int hr; // Ensure required properties are set if ( videoDevice == null && audioDevice == null ) throw new ArgumentException( "The video and/or audio device have not been set. Please set one or both to valid capture devices.\n" ); // Skip if we are already created if ( (int)graphState < (int)GraphState.Created ) { // Garbage collect, ensure that previous filters are released GC.Collect(); // Make a new filter graph #if DSHOWNET // Make a new filter graph graphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); // Get the Capture Graph Builder Guid clsid = Clsid.CaptureGraphBuilder2; Guid riid = typeof(ICaptureGraphBuilder2).GUID; captureGraphBuilder = (ICaptureGraphBuilder2)DsBugWO.CreateDsInstance(ref clsid, ref riid); #else FilterGraph graph = new FilterGraph(); graphBuilder = (IGraphBuilder)graph; // Get the Capture Graph Builder captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); #endif // Link the CaptureGraphBuilder to the filter graph hr = captureGraphBuilder.SetFiltergraph(graphBuilder); if (hr < 0) Marshal.ThrowExceptionForHR(hr); // Add the graph to the Running Object Table so it can be // viewed with GraphEdit #if DEBUG #if DSHOWNET DsROT.AddGraphToRot(graphBuilder, out rotCookie); #else rotCookie = new DsROTEntry(graphBuilder); #endif #endif // Get the video device and add it to the filter graph if ( VideoDevice != null ) { videoDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( VideoDevice.MonikerString ); hr = graphBuilder.AddFilter( videoDeviceFilter, "Video Capture Device" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Get the audio device and add it to the filter graph if ( AudioDevice != null ) { audioDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( AudioDevice.MonikerString ); hr = graphBuilder.AddFilter( audioDeviceFilter, "Audio Capture Device" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Get the video compressor and add it to the filter graph if ( VideoCompressor != null ) { videoCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( VideoCompressor.MonikerString ); hr = graphBuilder.AddFilter( videoCompressorFilter, "Video Compressor" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Get the audio compressor and add it to the filter graph if ( AudioCompressor != null ) { audioCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( AudioCompressor.MonikerString ); hr = graphBuilder.AddFilter( audioCompressorFilter, "Audio Compressor" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Retrieve the stream control interface for the video device // FindInterface will also add any required filters // (WDM devices in particular may need additional // upstream filters to function). // Try looking for an interleaved media type object o; cat = PinCategory.Capture; med = MediaType.Interleaved; Guid iid = typeof(IAMStreamConfig).GUID; #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o ); #endif if ( hr != 0 ) { // If not found, try looking for a video media type med = MediaType.Video; #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o ); #endif if ( hr != 0 ) o = null; } videoStreamConfig = o as IAMStreamConfig; // #if NEWCODE // Start of new Brian's Low code // Retrieve the stream control interface for the video device // FindInterface will also add any required filters // (WDM devices in particular may need additional // upstream filters to function). // Try looking for an interleaved media type o = null; cat = PinCategory.Preview; med = MediaType.Interleaved; iid = typeof(IAMStreamConfig).GUID; #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o ); #endif if ( hr != 0 ) { // If not found, try looking for a video media type med = MediaType.Video; #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o ); #endif if ( hr != 0 ) o = null; } this.previewStreamConfig = o as IAMStreamConfig; // End of new Brian's Low code if( (this.videoStreamConfig != null)|| (this.previewStreamConfig != null) ) { this.dxUtils = new DxUtils(); bool result = this.dxUtils.InitDxUtils(this.videoDeviceFilter); if((!result)&&(!this.dxUtils.FindMediaData(this.videoStreamConfig))) { this.dxUtils.Dispose(); this.dxUtils = null; } } // #endif // Retrieve the stream control interface for the audio device o = null; cat = PinCategory.Capture; med = MediaType.Audio ; iid = typeof(IAMStreamConfig).GUID; if( (this.AudioViaPci)&& (audioDeviceFilter == null)&&(videoDeviceFilter != null) ) { hr = captureGraphBuilder.FindInterface( #if DSHOWNET ref cat, ref med, videoDeviceFilter, ref iid, out o ); #else DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o); #endif } else { #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, audioDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), audioDeviceFilter, DsGuid.FromGuid(iid), out o ); #endif } if (hr != 0) o = null; audioStreamConfig = o as IAMStreamConfig; // Retreive the media control interface (for starting/stopping graph) mediaControl = (IMediaControl) graphBuilder; // Reload any video crossbars if ( videoSources != null ) videoSources.Dispose(); videoSources = null; // Reload any audio crossbars if ( audioSources != null ) audioSources.Dispose(); audioSources = null; // Reload any property pages exposed by filters this.PropertyPages = null; // Reload capabilities of video device videoCaps = null; previewCaps = null; // Reload capabilities of video device audioCaps = null; // Retrieve TV Tuner if available o = null; cat = PinCategory.Capture; med = MediaType.Interleaved; iid = typeof(IAMTVTuner).GUID; #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o ); #endif if ( hr != 0 ) { med = MediaType.Video ; #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o ); #endif if ( hr != 0 ) o = null; } IAMTVTuner t = o as IAMTVTuner; if ( t != null ) { tuner = new Tuner(t); // Do not forget to set proper country code (Netherlands is 31) } // No check on TV Audio needed, it will show up in the // PropertyPages when it is available // Code for finding the TV audio interface o = null; cat = PinCategory.Capture; med = MediaType.Interleaved; iid = typeof(IAMTVAudio).GUID; hr = captureGraphBuilder.FindInterface( #if DSHOWNET ref cat, ref med, videoDeviceFilter, ref iid, out o); #else cat, med, videoDeviceFilter, iid, out o); #endif if ( hr != 0 ) { med = MediaType.Video; #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( cat, med, videoDeviceFilter, iid, out o); #endif if ( hr != 0 ) { o = null; } } if((o != null)&&(tuner != null)) { IAMTVAudio a = o as IAMTVAudio; TvAudio = a; #if DEBUG Debug.WriteLine("FindInterface tuner.TvAudio"); #endif // DEBUG } /* // ----------- VMR 9 ------------------- //## check out samples\inc\vmrutil.h :: RenderFileToVMR9 IBaseFilter vmr = null; if ( ( VideoDevice != null ) && ( previewWindow != null ) ) { vmr = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.VideoMixingRenderer9, true ) ); hr = graphBuilder.AddFilter( vmr, "VMR" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); IVMRFilterConfig9 vmrFilterConfig = (IVMRFilterConfig9) vmr; hr = vmrFilterConfig.SetRenderingMode( VMRMode9.Windowless ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); IVMRWindowlessControl9 vmrWindowsless = (IVMRWindowlessControl9) vmr; hr = vmrWindowsless.SetVideoClippingWindow( previewWindow.Handle ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } //------------------------------------------- // ---------- SmartTee --------------------- IBaseFilter smartTeeFilter = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.SmartTee, true ) ); hr = graphBuilder.AddFilter( smartTeeFilter, "Video Smart Tee" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // Video -> SmartTee cat = PinCategory.Capture; med = MediaType.Video; hr = captureGraphBuilder.RenderStream( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, null, smartTeeFilter ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // smarttee -> mux cat = PinCategory.Capture; med = MediaType.Video; hr = captureGraphBuilder.RenderStream( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), smartTeeFilter, null, muxFilter ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // smarttee -> vmr cat = PinCategory.Preview; med = MediaType.Video; hr = captureGraphBuilder.RenderStream( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), smartTeeFilter, null, vmr ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // ------------------------------------- */ // Update the state now that we are done graphState = GraphState.Created; } }
// start playing public void Start() { // If we aren't already playing (or shutting down) if (m_State == GraphState.Stopped || m_State == GraphState.Paused) { m_FrameCounter = 0; int hr = m_mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); m_State = GraphState.Running; } }
/// <summary> /// Connects the filters of a previously created graph /// (created by createGraph()). Once rendered the graph /// is ready to be used. This method may also destroy /// streams if we have streams we no longer want. /// </summary> protected void renderGraph() { Guid cat; Guid med; int hr; bool didSomething = false; #if DSHOWNET const int WS_CHILD = 0x40000000; const int WS_CLIPCHILDREN = 0x02000000; const int WS_CLIPSIBLINGS = 0x04000000; #endif assertStopped(); // Ensure required properties set if ( filename == null ) throw new ArgumentException( "The Filename property has not been set to a file.\n" ); // Stop the graph if ( mediaControl != null ) mediaControl.Stop(); // Create the graph if needed (group should already be created) createGraph(); // Derender the graph if we have a capture or preview stream // that we no longer want. We can't derender the capture and // preview streams seperately. // Notice the second case will leave a capture stream intact // even if we no longer want it. This allows the user that is // not using the preview to Stop() and Start() without // rerendering the graph. if ( !wantPreviewRendered && isPreviewRendered ) derenderGraph(); if ( !wantCaptureRendered && isCaptureRendered ) if ( wantPreviewRendered ) derenderGraph(); // Render capture stream (only if necessary) if ( wantCaptureRendered && !isCaptureRendered ) { // Render the file writer portion of graph (mux -> file) // Record captured audio/video in Avi, Wmv or Wma format Guid mediaSubType; // Media sub type bool captureAudio = true; bool captureVideo = true; IBaseFilter videoCompressorfilter = null; // Set media sub type and video compressor filter if needed if(RecFileMode == RecFileModeType.Avi) { mediaSubType = MediaSubType.Avi; // For Avi file saving a video compressor must be used // If one is selected, that one will be used. videoCompressorfilter = videoCompressorFilter; } else { mediaSubType = MediaSubType.Asf; } // Intialize the Avi or Asf file writer #if DSHOWNET hr = captureGraphBuilder.SetOutputFileName(ref mediaSubType, Filename, out muxFilter, out fileWriterFilter); #else hr = captureGraphBuilder.SetOutputFileName(mediaSubType, Filename, out muxFilter, out fileWriterFilter); #endif if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // For Wma (and Wmv) a suitable profile must be selected. This // can be done via a property window, however the muxFilter is // just created. if needed, the property windows should show up // right now! // Another solution is to configure the Asf file writer, the // use interface must ensure the proper format has been // selected. if((RecFileMode == RecFileModeType.Wma)|| (RecFileMode == RecFileModeType.Wmv)) { if(this.AsfFormat != null) { this.AsfFormat.UpdateAsfAVFormat(this.muxFilter); this.AsfFormat.GetCurrentAsfAVInfo(out captureAudio, out captureVideo); } } // Render video (video -> mux) if needed or possible if((VideoDevice != null)&&(captureVideo)) { // Try interleaved first, because if the device supports it, // it's the only way to get audio as well as video cat = PinCategory.Capture; med = MediaType.Interleaved; #if DSHOWNET hr = captureGraphBuilder.RenderStream(ref cat, ref med, videoDeviceFilter, videoCompressorfilter, muxFilter); #else hr = captureGraphBuilder.RenderStream( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, videoCompressorFilter, muxFilter ); #endif if( hr < 0 ) { med = MediaType.Video; #if DSHOWNET hr = captureGraphBuilder.RenderStream(ref cat, ref med, videoDeviceFilter, videoCompressorfilter, muxFilter); #else hr = captureGraphBuilder.RenderStream( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, videoCompressorFilter, muxFilter ); #endif if ( hr == -2147220969 ) throw new DeviceInUseException( "Video device", hr ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } } // Render audio (audio -> mux) if possible if((audioDeviceFilter != null)&&(captureAudio)) { // If this Asf file format than please keep in mind that // certain Wmv formats do not have an audio stream, so // when using this code, please ensure you use a format // which supports audio! cat = PinCategory.Capture; med = MediaType.Audio; #if DSHOWNET hr = captureGraphBuilder.RenderStream( ref cat, ref med, audioDeviceFilter, audioCompressorFilter, muxFilter ); #else hr = captureGraphBuilder.RenderStream( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), audioDeviceFilter, audioCompressorFilter, muxFilter ); #endif if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } else if( (this.AudioViaPci)&&(captureAudio)&& (audioDeviceFilter == null)&&(videoDeviceFilter != null) ) { cat = PinCategory.Capture; med = MediaType.Audio; #if DSHOWNET hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, audioCompressorFilter, muxFilter ); #else hr = captureGraphBuilder.RenderStream(cat, med, videoDeviceFilter, audioCompressorFilter, muxFilter); #endif if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } isCaptureRendered = true; didSomething = true; } // Render preview stream (only if necessary) if ( wantPreviewRendered && !isPreviewRendered ) { // Render preview (video -> renderer) this.InitVideoRenderer(); this.AddDeInterlaceFilter(); // When capture pin is used, preview works immediately, // however this conflicts with file saving. // An alternative is to use VMR9 cat = PinCategory.Preview; med = MediaType.Video; // #if NEWCODE if(this.InitSampleGrabber()) { Debug.WriteLine("SampleGrabber added to graph."); #if DSHOWNET hr = captureGraphBuilder.RenderStream(ref cat, ref med, videoDeviceFilter, this.baseGrabFlt, this.videoRendererFilter); #else hr = captureGraphBuilder.RenderStream(DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, this.baseGrabFlt, this.videoRendererFilter); #endif if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } else // #endif NEWCODE { #if DSHOWNET hr = captureGraphBuilder.RenderStream(ref cat, ref med, videoDeviceFilter, null, this.videoRendererFilter); #else hr = captureGraphBuilder.RenderStream(DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, null, this.videoRendererFilter); #endif if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Special option to enable rendering audio via PCI bus if((this.AudioViaPci)&&(audioDeviceFilter != null)) { cat = PinCategory.Preview; med = MediaType.Audio; #if DSHOWNET hr = captureGraphBuilder.RenderStream( ref cat, ref med, audioDeviceFilter, null, null ); #else hr = captureGraphBuilder.RenderStream(DsGuid.FromGuid(cat), DsGuid.FromGuid(med), audioDeviceFilter, null, null); #endif if( hr < 0 ) { Marshal.ThrowExceptionForHR( hr ); } } else if( (this.AudioViaPci)&& (this.audioDeviceFilter == null)&&(this.videoDeviceFilter != null) ) { cat = PinCategory.Preview; med = MediaType.Audio; #if DSHOWNET hr = captureGraphBuilder.RenderStream( ref cat, ref med, videoDeviceFilter, null, null ); #else hr = captureGraphBuilder.RenderStream(cat, med, videoDeviceFilter, null, null); #endif if( hr < 0 ) { Marshal.ThrowExceptionForHR( hr ); } } // Get the IVideoWindow interface videoWindow = (IVideoWindow) graphBuilder; // Set the video window to be a child of the main window hr = videoWindow.put_Owner( previewWindow.Handle ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // Set video window style #if DSHOWNET hr = videoWindow.put_WindowStyle( WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS); #else hr = videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren | WindowStyle.ClipSiblings); #endif if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // Position video window in client rect of owner window previewWindow.Resize += new EventHandler( onPreviewWindowResize ); onPreviewWindowResize( this, null ); // Make the video window visible, now that it is properly positioned #if DSHOWNET hr = videoWindow.put_Visible( DsHlp.OATRUE ); #else hr = videoWindow.put_Visible( OABool.True ); #endif if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); isPreviewRendered = true; didSomething = true; // #if NEWCODE SetMediaSampleGrabber(); // #endif NEWCODE } if ( didSomething ) graphState = GraphState.Rendered; }
// Shut down capture private void CloseInterfaces() { int hr; lock (this) { if (m_State != GraphState.Exiting) { m_State = GraphState.Exiting; // Release the thread (if the thread was started) if (m_mre != null) { m_mre.Set(); } } if (m_mediaCtrl != null) { // Stop the graph hr = m_mediaCtrl.Stop(); m_mediaCtrl = null; } if (m_sampGrabber != null) { Marshal.ReleaseComObject(m_sampGrabber); m_sampGrabber = null; } if (m_FilterGraph != null) { Marshal.ReleaseComObject(m_FilterGraph); m_FilterGraph = null; } } GC.Collect(); }
/// <summary> /// Disconnect and remove all filters except the device /// and compressor filters. This is the opposite of /// renderGraph(). Soem properties such as FrameRate /// can only be set when the device output pins are not /// connected. /// </summary> protected void derenderGraph() { // Stop the graph if it is running (ignore errors) if ( mediaControl != null ) mediaControl.Stop(); // Free the preview window (ignore errors) if ( videoWindow != null ) { #if DSHOWNET videoWindow.put_Visible(DsHlp.OAFALSE); #else videoWindow.put_Visible(OABool.False); #endif videoWindow.put_Owner( IntPtr.Zero ); videoWindow = null; } // Remove the Resize event handler if ( PreviewWindow != null ) previewWindow.Resize -= new EventHandler( onPreviewWindowResize ); if ( (int)graphState >= (int)GraphState.Rendered ) { // Update the state graphState = GraphState.Created; isCaptureRendered = false; isPreviewRendered = false; // Disconnect all filters downstream of the // video and audio devices. If we have a compressor // then disconnect it, but don't remove it if (videoDeviceFilter != null) { try { removeDownstream(videoDeviceFilter, (videoCompressor == null)); } catch { Debug.WriteLine("Error removeDownstream videoDeviceFilter"); } } if (audioDeviceFilter != null) { try { removeDownstream(audioDeviceFilter, (audioCompressor == null)); } catch { Debug.WriteLine("Error removeDownstream audioDeviceFilter"); } } // These filters should have been removed by the // calls above. (Is there anyway to check?) muxFilter = null; fileWriterFilter = null; this.videoRendererFilter = null; this.deInterlaceFilter = null; } }
/// <summary> /// Disconnect and remove all filters except the device /// and compressor filters. This is the opposite of /// renderGraph(). Soem properties such as FrameRate /// can only be set when the device output pins are not /// connected. /// </summary> protected void derenderGraph() { // Stop the graph if it is running (ignore errors) if ( mediaControl != null ) mediaControl.Stop(); // Free the preview window (ignore errors) if ( videoWindow != null ) { videoWindow.put_Visible( DsHlp.OAFALSE ); videoWindow.put_Owner( IntPtr.Zero ); videoWindow = null; } //ELIMINAR RECURSOS CAPTURA <<<<<<<<<================================================ /*if ( mediaEvt != null ) { int hr = mediaEvt.SetNotifyWindow( IntPtr.Zero, WM_GRAPHNOTIFY, IntPtr.Zero ); mediaEvt = null; }*/ // Remove the Resize event handler if ( PreviewWindow != null ) previewWindow.Resize -= new EventHandler( onPreviewWindowResize ); if ( (int)graphState >= (int)GraphState.Rendered ) { // Update the state graphState = GraphState.Created; isCaptureRendered = false; isPreviewRendered = false; // Disconnect all filters downstream of the // video and audio devices. If we have a compressor // then disconnect it, but don't remove it if ( videoDeviceFilter != null ) removeDownstream( videoDeviceFilter, (videoCompressor==null) ); if ( audioDeviceFilter != null ) removeDownstream( audioDeviceFilter, (audioCompressor==null) ); // These filters should have been removed by the // calls above. (Is there anyway to check?) muxFilter = null; fileWriterFilter = null; //ELIMINAR RECURSOS CAPTURA <<<<<<<<<================================================ baseGrabFlt = null; } }
public void Stop() { this.wantCaptureRendered = false; if (this.mediaControl != null) { this.mediaControl.Stop(); } if (this.graphState == GraphState.Capturing) { this.graphState = GraphState.Rendered; if (this.CaptureComplete != null) { this.CaptureComplete(this, null); } } try { this.renderGraph(); } catch { } try { this.startPreviewIfNeeded(); } catch { } }