/// <summary> /// Connects the filters of a previously created graph /// (created by CreateGraph()). Once rendered the graph /// is ready to be used. This method may also destroy /// streams if we have streams we no longer want. /// </summary> protected void RenderGraph() { var didSomething = false; const int WS_CHILD = 0x40000000; const int WS_CLIPCHILDREN = 0x02000000; const int WS_CLIPSIBLINGS = 0x04000000; // Stop the graph MediaControl?.Stop(); // Create the graph if needed (group should already be created) CreateGraph(); // Derender the graph if we have a capture or preview stream // that we no longer want. We can't derender the capture and // preview streams seperately. // Notice the second case will leave a capture stream intact // even if we no longer want it. This allows the user that is // not using the preview to Stop() and Start() without // rerendering the graph. if (!WantPreviewRendered && IsPreviewRendered) { DerenderGraph(); } // Render preview stream (only if necessary) if (WantPreviewRendered && !IsPreviewRendered) { // Render preview (video -> renderer) var cat = Uuid.PinCategory.Preview; var med = Uuid.MediaType.Video; var hr = CaptureGraphBuilder.RenderStream(ref cat, ref med, VideoDeviceFilter, _baseGrabFlt, null); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Get the IVideoWindow interface VideoWindow = (ControlStreaming.IVideoWindow)GraphBuilder; // Set the video window to be a child of the main window var source = PresentationSource.FromVisual(PreviewWindow) as HwndSource; hr = VideoWindow.put_Owner(source.Handle); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Set video window style hr = VideoWindow.put_WindowStyle(WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Position video window in client rect of owner window PreviewWindow.SizeChanged += OnPreviewWindowResize; OnPreviewWindowResize(this, null); // Make the video window visible, now that it is properly positioned hr = VideoWindow.put_Visible(CoreStreaming.DsHlp.OATRUE); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } IsPreviewRendered = true; didSomething = true; var media = new CoreStreaming.AMMediaType(); hr = SampGrabber.GetConnectedMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } if ((media.formatType != Uuid.FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } _videoInfoHeader = (EditStreaming.VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(EditStreaming.VideoInfoHeader)); Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; } if (didSomething) { ActualGraphState = GraphState.Rendered; } }
/// <summary> /// Create a new filter graph and add filters (devices, compressors, misc), /// but leave the filters unconnected. Call RenderGraph() /// to connect the filters. /// </summary> protected void CreateGraph() { //Skip if already created if ((int)ActualGraphState < (int)GraphState.Created) { // Make a new filter graph GraphBuilder = (ExtendStreaming.IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Uuid.Clsid.FilterGraph, true)); // Get the Capture Graph Builder var clsid = Uuid.Clsid.CaptureGraphBuilder2; var riid = typeof(ExtendStreaming.ICaptureGraphBuilder2).GUID; CaptureGraphBuilder = (ExtendStreaming.ICaptureGraphBuilder2)Workaround.CreateDsInstance(ref clsid, ref riid); // Link the CaptureGraphBuilder to the filter graph var hr = CaptureGraphBuilder.SetFiltergraph(GraphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } var comType = Type.GetTypeFromCLSID(Uuid.Clsid.SampleGrabber); if (comType == null) { throw new NotImplementedException(@"DirectShow SampleGrabber not installed/registered!"); } var comObj = Activator.CreateInstance(comType); SampGrabber = (EditStreaming.ISampleGrabber)comObj; comObj = null; _baseGrabFlt = (CoreStreaming.IBaseFilter)SampGrabber; var media = new CoreStreaming.AMMediaType(); // Get the video device and add it to the filter graph if (VideoDevice != null) { VideoDeviceFilter = (CoreStreaming.IBaseFilter)Marshal.BindToMoniker(VideoDevice.MonikerString); hr = GraphBuilder.AddFilter(VideoDeviceFilter, "Video Capture Device"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } media.majorType = Uuid.MediaType.Video; media.subType = Uuid.MediaSubType.RGB32; //RGB24; media.formatType = Uuid.FormatType.VideoInfo; media.temporalCompression = true; //New hr = SampGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = GraphBuilder.AddFilter(_baseGrabFlt, "Grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // Retrieve the stream control interface for the video device // FindInterface will also add any required filters // (WDM devices in particular may need additional // upstream filters to function). // Try looking for an interleaved media type object o; var cat = Uuid.PinCategory.Capture; var med = Uuid.MediaType.Interleaved; var iid = typeof(ExtendStreaming.IAMStreamConfig).GUID; hr = CaptureGraphBuilder.FindInterface(ref cat, ref med, VideoDeviceFilter, ref iid, out o); if (hr != 0) { // If not found, try looking for a video media type med = Uuid.MediaType.Video; hr = CaptureGraphBuilder.FindInterface(ref cat, ref med, VideoDeviceFilter, ref iid, out o); if (hr != 0) { o = null; } } VideoStreamConfig = o as ExtendStreaming.IAMStreamConfig; // Retreive the media control interface (for starting/stopping graph) MediaControl = (ControlStreaming.IMediaControl)GraphBuilder; // Reload any video crossbars //if (videoSources != null) videoSources.Dispose(); videoSources = null; _videoInfoHeader = (EditStreaming.VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(EditStreaming.VideoInfoHeader)); Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; hr = SampGrabber.SetBufferSamples(false); if (hr == 0) { hr = SampGrabber.SetOneShot(false); } if (hr == 0) { hr = SampGrabber.SetCallback(null, 0); } if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // Update the state now that we are done ActualGraphState = GraphState.Created; }