private void SetResolution(IAMStreamConfig streamConfig, VideoCapabilities resolution) { if (!(resolution == null)) { int count = 0; int size = 0; AMMediaType mediaType = null; VideoStreamConfigCaps streamConfigCaps = new VideoStreamConfigCaps(); streamConfig.GetNumberOfCapabilities(out count, out size); for (int i = 0; i < count; i++) { try { VideoCapabilities b = new VideoCapabilities(streamConfig, i); if (resolution == b && streamConfig.GetStreamCaps(i, out mediaType, streamConfigCaps) == 0) { break; } } catch { } } if (mediaType != null) { streamConfig.SetFormat(mediaType); mediaType.Dispose(); } } }
// Retrieve capabilities of a video device internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index) { AMMediaType mediaType = null; VideoStreamConfigCaps caps = new VideoStreamConfigCaps( ); try { // retrieve capabilities struct at the specified index int hr = videoStreamConfig.GetStreamCaps(index, out mediaType, caps); if (hr != 0) { Marshal.ThrowExceptionForHR(hr); } // extract info FrameSize = caps.InputSize; MaxFrameRate = (int)(10000000 / caps.MinFrameInterval); } finally { if (mediaType != null) { mediaType.Dispose( ); } } }
// Token: 0x06000040 RID: 64 RVA: 0x000033D0 File Offset: 0x000015D0 private void SetResolution(IAMStreamConfig streamConfig, VideoCapabilities resolution) { if (resolution == null) { return; } int num = 0; int num2 = 0; AMMediaType ammediaType = null; VideoStreamConfigCaps streamConfigCaps = new VideoStreamConfigCaps(); streamConfig.GetNumberOfCapabilities(out num, out num2); for (int i = 0; i < num; i++) { try { VideoCapabilities b = new VideoCapabilities(streamConfig, i); if (resolution == b && streamConfig.GetStreamCaps(i, out ammediaType, streamConfigCaps) == 0) { break; } } catch { } } if (ammediaType != null) { streamConfig.SetFormat(ammediaType); ammediaType.Dispose(); } }
// Set frame's size and rate for the specified stream configuration private void SetFrameSizeAndRate(IAMStreamConfig streamConfig, Size size, int frameRate) { bool sizeSet = false; AMMediaType mediaType; // get current format streamConfig.GetFormat(out mediaType); // change frame size if required if ((size.Width != 0) && (size.Height != 0)) { // iterate through device's capabilities to find mediaType for desired resolution int capabilitiesCount = 0, capabilitySize = 0; AMMediaType newMediaType = null; VideoStreamConfigCaps caps = new VideoStreamConfigCaps(); streamConfig.GetNumberOfCapabilities(out capabilitiesCount, out capabilitySize); for (int i = 0; i < capabilitiesCount; i++) { if (streamConfig.GetStreamCaps(i, out newMediaType, caps) == 0) { if (caps.InputSize == size) { mediaType.Dispose(); mediaType = newMediaType; sizeSet = true; break; } else { newMediaType.Dispose(); } } } } VideoInfoHeader infoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); // try changing size manually if failed finding mediaType before if ((size.Width != 0) && (size.Height != 0) && (!sizeSet)) { infoHeader.BmiHeader.Width = size.Width; infoHeader.BmiHeader.Height = size.Height; } // change frame rate if required if (frameRate != 0) { infoHeader.AverageTimePerFrame = 10000000 / frameRate; } // copy the media structure back Marshal.StructureToPtr(infoHeader, mediaType.FormatPtr, false); // set the new format streamConfig.SetFormat(mediaType); mediaType.Dispose(); }
// Retrieve capabilities of a video device internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index) { AMMediaType mediaType = null; VideoStreamConfigCaps caps = new VideoStreamConfigCaps( ); try { // retrieve capabilities struct at the specified index int hr = videoStreamConfig.GetStreamCaps(index, out mediaType, caps); if (hr != 0) { Marshal.ThrowExceptionForHR(hr); } if (mediaType.FormatType == FormatType.VideoInfo) { VideoInfoHeader videoInfo = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); MediaType = GUID.GetNickname(mediaType.SubType); FrameSize = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height); BitCount = videoInfo.BmiHeader.BitCount; AverageFrameRate = (int)(10000000 / videoInfo.AverageTimePerFrame); MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval); MinimumFrameRate = (int)(10000000 / caps.MaxFrameInterval); } else if (mediaType.FormatType == FormatType.VideoInfo2) { VideoInfoHeader2 videoInfo = (VideoInfoHeader2)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader2)); MediaType = GUID.GetNickname(mediaType.SubType); FrameSize = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height); BitCount = videoInfo.BmiHeader.BitCount; AverageFrameRate = (int)(10000000 / videoInfo.AverageTimePerFrame); MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval); MinimumFrameRate = (int)(10000000 / caps.MaxFrameInterval); } else { throw new ApplicationException("Unsupported format found."); } // ignore 12 bpp formats for now, since it was noticed they cause issues on Windows 8 // TODO: proper fix needs to be done so ICaptureGraphBuilder2::RenderStream() does not fail // on such formats if (BitCount <= 12) { //throw new ApplicationException( "Unsupported format found." ); } } finally { if (mediaType != null) { mediaType.Dispose( ); } } }
internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index) { AMMediaType mediaType = null; VideoStreamConfigCaps caps = new VideoStreamConfigCaps( ); try { int hr = videoStreamConfig.GetStreamCaps(index, out mediaType, caps); if (hr != 0) { Marshal.ThrowExceptionForHR(hr); } if (mediaType.FormatType == FormatType.VideoInfo) { VideoInfoHeader videoInfo = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); FrameSize = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height); BitCount = videoInfo.BmiHeader.BitCount; AverageFrameRate = (int)(10000000 / videoInfo.AverageTimePerFrame); MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval); } else if (mediaType.FormatType == FormatType.VideoInfo2) { VideoInfoHeader2 videoInfo = (VideoInfoHeader2)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader2)); FrameSize = new Size(videoInfo.BmiHeader.Width, videoInfo.BmiHeader.Height); BitCount = videoInfo.BmiHeader.BitCount; AverageFrameRate = (int)(10000000 / videoInfo.AverageTimePerFrame); MaximumFrameRate = (int)(10000000 / caps.MinFrameInterval); } else { throw new ApplicationException("Unsupported format found."); } if (BitCount <= 12) { throw new ApplicationException("Unsupported format found."); } } finally { if (mediaType != null) { mediaType.Dispose( ); } } }
// Token: 0x06000027 RID: 39 RVA: 0x00002718 File Offset: 0x00000918 internal VideoCapabilities(IAMStreamConfig videoStreamConfig, int index) { AMMediaType ammediaType = null; VideoStreamConfigCaps videoStreamConfigCaps = new VideoStreamConfigCaps(); try { int streamCaps = videoStreamConfig.GetStreamCaps(index, out ammediaType, videoStreamConfigCaps); if (streamCaps != 0) { Marshal.ThrowExceptionForHR(streamCaps); } if (ammediaType.FormatType == FormatType.VideoInfo) { VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(ammediaType.FormatPtr, typeof(VideoInfoHeader)); this.FrameSize = new Size(videoInfoHeader.BmiHeader.Width, videoInfoHeader.BmiHeader.Height); this.BitCount = (int)videoInfoHeader.BmiHeader.BitCount; this.AverageFrameRate = (int)(10000000L / videoInfoHeader.AverageTimePerFrame); this.MaximumFrameRate = (int)(10000000L / videoStreamConfigCaps.MinFrameInterval); } else { if (!(ammediaType.FormatType == FormatType.VideoInfo2)) { throw new ApplicationException("Unsupported format found."); } VideoInfoHeader2 videoInfoHeader2 = (VideoInfoHeader2)Marshal.PtrToStructure(ammediaType.FormatPtr, typeof(VideoInfoHeader2)); this.FrameSize = new Size(videoInfoHeader2.BmiHeader.Width, videoInfoHeader2.BmiHeader.Height); this.BitCount = (int)videoInfoHeader2.BmiHeader.BitCount; this.AverageFrameRate = (int)(10000000L / videoInfoHeader2.AverageTimePerFrame); this.MaximumFrameRate = (int)(10000000L / videoStreamConfigCaps.MinFrameInterval); } if (this.BitCount <= 12) { throw new ApplicationException("Unsupported format found."); } } finally { if (ammediaType != null) { ammediaType.Dispose(); } } }
private void SetResolution(IAMStreamConfig streamConfig, VideoCapabilities resolution) { if (resolution == null) { return; } int capabilitiesCount = 0, capabilitySize = 0; AMMediaType newMediaType = null; VideoStreamConfigCaps caps = new VideoStreamConfigCaps( ); streamConfig.GetNumberOfCapabilities(out capabilitiesCount, out capabilitySize); for (int i = 0; i < capabilitiesCount; i++) { try { VideoCapabilities vc = new VideoCapabilities(streamConfig, i); if (resolution == vc) { if (streamConfig.GetStreamCaps(i, out newMediaType, caps) == 0) { break; } } } catch { } } if (newMediaType != null) { streamConfig.SetFormat(newMediaType); newMediaType.Dispose( ); } }
// Token: 0x0600003F RID: 63 RVA: 0x00002D24 File Offset: 0x00000F24 private void WorkerThread(bool runGraph) { ReasonToFinishPlaying reason = ReasonToFinishPlaying.StoppedByUser; bool flag = false; VideoCaptureDevice.Grabber grabber = new VideoCaptureDevice.Grabber(this, false); VideoCaptureDevice.Grabber grabber2 = new VideoCaptureDevice.Grabber(this, true); object obj = null; object obj2 = null; object obj3 = null; object obj4 = null; object obj5 = null; ICaptureGraphBuilder2 captureGraphBuilder = null; IFilterGraph2 filterGraph = null; IBaseFilter baseFilter = null; IBaseFilter baseFilter2 = null; IBaseFilter baseFilter3 = null; ISampleGrabber sampleGrabber = null; ISampleGrabber sampleGrabber2 = null; IAMVideoControl iamvideoControl = null; IPin pin = null; IAMCrossbar iamcrossbar = null; try { Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating capture graph builder"); } obj = Activator.CreateInstance(typeFromCLSID); captureGraphBuilder = (ICaptureGraphBuilder2)obj; Type typeFromCLSID2 = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (typeFromCLSID2 == null) { throw new ApplicationException("Failed creating filter graph"); } obj2 = Activator.CreateInstance(typeFromCLSID2); filterGraph = (IFilterGraph2)obj2; captureGraphBuilder.SetFiltergraph((IGraphBuilder)filterGraph); this.sourceObject1 = FilterInfo.CreateFilter(this.deviceMoniker1); if (this.sourceObject1 == null) { throw new ApplicationException("Failed creating device object for moniker"); } baseFilter = (IBaseFilter)this.sourceObject1; try { iamvideoControl = (IAMVideoControl)this.sourceObject1; } catch { } Type typeFromCLSID3 = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (typeFromCLSID3 == null) { throw new ApplicationException("Failed creating sample grabber"); } obj3 = Activator.CreateInstance(typeFromCLSID3); sampleGrabber = (ISampleGrabber)obj3; baseFilter2 = (IBaseFilter)obj3; obj4 = Activator.CreateInstance(typeFromCLSID3); sampleGrabber2 = (ISampleGrabber)obj4; baseFilter3 = (IBaseFilter)obj4; filterGraph.AddFilter(baseFilter, "source"); filterGraph.AddFilter(baseFilter2, "grabber_video"); filterGraph.AddFilter(baseFilter3, "grabber_snapshot"); AMMediaType ammediaType = new AMMediaType(); ammediaType.MajorType = MediaType.Video1; ammediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(ammediaType); sampleGrabber2.SetMediaType(ammediaType); captureGraphBuilder.FindInterface(FindDirection.UpstreamOnly, Guid.Empty, baseFilter, typeof(IAMCrossbar).GUID, out obj5); if (obj5 != null) { iamcrossbar = (IAMCrossbar)obj5; } this.isCrossbarAvailable1 = new bool?(iamcrossbar != null); this.crossbarVideoInputs1 = this.ColletCrossbarVideoInputs(iamcrossbar); if (iamvideoControl != null) { captureGraphBuilder.FindPin(this.sourceObject1, PinDirection.Output, PinCategory.StillImage, MediaType.Video1, false, 0, out pin); if (pin != null) { VideoControlFlags videoControlFlags; iamvideoControl.GetCaps(pin, out videoControlFlags); flag = ((videoControlFlags & VideoControlFlags.ExternalTriggerEnable) > (VideoControlFlags)0); } } sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(grabber, 1); sampleGrabber2.SetBufferSamples(true); sampleGrabber2.SetOneShot(false); sampleGrabber2.SetCallback(grabber2, 1); this.GetPinCapabilitiesAndConfigureSizeAndRate(captureGraphBuilder, baseFilter, PinCategory.Capture, this.videoResolution1, ref this.videoCapabilities1); if (flag) { this.GetPinCapabilitiesAndConfigureSizeAndRate(captureGraphBuilder, baseFilter, PinCategory.StillImage, this.snapshotResolution1, ref this.snapshotCapabilities1); } else { this.snapshotCapabilities1 = new VideoCapabilities[0]; } Dictionary <string, VideoCapabilities[]> obj6 = VideoCaptureDevice.cacheVideoCapabilities1; lock (obj6) { if (this.videoCapabilities1 != null && !VideoCaptureDevice.cacheVideoCapabilities1.ContainsKey(this.deviceMoniker1)) { VideoCaptureDevice.cacheVideoCapabilities1.Add(this.deviceMoniker1, this.videoCapabilities1); } } obj6 = VideoCaptureDevice.cacheSnapshotCapabilities1; lock (obj6) { if (this.snapshotCapabilities1 != null && !VideoCaptureDevice.cacheSnapshotCapabilities1.ContainsKey(this.deviceMoniker1)) { VideoCaptureDevice.cacheSnapshotCapabilities1.Add(this.deviceMoniker1, this.snapshotCapabilities1); } } if (runGraph) { captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video1, baseFilter, null, baseFilter2); if (sampleGrabber.GetConnectedMediaType(ammediaType) == 0) { VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(ammediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = videoInfoHeader.BmiHeader.Width; grabber.Height = videoInfoHeader.BmiHeader.Height; ammediaType.Dispose(); } if (flag && this.provideSnapshots1) { captureGraphBuilder.RenderStream(PinCategory.StillImage, MediaType.Video1, baseFilter, null, baseFilter3); if (sampleGrabber2.GetConnectedMediaType(ammediaType) == 0) { VideoInfoHeader videoInfoHeader2 = (VideoInfoHeader)Marshal.PtrToStructure(ammediaType.FormatPtr, typeof(VideoInfoHeader)); grabber2.Width = videoInfoHeader2.BmiHeader.Width; grabber2.Height = videoInfoHeader2.BmiHeader.Height; ammediaType.Dispose(); } } IMediaControl mediaControl = (IMediaControl)obj2; IMediaEventEx mediaEventEx = (IMediaEventEx)obj2; mediaControl.Run(); if (flag && this.provideSnapshots1) { this.startTime1 = DateTime.Now; iamvideoControl.SetMode(pin, VideoControlFlags.ExternalTriggerEnable); } for (;;) { DsEvCode dsEvCode; IntPtr lParam; IntPtr lParam2; if (mediaEventEx != null && mediaEventEx.GetEvent(out dsEvCode, out lParam, out lParam2, 0) >= 0) { mediaEventEx.FreeEventParams(dsEvCode, lParam, lParam2); if (dsEvCode == DsEvCode.DeviceLost) { break; } } if (this.needToSetVideoInput1) { this.needToSetVideoInput1 = false; if (this.isCrossbarAvailable1.Value) { this.SetCurrentCrossbarInput(iamcrossbar, this.crossbarVideoInput1); this.crossbarVideoInput1 = this.GetCurrentCrossbarInput(iamcrossbar); } } if (this.needToSimulateTrigger1) { this.needToSimulateTrigger1 = false; if (flag && this.provideSnapshots1) { iamvideoControl.SetMode(pin, VideoControlFlags.Trigger); } } if (this.needToDisplayPropertyPage1) { this.needToDisplayPropertyPage1 = false; this.DisplayPropertyPage(this.parentWindowForPropertyPage1, this.sourceObject1); if (iamcrossbar != null) { this.crossbarVideoInput1 = this.GetCurrentCrossbarInput(iamcrossbar); } } if (this.needToDisplayCrossBarPropertyPage1) { this.needToDisplayCrossBarPropertyPage1 = false; if (iamcrossbar != null) { this.DisplayPropertyPage(this.parentWindowForPropertyPage1, iamcrossbar); this.crossbarVideoInput1 = this.GetCurrentCrossbarInput(iamcrossbar); } } if (this.stopEvent1.WaitOne(100, false)) { goto IL_54E; } } reason = ReasonToFinishPlaying.DeviceLost; IL_54E: mediaControl.Stop1(); } } catch (Exception ex) { if (this.VideoSourceError != null) { this.VideoSourceError(this, new VideoSourceErrorEventArgs(ex.Message)); } } finally { captureGraphBuilder = null; filterGraph = null; baseFilter = null; iamvideoControl = null; pin = null; iamcrossbar = null; baseFilter2 = null; baseFilter3 = null; sampleGrabber = null; sampleGrabber2 = null; if (obj2 != null) { Marshal.ReleaseComObject(obj2); obj2 = null; } if (this.sourceObject1 != null) { Marshal.ReleaseComObject(this.sourceObject1); this.sourceObject1 = null; } if (obj3 != null) { Marshal.ReleaseComObject(obj3); obj3 = null; } if (obj4 != null) { Marshal.ReleaseComObject(obj4); obj4 = null; } if (obj != null) { Marshal.ReleaseComObject(obj); obj = null; } if (obj5 != null) { Marshal.ReleaseComObject(obj5); obj5 = null; } } if (this.PlayingFinished != null) { this.PlayingFinished(this, reason); } }
private void WorkerThread() { ReasonToFinishPlaying reason = ReasonToFinishPlaying.StoppedByUser; Grabber grabber = new Grabber(this); object obj = null; object obj2 = null; IGraphBuilder graphBuilder = null; IBaseFilter filter = null; IBaseFilter baseFilter = null; ISampleGrabber sampleGrabber = null; IMediaControl mediaControl = null; IMediaEventEx mediaEventEx = null; try { Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating filter graph"); } obj = Activator.CreateInstance(typeFromCLSID); graphBuilder = (IGraphBuilder)obj; graphBuilder.AddSourceFilter(fileName, "source", out filter); if (filter == null) { throw new ApplicationException("Failed creating source filter"); } typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating sample grabber"); } obj2 = Activator.CreateInstance(typeFromCLSID); sampleGrabber = (ISampleGrabber)obj2; baseFilter = (IBaseFilter)obj2; graphBuilder.AddFilter(baseFilter, "grabber"); AMMediaType aMMediaType = new AMMediaType(); aMMediaType.MajorType = MediaType.Video; aMMediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(aMMediaType); int num = 0; IPin inPin = Tools.GetInPin(baseFilter, 0); IPin pin = null; while (true) { pin = Tools.GetOutPin(filter, num); if (pin == null) { Marshal.ReleaseComObject(inPin); throw new ApplicationException("Did not find acceptable output video pin in the given source"); } if (graphBuilder.Connect(pin, inPin) >= 0) { break; } Marshal.ReleaseComObject(pin); pin = null; num++; } Marshal.ReleaseComObject(pin); Marshal.ReleaseComObject(inPin); if (sampleGrabber.GetConnectedMediaType(aMMediaType) == 0) { VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(aMMediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = videoInfoHeader.BmiHeader.Width; grabber.Height = videoInfoHeader.BmiHeader.Height; aMMediaType.Dispose(); } if (!preventFreezing) { graphBuilder.Render(Tools.GetOutPin(baseFilter, 0)); IVideoWindow videoWindow = (IVideoWindow)obj; videoWindow.put_AutoShow(autoShow: false); videoWindow = null; } sampleGrabber.SetBufferSamples(bufferThem: false); sampleGrabber.SetOneShot(oneShot: false); sampleGrabber.SetCallback(grabber, 1); if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)obj; mediaFilter.SetSyncSource(null); } mediaControl = (IMediaControl)obj; mediaEventEx = (IMediaEventEx)obj; mediaControl.Run(); do { if (mediaEventEx != null && mediaEventEx.GetEvent(out DsEvCode lEventCode, out IntPtr lParam, out IntPtr lParam2, 0) >= 0) { mediaEventEx.FreeEventParams(lEventCode, lParam, lParam2); if (lEventCode == DsEvCode.Complete) { reason = ReasonToFinishPlaying.EndOfStreamReached; break; } } }while (!stopEvent.WaitOne(100, exitContext: false)); mediaControl.Stop(); } catch (Exception ex) { if (this.VideoSourceError != null) { this.VideoSourceError(this, new VideoSourceErrorEventArgs(ex.Message)); } } finally { graphBuilder = null; baseFilter = null; sampleGrabber = null; mediaControl = null; mediaEventEx = null; if (obj != null) { Marshal.ReleaseComObject(obj); obj = null; } if (filter != null) { Marshal.ReleaseComObject(filter); filter = null; } if (obj2 != null) { Marshal.ReleaseComObject(obj2); obj2 = null; } } if (this.PlayingFinished != null) { this.PlayingFinished(this, reason); } }
private void WorkerThread(bool runGraph) { ReasonToFinishPlaying reason = ReasonToFinishPlaying.StoppedByUser; bool flag = false; Grabber grabber = new Grabber(this, snapshotMode: false); Grabber grabber2 = new Grabber(this, snapshotMode: true); object obj = null; object obj2 = null; object obj3 = null; object obj4 = null; object retInterface = null; ICaptureGraphBuilder2 captureGraphBuilder = null; IFilterGraph2 filterGraph = null; IBaseFilter baseFilter = null; IBaseFilter baseFilter2 = null; IBaseFilter baseFilter3 = null; ISampleGrabber sampleGrabber = null; ISampleGrabber sampleGrabber2 = null; IMediaControl mediaControl = null; IAMVideoControl iAMVideoControl = null; IMediaEventEx mediaEventEx = null; IPin pin = null; IAMCrossbar iAMCrossbar = null; try { Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating capture graph builder"); } obj = Activator.CreateInstance(typeFromCLSID); captureGraphBuilder = (ICaptureGraphBuilder2)obj; typeFromCLSID = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating filter graph"); } obj2 = Activator.CreateInstance(typeFromCLSID); filterGraph = (IFilterGraph2)obj2; captureGraphBuilder.SetFiltergraph((IGraphBuilder)filterGraph); sourceObject = FilterInfo.CreateFilter(deviceMoniker); if (sourceObject == null) { throw new ApplicationException("Failed creating device object for moniker"); } baseFilter = (IBaseFilter)sourceObject; try { iAMVideoControl = (IAMVideoControl)sourceObject; } catch { } typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating sample grabber"); } obj3 = Activator.CreateInstance(typeFromCLSID); sampleGrabber = (ISampleGrabber)obj3; baseFilter2 = (IBaseFilter)obj3; obj4 = Activator.CreateInstance(typeFromCLSID); sampleGrabber2 = (ISampleGrabber)obj4; baseFilter3 = (IBaseFilter)obj4; filterGraph.AddFilter(baseFilter, "source"); filterGraph.AddFilter(baseFilter2, "grabber_video"); filterGraph.AddFilter(baseFilter3, "grabber_snapshot"); AMMediaType aMMediaType = new AMMediaType(); aMMediaType.MajorType = MediaType.Video; aMMediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(aMMediaType); sampleGrabber2.SetMediaType(aMMediaType); captureGraphBuilder.FindInterface(FindDirection.UpstreamOnly, Guid.Empty, baseFilter, typeof(IAMCrossbar).GUID, out retInterface); if (retInterface != null) { iAMCrossbar = (IAMCrossbar)retInterface; } isCrossbarAvailable = (iAMCrossbar != null); crossbarVideoInputs = ColletCrossbarVideoInputs(iAMCrossbar); if (iAMVideoControl != null) { captureGraphBuilder.FindPin(sourceObject, PinDirection.Output, PinCategory.StillImage, MediaType.Video, unconnected: false, 0, out pin); if (pin != null) { iAMVideoControl.GetCaps(pin, out VideoControlFlags flags); flag = ((flags & VideoControlFlags.ExternalTriggerEnable) != 0); } } sampleGrabber.SetBufferSamples(bufferThem: false); sampleGrabber.SetOneShot(oneShot: false); sampleGrabber.SetCallback(grabber, 1); sampleGrabber2.SetBufferSamples(bufferThem: true); sampleGrabber2.SetOneShot(oneShot: false); sampleGrabber2.SetCallback(grabber2, 1); GetPinCapabilitiesAndConfigureSizeAndRate(captureGraphBuilder, baseFilter, PinCategory.Capture, videoResolution, ref videoCapabilities); if (flag) { GetPinCapabilitiesAndConfigureSizeAndRate(captureGraphBuilder, baseFilter, PinCategory.StillImage, snapshotResolution, ref snapshotCapabilities); } else { snapshotCapabilities = new VideoCapabilities[0]; } lock (cacheVideoCapabilities) { if (videoCapabilities != null && !cacheVideoCapabilities.ContainsKey(deviceMoniker)) { cacheVideoCapabilities.Add(deviceMoniker, videoCapabilities); } } lock (cacheSnapshotCapabilities) { if (snapshotCapabilities != null && !cacheSnapshotCapabilities.ContainsKey(deviceMoniker)) { cacheSnapshotCapabilities.Add(deviceMoniker, snapshotCapabilities); } } if (runGraph) { captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, baseFilter, null, baseFilter2); if (sampleGrabber.GetConnectedMediaType(aMMediaType) == 0) { VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(aMMediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = videoInfoHeader.BmiHeader.Width; grabber.Height = videoInfoHeader.BmiHeader.Height; aMMediaType.Dispose(); } if (flag && provideSnapshots) { captureGraphBuilder.RenderStream(PinCategory.StillImage, MediaType.Video, baseFilter, null, baseFilter3); if (sampleGrabber2.GetConnectedMediaType(aMMediaType) == 0) { VideoInfoHeader videoInfoHeader2 = (VideoInfoHeader)Marshal.PtrToStructure(aMMediaType.FormatPtr, typeof(VideoInfoHeader)); grabber2.Width = videoInfoHeader2.BmiHeader.Width; grabber2.Height = videoInfoHeader2.BmiHeader.Height; aMMediaType.Dispose(); } } mediaControl = (IMediaControl)obj2; mediaEventEx = (IMediaEventEx)obj2; mediaControl.Run(); if (flag && provideSnapshots) { startTime = DateTime.Now; iAMVideoControl.SetMode(pin, VideoControlFlags.ExternalTriggerEnable); } do { if (mediaEventEx != null && mediaEventEx.GetEvent(out DsEvCode lEventCode, out IntPtr lParam, out IntPtr lParam2, 0) >= 0) { mediaEventEx.FreeEventParams(lEventCode, lParam, lParam2); if (lEventCode == DsEvCode.DeviceLost) { reason = ReasonToFinishPlaying.DeviceLost; break; } } if (needToSetVideoInput) { needToSetVideoInput = false; if (isCrossbarAvailable.Value) { SetCurrentCrossbarInput(iAMCrossbar, crossbarVideoInput); crossbarVideoInput = GetCurrentCrossbarInput(iAMCrossbar); } } if (needToSimulateTrigger) { needToSimulateTrigger = false; if (flag && provideSnapshots) { iAMVideoControl.SetMode(pin, VideoControlFlags.Trigger); } } if (needToDisplayPropertyPage) { needToDisplayPropertyPage = false; DisplayPropertyPage(parentWindowForPropertyPage, sourceObject); if (iAMCrossbar != null) { crossbarVideoInput = GetCurrentCrossbarInput(iAMCrossbar); } } if (needToDisplayCrossBarPropertyPage) { needToDisplayCrossBarPropertyPage = false; if (iAMCrossbar != null) { DisplayPropertyPage(parentWindowForPropertyPage, iAMCrossbar); crossbarVideoInput = GetCurrentCrossbarInput(iAMCrossbar); } } }while (!stopEvent.WaitOne(100, exitContext: false)); mediaControl.Stop(); } } catch (Exception ex) { if (this.VideoSourceError != null) { this.VideoSourceError(this, new VideoSourceErrorEventArgs(ex.Message)); } } finally { captureGraphBuilder = null; filterGraph = null; baseFilter = null; mediaControl = null; iAMVideoControl = null; mediaEventEx = null; pin = null; iAMCrossbar = null; baseFilter2 = null; baseFilter3 = null; sampleGrabber = null; sampleGrabber2 = null; if (obj2 != null) { Marshal.ReleaseComObject(obj2); obj2 = null; } if (sourceObject != null) { Marshal.ReleaseComObject(sourceObject); sourceObject = null; } if (obj3 != null) { Marshal.ReleaseComObject(obj3); obj3 = null; } if (obj4 != null) { Marshal.ReleaseComObject(obj4); obj4 = null; } if (obj != null) { Marshal.ReleaseComObject(obj); obj = null; } if (retInterface != null) { Marshal.ReleaseComObject(retInterface); retInterface = null; } } if (this.PlayingFinished != null) { this.PlayingFinished(this, reason); } }
private void WorkerThread( bool runGraph ) { var reasonToStop = ReasonToFinishPlaying.StoppedByUser; bool isSapshotSupported = false; // grabber var videoGrabber = new Grabber( this, false ); var snapshotGrabber = new Grabber( this, true ); // objects object captureGraphObject = null; object graphObject = null; object videoGrabberObject = null; object snapshotGrabberObject = null; object crossbarObject = null; // interfaces ICaptureGraphBuilder2 captureGraph; IFilterGraph2 graph; IBaseFilter sourceBase; IBaseFilter videoGrabberBase; IBaseFilter snapshotGrabberBase; ISampleGrabber videoSampleGrabber; ISampleGrabber snapshotSampleGrabber; IMediaControl mediaControl; IAMVideoControl videoControl = null; IMediaEventEx mediaEvent; IPin pinStillImage = null; IAMCrossbar crossbar = null; try { // get type of capture graph builder Type type = Type.GetTypeFromCLSID( Clsid.CaptureGraphBuilder2 ); if ( type == null ) throw new ApplicationException( "Failed creating capture graph builder" ); // create capture graph builder captureGraphObject = Activator.CreateInstance( type ); captureGraph = (ICaptureGraphBuilder2) captureGraphObject; // get type of filter graph type = Type.GetTypeFromCLSID( Clsid.FilterGraph ); if ( type == null ) throw new ApplicationException( "Failed creating filter graph" ); // create filter graph graphObject = Activator.CreateInstance( type ); graph = (IFilterGraph2) graphObject; // set filter graph to the capture graph builder captureGraph.SetFiltergraph( (IGraphBuilder) graph ); // create source device's object _sourceObject = FilterInfo.CreateFilter( _deviceMoniker ); if ( _sourceObject == null ) throw new ApplicationException( "Failed creating device object for moniker" ); // get base filter interface of source device sourceBase = (IBaseFilter) _sourceObject; // get video control interface of the device try { videoControl = (IAMVideoControl) _sourceObject; } catch { // some camera drivers may not support IAMVideoControl interface } // get type of sample grabber type = Type.GetTypeFromCLSID( Clsid.SampleGrabber ); if ( type == null ) throw new ApplicationException( "Failed creating sample grabber" ); // create sample grabber used for video capture videoGrabberObject = Activator.CreateInstance( type ); videoSampleGrabber = (ISampleGrabber) videoGrabberObject; videoGrabberBase = (IBaseFilter) videoGrabberObject; // create sample grabber used for snapshot capture snapshotGrabberObject = Activator.CreateInstance( type ); snapshotSampleGrabber = (ISampleGrabber) snapshotGrabberObject; snapshotGrabberBase = (IBaseFilter) snapshotGrabberObject; // add source and grabber filters to graph graph.AddFilter( sourceBase, "source" ); graph.AddFilter( videoGrabberBase, "grabber_video" ); graph.AddFilter( snapshotGrabberBase, "grabber_snapshot" ); // set media type var mediaType = new AMMediaType {MajorType = MediaType.Video, SubType = MediaSubType.RGB24}; videoSampleGrabber.SetMediaType( mediaType ); snapshotSampleGrabber.SetMediaType( mediaType ); // get crossbar object to to allows configuring pins of capture card captureGraph.FindInterface( FindDirection.UpstreamOnly, Guid.Empty, sourceBase, typeof( IAMCrossbar ).GUID, out crossbarObject ); if ( crossbarObject != null ) { crossbar = (IAMCrossbar) crossbarObject; } _isCrossbarAvailable = ( crossbar != null ); _crossbarVideoInputs = ColletCrossbarVideoInputs( crossbar ); if ( videoControl != null ) { // find Still Image output pin of the vedio device captureGraph.FindPin( _sourceObject, PinDirection.Output, PinCategory.StillImage, MediaType.Video, false, 0, out pinStillImage ); // check if it support trigger mode if ( pinStillImage != null ) { VideoControlFlags caps; videoControl.GetCaps( pinStillImage, out caps ); isSapshotSupported = ( ( caps & VideoControlFlags.ExternalTriggerEnable ) != 0 ); } } // configure video sample grabber videoSampleGrabber.SetBufferSamples( false ); videoSampleGrabber.SetOneShot( false ); videoSampleGrabber.SetCallback( videoGrabber, 1 ); // configure snapshot sample grabber snapshotSampleGrabber.SetBufferSamples( true ); snapshotSampleGrabber.SetOneShot( false ); snapshotSampleGrabber.SetCallback( snapshotGrabber, 1 ); // configure pins GetPinCapabilitiesAndConfigureSizeAndRate( captureGraph, sourceBase, PinCategory.Capture, _videoResolution, ref _videoCapabilities ); if ( isSapshotSupported ) { GetPinCapabilitiesAndConfigureSizeAndRate( captureGraph, sourceBase, PinCategory.StillImage, _snapshotResolution, ref _snapshotCapabilities ); } else { _snapshotCapabilities = new VideoCapabilities[0]; } // put video/snapshot capabilities into cache lock ( CacheVideoCapabilities ) { if ( ( _videoCapabilities != null ) && ( !CacheVideoCapabilities.ContainsKey( _deviceMoniker ) ) ) { CacheVideoCapabilities.Add( _deviceMoniker, _videoCapabilities ); } } lock ( CacheSnapshotCapabilities ) { if ( ( _snapshotCapabilities != null ) && ( !CacheSnapshotCapabilities.ContainsKey( _deviceMoniker ) ) ) { CacheSnapshotCapabilities.Add( _deviceMoniker, _snapshotCapabilities ); } } if ( runGraph ) { // render capture pin captureGraph.RenderStream( PinCategory.Capture, MediaType.Video, sourceBase, null, videoGrabberBase ); if ( videoSampleGrabber.GetConnectedMediaType( mediaType ) == 0 ) { var vih = (VideoInfoHeader) Marshal.PtrToStructure( mediaType.FormatPtr, typeof( VideoInfoHeader ) ); videoGrabber.Width = vih.BmiHeader.Width; videoGrabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } if ( ( isSapshotSupported ) && ( _provideSnapshots ) ) { // render snapshot pin captureGraph.RenderStream( PinCategory.StillImage, MediaType.Video, sourceBase, null, snapshotGrabberBase ); if ( snapshotSampleGrabber.GetConnectedMediaType( mediaType ) == 0 ) { var vih = (VideoInfoHeader) Marshal.PtrToStructure( mediaType.FormatPtr, typeof( VideoInfoHeader ) ); snapshotGrabber.Width = vih.BmiHeader.Width; snapshotGrabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } } // get media control mediaControl = (IMediaControl) graphObject; // get media events' interface mediaEvent = (IMediaEventEx) graphObject; // run mediaControl.Run( ); if ( ( isSapshotSupported ) && ( _provideSnapshots ) ) { _startTime = DateTime.Now; videoControl.SetMode( pinStillImage, VideoControlFlags.ExternalTriggerEnable ); } do { if ( mediaEvent != null ) { IntPtr p1; IntPtr p2; DsEvCode code; if ( mediaEvent.GetEvent( out code, out p1, out p2, 0 ) >= 0 ) { mediaEvent.FreeEventParams( code, p1, p2 ); if ( code == DsEvCode.DeviceLost ) { reasonToStop = ReasonToFinishPlaying.DeviceLost; break; } } } if ( _needToSetVideoInput ) { _needToSetVideoInput = false; // set/check current input type of a video card (frame grabber) if ( _isCrossbarAvailable.Value ) { SetCurrentCrossbarInput( crossbar, _crossbarVideoInput ); _crossbarVideoInput = GetCurrentCrossbarInput( crossbar ); } } if ( _needToSimulateTrigger ) { _needToSimulateTrigger = false; if ( ( isSapshotSupported ) && ( _provideSnapshots ) ) { videoControl.SetMode( pinStillImage, VideoControlFlags.Trigger ); } } if ( _needToDisplayPropertyPage ) { _needToDisplayPropertyPage = false; DisplayPropertyPage( _parentWindowForPropertyPage, _sourceObject ); if ( crossbar != null ) { _crossbarVideoInput = GetCurrentCrossbarInput( crossbar ); } } if ( _needToDisplayCrossBarPropertyPage ) { _needToDisplayCrossBarPropertyPage = false; if ( crossbar != null ) { DisplayPropertyPage( _parentWindowForPropertyPage, crossbar ); _crossbarVideoInput = GetCurrentCrossbarInput( crossbar ); } } } while ( !_stopEvent.WaitOne( 100, false ) ); mediaControl.Stop( ); } } catch ( Exception exception ) { // provide information to clients if ( VideoSourceError != null ) { VideoSourceError( this, new VideoSourceErrorEventArgs( exception.Message ) ); } } finally { // release all objects captureGraph = null; graph = null; sourceBase = null; mediaControl = null; videoControl = null; mediaEvent = null; pinStillImage = null; crossbar = null; videoGrabberBase = null; snapshotGrabberBase = null; videoSampleGrabber = null; snapshotSampleGrabber = null; if ( graphObject != null ) { Marshal.ReleaseComObject( graphObject ); graphObject = null; } if ( _sourceObject != null ) { Marshal.ReleaseComObject( _sourceObject ); _sourceObject = null; } if ( videoGrabberObject != null ) { Marshal.ReleaseComObject( videoGrabberObject ); videoGrabberObject = null; } if ( snapshotGrabberObject != null ) { Marshal.ReleaseComObject( snapshotGrabberObject ); snapshotGrabberObject = null; } if ( captureGraphObject != null ) { Marshal.ReleaseComObject( captureGraphObject ); captureGraphObject = null; } if ( crossbarObject != null ) { Marshal.ReleaseComObject( crossbarObject ); crossbarObject = null; } } if ( PlayingFinished != null ) { PlayingFinished( this, reasonToStop ); } }
private void WorkerThread(bool runGraph) { // grabber Grabber grabber = new Grabber(this); // objects object captureGraphObject = null; object graphObject = null; object grabberObject = null; // interfaces ICaptureGraphBuilder2 captureGraph = null; IFilterGraph2 graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sampleGrabber = null; IMediaControl mediaControl = null; try { // get type of capture graph builder Type type = Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2); if (type == null) { throw new ApplicationException("Failed creating capture graph builder"); } // create capture graph builder captureGraphObject = Activator.CreateInstance(type); captureGraph = (ICaptureGraphBuilder2)captureGraphObject; // get type of filter graph type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IFilterGraph2)graphObject; // set filter graph to the capture graph builder captureGraph.SetFiltergraph((IGraphBuilder)graph); // create source device's object sourceObject = FilterInfo.CreateFilter(deviceMoniker); if (sourceObject == null) { throw new ApplicationException("Failed creating device object for moniker"); } // get base filter interface of source device sourceBase = (IBaseFilter)sourceObject; // get type of sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObject = Activator.CreateInstance(type); sampleGrabber = (ISampleGrabber)grabberObject; grabberBase = (IBaseFilter)grabberObject; // add source and grabber filters to graph graph.AddFilter(sourceBase, "source"); graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mediaType = new AMMediaType( ); mediaType.MajorType = MediaType.Video; mediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(mediaType); // configure sample grabber sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(grabber, 1); // check if it is required to change capture settings if ((desiredFrameRate != 0) || ((desiredFrameSize.Width != 0) && (desiredFrameSize.Height != 0))) { object streamConfigObject; // get stream configuration object captureGraph.FindInterface(PinCategory.Capture, MediaType.Video, sourceBase, typeof(IAMStreamConfig).GUID, out streamConfigObject); if (streamConfigObject != null) { IAMStreamConfig streamConfig = (IAMStreamConfig)streamConfigObject; if (videoCapabilities == null) { // get all video capabilities try { videoCapabilities = AForge.Video.DirectShow.VideoCapabilities.FromStreamConfig(streamConfig); } catch { } } // get current format streamConfig.GetFormat(out mediaType); VideoInfoHeader infoHeader = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); // change frame size if required if ((desiredFrameSize.Width != 0) && (desiredFrameSize.Height != 0)) { infoHeader.BmiHeader.Width = desiredFrameSize.Width; infoHeader.BmiHeader.Height = desiredFrameSize.Height; } // change frame rate if required if (desiredFrameRate != 0) { infoHeader.AverageTimePerFrame = 10000000 / desiredFrameRate; } // copy the media structure back Marshal.StructureToPtr(infoHeader, mediaType.FormatPtr, false); // set the new format streamConfig.SetFormat(mediaType); mediaType.Dispose( ); } } else { if (videoCapabilities == null) { object streamConfigObject; // get stream configuration object captureGraph.FindInterface(PinCategory.Capture, MediaType.Video, sourceBase, typeof(IAMStreamConfig).GUID, out streamConfigObject); if (streamConfigObject != null) { IAMStreamConfig streamConfig = (IAMStreamConfig)streamConfigObject; // get all video capabilities try { videoCapabilities = AForge.Video.DirectShow.VideoCapabilities.FromStreamConfig(streamConfig); } catch { } } } } if (runGraph) { // render source device on sample grabber captureGraph.RenderStream(PinCategory.Capture, MediaType.Video, sourceBase, null, grabberBase); // get media type if (sampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } // get media control mediaControl = (IMediaControl)graphObject; // run mediaControl.Run( ); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); if (needToDisplayPropertyPage) { needToDisplayPropertyPage = false; try { // retrieve ISpecifyPropertyPages interface of the device ISpecifyPropertyPages pPropPages = (ISpecifyPropertyPages)sourceObject; // get property pages from the property bag CAUUID caGUID; pPropPages.GetPages(out caGUID); // get filter info FilterInfo filterInfo = new FilterInfo(deviceMoniker); // create and display the OlePropertyFrame Win32.OleCreatePropertyFrame(parentWindowForPropertyPage, 0, 0, filterInfo.Name, 1, ref sourceObject, caGUID.cElems, caGUID.pElems, 0, 0, IntPtr.Zero); // release COM objects Marshal.FreeCoTaskMem(caGUID.pElems); } catch { } } } mediaControl.StopWhenReady( ); } } catch (Exception exception) { // provide information to clients if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message)); } } finally { // release all objects captureGraph = null; graph = null; sourceBase = null; grabberBase = null; sampleGrabber = null; mediaControl = null; if (graphObject != null) { Marshal.ReleaseComObject(graphObject); graphObject = null; } if (sourceObject != null) { Marshal.ReleaseComObject(sourceObject); sourceObject = null; } if (grabberObject != null) { Marshal.ReleaseComObject(grabberObject); grabberObject = null; } if (captureGraphObject != null) { Marshal.ReleaseComObject(captureGraphObject); captureGraphObject = null; } } if (PlayingFinished != null) { PlayingFinished(this, ReasonToFinishPlaying.StoppedByUser); } }
public DirectShowCapture(string name, string deviceMoniker) { this.Name = name; this.Uuid = deviceMoniker; this.Width = 1600; this.Height = 1200; captureGraphBuilder2 = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2)) as ICaptureGraphBuilder2; filterGraph2 = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph)) as IFilterGraph2; sampleGrabberBaseFilter = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.SampleGrabber)) as IBaseFilter; sampleGrabber = sampleGrabberBaseFilter as ISampleGrabber; captureGraphBuilder2.SetFiltergraph(filterGraph2 as IGraphBuilder); FilterInfo.CreateFilter(this.Uuid, out captureSourceBaseFilter); captureSourceBaseFilter.SetSyncSource(IntPtr.Zero); sampleGrabberBaseFilter.SetSyncSource(IntPtr.Zero); videoProcAmp = captureSourceBaseFilter as IAMVideoProcAmp; cameraControl = captureSourceBaseFilter as IAMCameraControl; ksPropertySet = captureSourceBaseFilter as IKsPropertySet; videoProcAmp.Set(VideoProcAmpProperty.ColorEnable, 1, VideoProcAmpFlags.Manual); ksPropertySet.SetExposure(TimeSpan.FromSeconds(1000 / 120)); filterGraph2.AddFilter(captureSourceBaseFilter, "source"); filterGraph2.AddFilter(sampleGrabberBaseFilter, "grabber"); object streamConfigObj; captureGraphBuilder2.FindInterface(PinCategory.Capture, MediaType.Video, captureSourceBaseFilter, typeof(IAMStreamConfig).GUID, out streamConfigObj); IAMStreamConfig streamConfig = (IAMStreamConfig)streamConfigObj; videoCapabilities = Pentacorn.Vision.Captures.DirectShow.VideoCapabilities.FromStreamConfig(streamConfig); var desiredFormat = videoCapabilities.Where(vc => vc.FrameSize.Width == this.Width && vc.FrameSize.Height == this.Height) .OrderByDescending(vc => vc.MaxFrameRate).First(); streamConfig.SetFormat(desiredFormat.MediaType); var hr = sampleGrabber.SetMediaType(desiredFormat.MediaType); if (hr < 0) throw new Win32Exception(hr); sampleGrabber.SetBufferSamples(true); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(this, 1); captureGraphBuilder2.RenderStream(PinCategory.Capture, MediaType.Video, captureSourceBaseFilter, null, sampleGrabberBaseFilter); if (hr < 0) throw new Win32Exception(hr); AMMediaType mediaType = new AMMediaType(); if (sampleGrabber.GetConnectedMediaType(mediaType) >= 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); if (this.Width != vih.BmiHeader.Width) throw new Exception("DirectShow capture width not what's requested."); if (this.Height != vih.BmiHeader.Height) throw new Exception("DirectShow capture width not what's requested."); mediaType.Dispose(); } mediaControl = (IMediaControl)filterGraph2; mediaControl.Run(); }
public void WorkerThread() { bool flag = false; Grabber pCallback = new Grabber(this); object o = null; object obj3 = null; object obj4 = null; IGraphBuilder builder = null; IBaseFilter pFilter = null; IBaseFilter filter2 = null; ISampleGrabber grabber2 = null; IFileSourceFilter filter3 = null; IMediaControl control = null; IMediaEventEx ex = null; while (!flag && !this.stopEvent.WaitOne(0, true)) { try { try { Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating filter graph"); } o = Activator.CreateInstance(typeFromCLSID); builder = (IGraphBuilder)o; typeFromCLSID = Type.GetTypeFromCLSID(Clsid.WindowsMediaSource); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating WM source"); } obj3 = Activator.CreateInstance(typeFromCLSID); pFilter = (IBaseFilter)obj3; typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating sample grabber"); } obj4 = Activator.CreateInstance(typeFromCLSID); grabber2 = (ISampleGrabber)obj4; filter2 = (IBaseFilter)obj4; builder.AddFilter(pFilter, "source"); builder.AddFilter(filter2, "grabber"); AMMediaType pmt = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24 }; grabber2.SetMediaType(pmt); filter3 = (IFileSourceFilter)obj3; filter3.Load(this.source, null); if (builder.Connect(DSTools.GetOutPin(pFilter, 0), DSTools.GetInPin(filter2, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } if (grabber2.GetConnectedMediaType(pmt) == 0) { VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(pmt.formatPtr, typeof(VideoInfoHeader)); pCallback.Width = header.BmiHeader.Width; pCallback.Height = header.BmiHeader.Height; pmt.Dispose(); } builder.Render(DSTools.GetOutPin(filter2, 0)); grabber2.SetBufferSamples(false); grabber2.SetOneShot(false); grabber2.SetCallback(pCallback, 1); IVideoWindow window = (IVideoWindow)o; window.put_AutoShow(false); window = null; ex = (IMediaEventEx)o; control = (IMediaControl)o; control.Run(); while (!this.stopEvent.WaitOne(0, true)) { int num; int num2; int num3; Thread.Sleep(100); if (ex.GetEvent(out num, out num2, out num3, 0) == 0) { ex.FreeEventParams(num, num2, num3); if (num == 1) { break; } } } control.StopWhenReady(); } catch (Exception) { flag = true; } continue; } finally { ex = null; control = null; filter3 = null; builder = null; pFilter = null; filter2 = null; grabber2 = null; if (o != null) { Marshal.ReleaseComObject(o); o = null; } if (obj3 != null) { Marshal.ReleaseComObject(obj3); obj3 = null; } if (obj4 != null) { Marshal.ReleaseComObject(obj4); obj4 = null; } } } }
// Thread entry point public void WorkerThread() { bool failed = false; // grabber Grabber grabber = new Grabber(this); // objects object graphObj = null; object sourceObj = null; object grabberObj = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sg = null; IFileSourceFilter fileSource = null; IMediaControl mc = null; IMediaEventEx mediaEvent = null; int code, param1, param2; while ((!failed) && (!stopEvent.WaitOne(0, true))) { try { // Get type for filter graph Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (srvType == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObj = Activator.CreateInstance(srvType); graph = (IGraphBuilder)graphObj; // Get type for windows media source filter srvType = Type.GetTypeFromCLSID(Clsid.WindowsMediaSource); if (srvType == null) { throw new ApplicationException("Failed creating WM source"); } // create windows media source filter sourceObj = Activator.CreateInstance(srvType); sourceBase = (IBaseFilter)sourceObj; // Get type for sample grabber srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (srvType == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObj = Activator.CreateInstance(srvType); sg = (ISampleGrabber)grabberObj; grabberBase = (IBaseFilter)grabberObj; // add source filter to graph graph.AddFilter(sourceBase, "source"); graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; sg.SetMediaType(mt); // load file fileSource = (IFileSourceFilter)sourceObj; fileSource.Load(this.source, null); // connect pins if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } // get media type if (sg.GetConnectedMediaType(mt) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mt.Dispose(); } // render graph.Render(DSTools.GetOutPin(grabberBase, 0)); // sg.SetBufferSamples(false); sg.SetOneShot(false); sg.SetCallback(grabber, 1); // window IVideoWindow win = (IVideoWindow)graphObj; win.put_AutoShow(false); win = null; // get events interface mediaEvent = (IMediaEventEx)graphObj; // get media control mc = (IMediaControl)graphObj; // run mc.Run(); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); // get an event if (mediaEvent.GetEvent(out code, out param1, out param2, 0) == 0) { // release params mediaEvent.FreeEventParams(code, param1, param2); // if (code == (int)EventCode.Complete) { break; } } } mc.StopWhenReady(); } // catch any exceptions catch (Exception e) { System.Diagnostics.Debug.WriteLine("----: " + e.Message); failed = true; } // finalization block finally { // release all objects mediaEvent = null; mc = null; fileSource = null; graph = null; sourceBase = null; grabberBase = null; sg = null; if (graphObj != null) { Marshal.ReleaseComObject(graphObj); graphObj = null; } if (sourceObj != null) { Marshal.ReleaseComObject(sourceObj); sourceObj = null; } if (grabberObj != null) { Marshal.ReleaseComObject(grabberObj); grabberObj = null; } } } }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread( ) { // grabber Grabber grabber = new Grabber(this); // objects object graphObject = null; object sourceObject = null; object grabberObject = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sampleGrabber = null; IMediaControl mediaControl = null; IFileSourceFilter fileSource = null; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object type = Type.GetTypeFromCLSID(Clsid.AsyncReader); if (type == null) { throw new ApplicationException("Failed creating filter async reader"); } sourceObject = Activator.CreateInstance(type); sourceBase = (IBaseFilter)sourceObject; fileSource = (IFileSourceFilter)sourceObject; fileSource.Load(fileName, null); // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObject = Activator.CreateInstance(type); sampleGrabber = (ISampleGrabber)grabberObject; grabberBase = (IBaseFilter)grabberObject; // add source and grabber filters to graph graph.AddFilter(sourceBase, "source"); graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mediaType = new AMMediaType( ); mediaType.MajorType = MediaType.Video; mediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(mediaType); // connect pins if (graph.Connect(Tools.GetOutPin(sourceBase, 0), Tools.GetInPin(grabberBase, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } // get media type if (sampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } // let's do rendering, if we don't need to prevent freezing if (!preventFreezing) { // render pin graph.Render(Tools.GetOutPin(grabberBase, 0)); // configure video window IVideoWindow window = (IVideoWindow)graphObject; window.put_AutoShow(false); window = null; } // configure sample grabber sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(grabber, 1); // get media control mediaControl = (IMediaControl)graphObject; // run mediaControl.Run( ); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } mediaControl.StopWhenReady( ); } catch (Exception exception) { // provide information to clients if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message)); } } finally { // release all objects graph = null; sourceBase = null; grabberBase = null; sampleGrabber = null; mediaControl = null; fileSource = null; if (graphObject != null) { Marshal.ReleaseComObject(graphObject); graphObject = null; } if (sourceObject != null) { Marshal.ReleaseComObject(sourceObject); sourceObject = null; } if (grabberObject != null) { Marshal.ReleaseComObject(grabberObject); grabberObject = null; } } }
private void WorkerThread(bool runGraph) { ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser; bool isSapshotSupported = false; Grabber videoGrabber = new Grabber(this, false); Grabber snapshotGrabber = new Grabber(this, true); object captureGraphObject = null; object graphObject = null; object videoGrabberObject = null; object snapshotGrabberObject = null; object crossbarObject = null; ICaptureGraphBuilder2 captureGraph = null; IFilterGraph2 graph = null; IBaseFilter sourceBase = null; IBaseFilter videoGrabberBase = null; IBaseFilter snapshotGrabberBase = null; ISampleGrabber videoSampleGrabber = null; ISampleGrabber snapshotSampleGrabber = null; IMediaControl mediaControl = null; IAMVideoControl videoControl = null; IMediaEventEx mediaEvent = null; IPin pinStillImage = null; IAMCrossbar crossbar = null; try { Type type = Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2); if (type == null) { throw new ApplicationException("Failed creating capture graph builder"); } captureGraphObject = Activator.CreateInstance(type); captureGraph = (ICaptureGraphBuilder2)captureGraphObject; type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } graphObject = Activator.CreateInstance(type); graph = (IFilterGraph2)graphObject; captureGraph.SetFiltergraph((IGraphBuilder)graph); sourceObject = FilterInfo.CreateFilter(deviceMoniker); if (sourceObject == null) { throw new ApplicationException("Failed creating device object for moniker"); } sourceBase = (IBaseFilter)sourceObject; try { videoControl = (IAMVideoControl)sourceObject; } catch { } type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } videoGrabberObject = Activator.CreateInstance(type); videoSampleGrabber = (ISampleGrabber)videoGrabberObject; videoGrabberBase = (IBaseFilter)videoGrabberObject; snapshotGrabberObject = Activator.CreateInstance(type); snapshotSampleGrabber = (ISampleGrabber)snapshotGrabberObject; snapshotGrabberBase = (IBaseFilter)snapshotGrabberObject; graph.AddFilter(sourceBase, "source"); graph.AddFilter(videoGrabberBase, "grabber_video"); graph.AddFilter(snapshotGrabberBase, "grabber_snapshot"); AMMediaType mediaType = new AMMediaType( ); mediaType.MajorType = MediaType.Video; mediaType.SubType = MediaSubType.RGB24; videoSampleGrabber.SetMediaType(mediaType); snapshotSampleGrabber.SetMediaType(mediaType); captureGraph.FindInterface(FindDirection.UpstreamOnly, Guid.Empty, sourceBase, typeof(IAMCrossbar).GUID, out crossbarObject); if (crossbarObject != null) { crossbar = (IAMCrossbar)crossbarObject; } isCrossbarAvailable = (crossbar != null); crossbarVideoInputs = ColletCrossbarVideoInputs(crossbar); if (videoControl != null) { captureGraph.FindPin(sourceObject, PinDirection.Output, PinCategory.StillImage, MediaType.Video, false, 0, out pinStillImage); if (pinStillImage != null) { VideoControlFlags caps; videoControl.GetCaps(pinStillImage, out caps); isSapshotSupported = ((caps & VideoControlFlags.ExternalTriggerEnable) != 0); } } videoSampleGrabber.SetBufferSamples(false); videoSampleGrabber.SetOneShot(false); videoSampleGrabber.SetCallback(videoGrabber, 1); snapshotSampleGrabber.SetBufferSamples(true); snapshotSampleGrabber.SetOneShot(false); snapshotSampleGrabber.SetCallback(snapshotGrabber, 1); GetPinCapabilitiesAndConfigureSizeAndRate(captureGraph, sourceBase, PinCategory.Capture, videoResolution, ref videoCapabilities); if (isSapshotSupported) { GetPinCapabilitiesAndConfigureSizeAndRate(captureGraph, sourceBase, PinCategory.StillImage, snapshotResolution, ref snapshotCapabilities); } else { snapshotCapabilities = new VideoCapabilities[0]; } lock ( cacheVideoCapabilities ) { if ((videoCapabilities != null) && (!cacheVideoCapabilities.ContainsKey(deviceMoniker))) { cacheVideoCapabilities.Add(deviceMoniker, videoCapabilities); } } lock ( cacheSnapshotCapabilities ) { if ((snapshotCapabilities != null) && (!cacheSnapshotCapabilities.ContainsKey(deviceMoniker))) { cacheSnapshotCapabilities.Add(deviceMoniker, snapshotCapabilities); } } if (runGraph) { captureGraph.RenderStream(PinCategory.Capture, MediaType.Video, sourceBase, null, videoGrabberBase); if (videoSampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); videoGrabber.Width = vih.BmiHeader.Width; videoGrabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } if ((isSapshotSupported) && (provideSnapshots)) { captureGraph.RenderStream(PinCategory.StillImage, MediaType.Video, sourceBase, null, snapshotGrabberBase); if (snapshotSampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); snapshotGrabber.Width = vih.BmiHeader.Width; snapshotGrabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } } mediaControl = (IMediaControl)graphObject; mediaEvent = (IMediaEventEx)graphObject; IntPtr p1, p2; DsEvCode code; mediaControl.Run( ); if ((isSapshotSupported) && (provideSnapshots)) { startTime = DateTime.Now; videoControl.SetMode(pinStillImage, VideoControlFlags.ExternalTriggerEnable); } do { if (mediaEvent != null) { if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0) { mediaEvent.FreeEventParams(code, p1, p2); if (code == DsEvCode.DeviceLost) { reasonToStop = ReasonToFinishPlaying.DeviceLost; break; } } } if (needToSetVideoInput) { needToSetVideoInput = false; if (isCrossbarAvailable.Value) { SetCurrentCrossbarInput(crossbar, crossbarVideoInput); crossbarVideoInput = GetCurrentCrossbarInput(crossbar); } } if (needToSimulateTrigger) { needToSimulateTrigger = false; if ((isSapshotSupported) && (provideSnapshots)) { videoControl.SetMode(pinStillImage, VideoControlFlags.Trigger); } } if (needToDisplayPropertyPage) { needToDisplayPropertyPage = false; DisplayPropertyPage(parentWindowForPropertyPage, sourceObject); if (crossbar != null) { crossbarVideoInput = GetCurrentCrossbarInput(crossbar); } } if (needToDisplayCrossBarPropertyPage) { needToDisplayCrossBarPropertyPage = false; if (crossbar != null) { DisplayPropertyPage(parentWindowForPropertyPage, crossbar); crossbarVideoInput = GetCurrentCrossbarInput(crossbar); } } }while (!stopEvent.WaitOne(100, false)); mediaControl.Stop( ); } } catch (Exception exception) { if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message)); } } finally { captureGraph = null; graph = null; sourceBase = null; mediaControl = null; videoControl = null; mediaEvent = null; pinStillImage = null; crossbar = null; videoGrabberBase = null; snapshotGrabberBase = null; videoSampleGrabber = null; snapshotSampleGrabber = null; if (graphObject != null) { Marshal.ReleaseComObject(graphObject); graphObject = null; } if (sourceObject != null) { Marshal.ReleaseComObject(sourceObject); sourceObject = null; } if (videoGrabberObject != null) { Marshal.ReleaseComObject(videoGrabberObject); videoGrabberObject = null; } if (snapshotGrabberObject != null) { Marshal.ReleaseComObject(snapshotGrabberObject); snapshotGrabberObject = null; } if (captureGraphObject != null) { Marshal.ReleaseComObject(captureGraphObject); captureGraphObject = null; } if (crossbarObject != null) { Marshal.ReleaseComObject(crossbarObject); crossbarObject = null; } } if (PlayingFinished != null) { PlayingFinished(this, reasonToStop); } }
private async void Start() { await Program.ComputeContext.SwitchTo(); CapGraphBuilder2 = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2)) as ICaptureGraphBuilder2; FilterGraph2 = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph)) as IFilterGraph2; SampleGrabberBaseFilter = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.SampleGrabber)) as IBaseFilter; SampleGrabber = SampleGrabberBaseFilter as ISampleGrabber; CapGraphBuilder2.SetFiltergraph(FilterGraph2 as IGraphBuilder); FilterInfo.CreateFilter(this.Uuid, out CaptureSourceBaseFilter); CaptureSourceBaseFilter.SetSyncSource(IntPtr.Zero); SampleGrabberBaseFilter.SetSyncSource(IntPtr.Zero); VideoProcAmp = CaptureSourceBaseFilter as IAMVideoProcAmp; CameraControl = CaptureSourceBaseFilter as IAMCameraControl; KsPropertySet = CaptureSourceBaseFilter as IKsPropertySet; VideoProcAmp.Set(VideoProcAmpProperty.ColorEnable, 1, VideoProcAmpFlags.Manual); KsPropertySet.SetExposure(TimeSpan.FromSeconds(1000 / 120)); FilterGraph2.AddFilter(CaptureSourceBaseFilter, "source"); FilterGraph2.AddFilter(SampleGrabberBaseFilter, "grabber"); object streamConfigObj; CapGraphBuilder2.FindInterface(PinCategory.Capture, MediaType.Video, CaptureSourceBaseFilter, typeof(IAMStreamConfig).GUID, out streamConfigObj); IAMStreamConfig streamConfig = (IAMStreamConfig)streamConfigObj; VideoCaps = Pentacorn.Captures.DirectShow.VideoCapabilities.FromStreamConfig(streamConfig); var desiredFormat = VideoCaps.Where(vc => vc.FrameSize.Width == this.Width && vc.FrameSize.Height == this.Height) .OrderByDescending(vc => vc.MaxFrameRate).First(); streamConfig.SetFormat(desiredFormat.MediaType); var hr = SampleGrabber.SetMediaType(desiredFormat.MediaType); if (hr < 0) throw new Win32Exception(hr); SampleGrabber.SetBufferSamples(true); SampleGrabber.SetOneShot(false); SampleGrabber.SetCallback(this, 1); CapGraphBuilder2.RenderStream(PinCategory.Capture, MediaType.Video, CaptureSourceBaseFilter, null, SampleGrabberBaseFilter); if (hr < 0) throw new Win32Exception(hr); AMMediaType mediaType = new AMMediaType(); if (SampleGrabber.GetConnectedMediaType(mediaType) >= 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); if (this.Width != vih.BmiHeader.Width) throw new Exception("DirectShow capture width not what's requested."); if (this.Height != vih.BmiHeader.Height) throw new Exception("DirectShow capture width not what's requested."); mediaType.Dispose(); } MediaControl = (IMediaControl)FilterGraph2; MediaControl.Run(); }
private void CreateFilters() { isValid = true; // grabber grabberVideo = new GrabberVideo(this); grabberAudio = new GrabberAudio(this); // objects graphObject = null; grabberObjectVideo = null; grabberObjectAudio = null; int sourceBaseVideoPinIndex = 0; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object if (fileName.ToLower().EndsWith(".wmv")) { type = Type.GetTypeFromCLSID(Clsid.WMASFReader); if (type == null) { throw new ApplicationException("Failed creating ASF Reader filter"); } sourceBase = (IBaseFilter)Activator.CreateInstance(type); IFileSourceFilter sourceFile = (IFileSourceFilter)sourceBase; sourceFile.Load(fileName, null); graph.AddFilter(sourceBase, "source"); sourceBaseVideoPinIndex = 1; } else { graph.AddSourceFilter(fileName, "source", out sourceBase); if (sourceBase == null) { try { type = Type.GetTypeFromCLSID(Clsid.AsyncReader); if (type == null) { throw new ApplicationException("Failed creating Async Reader filter"); } sourceBase = (IBaseFilter)Activator.CreateInstance(type); IFileSourceFilter sourceFile = (IFileSourceFilter)sourceBase; sourceFile.Load(fileName, null); graph.AddFilter(sourceBase, "source"); } catch { throw new ApplicationException("Failed creating source filter"); } } sourceBaseVideoPinIndex = 0; } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObjectVideo = Activator.CreateInstance(type); sampleGrabberVideo = (ISampleGrabber)grabberObjectVideo; grabberBaseVideo = (IBaseFilter)grabberObjectVideo; // add grabber filters to graph graph.AddFilter(grabberBaseVideo, "grabberVideo"); // set media type AMMediaType mediaType = new AMMediaType { MajorType = MediaType.Video, SubType = MediaSubType.ARGB32 /* MediaSubType.RGB24 */ }; ; sampleGrabberVideo.SetMediaType(mediaType); // connect pins IPin outPin = Tools.GetOutPin(sourceBase, sourceBaseVideoPinIndex); IPin inPin = Tools.GetInPin(grabberBaseVideo, 0); if (graph.Connect(outPin, inPin) < 0) { throw new ApplicationException("Failed connecting sourceBase to grabberBaseVideo"); } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); // get media type if (sampleGrabberVideo.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); grabberVideo.Width = vih.BmiHeader.Width; grabberVideo.Height = vih.BmiHeader.Height; mediaType.Dispose(); } if (useAudioGrabber) { // ***************************************************************** // ******** Add the audio grabber to monitor audio peaks *********** bool audioGrabberIsConnected = false; Tools.FilterInfo2 filterInfo2 = Tools.GetNextFilter(sourceBase, PinDirection.Output, 0); foreach (Tools.PinInfo2 pinInfo2 in filterInfo2.Pins) { if (pinInfo2.PinInfo.Direction == PinDirection.Output) { if (!Tools.IsPinConnected(pinInfo2.Pin)) { try { graph.Render(pinInfo2.Pin); AMMediaType mt = new AMMediaType(); pinInfo2.Pin.ConnectionMediaType(mt); if (mt.MajorType == MediaType.Audio) { // Obtain a reference to the filter connected to the audio output of the video splitter (usually, this is the audio decoder) Tools.FilterInfo2 decoderFilterInfo2 = Tools.GetNextFilter(pinInfo2.PinInfo.Filter, PinDirection.Output, 0); // Remove all the filters connected to the audio decoder filter System.Collections.Generic.List <Tools.FilterInfo2> filtersInfo2 = new System.Collections.Generic.List <Tools.FilterInfo2>(); Tools.FilterInfo2 testFilterInfo2 = Tools.GetNextFilter(decoderFilterInfo2.Filter, PinDirection.Output, 0); while (true) { filtersInfo2.Add(testFilterInfo2); testFilterInfo2 = Tools.GetNextFilter(testFilterInfo2.Filter, PinDirection.Output, 0); if (testFilterInfo2.Filter == null) { break; } } foreach (Tools.FilterInfo2 fi2 in filtersInfo2) { graph.RemoveFilter(fi2.Filter); fi2.Release(); } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating audio sample grabber"); } // create sample grabber grabberObjectAudio = Activator.CreateInstance(type); sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio; grabberBaseAudio = (IBaseFilter)grabberObjectAudio; // add grabber filters to graph graph.AddFilter(grabberBaseAudio, "grabberAudio"); // set media type AMMediaType mediaTypeAudio = new AMMediaType { MajorType = MediaType.Audio, SubType = MediaSubType.PCM, FormatType = FormatType.WaveEx }; sampleGrabberAudio.SetMediaType(mediaTypeAudio); outPin = Tools.GetOutPin(decoderFilterInfo2.Filter, 0); inPin = Tools.GetInPin(grabberBaseAudio, 0); if (graph.Connect(outPin, inPin) < 0) { throw new ApplicationException("Failed connecting filter to grabberBaseAudio"); } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); // Finally, connect the grabber to the audio renderer outPin = Tools.GetOutPin(grabberBaseAudio, 0); graph.Render(outPin); mt = new AMMediaType(); outPin.ConnectionMediaType(mt); if (!Tools.IsPinConnected(outPin)) { throw new ApplicationException("Failed obtaining media audio information"); } wavFormat = new WaveFormatEx(); Marshal.PtrToStructure(mt.FormatPtr, wavFormat); Marshal.ReleaseComObject(outPin); // configure sample grabber sampleGrabberAudio.SetBufferSamples(false); sampleGrabberAudio.SetOneShot(false); sampleGrabberAudio.SetCallback(grabberAudio, 1); audioGrabberIsConnected = true; break; } } catch { } } } } filterInfo2.Release(); if (!audioGrabberIsConnected) { foreach (Tools.PinInfo2 pinInfo2 in Tools.GetPins(sourceBase)) { if (!Tools.IsPinConnected(pinInfo2.Pin)) { foreach (AMMediaType mt in Tools.GetMediaTypes(pinInfo2.Pin)) { if (mt.MajorType == MediaType.Audio) { // create sample grabber grabberObjectAudio = Activator.CreateInstance(type); sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio; grabberBaseAudio = (IBaseFilter)grabberObjectAudio; // add grabber filters to graph graph.AddFilter(grabberBaseAudio, "grabberAudio"); // set media type AMMediaType mediaTypeAudio = new AMMediaType { MajorType = MediaType.Audio, SubType = MediaSubType.PCM, FormatType = FormatType.WaveEx }; sampleGrabberAudio.SetMediaType(mediaTypeAudio); inPin = Tools.GetInPin(grabberBaseAudio, 0); if (graph.Connect(pinInfo2.Pin, inPin) < 0) { throw new ApplicationException("Failed connecting sourceBase to grabberBaseVideo"); } Marshal.ReleaseComObject(inPin); // Finally, connect the grabber to the audio renderer outPin = Tools.GetOutPin(grabberBaseAudio, 0); graph.Render(outPin); AMMediaType amt = new AMMediaType(); outPin.ConnectionMediaType(amt); if (!Tools.IsPinConnected(outPin)) { throw new ApplicationException("Failed obtaining media audio information"); } wavFormat = new WaveFormatEx(); Marshal.PtrToStructure(amt.FormatPtr, wavFormat); Marshal.ReleaseComObject(outPin); // configure sample grabber sampleGrabberAudio.SetBufferSamples(false); sampleGrabberAudio.SetOneShot(false); sampleGrabberAudio.SetCallback(grabberAudio, 1); audioGrabberIsConnected = true; break; } } } } } // ***************************************************************** } // let's do the rendering, if we don't need to prevent freezing if (!preventFreezing) { // render pin graph.Render(Tools.GetOutPin(grabberBaseVideo, 0)); // configure video window IVideoWindow window = (IVideoWindow)graphObject; window.put_AutoShow(false); window = null; } // configure sample grabber sampleGrabberVideo.SetBufferSamples(false); sampleGrabberVideo.SetOneShot(false); sampleGrabberVideo.SetCallback(grabberVideo, 1); // disable clock, if someone requested it if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)graphObject; mediaFilter.SetSyncSource(null); } // get media control mediaControl = (IMediaControl)graphObject; // get media seek control mediaSeekControl = (IMediaSeeking)graphObject; // get media events' interface mediaEvent = (IMediaEventEx)graphObject; // get media audio control basicAudio = (IBasicAudio)graphObject; } catch (Exception exception) { DestroyFilters(); // provide information to clients VideoSourceError?.Invoke(this, new VideoSourceErrorEventArgs(exception.Message)); } }
// Thread entry point public void WorkerThread() { int hr; Guid cat; Guid med; // grabber Grabber grabber = new Grabber(this); // objects object graphObj = null; object grabberObj = null; // interfaces IGraphBuilder graphBuilder = null; ICaptureGraphBuilder2 captureGraphBuilder = null; IBaseFilter videoDeviceFilter = null; IBaseFilter grabberFilter = null; ISampleGrabber sg = null; IMediaControl mc = null; try { // Make a new filter graph graphObj = Activator.CreateInstance( Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); graphBuilder = (IGraphBuilder)graphObj; // Get the Capture Graph Builder Guid clsid = Clsid.CaptureGraphBuilder2; Guid riid = typeof(ICaptureGraphBuilder2).GUID; captureGraphBuilder = (ICaptureGraphBuilder2) TempFix.CreateDsInstance(ref clsid, ref riid); // Link the CaptureGraphBuilder to the filter graph hr = captureGraphBuilder.SetFiltergraph(graphBuilder); if (hr < 0) Marshal.ThrowExceptionForHR(hr); // Get the video device and add it to the filter graph if (source != null) { videoDeviceFilter = (IBaseFilter) Marshal.BindToMoniker(source); hr = graphBuilder.AddFilter(videoDeviceFilter, "Video Capture Device"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); } // create sample grabber, object and filter grabberObj = Activator.CreateInstance( Type.GetTypeFromCLSID(Clsid.SampleGrabber, true)); grabberFilter = (IBaseFilter)grabberObj; sg = (ISampleGrabber)grabberObj; // add sample grabber filter to filter graph hr = graphBuilder.AddFilter(grabberFilter, "grabber"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); // Try looking for an video device interleaved media type IBaseFilter testFilter = videoDeviceFilter; // grabberFilter (not supported) object o; cat = PinCategory.Capture; med = MediaType.Interleaved; Guid iid = typeof(IAMStreamConfig).GUID; hr = captureGraphBuilder.FindInterface( ref cat, ref med, testFilter, ref iid, out o); if (hr != 0) { // If not found, try looking for a video media type med = MediaType.Video; hr = captureGraphBuilder.FindInterface( ref cat, ref med, testFilter, ref iid, out o); if (hr != 0) o = null; } // Set the video stream configuration to data member videoStreamConfig = o as IAMStreamConfig; o = null; // Experimental testing: Try to set the Frame Size & Rate // Results: When enabled, the grabber video breaks up into // several duplicate frames (6 frames) bool bdebug = true; if (bdebug) { BitmapInfoHeader bmiHeader; bmiHeader = (BitmapInfoHeader) getStreamConfigSetting(videoStreamConfig, "BmiHeader"); bmiHeader.Width = framesize.Width; bmiHeader.Height = framesize.Height; setStreamConfigSetting(videoStreamConfig, "BmiHeader", bmiHeader); long avgTimePerFrame = (long)(10000000 / framerate); setStreamConfigSetting(videoStreamConfig, "AvgTimePerFrame", avgTimePerFrame); } // connect pins (Turns on the video device) if (graphBuilder.Connect(DSTools.GetOutPin( videoDeviceFilter, 0), DSTools.GetInPin(grabberFilter, 0)) < 0) throw new ApplicationException( "Failed connecting filters"); // Set the sample grabber media type settings AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; sg.SetMediaType(mt); // get media type if (sg.GetConnectedMediaType(mt) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); System.Diagnostics.Debug.WriteLine("width = " + vih.BmiHeader.Width + ", height = " + vih.BmiHeader.Height); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mt.Dispose(); } // render graphBuilder.Render(DSTools.GetOutPin(grabberFilter, 0)); // Set various sample grabber properties sg.SetBufferSamples(false); sg.SetOneShot(false); sg.SetCallback(grabber, 1); // Do not show active (source) window IVideoWindow win = (IVideoWindow)graphObj; win.put_AutoShow(false); win = null; // get media control mc = (IMediaControl)graphObj; // run mc.Run(); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } mc.StopWhenReady(); } // catch any exceptions catch (Exception e) { System.Diagnostics.Debug.WriteLine("----: " + e.Message); } // finalization block finally { // release all objects mc = null; graphBuilder = null; captureGraphBuilder = null; videoDeviceFilter = null; grabberFilter = null; sg = null; if (graphObj != null) { Marshal.ReleaseComObject(graphObj); graphObj = null; } if (grabberObj != null) { Marshal.ReleaseComObject(grabberObj); grabberObj = null; } } }
// Thread entry point public void WorkerThread() { // grabber Grabber grabber = new Grabber(this); // objects object graphObj = null; object sourceObj = null; object grabberObj = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sg = null; IMediaControl mc = null; try { // Get type for filter graph Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (srvType == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObj = Activator.CreateInstance(srvType); graph = (IGraphBuilder)graphObj; // ---- UCOMIBindCtx bindCtx = null; UCOMIMoniker moniker = null; int n = 0; // create bind context if (Win32.CreateBindCtx(0, out bindCtx) == 0) { // convert moniker`s string to a moniker if (Win32.MkParseDisplayName(bindCtx, source, ref n, out moniker) == 0) { // get device base filter Guid filterId = typeof(IBaseFilter).GUID; moniker.BindToObject(null, null, ref filterId, out sourceObj); Marshal.ReleaseComObject(moniker); moniker = null; } Marshal.ReleaseComObject(bindCtx); bindCtx = null; } // ---- if (sourceObj == null) { throw new ApplicationException("Failed creating device object for moniker"); } sourceBase = (IBaseFilter)sourceObj; // Get type for sample grabber srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (srvType == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObj = Activator.CreateInstance(srvType); sg = (ISampleGrabber)grabberObj; grabberBase = (IBaseFilter)grabberObj; // add source filter to graph graph.AddFilter(sourceBase, "source"); graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; sg.SetMediaType(mt); // connect pins if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } // get media type if (sg.GetConnectedMediaType(mt) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mt.Dispose(); } // render graph.Render(DSTools.GetOutPin(grabberBase, 0)); // sg.SetBufferSamples(false); sg.SetOneShot(false); sg.SetCallback(grabber, 1); // window IVideoWindow win = (IVideoWindow)graphObj; win.put_AutoShow(false); win = null; // get media control mc = (IMediaControl)graphObj; // run mc.Run(); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } mc.StopWhenReady(); } // catch any exceptions catch (Exception e) { System.Diagnostics.Debug.WriteLine("----: " + e.Message); } // finalization block finally { // release all objects mc = null; graph = null; sourceBase = null; grabberBase = null; sg = null; if (graphObj != null) { Marshal.ReleaseComObject(graphObj); graphObj = null; } if (sourceObj != null) { Marshal.ReleaseComObject(sourceObj); sourceObj = null; } if (grabberObj != null) { Marshal.ReleaseComObject(grabberObj); grabberObj = null; } } }
public void WorkerThread() { Grabber pCallback = new Grabber(this); object o = null; object ppvResult = null; object obj4 = null; IGraphBuilder builder = null; IBaseFilter pFilter = null; IBaseFilter filter2 = null; ISampleGrabber grabber2 = null; IMediaControl control = null; try { Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating filter graph"); } o = Activator.CreateInstance(typeFromCLSID); builder = (IGraphBuilder)o; UCOMIBindCtx ppbc = null; UCOMIMoniker ppmk = null; int pchEaten = 0; if (Win32.CreateBindCtx(0, out ppbc) == 0) { if (Win32.MkParseDisplayName(ppbc, this.source, ref pchEaten, out ppmk) == 0) { Guid gUID = typeof(IBaseFilter).GUID; ppmk.BindToObject(null, null, ref gUID, out ppvResult); Marshal.ReleaseComObject(ppmk); ppmk = null; } Marshal.ReleaseComObject(ppbc); ppbc = null; } if (ppvResult == null) { throw new ApplicationException("Failed creating device object for moniker"); } pFilter = (IBaseFilter)ppvResult; typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating sample grabber"); } obj4 = Activator.CreateInstance(typeFromCLSID); grabber2 = (ISampleGrabber)obj4; filter2 = (IBaseFilter)obj4; builder.AddFilter(pFilter, "source"); builder.AddFilter(filter2, "grabber"); AMMediaType pmt = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24 }; grabber2.SetMediaType(pmt); if (builder.Connect(DSTools.GetOutPin(pFilter, 0), DSTools.GetInPin(filter2, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } if (grabber2.GetConnectedMediaType(pmt) == 0) { VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(pmt.formatPtr, typeof(VideoInfoHeader)); pCallback.Width = header.BmiHeader.Width; pCallback.Height = header.BmiHeader.Height; pmt.Dispose(); } builder.Render(DSTools.GetOutPin(filter2, 0)); grabber2.SetBufferSamples(false); grabber2.SetOneShot(false); grabber2.SetCallback(pCallback, 1); ((IVideoWindow)o).put_AutoShow(false); control = (IMediaControl)o; control.Run(); while (!this.stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } control.StopWhenReady(); } catch (Exception) { } finally { control = null; builder = null; pFilter = null; filter2 = null; grabber2 = null; if (o != null) { Marshal.ReleaseComObject(o); o = null; } if (ppvResult != null) { Marshal.ReleaseComObject(ppvResult); ppvResult = null; } if (obj4 != null) { Marshal.ReleaseComObject(obj4); obj4 = null; } } }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread( ) { ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser; // grabber Grabber grabber = new Grabber(this); // objects object graphObject = null; object grabberObject = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sampleGrabber = null; IMediaControl mediaControl = null; IMediaEventEx mediaEvent = null; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object graph.AddSourceFilter(fileName, "source", out sourceBase); if (sourceBase == null) { throw new ApplicationException("Failed creating source filter"); } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObject = Activator.CreateInstance(type); sampleGrabber = (ISampleGrabber)grabberObject; grabberBase = (IBaseFilter)grabberObject; // add grabber filters to graph graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mediaType = new AMMediaType( ); mediaType.MajorType = MediaType.Video; mediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(mediaType); // connect pins int pinToTry = 0; IPin inPin = Tools.GetInPin(grabberBase, 0); IPin outPin = null; // find output pin acceptable by sample grabber while (true) { outPin = Tools.GetOutPin(sourceBase, pinToTry); if (outPin == null) { Marshal.ReleaseComObject(inPin); throw new ApplicationException("Did not find acceptable output video pin in the given source"); } if (graph.Connect(outPin, inPin) < 0) { Marshal.ReleaseComObject(outPin); outPin = null; pinToTry++; } else { break; } } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); // get media type if (sampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } // let's do rendering, if we don't need to prevent freezing if (!preventFreezing) { // render pin graph.Render(Tools.GetOutPin(grabberBase, 0)); // configure video window IVideoWindow window = (IVideoWindow)graphObject; window.put_AutoShow(false); window = null; } // configure sample grabber sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(grabber, 1); // disable clock, if someone requested it if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)graphObject; mediaFilter.SetSyncSource(null); } // get media control mediaControl = (IMediaControl)graphObject; // get media events' interface mediaEvent = (IMediaEventEx)graphObject; IntPtr p1, p2; DsEvCode code; // run mediaControl.Run( ); do { if (mediaEvent != null) { if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0) { mediaEvent.FreeEventParams(code, p1, p2); if (code == DsEvCode.Complete) { reasonToStop = ReasonToFinishPlaying.EndOfStreamReached; break; } } } }while (!stopEvent.WaitOne(100, false)); mediaControl.Stop( ); } catch (Exception exception) { // provide information to clients if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message)); } } finally { // release all objects graph = null; grabberBase = null; sampleGrabber = null; mediaControl = null; mediaEvent = null; if (graphObject != null) { Marshal.ReleaseComObject(graphObject); graphObject = null; } if (sourceBase != null) { Marshal.ReleaseComObject(sourceBase); sourceBase = null; } if (grabberObject != null) { Marshal.ReleaseComObject(grabberObject); grabberObject = null; } } if (PlayingFinished != null) { PlayingFinished(this, reasonToStop); } }
private void WorkerThread(bool runGraph) { VideoSourceFinishedReasonType reasonToStop = VideoSourceFinishedReasonType.StoppedByUser; bool isSapshotSupported = false; // grabber Grabber videoGrabber = new Grabber(this, false); Grabber snapshotGrabber = new Grabber(this, true); // objects object captureGraphObject = null; object graphObject = null; object videoGrabberObject = null; object snapshotGrabberObject = null; // interfaces ICaptureGraphBuilder2 captureGraph = null; IFilterGraph2 graph = null; IBaseFilter sourceBase = null; IBaseFilter videoGrabberBase = null; IBaseFilter snapshotGrabberBase = null; ISampleGrabber videoSampleGrabber = null; ISampleGrabber snapshotSampleGrabber = null; IMediaControl mediaControl = null; IAMVideoControl videoControl = null; IMediaEventEx mediaEvent = null; IPin pinStillImage = null; try { // get type of capture graph builder Type type = Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2); if (type == null) { throw new ApplicationException("Failed creating capture graph builder"); } // create capture graph builder captureGraphObject = Activator.CreateInstance(type); captureGraph = (ICaptureGraphBuilder2)captureGraphObject; // get type of filter graph type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IFilterGraph2)graphObject; // set filter graph to the capture graph builder captureGraph.SetFiltergraph((IGraphBuilder)graph); // create source device's object sourceObject = FilterInfo.CreateFilter(deviceMoniker); if (sourceObject == null) { throw new ApplicationException("Failed creating device object for moniker"); } // get base filter interface of source device sourceBase = (IBaseFilter)sourceObject; // get video control interface of the device try { videoControl = (IAMVideoControl)sourceObject; } catch { // some camera drivers may not support IAMVideoControl interface } // get type of sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber used for video capture videoGrabberObject = Activator.CreateInstance(type); videoSampleGrabber = (ISampleGrabber)videoGrabberObject; videoGrabberBase = (IBaseFilter)videoGrabberObject; // create sample grabber used for snapshot capture snapshotGrabberObject = Activator.CreateInstance(type); snapshotSampleGrabber = (ISampleGrabber)snapshotGrabberObject; snapshotGrabberBase = (IBaseFilter)snapshotGrabberObject; // add source and grabber filters to graph graph.AddFilter(sourceBase, "source"); graph.AddFilter(videoGrabberBase, "grabber_video"); graph.AddFilter(snapshotGrabberBase, "grabber_snapshot"); // set media type AMMediaType mediaType = new AMMediaType(); mediaType.MajorType = MediaType.Video; mediaType.SubType = MediaSubType.RGB24; videoSampleGrabber.SetMediaType(mediaType); snapshotSampleGrabber.SetMediaType(mediaType); if (videoControl != null) { // find Still Image output pin of the vedio device captureGraph.FindPin(sourceObject, PinDirection.Output, PinCategory.StillImage, MediaType.Video, false, 0, out pinStillImage); // check if it support trigger mode if (pinStillImage != null) { VideoControlFlags caps; videoControl.GetCaps(pinStillImage, out caps); isSapshotSupported = ((caps & VideoControlFlags.ExternalTriggerEnable) != 0); } } // configure video sample grabber videoSampleGrabber.SetBufferSamples(false); videoSampleGrabber.SetOneShot(false); videoSampleGrabber.SetCallback(videoGrabber, 1); // configure snapshot sample grabber snapshotSampleGrabber.SetBufferSamples(true); snapshotSampleGrabber.SetOneShot(false); snapshotSampleGrabber.SetCallback(snapshotGrabber, 1); // configure pins GetPinCapabilitiesAndConfigureSizeAndRate(captureGraph, sourceBase, PinCategory.Capture, desiredFrameSize, desiredFrameRate, ref videoCapabilities); if (isSapshotSupported) { GetPinCapabilitiesAndConfigureSizeAndRate(captureGraph, sourceBase, PinCategory.StillImage, desiredSnapshotSize, 0, ref snapshotCapabilities); } else { snapshotCapabilities = new VideoCapabilities[0]; } if (runGraph) { // render capture pin captureGraph.RenderStream(PinCategory.Capture, MediaType.Video, sourceBase, null, videoGrabberBase); if (videoSampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); videoGrabber.Width = vih.BmiHeader.Width; videoGrabber.Height = vih.BmiHeader.Height; mediaType.Dispose(); } if ((isSapshotSupported) && (provideSnapshots)) { // render snapshot pin captureGraph.RenderStream(PinCategory.StillImage, MediaType.Video, sourceBase, null, snapshotGrabberBase); if (snapshotSampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); snapshotGrabber.Width = vih.BmiHeader.Width; snapshotGrabber.Height = vih.BmiHeader.Height; mediaType.Dispose(); } } // get media control mediaControl = (IMediaControl)graphObject; // get media events' interface mediaEvent = (IMediaEventEx)graphObject; IntPtr p1, p2; DsEvCode code; // run mediaControl.Run(); if ((isSapshotSupported) && (provideSnapshots)) { startTime = DateTime.Now; videoControl.SetMode(pinStillImage, VideoControlFlags.ExternalTriggerEnable); } while (!stopEvent.WaitOne(0, false)) { Thread.Sleep(100); if (mediaEvent != null) { if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0) { mediaEvent.FreeEventParams(code, p1, p2); if (code == DsEvCode.DeviceLost) { reasonToStop = VideoSourceFinishedReasonType.DeviceLost; break; } } } if (needToSimulateTrigger) { needToSimulateTrigger = false; if ((isSapshotSupported) && (provideSnapshots)) { videoControl.SetMode(pinStillImage, VideoControlFlags.Trigger); } } if (needToDisplayPropertyPage) { needToDisplayPropertyPage = false; try { // retrieve ISpecifyPropertyPages interface of the device ISpecifyPropertyPages pPropPages = (ISpecifyPropertyPages)sourceObject; // get property pages from the property bag CAUUID caGUID; pPropPages.GetPages(out caGUID); // get filter info FilterInfo filterInfo = new FilterInfo(deviceMoniker); // create and display the OlePropertyFrame Win32.OleCreatePropertyFrame(parentWindowForPropertyPage, 0, 0, filterInfo.Name, 1, ref sourceObject, caGUID.cElems, caGUID.pElems, 0, 0, IntPtr.Zero); // release COM objects Marshal.FreeCoTaskMem(caGUID.pElems); } catch { } } } mediaControl.Stop(); } } catch (Exception exception) { // provide information to clients if (VideoSourceException != null) { VideoSourceException(this, new VideoSourceExceptionEventArgs(exception.Message)); } } finally { // release all objects captureGraph = null; graph = null; sourceBase = null; mediaControl = null; videoControl = null; mediaEvent = null; pinStillImage = null; videoGrabberBase = null; snapshotGrabberBase = null; videoSampleGrabber = null; snapshotSampleGrabber = null; if (graphObject != null) { Marshal.ReleaseComObject(graphObject); graphObject = null; } if (sourceObject != null) { Marshal.ReleaseComObject(sourceObject); sourceObject = null; } if (videoGrabberObject != null) { Marshal.ReleaseComObject(videoGrabberObject); videoGrabberObject = null; } if (snapshotGrabberObject != null) { Marshal.ReleaseComObject(snapshotGrabberObject); snapshotGrabberObject = null; } if (captureGraphObject != null) { Marshal.ReleaseComObject(captureGraphObject); captureGraphObject = null; } } if (VideoSourceFinished != null) { VideoSourceFinished(this, new VideoSourceFinishedEventArgs(reasonToStop)); } }