protected object setStreamConfigSetting(DShowNET.IAMStreamConfig streamConfig, string fieldName, object newValue) { if (streamConfig == null) { throw new NotSupportedException(); } object returnValue = null; IntPtr pmt = IntPtr.Zero; DShowNET.AMMediaType mediaType = new DShowNET.AMMediaType(); try { // Get the current format info int hr = streamConfig.GetFormat(out pmt); if (hr != 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.PtrToStructure(pmt, mediaType); // The formatPtr member points to different structures // dependingon the formatType object formatStruct; if (mediaType.formatType == DShowNET.FormatType.WaveEx) { formatStruct = new DShowNET.WaveFormatEx(); } else if (mediaType.formatType == DShowNET.FormatType.VideoInfo) { formatStruct = new VideoInfoHeader(); } else if (mediaType.formatType == DShowNET.FormatType.VideoInfo2) { formatStruct = new DShowNET.VideoInfoHeader2(); } else { throw new NotSupportedException("This device does not support a recognized format block."); } // Retrieve the nested structure Marshal.PtrToStructure(mediaType.formatPtr, formatStruct); // Find the required field Type structType = formatStruct.GetType(); FieldInfo fieldInfo = structType.GetField(fieldName); if (fieldInfo == null) { throw new NotSupportedException("Unable to find the member '" + fieldName + "' in the format block."); } // Update the value of the field fieldInfo.SetValue(formatStruct, newValue); // PtrToStructure copies the data so we need to copy it back Marshal.StructureToPtr(formatStruct, mediaType.formatPtr, false); // Save the changes hr = streamConfig.SetFormat(mediaType); if (hr != 0) { Marshal.ThrowExceptionForHR(hr); } } finally { //DShowNET.DsUtils.FreeAMMediaType(mediaType); Marshal.FreeCoTaskMem(pmt); } return(returnValue); }
// Thread entry point public void WorkerThread() { int hr; Guid cat; Guid med; // grabber Grabber grabber = new Grabber(this); // objects object graphObj = null; object grabberObj = null; // interfaces IGraphBuilder graphBuilder = null; DShowNET.ICaptureGraphBuilder2 captureGraphBuilder = null; IBaseFilter videoDeviceFilter = null; IBaseFilter grabberFilter = null; ISampleGrabber sg = null; IMediaControl mc = null; try { // Make a new filter graph graphObj = Activator.CreateInstance(Type.GetTypeFromCLSID(DShowNET.Clsid.FilterGraph, true)); graphBuilder = (IGraphBuilder)graphObj; // Get the Capture Graph Builder Guid clsid = DShowNET.Clsid.CaptureGraphBuilder2; Guid riid = typeof(DShowNET.ICaptureGraphBuilder2).GUID; captureGraphBuilder = (DShowNET.ICaptureGraphBuilder2)DShowNET.DsBugWO.CreateDsInstance(ref clsid, ref riid); // Link the CaptureGraphBuilder to the filter graph hr = captureGraphBuilder.SetFiltergraph((DShowNET.IGraphBuilder)graphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } int rotCookie = 0; DShowNET.DsROT.AddGraphToRot(graphBuilder, out rotCookie); // Get the video device and add it to the filter graph if (deviceMoniker != null) { videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(deviceMoniker); hr = graphBuilder.AddFilter(videoDeviceFilter, "Video Capture Device"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // create sample grabber, object and filter grabberObj = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.SampleGrabber, true)); grabberFilter = (IBaseFilter)grabberObj; sg = (ISampleGrabber)grabberObj; // add sample grabber filter to filter graph hr = graphBuilder.AddFilter(grabberFilter, "grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Try looking for an video device interleaved media type IBaseFilter testFilter = videoDeviceFilter; // grabberFilter (not supported) object o; cat = DShowNET.PinCategory.Capture; med = DShowNET.MediaType.Interleaved; Guid iid = typeof(DShowNET.IAMStreamConfig).GUID; hr = captureGraphBuilder.FindInterface(ref cat, ref med, (DShowNET.IBaseFilter)testFilter, ref iid, out o); if (hr != 0) { // If not found, try looking for a video media type med = MediaType.Video; hr = captureGraphBuilder.FindInterface( ref cat, ref med, (DShowNET.IBaseFilter)testFilter, ref iid, out o); if (hr != 0) { o = null; } } // Set the video stream configuration to data member videoStreamConfig = o as DShowNET.IAMStreamConfig; o = null; //modifies the stream size and frame rate if (modifyStream) { //set size of frame BitmapInfoHeader bmiHeader; bmiHeader = (BitmapInfoHeader)getStreamConfigSetting(videoStreamConfig, "BmiHeader"); bmiHeader.Width = streamSize.Width; bmiHeader.Height = streamSize.Height; setStreamConfigSetting(videoStreamConfig, "BmiHeader", bmiHeader); //set frame rate (not supported on the cameras we have) /* * long avgTimePerFrame = (long)(10000000 / framerate); * setStreamConfigSetting(videoStreamConfig, "AvgTimePerFrame", avgTimePerFrame); */ } // connect pins (Turns on the video device) if (graphBuilder.Connect((IPin)AForge.Video.DirectShow.Internals.Tools.GetOutPin((AForge.Video.DirectShow.Internals.IBaseFilter)videoDeviceFilter, 0), (IPin)AForge.Video.DirectShow.Internals.Tools.GetInPin((AForge.Video.DirectShow.Internals.IBaseFilter)grabberFilter, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } // Set the sample grabber media type settings AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; sg.SetMediaType(mt); // get media type and set sample grabber parameters if (sg.GetConnectedMediaType(mt) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); if (vih.BmiHeader.Compression != 0) { YUYV = true; grabber.setYUYV(YUYV); } grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; //mt.Dispose(); } // Set various sample grabber properties sg.SetBufferSamples(false); sg.SetOneShot(false); sg.SetCallback(grabber, 1); if (!preventFreezing) { // render graphBuilder.Render((IPin)AForge.Video.DirectShow.Internals.Tools.GetOutPin((AForge.Video.DirectShow.Internals.IBaseFilter)grabberFilter, 0)); // Do not show active (source) window IVideoWindow win = (IVideoWindow)graphObj; win.put_AutoShow(0); win = null; } // get media control mc = (IMediaControl)graphBuilder; // run mc.Run(); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } mc.StopWhenReady(); } // catch any exceptions catch (Exception e) { // provide information to clients if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(e.Message)); } } // finalization block finally { // release all objects mc = null; graphBuilder = null; captureGraphBuilder = null; videoDeviceFilter = null; grabberFilter = null; sg = null; if (graphObj != null) { Marshal.ReleaseComObject(graphObj); graphObj = null; } if (grabberObj != null) { Marshal.ReleaseComObject(grabberObj); grabberObj = null; } } }