/// <summary> /// /// </summary> /// <param name="deviceNumber"></param> /// <param name="frameRate"></param> /// <param name="width"></param> /// <param name="height"></param> public WebCamCapture(int deviceNumber, int frameRate, int width, int height) { DsDevice[] devices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); if (deviceNumber > devices.Length - 1) { throw new ArgumentException("No video capture device found at index " + deviceNumber); } try { mFrameRate = frameRate; mWidth = width; mHeight = height; mDevice = devices[deviceNumber]; InitCaptureGraph(); mPictureReady = new ManualResetEvent(false); mImageCaptured = true; mIsRunning = false; } catch { Dispose(); throw; } }
private PTZDevice(string name, PTZType type) { var devices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); var device = devices.Where(d => d.Name == name).FirstOrDefault(); _device = device; _type = type; if (_device == null) throw new ApplicationException(String.Format("Couldn't find device named {0}!", name)); IFilterGraph2 graphBuilder = new FilterGraph() as IFilterGraph2; IBaseFilter filter = null; IMoniker i = _device.Mon as IMoniker; graphBuilder.AddSourceFilterForMoniker(i, null, _device.Name, out filter); _camControl = filter as IAMCameraControl; _ksPropertySet = filter as IKsPropertySet; if (_camControl == null) throw new ApplicationException("Couldn't get ICamControl!"); if (_ksPropertySet == null) throw new ApplicationException("Couldn't get IKsPropertySet!"); //TODO: Add Absolute if (type == PTZType.Relative && !(SupportFor(KSProperties.CameraControlFeature.KSPROPERTY_CAMERACONTROL_PAN_RELATIVE) && SupportFor(KSProperties.CameraControlFeature.KSPROPERTY_CAMERACONTROL_TILT_RELATIVE))) { throw new NotSupportedException("This camera doesn't appear to support Relative Pan and Tilt"); } //TODO: Do I through NotSupported when methods are called or throw them now? //TODO: Do I check for Zoom or ignore if it's not there? InitZoomRanges(); }
public DirectShowCamera(int id, int width, int height, int frameRate, DsDevice device) : base(id, width, height, frameRate) { SetupDevice(); _videoInput = new Capture(id); Name = device.Name; }
/// <summary> /// Contstructor /// </summary> /// <param name="epgEvents"></param> /// <param name="device"></param> /// <param name="sequence"></param> public TvCardDVBIP(IEpgEvents epgEvents, DsDevice device, int sequence) : base(epgEvents, device) { _cardType = CardType.DvbIP; _sequence = sequence; if (_sequence > 0) { _name = _name + "_" + _sequence; } }
protected IBaseFilter CreateFilterInstance(DsDevice device) { var guid = typeof(IBaseFilter).GUID; object objFilter; device.Mon.BindToObject(null, null, ref guid, out objFilter); if (objFilter == null) throw new NullReferenceException("Cannot bind to filter"); return (IBaseFilter)objFilter; }
/// <summary> /// use this method to indicate that the device specified no longer in use /// </summary> /// <param name="device">device</param> public void Remove(DsDevice device) { for (int i = 0; i < _devicesInUse.Count; ++i) { if (_devicesInUse[i].Mon == device.Mon && _devicesInUse[i].Name == device.Name) { _devicesInUse.RemoveAt(i); return; } } }
///<summary> /// Base constructor ///</summary> ///<param name="device">Base DS device</param> protected TvCardBase(DsDevice device) { _graphState = GraphState.Idle; _device = device; _tunerDevice = device; _name = device.Name; _devicePath = device.DevicePath; //get preload card value if (_devicePath != null) { GetPreloadBitAndCardId(); GetSupportsPauseGraph(); } }
public void SetupFileRecorderGraph(DsDevice dev, SystemCodecEntry compressor, ref float iFrameRate, ref int iWidth, ref int iHeight, string fileName) { try { SetupGraphInternal(dev, compressor, ref iFrameRate, ref iWidth, ref iHeight, fileName); latestBitmap = new Bitmap(iWidth, iHeight, PixelFormat.Format24bppRgb); fullRect = new Rectangle(0, 0, latestBitmap.Width, latestBitmap.Height); } catch { CloseResources(); throw; } }
///<summary> /// Constrcutor for the analog ///</summary> ///<param name="device">Tuner Device</param> public TvCardAnalog(DsDevice device) : base(device) { _parameters = new ScanParameters(); _mapSubChannels = new Dictionary<int, BaseSubChannel>(); _supportsSubChannels = true; _minChannel = 0; _maxChannel = 128; _camType = CamType.Default; _conditionalAccess = null; _cardType = CardType.Analog; _epgGrabbing = false; _configuration = Configuration.readConfiguration(_cardId, _name, _devicePath); Configuration.writeConfiguration(_configuration); }
public void SelectIndex(DsDevice dev) { // Highlight the specified device (if we can find it) if (dev != null) { for (int x = 0; x < lbDevices.Items.Count; x++) { VDevice d = lbDevices.Items[x] as VDevice; if (d.Device.DevicePath == dev.DevicePath) { lbDevices.SelectedIndex = x; break; } } } }
public PropertyPageHelper(DsDevice dev) { try { object source; var id = typeof(IBaseFilter).GUID; dev.Mon.BindToObject(null, null, ref id, out source); if (source != null) { var filter = (IBaseFilter)source; m_specifyPropertyPages = filter as ISpecifyPropertyPages; } } catch { MessageBox.Show(NO_PROPERTY_PAGE_FOUND); } }
public AnalogCamera(DsDevice device, Device d3dDevice) { this.d3dDevice = d3dDevice; texture = new Texture(d3dDevice, 720, 576, 1, Usage.Dynamic, Format.A8R8G8B8, Pool.Default); var desc = texture.GetLevelDescription(0); DataRectangle dr = texture.LockRectangle(0, LockFlags.Discard); int rowPitch = dr.Pitch; texture.UnlockRectangle(0); try { BuildGraph(device); } catch { Dispose(); throw; } }
/// <summary> Constructor </summary> /// <param name="dev"> Shows the PropertyPages of a specific DsDevice </param> public DirectShowPropertyPage(DsDevice dev) { try { object l_Source = null; Guid l_Iid = typeof (IBaseFilter).GUID; dev.Mon.BindToObject(null, null, ref l_Iid, out l_Source); if (l_Source != null) { Name = dev.Name; IBaseFilter filter = (IBaseFilter)l_Source; SupportsPersisting = false; this.specifyPropertyPages = filter as ISpecifyPropertyPages; } } catch { MessageBox.Show("This filter has no property page!"); } }
/// <summary> /// Initializes a new instance of the <see cref="TvCardDvbB2C2"/> class. /// </summary> /// <param name="device">The device.</param> public TvCardDvbB2C2(DsDevice device, DeviceInfo deviceInfo) : base(device) { _deviceInfo = deviceInfo; _devicePath = deviceInfo.DevicePath; _name = deviceInfo.Name; GetPreloadBitAndCardId(); _useDISEqCMotor = false; TvBusinessLayer layer = new TvBusinessLayer(); Card card = layer.GetCardByDevicePath(_devicePath); if (card != null) { Setting setting = layer.GetSetting("dvbs" + card.IdCard + "motorEnabled", "no"); if (setting.Value == "yes") _useDISEqCMotor = true; } _conditionalAccess = new ConditionalAccess(null, null, null, this); _ptrDisEqc = Marshal.AllocCoTaskMem(20); _disEqcMotor = new DiSEqCMotor(this); GetTunerCapabilities(); }
public WebCamera(DsDevice device, Device d3dDevice) { //bitmapWindow = new Test(); //bitmapWindow.Show(); this.d3dDevice = d3dDevice; texture = new Texture(d3dDevice, 1280, 720, 1, Usage.Dynamic, Format.A8R8G8B8, Pool.Default); //texture = Texture.FromFile(d3dDevice, ".\\Images\\checkerboard.jpg", D3DX.DefaultNonPowerOf2, D3DX.DefaultNonPowerOf2, 1, Usage.Dynamic, Format.A8R8G8B8, Pool.Default, Filter.None, Filter.None, 0); var desc = texture.GetLevelDescription(0); DataRectangle dr = texture.LockRectangle(0, LockFlags.Discard); int rowPitch = dr.Pitch; texture.UnlockRectangle(0); try { BuildGraph(device); } catch { Dispose(); throw; } }
private void Config() { int hr = 0; graphBuilder = (IFilterGraph2) new FilterGraph(); rot = new DsROTEntry(graphBuilder); // Assume that the first device in this category IS a BDA device... DsDevice[] devices = DsDevice.GetDevicesOfCat(FilterCategory.BDASourceFiltersCategory); hr = graphBuilder.AddSourceFilterForMoniker(devices[0].Mon, null, devices[0].Name, out bdaTuner); DsError.ThrowExceptionForHR(hr); IBDA_Topology topo = (IBDA_Topology)bdaTuner; int[] nodeTypes = new int[10]; int nodeTypesCount; // Get all nodes in the BDA Tuner hr = topo.GetNodeTypes(out nodeTypesCount, nodeTypes.Length, nodeTypes); DsError.ThrowExceptionForHR(hr); // For each nodes types for (int i = 0; i < nodeTypesCount; i++) { Guid[] nodeGuid = new Guid[10]; int nodeGuidCount; // Get its exposed interfaces hr = topo.GetNodeInterfaces(nodeTypes[i], out nodeGuidCount, nodeGuid.Length, nodeGuid); DsError.ThrowExceptionForHR(hr); // For each exposed interfaces for (int j = 0; j < nodeGuidCount; j++) { Debug.WriteLine(string.Format("node {0}/{1} : {2}", i, j, nodeGuid[j])); Console.WriteLine(string.Format("node {0}/{1} : {2}", i, j, nodeGuid[j])); // Is IBDA_AutoDemodulate supported by this node ? if (nodeGuid[j] == typeof(IBDA_AutoDemodulate).GUID) { Console.WriteLine("nodetype : " + nodeTypes[i]); // Yes, retrieve this node object ctrlNode; hr = topo.GetControlNode(0, 1, nodeTypes[i], out ctrlNode); DsError.ThrowExceptionForHR(hr); // Do the cast (it should not fail) autoDemodulate = ctrlNode as IBDA_AutoDemodulate; // Exit the for j loop if found a SignalStatistics object if (autoDemodulate != null) { break; } } } // If already found a SignalStatistics object, exit the for i loop if (autoDemodulate != null) { break; } } }
private (LocalAudioSourceCapability[] caps, bool success) GetCapabilities(DsDevice device) { Log.Information($"Audio ({device.Name}): Getting Caps"); var list = new List <LocalAudioSourceCapability>(); bool failed = false; IntPtr pCaps = IntPtr.Zero; IFilterGraph2 filterGraph2 = null; IBaseFilter sourceFilter = null; IAMStreamConfig streamConfig = null; object pin = null; int count = 0; int size = 0; try { filterGraph2 = new FilterGraph() as IFilterGraph2; if (filterGraph2 == null) { throw new NotSupportedException("filter2 is null"); } LocalVideoSourceManager.AddCaptureFilter(filterGraph2, device, out sourceFilter); pin = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); if (pin == null) { Log.Information($"Audio ({device.Name}): First pin is null"); pin = sourceFilter; } streamConfig = pin as IAMStreamConfig; if (streamConfig == null) { throw new NotSupportedException("pin is null"); } LocalVideoSourceManager.Checked(() => streamConfig.GetNumberOfCapabilities(out count, out size), "GetNumberOfCapabilities", null); if (count <= 0) { throw new NotSupportedException("This video source does not report capabilities."); } if (size != Marshal.SizeOf(typeof(AudioStreamConfigCaps))) { throw new NotSupportedException("Unable to retrieve video source capabilities. This video source requires a larger VideoStreamConfigCaps structure."); } // Alloc memory for structure pCaps = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(AudioStreamConfigCaps))); for (int i = 0; i < count; i++) { AMMediaType mediaType = null; LocalVideoSourceManager.Checked(() => streamConfig.GetStreamCaps(i, out mediaType, pCaps), "GetStreamCaps", null); AudioStreamConfigCaps caps = (AudioStreamConfigCaps)Marshal.PtrToStructure(pCaps, typeof(AudioStreamConfigCaps)); var result = new LocalAudioSourceCapability() { MinimumChannels = caps.MinimumChannels, MaximumChannels = caps.MaximumChannels, MinimumSampleFrequency = caps.MinimumSampleFrequency, MaximumSampleFrequency = caps.MaximumSampleFrequency }; list.Add(result); } } catch (Exception e) { Log.Error(e, $"Error during retreiving caps for '{device.Name}'"); failed = true; } finally { if (pCaps != IntPtr.Zero) { Marshal.FreeCoTaskMem(pCaps); } } Log.Information($"Audio ({device.Name}): Releasing"); try { LocalVideoSourceManager.ReleaseComObject(sourceFilter); LocalVideoSourceManager.ReleaseComObject(filterGraph2); LocalVideoSourceManager.ReleaseComObject(streamConfig); LocalVideoSourceManager.ReleaseComObject(pin); } catch (Exception e) { Log.Error(e, $"ReleaseComObject({device.Name}) failed"); } Log.Information($"Caps {device.Name}: Count: {list.Count}/{count}, Str={size} ({string.Join("; ", list.Where(s => !s.IsStandart()).Select(s => s.ToString()))})"); return(list.ToArray(), !failed); }
private void AddAndConnectBDABoardFilters() { int hr = 0; DsDevice[] devices; ICaptureGraphBuilder2 capBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); capBuilder.SetFiltergraph(this.graphBuilder); try { // Enumerate BDA Source filters category and found one that can connect to the network provider devices = DsDevice.GetDevicesOfCat(FilterCategory.BDASourceFiltersCategory); for (int i = 0; i < devices.Length; i++) { IBaseFilter tmp; hr = graphBuilder.AddSourceFilterForMoniker(devices[i].Mon, null, devices[i].Name, out tmp); DsError.ThrowExceptionForHR(hr); hr = capBuilder.RenderStream(null, null, this.networkProvider, null, tmp); if (hr == 0) { // Got it ! this.tuner = tmp; break; } else { // Try another... hr = graphBuilder.RemoveFilter(tmp); Marshal.ReleaseComObject(tmp); } } if (this.tuner == null) { throw new ApplicationException("Can't find a valid BDA tuner"); } // trying to connect this filter to the MPEG-2 Demux hr = capBuilder.RenderStream(null, null, tuner, null, mpeg2Demux); if (hr >= 0) { // this is a one filter model this.demodulator = null; this.capture = null; return; } else { // Then enumerate BDA Receiver Components category to found a filter connecting // to the tuner and the MPEG2 Demux devices = DsDevice.GetDevicesOfCat(FilterCategory.BDAReceiverComponentsCategory); for (int i = 0; i < devices.Length; i++) { IBaseFilter tmp; hr = graphBuilder.AddSourceFilterForMoniker(devices[i].Mon, null, devices[i].Name, out tmp); DsError.ThrowExceptionForHR(hr); hr = capBuilder.RenderStream(null, null, this.tuner, null, tmp); if (hr == 0) { // Got it ! this.capture = tmp; // Connect it to the MPEG-2 Demux hr = capBuilder.RenderStream(null, null, this.capture, null, this.mpeg2Demux); if (hr >= 0) { // This second filter connect both with the tuner and the demux. // This is a capture filter... return; } else { // This second filter connect with the tuner but not with the demux. // This is in fact a demodulator filter. We now must find the true capture filter... this.demodulator = this.capture; this.capture = null; // saving the Demodulator's DevicePath to avoid creating it twice. string demodulatorDevicePath = devices[i].DevicePath; for (int j = 0; i < devices.Length; j++) { if (devices[j].DevicePath.Equals(demodulatorDevicePath)) { continue; } hr = graphBuilder.AddSourceFilterForMoniker(devices[i].Mon, null, devices[i].Name, out tmp); DsError.ThrowExceptionForHR(hr); hr = capBuilder.RenderStream(null, null, this.demodulator, null, tmp); if (hr == 0) { // Got it ! this.capture = tmp; // Connect it to the MPEG-2 Demux hr = capBuilder.RenderStream(null, null, this.capture, null, this.mpeg2Demux); if (hr >= 0) { // This second filter connect both with the demodulator and the demux. // This is a true capture filter... return; } } else { // Try another... hr = graphBuilder.RemoveFilter(tmp); Marshal.ReleaseComObject(tmp); } } // for j // We have a tuner and a capture/demodulator that don't connect with the demux // and we found no additionals filters to build a working filters chain. throw new ApplicationException("Can't find a valid BDA filter chain"); } } else { // Try another... hr = graphBuilder.RemoveFilter(tmp); Marshal.ReleaseComObject(tmp); } } // for i // We have a tuner that connect to the Network Provider BUT not with the demux // and we found no additionals filters to build a working filters chain. throw new ApplicationException("Can't find a valid BDA filter chain"); } } finally { Marshal.ReleaseComObject(capBuilder); } }
/// <summary> /// Creates the filter by trying to detect it /// </summary> /// <param name="crossbar">The crossbar componen</param> /// <param name="tuner">The tuner component</param> /// <param name="graph">The stored graph</param> /// <param name="graphBuilder">The graphBuilder</param> /// <returns>true, if the graph building was successful</returns> private bool CreateAutomaticFilterInstance(Graph graph, Tuner tuner, Crossbar crossbar, IFilterGraph2 graphBuilder) { //get all tv audio tuner devices on this system DsDevice[] devices = null; try { devices = DsDevice.GetDevicesOfCat(FilterCategory.AMKSTVAudio); devices = DeviceSorter.Sort(devices, tuner.TunerName, crossbar.CrossBarName); } catch (Exception) { Log.Log.WriteFile("analog: AddTvAudioFilter no tv audio devices found - Trying TvTuner filter"); } if (devices != null && devices.Length > 0) { // try each tv audio tuner for (int i = 0; i < devices.Length; i++) { IBaseFilter tmp; Log.Log.WriteFile("analog: AddTvAudioFilter try:{0} {1}", devices[i].Name, i); //if tv audio tuner is currently in use we can skip it if (DevicesInUse.Instance.IsUsed(devices[i])) continue; int hr; try { //add tv audio tuner to graph hr = graphBuilder.AddSourceFilterForMoniker(devices[i].Mon, null, devices[i].Name, out tmp); } catch (Exception) { Log.Log.WriteFile("analog: cannot add filter to graph"); continue; } if (hr != 0) { //failed to add tv audio tuner to graph, continue with the next one if (tmp != null) { graphBuilder.RemoveFilter(tmp); Release.ComObject("tvAudioFilter filter", tmp); } continue; } // try connecting the tv tuner-> tv audio tuner if (FilterGraphTools.ConnectPin(graphBuilder, tuner.AudioPin, tmp, 0)) { // Got it ! // Connect tv audio tuner to the crossbar IPin pin = DsFindPin.ByDirection(tmp, PinDirection.Output, 0); hr = graphBuilder.Connect(pin, crossbar.AudioTunerIn); if (hr < 0) { //failed graphBuilder.RemoveFilter(tmp); Release.ComObject("audiotuner pinin", pin); Release.ComObject("audiotuner filter", tmp); } else { //succeeded. we're done Log.Log.WriteFile("analog: AddTvAudioFilter succeeded:{0}", devices[i].Name); Release.ComObject("audiotuner pinin", pin); _filterTvAudioTuner = tmp; _audioDevice = devices[i]; DevicesInUse.Instance.Add(_audioDevice); _tvAudioTunerInterface = tuner.Filter as IAMTVAudio; break; } } else { // cannot connect tv tuner-> tv audio tuner, try next one... graphBuilder.RemoveFilter(tmp); Release.ComObject("audiotuner filter", tmp); } } } if (_filterTvAudioTuner == null) { Log.Log.WriteFile("analog: AddTvAudioFilter no tv audio devices found - Trying TvTuner filter"); int hr = graphBuilder.Connect(tuner.AudioPin, crossbar.AudioTunerIn); if (hr != 0) { Log.Log.Error("analog: unable to add TvAudioTuner to graph - even TvTuner as TvAudio fails"); mode = TvAudioVariant.Unavailable; } else { Log.Log.WriteFile("analog: AddTvAudioFilter connected TvTuner with Crossbar directly succeeded!"); mode = TvAudioVariant.TvTunerConnection; _tvAudioTunerInterface = tuner.Filter as IAMTVAudio; if (_tvAudioTunerInterface != null) { Log.Log.WriteFile("analog: AddTvAudioFilter succeeded - TvTuner is also TvAudio"); _filterTvAudioTuner = tuner.Filter; mode = TvAudioVariant.TvTuner; } } graph.TvAudio.Mode = mode; } else { mode = TvAudioVariant.Normal; graph.TvAudio.Name = _audioDevice.Name; } if (mode != TvAudioVariant.Unavailable && mode != TvAudioVariant.TvTunerConnection && _tvAudioTunerInterface != null) { CheckCapabilities(graph); } return true; }
/// <summary> /// Initializes a new instance of the <see cref="TvCardDVBC"/> class. /// </summary> /// <param name="epgEvents">The EPG events interface.</param> /// <param name="device">The device.</param> public TvCardDVBC(IEpgEvents epgEvents, DsDevice device) : base(epgEvents, device) { _cardType = CardType.DvbC; }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iSampleRate, int iChannels) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; IBaseFilter baseGrabFlt = null; IBaseFilter nullrenderer = null; IMediaFilter mediaFilt = m_FilterGraph as IMediaFilter; // Get the graphbuilder object m_FilterGraph = (IFilterGraph2) new FilterGraph(); m_mediaCtrl = m_FilterGraph as IMediaControl; try { // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Get the SampleGrabber interface sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(m_FilterGraph); DsError.ThrowExceptionForHR(hr); // Add the audio device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Audio input", out capFilter); DsError.ThrowExceptionForHR(hr); // If any of the default config items are set if (iSampleRate + iChannels > 0) { SetConfigParms(capGraph, capFilter, iSampleRate, iChannels); } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the file filter to the sample grabber // Hopefully this will be the audio pin, we could check by reading it's mediatype IPin iPinOut = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); // Get the input pin from the sample grabber IPin iPinIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Add the null renderer to the graph nullrenderer = new NullRenderer() as IBaseFilter; hr = m_FilterGraph.AddFilter(nullrenderer, "Null renderer"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the sample grabber to the null renderer iPinOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0); iPinIn = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Read and cache the resulting settings SaveSizeInfo(sampGrabber); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); capGraph = null; } } }
public static bool GetDevicesOfCat(Guid cat, out ArrayList devs) { devs = null; int hr; object comObj = null; ICreateDevEnum enumDev = null; IEnumMoniker enumMon = null; IMoniker[] mon = new IMoniker[1]; try { Type srvType = Type.GetTypeFromCLSID(Clsid.SystemDeviceEnum); if (srvType == null) { throw new NotImplementedException("System Device Enumerator"); } comObj = Activator.CreateInstance(srvType); enumDev = (ICreateDevEnum)comObj; hr = enumDev.CreateClassEnumerator(ref cat, out enumMon, 0); if (hr != 0) { throw new NotSupportedException("No devices of the category"); } int /*f, */ count = 0; IntPtr ptr = new IntPtr(); do { hr = enumMon.Next(1, mon, ptr); if ((hr != 0) || (mon[0] == null)) { break; } DsDevice dev = new DsDevice(); GetFriendlyName(mon[0], ref dev.Name, ref dev.Path); if (devs == null) { devs = new ArrayList(); } dev.id = count; dev.Mon = mon[0]; mon[0] = null; devs.Add(dev); dev = null; count++; }while (true); return(count > 0); } catch (Exception) { if (devs != null) { foreach (DsDevice d in devs) { d.Dispose(); } devs = null; } return(false); } finally { enumDev = null; if (mon[0] != null) { Marshal.ReleaseComObject(mon[0]); } mon[0] = null; if (enumMon != null) { Marshal.ReleaseComObject(enumMon); } enumMon = null; if (comObj != null) { Marshal.ReleaseComObject(comObj); } comObj = null; } }
/// <summary> /// Create a new filter from its moniker /// </summary> /// <param name="dsDevice"></param> public Device(DsDevice dsDevice) { _dsDevice = dsDevice; }
private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin pCaptureOut = null; IPin pSampleIn = null; IPin pRenderIn = null; m_FilterGraph = new FilterGraph() as IFilterGraph2; try { #if DEBUG m_rot = new DsROTEntry(m_FilterGraph); #endif hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } if (m_pinStill == null) { IPin pRaw = null; IPin pSmart = null; m_VidControl = null; IBaseFilter iSmartTee = (IBaseFilter) new SmartTee(); try { hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee"); DsError.ThrowExceptionForHR(hr); pRaw = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0); hr = m_FilterGraph.Connect(pRaw, pSmart); DsError.ThrowExceptionForHR(hr); m_pinStill = DsFindPin.ByName(iSmartTee, "Preview"); pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture"); if (iHeight + iWidth + iBPP > 0) { SetConfigParms(pRaw, iWidth, iHeight, iBPP); } } finally { if (pRaw != null) { Marshal.ReleaseComObject(pRaw); } if (pRaw != pSmart) { Marshal.ReleaseComObject(pSmart); } if (pRaw != iSmartTee) { Marshal.ReleaseComObject(iSmartTee); } } } else { m_VidControl = capFilter as IAMVideoControl; pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); if (iHeight + iWidth + iBPP > 0) { SetConfigParms(m_pinStill, iWidth, iHeight, iBPP); } } sampGrabber = new SampleGrabber() as ISampleGrabber; IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter; hr = m_FilterGraph.AddFilter(pRenderer, "Renderer"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0); hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); } else { hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } SaveSizeInfo(sampGrabber); IMediaControl mediaCtrl = m_FilterGraph as IMediaControl; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); pCaptureOut = null; } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); pRenderIn = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } }
/// <summary> /// Initializes a new instance of the <see cref="TvCardATSC"/> class. /// </summary> /// <param name="epgEvents">The EPG events interface.</param> /// <param name="device">The device.</param> public TvCardATSC(IEpgEvents epgEvents, DsDevice device) : base(epgEvents, device) { _cardType = CardType.Atsc; }
/// <summary> /// Enumerate all tvcard devices and add them to the list /// </summary> private void DetectCards() { ITunerCap _providerType; bool genericNP = false; //SkyStar 2 & IP Streaming DsDevice[] devices = DsDevice.GetDevicesOfCat(FilterCategory.LegacyAmFilterCategory); for (int i = 0; i < devices.Length; ++i) { if (String.Compare(devices[i].Name, "B2C2 MPEG-2 Source", true) == 0) { Log.Log.WriteFile("Detected SkyStar 2 card"); TvCardDvbSS2 card = new TvCardDvbSS2(_epgEvents, devices[i]); _cards.Add(card); //break; maybe more than one B2C2 card ? } else if (String.Compare(devices[i].Name, "Elecard NWSource-Plus", true) == 0) { TvBusinessLayer layer = new TvBusinessLayer(); Setting setting; setting = layer.GetSetting("iptvCardCount", "1"); int iptvCardCount = Convert.ToInt32(setting.Value); for (int cardNum = 0; cardNum < iptvCardCount; cardNum++) { Log.Log.WriteFile("Detected IP TV Card " + cardNum); TvCardDVBIP card = new TvCardDVBIPElecard(_epgEvents, devices[i], cardNum); _cards.Add(card); } } else if (String.Compare(devices[i].Name, "MediaPortal IPTV Source Filter", true) == 0) { TvBusinessLayer layer = new TvBusinessLayer(); Setting setting; setting = layer.GetSetting("iptvCardCount", "1"); int iptvCardCount = Convert.ToInt32(setting.Value); for (int cardNum = 0; cardNum < iptvCardCount; cardNum++) { Log.Log.WriteFile("Detected IP TV Card " + cardNum); TvCardDVBIP card = new TvCardDVBIPBuiltIn(_epgEvents, devices[i], cardNum); _cards.Add(card); } } } //Hauppauge HD PVR & Colossus devices = DsDevice.GetDevicesOfCat(FilterCategory.AMKSCrossbar); for (int i = 0; i < devices.Length; ++i) { if (devices[i].Name == null) { continue; } if (devices[i].Name.Equals("Hauppauge HD PVR Crossbar")) { Log.Log.WriteFile("Detected Hauppauge HD PVR"); TvCardHDPVR card = new TvCardHDPVR(devices[i]); _cards.Add(card); } else if (devices[i].Name.Contains("Hauppauge Colossus Crossbar")) { Log.Log.WriteFile("Detected Hauppauge Colossus"); TvCardHDPVR card = new TvCardHDPVR(devices[i]); _cards.Add(card); } } //BDA TV devices devices = DsDevice.GetDevicesOfCat(FilterCategory.BDASourceFiltersCategory); if (devices.Length > 0) { IFilterGraph2 graphBuilder = (IFilterGraph2) new FilterGraph(); DsROTEntry rotEntry = new DsROTEntry(graphBuilder); Guid networkProviderClsId = new Guid("{D7D42E5C-EB36-4aad-933B-B4C419429C98}"); if (FilterGraphTools.IsThisComObjectInstalled(networkProviderClsId)) { handleInternalNetworkProviderFilter(devices, graphBuilder, networkProviderClsId, rotEntry); } else { ITuningSpace tuningSpace = null; ILocator locator = null; //DVBT IBaseFilter networkDVBT = null; try { networkProviderClsId = typeof(DVBTNetworkProvider).GUID; networkDVBT = FilterGraphTools.AddFilterFromClsid(graphBuilder, networkProviderClsId, "DVBT Network Provider"); tuningSpace = (ITuningSpace) new DVBTuningSpace(); tuningSpace.put_UniqueName("DVBT TuningSpace"); tuningSpace.put_FriendlyName("DVBT TuningSpace"); tuningSpace.put__NetworkType(typeof(DVBTNetworkProvider).GUID); ((IDVBTuningSpace)tuningSpace).put_SystemType(DVBSystemType.Terrestrial); locator = (ILocator) new DVBTLocator(); locator.put_CarrierFrequency(-1); locator.put_InnerFEC(FECMethod.MethodNotSet); locator.put_InnerFECRate(BinaryConvolutionCodeRate.RateNotSet); locator.put_Modulation(ModulationType.ModNotSet); locator.put_OuterFEC(FECMethod.MethodNotSet); locator.put_OuterFECRate(BinaryConvolutionCodeRate.RateNotSet); locator.put_SymbolRate(-1); tuningSpace.put_DefaultLocator(locator); ((ITuner)networkDVBT).put_TuningSpace(tuningSpace); } catch (Exception ex) { Log.Log.Error("DVBT card detection error: {0}", ex.ToString()); } //DVBS networkProviderClsId = typeof(DVBSNetworkProvider).GUID; IBaseFilter networkDVBS = FilterGraphTools.AddFilterFromClsid(graphBuilder, networkProviderClsId, "DVBS Network Provider"); tuningSpace = (ITuningSpace) new DVBSTuningSpace(); tuningSpace.put_UniqueName("DVBS TuningSpace"); tuningSpace.put_FriendlyName("DVBS TuningSpace"); tuningSpace.put__NetworkType(typeof(DVBSNetworkProvider).GUID); ((IDVBSTuningSpace)tuningSpace).put_SystemType(DVBSystemType.Satellite); locator = (ILocator) new DVBTLocator(); locator.put_CarrierFrequency(-1); locator.put_InnerFEC(FECMethod.MethodNotSet); locator.put_InnerFECRate(BinaryConvolutionCodeRate.RateNotSet); locator.put_Modulation(ModulationType.ModNotSet); locator.put_OuterFEC(FECMethod.MethodNotSet); locator.put_OuterFECRate(BinaryConvolutionCodeRate.RateNotSet); locator.put_SymbolRate(-1); tuningSpace.put_DefaultLocator(locator); ((ITuner)networkDVBS).put_TuningSpace(tuningSpace); //ATSC networkProviderClsId = typeof(ATSCNetworkProvider).GUID; IBaseFilter networkATSC = FilterGraphTools.AddFilterFromClsid(graphBuilder, networkProviderClsId, "ATSC Network Provider"); tuningSpace = (ITuningSpace) new ATSCTuningSpace(); tuningSpace.put_UniqueName("ATSC TuningSpace"); tuningSpace.put_FriendlyName("ATSC TuningSpace"); ((IATSCTuningSpace)tuningSpace).put_MaxChannel(10000); ((IATSCTuningSpace)tuningSpace).put_MaxMinorChannel(10000); ((IATSCTuningSpace)tuningSpace).put_MinChannel(0); ((IATSCTuningSpace)tuningSpace).put_MinMinorChannel(0); ((IATSCTuningSpace)tuningSpace).put_MinPhysicalChannel(0); ((IATSCTuningSpace)tuningSpace).put_InputType(TunerInputType.Antenna); locator = (IATSCLocator) new ATSCLocator(); locator.put_CarrierFrequency(-1); locator.put_InnerFEC(FECMethod.MethodNotSet); locator.put_InnerFECRate(BinaryConvolutionCodeRate.RateNotSet); locator.put_Modulation(ModulationType.ModNotSet); locator.put_OuterFEC(FECMethod.MethodNotSet); locator.put_OuterFECRate(BinaryConvolutionCodeRate.RateNotSet); locator.put_SymbolRate(-1); locator.put_CarrierFrequency(-1); ((IATSCLocator)locator).put_PhysicalChannel(-1); ((IATSCLocator)locator).put_TSID(-1); tuningSpace.put_DefaultLocator(locator); ((ITuner)networkATSC).put_TuningSpace(tuningSpace); //DVBC networkProviderClsId = typeof(DVBCNetworkProvider).GUID; IBaseFilter networkDVBC = FilterGraphTools.AddFilterFromClsid(graphBuilder, networkProviderClsId, "DVBC Network Provider"); tuningSpace = (ITuningSpace) new DVBTuningSpace(); tuningSpace.put_UniqueName("DVBC TuningSpace"); tuningSpace.put_FriendlyName("DVBC TuningSpace"); tuningSpace.put__NetworkType(typeof(DVBCNetworkProvider).GUID); ((IDVBTuningSpace)tuningSpace).put_SystemType(DVBSystemType.Cable); locator = (ILocator) new DVBCLocator(); locator.put_CarrierFrequency(-1); locator.put_InnerFEC(FECMethod.MethodNotSet); locator.put_InnerFECRate(BinaryConvolutionCodeRate.RateNotSet); locator.put_Modulation(ModulationType.ModNotSet); locator.put_OuterFEC(FECMethod.MethodNotSet); locator.put_OuterFECRate(BinaryConvolutionCodeRate.RateNotSet); locator.put_SymbolRate(-1); tuningSpace.put_DefaultLocator(locator); ((ITuner)networkDVBC).put_TuningSpace(tuningSpace); //MS Network Provider - MCE Roll-up 2 or better networkProviderClsId = typeof(NetworkProvider).GUID; // First test if the Generic Network Provider is available (only on MCE 2005 + Update Rollup 2) if (FilterGraphTools.IsThisComObjectInstalled(networkProviderClsId)) { genericNP = true; } for (int i = 0; i < devices.Length; i++) { bool connected = false; bool isCablePreferred = false; string name = devices[i].Name ?? "unknown"; name = name.ToLowerInvariant(); Log.Log.WriteFile("Found card:{0}", name); //silicondust work-around for dvb type detection issue. generic provider would always use dvb-t if (name.Contains("silicondust hdhomerun tuner")) { isCablePreferred = CheckHDHomerunCablePrefered(name); Log.Log.WriteFile("silicondust hdhomerun detected - prefer cable mode: {0}", isCablePreferred); } IBaseFilter tmp; try { graphBuilder.AddSourceFilterForMoniker(devices[i].Mon, null, name, out tmp); } catch (InvalidComObjectException) { //ignore bad card Log.Log.WriteFile("cannot add filter {0} to graph", devices[i].Name); continue; } //Use the Microsoft Network Provider method first but only if available if (genericNP) { IBaseFilter networkDVB = FilterGraphTools.AddFilterFromClsid(graphBuilder, networkProviderClsId, "Microsoft Network Provider"); if (ConnectFilter(graphBuilder, networkDVB, tmp)) { Log.Log.WriteFile("Detected DVB card:{0}", name); // determine the DVB card supported GUIDs here! _providerType = networkDVB as ITunerCap; int ulcNetworkTypesMax = 5; int pulcNetworkTypes; Guid[] pguidNetworkTypes = new Guid[ulcNetworkTypesMax]; int hr = _providerType.get_SupportedNetworkTypes(ulcNetworkTypesMax, out pulcNetworkTypes, pguidNetworkTypes); for (int n = 0; n < pulcNetworkTypes; n++) { Log.Log.Debug("Detecting type by MSNP {0}: {1}", n, pguidNetworkTypes[n]); //test the first found guid to determine the DVB card type if (pguidNetworkTypes[n] == (typeof(DVBTNetworkProvider).GUID) && !isCablePreferred) { Log.Log.WriteFile("Detected DVB-T* card:{0}", name); TvCardDVBT dvbtCard = new TvCardDVBT(_epgEvents, devices[i]); _cards.Add(dvbtCard); connected = true; } else if (pguidNetworkTypes[n] == (typeof(DVBSNetworkProvider).GUID) && !isCablePreferred) { Log.Log.WriteFile("Detected DVB-S* card:{0}", name); TvCardDVBS dvbsCard = new TvCardDVBS(_epgEvents, devices[i]); _cards.Add(dvbsCard); connected = true; } else if (pguidNetworkTypes[n] == (typeof(DVBCNetworkProvider).GUID)) { Log.Log.WriteFile("Detected DVB-C* card:{0}", name); TvCardDVBC dvbcCard = new TvCardDVBC(_epgEvents, devices[i]); _cards.Add(dvbcCard); connected = true; } else if (pguidNetworkTypes[n] == (typeof(ATSCNetworkProvider).GUID)) { Log.Log.WriteFile("Detected ATSC* card:{0}", name); TvCardATSC dvbsCard = new TvCardATSC(_epgEvents, devices[i]); _cards.Add(dvbsCard); connected = true; } if (connected) { graphBuilder.RemoveFilter(tmp); Release.ComObject("tmp filter", tmp); break; // already found one, no need to continue } else if (n == (pulcNetworkTypes - 1)) { Log.Log.WriteFile("Connected with generic MS Network Provider however network types don't match, using the original method"); } } } else { Log.Log.WriteFile("Not connected with generic MS Network Provider, using the original method"); connected = false; } graphBuilder.RemoveFilter(networkDVB); Release.ComObject("ms provider", networkDVB); } if (!genericNP || !connected) { if (ConnectFilter(graphBuilder, networkDVBT, tmp)) { Log.Log.WriteFile("Detected DVB-T card:{0}", name); TvCardDVBT dvbtCard = new TvCardDVBT(_epgEvents, devices[i]); _cards.Add(dvbtCard); } else if (ConnectFilter(graphBuilder, networkDVBC, tmp)) { Log.Log.WriteFile("Detected DVB-C card:{0}", name); TvCardDVBC dvbcCard = new TvCardDVBC(_epgEvents, devices[i]); _cards.Add(dvbcCard); } else if (ConnectFilter(graphBuilder, networkDVBS, tmp)) { Log.Log.WriteFile("Detected DVB-S card:{0}", name); TvCardDVBS dvbsCard = new TvCardDVBS(_epgEvents, devices[i]); _cards.Add(dvbsCard); } else if (ConnectFilter(graphBuilder, networkATSC, tmp)) { Log.Log.WriteFile("Detected ATSC card:{0}", name); TvCardATSC dvbsCard = new TvCardATSC(_epgEvents, devices[i]); _cards.Add(dvbsCard); } graphBuilder.RemoveFilter(tmp); Release.ComObject("tmp filter", tmp); } } FilterGraphTools.RemoveAllFilters(graphBuilder); Release.ComObject("dvbc provider", networkDVBC); Release.ComObject("atsc provider", networkATSC); Release.ComObject("dvbs provider", networkDVBS); Release.ComObject("dvbt provider", networkDVBT); rotEntry.Dispose(); Release.ComObject("graph builder", graphBuilder); } } //Analogue TV devices devices = DsDevice.GetDevicesOfCat(FilterCategory.AMKSTVTuner); for (int i = 0; i < devices.Length; i++) { string name = devices[i].Name ?? "unknown"; name = name.ToLowerInvariant(); Log.Log.WriteFile("Detected analog card:{0}", name); TvCardAnalog analogCard = new TvCardAnalog(devices[i]); _cards.Add(analogCard); } _cards.Add(new RadioWebStreamCard()); }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, AMMediaType media) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; // Get the graphbuilder object m_FilterGraph = (IFilterGraph2) new FilterGraph(); m_mediaCtrl = m_FilterGraph as IMediaControl; try { // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Get the SampleGrabber interface sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph( m_FilterGraph ); DsError.ThrowExceptionForHR( hr ); // Add the video device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter); DsError.ThrowExceptionForHR( hr ); // add video crossbar // thanks to Andrew Fernie - this is to get tv tuner cards working IAMCrossbar crossbar = null; object o; hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMCrossbar).GUID, out o); if (hr >= 0) { crossbar = (IAMCrossbar)o; int oPin, iPin; int ovLink, ivLink; ovLink = ivLink = 0; crossbar.get_PinCounts(out oPin, out iPin); int pIdxRel; PhysicalConnectorType tp; for (int i = 0; i < iPin; i++) { crossbar.get_CrossbarPinInfo(true, i, out pIdxRel, out tp); if (tp == PhysicalConnectorType.Video_Composite) ivLink = i; } for (int i = 0; i < oPin; i++) { crossbar.get_CrossbarPinInfo(false, i, out pIdxRel, out tp); if (tp == PhysicalConnectorType.Video_VideoDecoder) ovLink = i; } try { crossbar.Route(ovLink, ivLink); o = null; } catch { throw new Exception("Failed to get IAMCrossbar"); } } //add AVI Decompressor IBaseFilter pAVIDecompressor = (IBaseFilter)new AVIDec(); hr = m_FilterGraph.AddFilter(pAVIDecompressor, "AVI Decompressor"); DsError.ThrowExceptionForHR(hr); // IBaseFilter baseGrabFlt = (IBaseFilter) sampGrabber; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter( baseGrabFlt, "Ds.NET Grabber" ); DsError.ThrowExceptionForHR( hr ); SetConfigParms(capGraph, capFilter, media); hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, pAVIDecompressor, baseGrabFlt); if (hr < 0) { hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt); } DsError.ThrowExceptionForHR( hr ); SaveSizeInfo(sampGrabber); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); capGraph = null; } } }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP, Control hControl) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin pCaptureOut = null; IPin pSampleIn = null; IPin pRenderIn = null; // Get the graphbuilder object m_FilterGraph = new FilterGraph() as IFilterGraph2; try { // add the video input device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); // Find the still pin m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); // Didn't find one. Is there a preview pin? if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } // Still haven't found one. Need to put a splitter in so we have // one stream to capture the bitmap from, and one to display. Ok, we // don't *have* to do it that way, but we are going to anyway. if (m_pinStill == null) { IPin pRaw = null; IPin pSmart = null; // There is no still pin m_VidControl = null; // Add a splitter IBaseFilter iSmartTee = (IBaseFilter) new SmartTee(); try { hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee"); DsError.ThrowExceptionForHR(hr); // Find the find the capture pin from the video device and the // input pin for the splitter, and connnect them pRaw = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0); hr = m_FilterGraph.Connect(pRaw, pSmart); DsError.ThrowExceptionForHR(hr); // Now set the capture and still pins (from the splitter) m_pinStill = DsFindPin.ByName(iSmartTee, "Preview"); pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture"); // If any of the default config items are set, perform the config // on the actual video device (rather than the splitter) if (iHeight + iWidth + iBPP > 0) { SetConfigParms(pRaw, iWidth, iHeight, iBPP); } } finally { if (pRaw != null) { Marshal.ReleaseComObject(pRaw); } if (pRaw != pSmart) { Marshal.ReleaseComObject(pSmart); } if (pRaw != iSmartTee) { Marshal.ReleaseComObject(iSmartTee); } } } else { // Get a control pointer (used in Click()) m_VidControl = capFilter as IAMVideoControl; pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); // If any of the default config items are set if (iHeight + iWidth + iBPP > 0) { SetConfigParms(m_pinStill, iWidth, iHeight, iBPP); } } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; // Configure the sample grabber IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); // Get the default video renderer IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter; hr = m_FilterGraph.AddFilter(pRenderer, "Renderer"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0); // Add the sample grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); } else { // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } // Learn the video properties SaveSizeInfo(sampGrabber); ConfigVideoWindow(hControl); // Start the graph IMediaControl mediaCtrl = m_FilterGraph as IMediaControl; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); pCaptureOut = null; } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); pRenderIn = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } }
/// <summary> /// Initializes a new instance of the <see cref="TvCardDVBT"/> class. /// </summary> /// <param name="epgEvents">The EPG events interface.</param> /// <param name="device">The device.</param> public TvCardDVBT(IEpgEvents epgEvents, DsDevice device) : base(epgEvents, device) { _cardType = CardType.DvbT; }
public void InitVideoCapture(int videoDeviceID, FrameRate framerate, Resolution resolution, ImageFormat format, bool grayscale) { if (cameraInitialized) { return; } this.resolution = resolution; this.grayscale = grayscale; this.frameRate = framerate; this.videoDeviceID = videoDeviceID; this.format = format; switch (resolution) { case Resolution._160x120: cameraWidth = 160; cameraHeight = 120; break; case Resolution._320x240: cameraWidth = 320; cameraHeight = 240; break; case Resolution._640x480: cameraWidth = 640; cameraHeight = 480; break; case Resolution._800x600: cameraWidth = 800; cameraHeight = 600; break; case Resolution._1024x768: cameraWidth = 1024; cameraHeight = 768; break; case Resolution._1280x1024: cameraWidth = 1280; cameraHeight = 1024; break; case Resolution._1600x1200: cameraWidth = 1600; cameraHeight = 1200; break; } if (!DsUtils.IsCorrectDirectXVersion()) { throw new GoblinException("DirectX 8.1 NOT installed!"); } if (!DsDev.GetDevicesOfCat(FilterCategory.VideoInputDevice, out capDevices)) { throw new GoblinException("No video capture devices found!"); } DsDevice dev = null; if (videoDeviceID >= capDevices.Count) { String suggestion = "Try the following device IDs:"; for (int i = 0; i < capDevices.Count; i++) { suggestion += " " + i + ":" + ((DsDevice)capDevices[i]).Name + ", "; } throw new GoblinException("VideoDeviceID " + videoDeviceID + " is out of the range. " + suggestion); } dev = (DsDevice)capDevices[videoDeviceID]; selectedVideoDeviceName = ((DsDevice)capDevices[videoDeviceID]).Name; if (dev == null) { throw new GoblinException("This video device cannot be accessed"); } StartupVideo(dev.Mon); cameraInitialized = true; }
/// <summary> /// Configures the DirectShow graph to play the selected video capture /// device with the selected parameters /// </summary> private void SetupGraph() { /* Clean up any messes left behind */ FreeResources(); try { /* Create a new graph */ m_graph = (IGraphBuilder) new FilterGraphNoThread(); #if DEBUG m_rotEntry = new DsROTEntry(m_graph); #endif /* Create a capture graph builder to help * with rendering a capture graph */ var graphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); /* Set our filter graph to the capture graph */ int hr = graphBuilder.SetFiltergraph(m_graph); DsError.ThrowExceptionForHR(hr); /* Add our capture device source to the graph */ if (m_videoCaptureSourceChanged) { m_captureDevice = AddFilterByName(m_graph, FilterCategory.VideoInputDevice, VideoCaptureSource); m_videoCaptureSourceChanged = false; } else if (m_videoCaptureDeviceChanged) { m_captureDevice = AddFilterByDevicePath(m_graph, FilterCategory.VideoInputDevice, VideoCaptureDevice.DevicePath); m_videoCaptureDeviceChanged = false; } /* If we have a null capture device, we have an issue */ if (m_captureDevice == null) { throw new WPFMediaKitException(string.Format("Capture device {0} not found or could not be created", VideoCaptureSource)); } if (UseYuv && !EnableSampleGrabbing) { /* Configure the video output pin with our parameters and if it fails * then just use the default media subtype*/ if (!SetVideoCaptureParameters(graphBuilder, m_captureDevice, MediaSubType.YUY2)) { SetVideoCaptureParameters(graphBuilder, m_captureDevice, Guid.Empty); } } else { /* Configure the video output pin with our parameters */ SetVideoCaptureParameters(graphBuilder, m_captureDevice, Guid.Empty); } var rendererType = VideoRendererType.VideoMixingRenderer9; /* Creates a video renderer and register the allocator with the base class */ m_renderer = CreateVideoRenderer(rendererType, m_graph, 1); if (rendererType == VideoRendererType.VideoMixingRenderer9) { var mixer = m_renderer as IVMRMixerControl9; if (mixer != null && !EnableSampleGrabbing && UseYuv) { VMR9MixerPrefs dwPrefs; mixer.GetMixingPrefs(out dwPrefs); dwPrefs &= ~VMR9MixerPrefs.RenderTargetMask; dwPrefs |= VMR9MixerPrefs.RenderTargetYUV; /* Prefer YUV */ mixer.SetMixingPrefs(dwPrefs); } } if (EnableSampleGrabbing) { m_sampleGrabber = (ISampleGrabber) new SampleGrabber(); SetupSampleGrabber(m_sampleGrabber); hr = m_graph.AddFilter(m_sampleGrabber as IBaseFilter, "SampleGrabber"); DsError.ThrowExceptionForHR(hr); } IBaseFilter mux = null; IFileSinkFilter sink = null; if (!string.IsNullOrEmpty(this.m_fileName)) { hr = graphBuilder.SetOutputFileName(MediaSubType.Asf, this.m_fileName, out mux, out sink); DsError.ThrowExceptionForHR(hr); hr = graphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, m_captureDevice, null, mux); DsError.ThrowExceptionForHR(hr); // use the first audio device var audioDevices = DsDevice.GetDevicesOfCat(FilterCategory.AudioInputDevice); if (audioDevices.Length > 0) { var audioDevice = AddFilterByDevicePath(m_graph, FilterCategory.AudioInputDevice, audioDevices[0].DevicePath); hr = graphBuilder.RenderStream(PinCategory.Capture, MediaType.Audio, audioDevice, null, mux); DsError.ThrowExceptionForHR(hr); } } hr = graphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, m_captureDevice, null, m_renderer); DsError.ThrowExceptionForHR(hr); /* Register the filter graph * with the base classes */ SetupFilterGraph(m_graph); /* Sets the NaturalVideoWidth/Height */ SetNativePixelSizes(m_renderer); HasVideo = true; /* Make sure we Release() this COM reference */ if (mux != null) { Marshal.ReleaseComObject(mux); } if (sink != null) { Marshal.ReleaseComObject(sink); } Marshal.ReleaseComObject(graphBuilder); } catch (Exception ex) { /* Something got fuct up */ FreeResources(); InvokeMediaFailed(new MediaFailedEventArgs(ex.Message, ex)); } /* Success */ InvokeMediaOpened(); }
private void CMB_videosources_SelectedIndexChanged(object sender, EventArgs e) { if (MainV2.MONO) { return; } int hr; int count; int size; object o; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; AMMediaType media = null; VideoInfoHeader v; VideoStreamConfigCaps c; var modes = new List <GCSBitmapInfo>(); // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); var m_FilterGraph = (IFilterGraph2) new FilterGraph(); DsDevice[] capDevices; capDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); // Add the video device hr = m_FilterGraph.AddSourceFilterForMoniker(capDevices[CMB_videosources.SelectedIndex].Mon, null, "Video input", out capFilter); try { DsError.ThrowExceptionForHR(hr); } catch (Exception ex) { CustomMessageBox.Show("Can not add video source\n" + ex); return; } // Find the stream config interface hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o); DsError.ThrowExceptionForHR(hr); var videoStreamConfig = o as IAMStreamConfig; if (videoStreamConfig == null) { CustomMessageBox.Show("Failed to get IAMStreamConfig"); return; } hr = videoStreamConfig.GetNumberOfCapabilities(out count, out size); DsError.ThrowExceptionForHR(hr); var TaskMemPointer = Marshal.AllocCoTaskMem(size); for (var i = 0; i < count; i++) { var ptr = IntPtr.Zero; hr = videoStreamConfig.GetStreamCaps(i, out media, TaskMemPointer); v = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); c = (VideoStreamConfigCaps)Marshal.PtrToStructure(TaskMemPointer, typeof(VideoStreamConfigCaps)); modes.Add(new GCSBitmapInfo(v.BmiHeader.Width, v.BmiHeader.Height, c.MaxFrameInterval, c.VideoStandard.ToString(), media)); } Marshal.FreeCoTaskMem(TaskMemPointer); DsUtils.FreeAMMediaType(media); CMB_videoresolutions.DataSource = modes; if (Settings.Instance["video_options"] != "" && CMB_videosources.Text != "") { try { CMB_videoresolutions.SelectedIndex = Settings.Instance.GetInt32("video_options"); } catch { } // ignore bad entries } }
internal Device(DsDevice dsDevice) { _dsDevice = dsDevice; }
/// <summary> /// adds the BDA renderer filter to the graph by elimination /// then tries to match tuner & render filters if successful then connects them. /// </summary> /// <param name="device">Tuner device</param> /// <param name="currentLastFilter">The current last filter if we add multiple captures</param> protected void AddBDARendererToGraph(DsDevice device, ref IBaseFilter currentLastFilter) { if (!CheckThreadId()) return; if (_filterCapture != null) return; DsDevice[] devices = DsDevice.GetDevicesOfCat(FilterCategory.BDAReceiverComponentsCategory); const string guidBdaMPEFilter = @"\{8e60217d-a2ee-47f8-b0c5-0f44c55f66dc}"; const string guidBdaSlipDeframerFilter = @"\{03884cb6-e89a-4deb-b69e-8dc621686e6a}"; for (int i = 0; i < devices.Length; i++) { if (devices[i].DevicePath.ToUpperInvariant().IndexOf(guidBdaMPEFilter.ToUpperInvariant()) >= 0) continue; if (devices[i].DevicePath.ToUpperInvariant().IndexOf(guidBdaSlipDeframerFilter.ToUpperInvariant()) >= 0) continue; IBaseFilter tmp; const string deviceIdDelimter = @"#{"; Log.Log.WriteFile("dvb: -{0}", devices[i].Name); //Make sure the BDA Receiver Component is on the same physical device as the BDA Source Filter. //This is done by checking the DeviceId and DeviceInstance part of the DevicePath. if (matchDevicePath) { int indx1 = device.DevicePath.IndexOf(deviceIdDelimter); int indx2 = devices[i].DevicePath.IndexOf(deviceIdDelimter); if (indx1 < 0 || indx2 < 0) { continue; } if (device.DevicePath.Remove(indx1) != devices[i].DevicePath.Remove(indx2)) { continue; } } if (DevicesInUse.Instance.IsUsed(devices[i])) continue; int hr; try { hr = _graphBuilder.AddSourceFilterForMoniker(devices[i].Mon, null, devices[i].Name, out tmp); } catch (Exception) { continue; } if (hr != 0) { if (tmp != null) { Log.Log.Error("dvb: Failed to add bda receiver: {0}. Is it in use?", devices[i].Name); _graphBuilder.RemoveFilter(tmp); Release.ComObject("bda receiver", tmp); } continue; } //render [Tuner]->[Capture] hr = _capBuilder.RenderStream(null, null, _filterTuner, null, tmp); if (hr == 0) { Log.Log.WriteFile("dvb: Render [Tuner]->[Capture] AOK"); // render [Capture]->[Inf Tee] _filterCapture = tmp; _captureDevice = devices[i]; DevicesInUse.Instance.Add(devices[i]); Log.Log.WriteFile("dvb: Setting lastFilter to Capture device"); currentLastFilter = _filterCapture; break; } // Try another... Log.Log.WriteFile("dvb: Looking for another bda receiver..."); _graphBuilder.RemoveFilter(tmp); Release.ComObject("bda receiver", tmp); } }
protected DsDevice FindDeviceByName(string name) { var devices = DsDevice.GetDevicesOfCat(FilterCategory.LegacyAmFilterCategory); return(devices.FirstOrDefault(device => device.GetPropBagValue("FriendlyName") == name)); }
/// <summary> /// Initializes a new instance of the <see cref="TvCardDvbBase"/> class. /// </summary> public TvCardDvbBase(DsDevice device) : base(device) { matchDevicePath = true; _lastSignalUpdate = DateTime.MinValue; _mapSubChannels = new Dictionary<int, BaseSubChannel>(); _parameters = new ScanParameters(); _minChannel = -1; _maxChannel = -1; _supportsSubChannels = true; Guid networkProviderClsId = new Guid("{D7D42E5C-EB36-4aad-933B-B4C419429C98}"); useInternalNetworkProvider = FilterGraphTools.IsThisComObjectInstalled(networkProviderClsId); }
public void Capture_video() { if (firstActive) { firstActive = true; CloseInterfaces(); //this.Close(); return; } firstActive = true; if (!DsUtils.IsCorrectDirectXVersion()) { MessageBox.Show(this, "DirectX 8.1 NOT installed!", "DirectShow.NET", MessageBoxButtons.OK, MessageBoxIcon.Stop); this.Close(); return; } if (!DsDev.GetDevicesOfCat(FilterCategory.VideoInputDevice, out capDevices)) { MessageBox.Show(this, "No video capture devices found!", "DirectShow.NET", MessageBoxButtons.OK, MessageBoxIcon.Stop); this.Close(); return; } SaveFileDialog sd = new SaveFileDialog(); sd.FileName = DateTime.Now.ToString("yyMMdd hh-mm-ss"); sd.Title = "Save Video Stream as..."; sd.Filter = "Video file (*.avi)|*.avi"; sd.FilterIndex = 1; sd.InitialDirectory = "C:\\temp\\autobot7\\"; Thread.Sleep(2000); System.Windows.Forms.SendKeys.Send("{ENTER}"); if (sd.ShowDialog() != DialogResult.OK) { this.Close(); return; } fileName = sd.FileName; DsDevice dev = null; if (capDevices.Count == 1) { dev = capDevices[0] as DsDevice; } else { DeviceSelector selector = new DeviceSelector(capDevices); selector.ShowDialog(this); dev = selector.SelectedDevice; } if (dev == null) { this.Close(); return; } if (!StartupVideo(dev.Mon)) { this.Close(); } this.Text = "save" + fileName + ".........."; }
private void SetupGraphInternal(DsDevice dev, VideoFormatHelper.SupportedVideoFormat selectedFormat, ref float iFrameRate, ref int iWidth, ref int iHeight) { // Capture Source (Capture/Video) --> (Input) Sample Grabber (Output) --> (In) Null Renderer IBaseFilter nullRenderer = null; try { // Add the video device int hr = filterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); if (capFilter != null) // If any of the default config items are set SetConfigParms(capBuilder, capFilter, selectedFormat, ref iFrameRate, ref iWidth, ref iHeight); IBaseFilter baseGrabFlt = (IBaseFilter)samplGrabber; ConfigureSampleGrabber(samplGrabber); hr = filterGraph.AddFilter(baseGrabFlt, "OccuRec AVI Video Grabber"); DsError.ThrowExceptionForHR(hr); // Connect the video device output to the sample grabber IPin videoCaptureOutputPin = DsHelper.FindPin(capFilter, PinDirection.Output, MediaType.Video, PinCategory.Capture, "Capture"); IPin grabberInputPin = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); hr = filterGraph.Connect(videoCaptureOutputPin, grabberInputPin); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(videoCaptureOutputPin); Marshal.ReleaseComObject(grabberInputPin); // Add the frame grabber to the graph nullRenderer = (IBaseFilter)new NullRenderer(); hr = filterGraph.AddFilter(nullRenderer, "OccuRec AVI Video Null Renderer"); DsError.ThrowExceptionForHR(hr); // Connect the sample grabber to the null renderer (so frame samples will be coming through) IPin grabberOutputPin = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0); IPin renderedInputPin = DsFindPin.ByDirection(nullRenderer, PinDirection.Input, 0); hr = filterGraph.Connect(grabberOutputPin, renderedInputPin); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(grabberOutputPin); Marshal.ReleaseComObject(renderedInputPin); } finally { if (nullRenderer != null) Marshal.ReleaseComObject(nullRenderer); } }
static public List <Camera>?GetCameraDevices() { List <Camera>?result = null; string inputFormat = RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? "dshow" : RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? "v4l2" : RuntimeInformation.IsOSPlatform(OSPlatform.OSX) ? "avfoundation" : throw new NotSupportedException($"Cannot find adequate input format - OSArchitecture:[{RuntimeInformation.OSArchitecture}] - OSDescription:[{RuntimeInformation.OSDescription}]"); // FFmpeg doesn't implement avdevice_list_input_sources() for the DShow input format yet. if (inputFormat == "dshow") { result = new List <Camera>(); var dsDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); for (int i = 0; i < dsDevices.Length; i++) { var dsDevice = dsDevices[i]; if ((dsDevice.Name != null) && (dsDevice.Name.Length > 0)) { Camera camera = new Camera { Name = dsDevice.Name, Path = "video=" + dsDevice.Name }; result.Add(camera); } } } else if (inputFormat == "avfoundation") { result = SIPSorceryMedia.FFmpeg.Interop.MacOS.AvFoundation.GetCameraDevices(); } else { AVInputFormat * avInputFormat = ffmpeg.av_find_input_format(inputFormat); AVDeviceInfoList *avDeviceInfoList = null; ffmpeg.avdevice_list_input_sources(avInputFormat, null, null, &avDeviceInfoList).ThrowExceptionIfError(); int nDevices = avDeviceInfoList->nb_devices; var avDevices = avDeviceInfoList->devices; result = new List <Camera>(); for (int i = 0; i < nDevices; i++) { var avDevice = avDevices[i]; var name = Marshal.PtrToStringAnsi((IntPtr)avDevice->device_description); var path = Marshal.PtrToStringAnsi((IntPtr)avDevice->device_name); if ((name != null) && (name.Length > 0)) { Camera camera = new Camera { Name = name, Path = path }; result.Add(camera); } } ffmpeg.avdevice_free_list_devices(&avDeviceInfoList); } return(result); }
private void OnCameraPlugged(DsDevice device) { CameraCapture camera = new CameraCapture(device, WindowHandle); _camerasAll.Add(camera); _freeCameras.Add(camera); if (CameraPlugged != null) CameraPlugged(this, new CameraEventArgs() { Camera = camera, Available = true }); }
public static DsDevice[] GetAllSystemCameras() { DsDevice[] _SystemCameras = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); return(_SystemCameras); }
public VDevice(DsDevice dev) { m_dev = dev; }
/// <summary> /// Creates the teletext component in the graph. First we try to use the stored informations in the graph /// </summary> /// <param name="graph">The stored graph</param> /// <param name="graphBuilder">The graphbuilder</param> /// <param name="capture">The capture component</param> /// <returns>true, if the building was successful; false otherwise</returns> public bool CreateFilterInstance(Graph graph, IFilterGraph2 graphBuilder, Capture capture) { Log.Log.WriteFile("analog: SetupTeletext()"); Guid guidBaseFilter = typeof(IBaseFilter).GUID; object obj; //find and add tee/sink to sink filter DsDevice[] devices = DsDevice.GetDevicesOfCat(FilterCategory.AMKSSplitter); devices[0].Mon.BindToObject(null, null, ref guidBaseFilter, out obj); _teeSink = (IBaseFilter)obj; int hr = graphBuilder.AddFilter(_teeSink, devices[0].Name); if (hr != 0) { Log.Log.Error("analog:SinkGraphEx.SetupTeletext(): Unable to add tee/sink filter"); return(false); } //connect capture filter -> tee sink filter IPin pin = DsFindPin.ByDirection(_teeSink, PinDirection.Input, 0); hr = graphBuilder.Connect(capture.VBIPin, pin); Release.ComObject(pin); if (hr != 0) { //failed... Log.Log.Error("analog: unable to connect capture->tee/sink"); graphBuilder.RemoveFilter(_teeSink); Release.ComObject(_teeSink); _teeSink = _filterWstDecoder = null; return(false); } if (!string.IsNullOrEmpty(graph.Teletext.Name)) { Log.Log.WriteFile("analog: Using Teletext-Component configuration from stored graph"); devices = DsDevice.GetDevicesOfCat(graph.Teletext.Category); foreach (DsDevice device in devices) { if (device.Name != null && device.Name.Equals(graph.Teletext.Name)) { //found it, add it to the graph Log.Log.Info("analog:Using teletext component - {0}", graph.Teletext.Name); device.Mon.BindToObject(null, null, ref guidBaseFilter, out obj); _filterWstDecoder = (IBaseFilter)obj; hr = graphBuilder.AddFilter(_filterWstDecoder, device.Name); if (hr != 0) { //failed... Log.Log.Error("analog:SinkGraphEx.SetupTeletext(): Unable to add WST Codec filter"); graphBuilder.RemoveFilter(_filterWstDecoder); _filterWstDecoder = null; } break; } } } if (_filterWstDecoder == null) { Log.Log.WriteFile("analog: No stored or invalid graph for Teletext component - Trying to detect"); //find the WST codec filter devices = DsDevice.GetDevicesOfCat(FilterCategory.AMKSVBICodec); foreach (DsDevice device in devices) { if (device.Name != null && device.Name.IndexOf("WST") >= 0) { //found it, add it to the graph Log.Log.Info("analog:Found WST Codec filter"); device.Mon.BindToObject(null, null, ref guidBaseFilter, out obj); _filterWstDecoder = (IBaseFilter)obj; hr = graphBuilder.AddFilter(_filterWstDecoder, device.Name); if (hr != 0) { //failed... Log.Log.Error("analog:Unable to add WST Codec filter"); graphBuilder.RemoveFilter(_teeSink); Release.ComObject(_teeSink); _teeSink = _filterWstDecoder = null; return(false); } graph.Teletext.Name = device.Name; graph.Teletext.Category = FilterCategory.AMKSVBICodec; break; } } //Look for VBI Codec for Vista users as Vista doesn't use WST Codec anymore if (_filterWstDecoder == null) { devices = DsDevice.GetDevicesOfCat(FilterCategory.AMKSMULTIVBICodec); foreach (DsDevice device in devices) { if (device.Name != null && device.Name.IndexOf("VBI") >= 0) { //found it, add it to the graph Log.Log.Info("analog:Found VBI Codec filter"); device.Mon.BindToObject(null, null, ref guidBaseFilter, out obj); _filterWstDecoder = (IBaseFilter)obj; hr = graphBuilder.AddFilter(_filterWstDecoder, device.Name); if (hr != 0) { //failed... Log.Log.Error("analog:Unable to add VBI Codec filter"); graphBuilder.RemoveFilter(_teeSink); Release.ComObject(_teeSink); _teeSink = _filterWstDecoder = null; return(false); } graph.Teletext.Name = device.Name; graph.Teletext.Category = FilterCategory.AMKSMULTIVBICodec; break; } } } } if (_filterWstDecoder == null) { Log.Log.Error("analog: unable to find WST Codec or VBI Codec filter"); graphBuilder.RemoveFilter(_teeSink); Release.ComObject(_teeSink); _teeSink = _filterWstDecoder = null; return(false); } //connect tee sink filter-> wst codec filter IPin pinOut = DsFindPin.ByDirection(_teeSink, PinDirection.Output, 0); pin = DsFindPin.ByDirection(_filterWstDecoder, PinDirection.Input, 0); hr = graphBuilder.Connect(pinOut, pin); Release.ComObject(pin); Release.ComObject(pinOut); if (hr != 0) { //failed Log.Log.Error("analog: unable to tee/sink->wst codec"); graphBuilder.RemoveFilter(_filterWstDecoder); graphBuilder.RemoveFilter(_teeSink); Release.ComObject(_filterWstDecoder); Release.ComObject(_teeSink); _teeSink = _filterWstDecoder = null; _teeSink = null; graph.Teletext.Name = null; graph.Teletext.Category = new Guid(); return(false); } //done Log.Log.WriteFile("analog: teletext setup"); if (_filterWstDecoder != null) { Log.Log.WriteFile("analog:connect wst/vbi codec->tsfilesink"); _pinWST_VBI = DsFindPin.ByDirection(_filterWstDecoder, PinDirection.Output, 0); } return(true); }
/// <summary> build the capture graph. </summary> private void SetupGraph(DsDevice dev, string szFileName) { int hr; IBaseFilter capFilter = null; IBaseFilter asfWriter = null; ICaptureGraphBuilder2 capGraph = null; // Get the graphbuilder object m_graphBuilder = (IFilterGraph2) new FilterGraph(); try { // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Start building the graph hr = capGraph.SetFiltergraph((IGraphBuilder)m_graphBuilder); DsError.ThrowExceptionForHR(hr); #if DEBUG m_rot = new DsROTEntry(m_graphBuilder); #endif // add the video input device hr = m_graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); // create and configure the asfWriter filter asfWriter = ConfigAsf(capGraph, szFileName); DoAnet(asfWriter); // Add the video device hr = m_graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device"); DsError.ThrowExceptionForHR(hr); // Connect the capture device to the asf Writer hr = capGraph.RenderStream(null, null, capFilter, null, asfWriter); DsError.ThrowExceptionForHR(hr); // Use this to start/stop the graph m_mediaCtrl = m_graphBuilder as IMediaControl; } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (asfWriter != null) { Marshal.ReleaseComObject(asfWriter); asfWriter = null; } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); capGraph = null; } } }
/// <summary> /// destroys the graph and cleans up any resources /// </summary> protected void Decompose() { if (_graphBuilder == null || !CheckThreadId()) return; Log.Log.WriteFile("dvb:Decompose"); if (_epgGrabbing) { if (_epgGrabberCallback != null && _epgGrabbing) { Log.Log.Epg("dvb:cancel epg->decompose"); _epgGrabberCallback.OnEpgCancelled(); } _epgGrabbing = false; } FreeAllSubChannels(); Log.Log.WriteFile(" stop"); // Decompose the graph int counter = 0, hr = 0; FilterState state = FilterState.Running; hr = ((IMediaControl)_graphBuilder).Stop(); while (state != FilterState.Stopped) { System.Threading.Thread.Sleep(100); hr = ((IMediaControl)_graphBuilder).GetState(10, out state); counter++; if (counter >= 30) { if (state != FilterState.Stopped) Log.Log.Error("dvb:graph still running"); break; } } //In case MDPlugs exists then close and release them if (_mdplugs != null) { Log.Log.Info(" Closing MDAPI Plugins"); _mdplugs.Close(); _mdplugs = null; } if (_conditionalAccess != null) { Log.Log.Info(" Disposing ConditionalAccess"); _conditionalAccess.Dispose(); _conditionalAccess = null; } Log.Log.WriteFile(" free..."); _interfaceChannelScan = null; _interfaceEpgGrabber = null; _previousChannel = null; if (_filterMpeg2DemuxTif != null) { Release.ComObject("_filterMpeg2DemuxTif filter", _filterMpeg2DemuxTif); _filterMpeg2DemuxTif = null; } if (_filterNetworkProvider != null) { Release.ComObject("_filterNetworkProvider filter", _filterNetworkProvider); _filterNetworkProvider = null; } if (_infTeeMain != null) { Release.ComObject("main inftee filter", _infTeeMain); _infTeeMain = null; } if (_infTeeSecond != null) { Release.ComObject("second inftee filter", _infTeeSecond); _infTeeSecond = null; } if (_filterTuner != null) { while (Release.ComObject(_filterTuner) > 0) ; _filterTuner = null; } if (_filterCapture != null) { while (Release.ComObject(_filterCapture) > 0) ; _filterCapture = null; } if (_filterWinTvUsb != null) { Log.Log.Info(" Stopping WinTVCI module"); winTvCiHandler.Shutdown(); while (Release.ComObject(_filterWinTvUsb) > 0) ; _filterWinTvUsb = null; } if (_filterTIF != null) { Release.ComObject("TIF filter", _filterTIF); _filterTIF = null; } //if (_filterSectionsAndTables != null) //{ // Release.ComObject("secions&tables filter", _filterSectionsAndTables); _filterSectionsAndTables = null; //} Log.Log.WriteFile(" free pins..."); if (_filterTsWriter as IBaseFilter != null) { Release.ComObject("TSWriter filter", _filterTsWriter); _filterTsWriter = null; } else { Log.Log.Debug("!!! Error releasing TSWriter filter (_filterTsWriter as IBaseFilter was null!)"); _filterTsWriter = null; } Log.Log.WriteFile(" free graph..."); if (_rotEntry != null) { _rotEntry.Dispose(); _rotEntry = null; } if (_capBuilder != null) { Release.ComObject("capture builder", _capBuilder); _capBuilder = null; } if (_graphBuilder != null) { FilterGraphTools.RemoveAllFilters(_graphBuilder); Release.ComObject("graph builder", _graphBuilder); _graphBuilder = null; } Log.Log.WriteFile(" free devices..."); if (_deviceWinTvUsb != null) { DevicesInUse.Instance.Remove(_deviceWinTvUsb); _deviceWinTvUsb = null; } if (_tunerDevice != null) { DevicesInUse.Instance.Remove(_tunerDevice); _tunerDevice = null; } if (_captureDevice != null) { DevicesInUse.Instance.Remove(_captureDevice); _captureDevice = null; } if (_tunerStatistics != null) { for (int i = 0; i < _tunerStatistics.Count; i++) { IBDA_SignalStatistics stat = _tunerStatistics[i]; while (Release.ComObject(stat) > 0) ; } _tunerStatistics.Clear(); } Log.Log.WriteFile(" decompose done..."); _graphState = GraphState.Idle; }
public abstract void Open(DsDevice cameraDevice);
public void SetupGraph(DsDevice dev, bool runOCR, VideoFormatHelper.SupportedVideoFormat selectedFormat, ref float iFrameRate, ref int iWidth, ref int iHeight) { try { filterGraph = (IFilterGraph2)new FilterGraph(); mediaCtrl = filterGraph as IMediaControl; capBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); samplGrabber = (ISampleGrabber)new SampleGrabber(); int hr = capBuilder.SetFiltergraph(filterGraph); DsError.ThrowExceptionForHR(hr); if (Settings.Default.VideoGraphDebugMode) { if (rot != null) { rot.Dispose(); rot = null; } rot = new DsROTEntry(filterGraph); } SetupGraphInternal(dev, selectedFormat, ref iFrameRate, ref iWidth, ref iHeight); // Now that sizes are fixed/known, store the sizes SaveSizeInfo(samplGrabber); crossbar = CrossbarHelper.SetupTunerAndCrossbar(capBuilder, capFilter); latestBitmap = new Bitmap(iWidth, iHeight, PixelFormat.Format24bppRgb); fullRect = new Rectangle(0, 0, latestBitmap.Width, latestBitmap.Height); NativeHelpers.SetupCamera( Settings.Default.CameraModel, iWidth, iHeight, Settings.Default.HorizontalFlip, Settings.Default.VerticalFlip, Settings.Default.IsIntegrating, (float)Settings.Default.MinSignatureDiffRatio, (float)Settings.Default.MinSignatureDiff, Settings.Default.GammaDiff, Settings.Default.ForceNewFrameOnLockedRate, dev.Name, selectedFormat.AsSerialized(), selectedFormat.FrameRate); NativeHelpers.SetupAav(Settings.Default.RecordStatusSectionOnly ? AavImageLayout.StatusSectionOnly : Settings.Default.AavImageLayout, Settings.Default.AavCompression); ocrEnabled = false; string errorMessage; if (runOCR) { OcrConfiguration ocrConfig = OcrSettings.Instance[Settings.Default.SelectedOcrConfiguration]; errorMessage = NativeHelpers.SetupBasicOcrMetrix(ocrConfig); if (errorMessage != null && callbacksObject != null) callbacksObject.OnError(-1, errorMessage); else { NativeHelpers.SetupOcr(ocrConfig); ocrEnabled = true; } } else { errorMessage = NativeHelpers.SetupTimestampPreservation(false, 0, 0); if (errorMessage != null && callbacksObject != null) callbacksObject.OnError(-1, errorMessage); } } catch { CloseResources(); if (callbacksObject != null) callbacksObject.OnError(-1, "Error initialising the camera. The selected video mode may not be supported by the camera."); throw; } }
private IBaseFilter BuildFileCaptureGraph(DsDevice dev, DsDevice compressor, VideoFormatHelper.SupportedVideoFormat selectedFormat, string fileName, ref float iFrameRate, ref int iWidth, ref int iHeight) { // Capture Source (Capture/Video) --> (Input) Sample Grabber (Output) --> (Input) Video Compressor (Output) --> (Input 01/Video/) AVI Mux (Output) --> (In) FileSink IBaseFilter muxFilter = null; IFileSinkFilter fileWriterFilter = null; IBaseFilter compressorFilter = null; try { IBaseFilter capFilter; // Add the video device int hr = filterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); if (capFilter != null) { SetConfigParms(capBuilder, capFilter, selectedFormat, ref iFrameRate, ref iWidth, ref iHeight); } IBaseFilter baseGrabFlt = (IBaseFilter)samplGrabber; ConfigureSampleGrabber(samplGrabber); hr = filterGraph.AddFilter(baseGrabFlt, "ASCOM Video Grabber"); DsError.ThrowExceptionForHR(hr); // Connect the video device output to the sample grabber IPin videoCaptureOutputPin = FindPin(capFilter, PinDirection.Output, MediaType.Video, Guid.Empty, "Capture"); IPin smartTeeInputPin = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); hr = filterGraph.Connect(videoCaptureOutputPin, smartTeeInputPin); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(videoCaptureOutputPin); Marshal.ReleaseComObject(smartTeeInputPin); // Create the file writer and AVI Mux (already connected to each other) hr = capBuilder.SetOutputFileName(MediaSubType.Avi, fileName, out muxFilter, out fileWriterFilter); DsError.ThrowExceptionForHR(hr); if (compressor != null) { // Create the compressor compressorFilter = CreateFilter(FilterCategory.VideoCompressorCategory, compressor.Name); } if (compressorFilter != null) { hr = filterGraph.AddFilter(compressorFilter, "ASCOM Video Compressor"); DsError.ThrowExceptionForHR(hr); // Connect the sample grabber Output pin to the compressor IPin grabberOutputPin = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0); IPin compressorInputPin = DsFindPin.ByDirection(compressorFilter, PinDirection.Input, 0); hr = filterGraph.Connect(grabberOutputPin, compressorInputPin); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(grabberOutputPin); Marshal.ReleaseComObject(compressorInputPin); // Connect the compressor output to the AVI Mux IPin compressorOutputPin = DsFindPin.ByDirection(compressorFilter, PinDirection.Output, 0); IPin aviMuxVideoInputPin = DsFindPin.ByDirection(muxFilter, PinDirection.Input, 0); hr = filterGraph.Connect(compressorOutputPin, aviMuxVideoInputPin); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(compressorOutputPin); Marshal.ReleaseComObject(aviMuxVideoInputPin); } else { // Connect the sample grabber Output pin to the AVI Mux IPin grabberOutputPin = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0); IPin aviMuxVideoInputPin = DsFindPin.ByDirection(muxFilter, PinDirection.Input, 0); hr = filterGraph.Connect(grabberOutputPin, aviMuxVideoInputPin); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(grabberOutputPin); Marshal.ReleaseComObject(aviMuxVideoInputPin); } return(capFilter); } finally { if (fileWriterFilter != null) { Marshal.ReleaseComObject(fileWriterFilter); } if (muxFilter != null) { Marshal.ReleaseComObject(muxFilter); } if (compressorFilter != null) { Marshal.ReleaseComObject(compressorFilter); } } }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP) { ISampleGrabber sampGrabber = null; IPin pCaptureOut = null; IPin pRenderIn = null; // Get the graphbuilder object m_FilterGraph = (IFilterGraph2)new FilterGraph(); try { // add the video input device IBaseFilter capFilter; int hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); //if (iHeight + iWidth + iBPP > 0) //{ //SetConfigParms(pRaw, iWidth, iHeight, iBPP); //} pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; // Configure the sample grabber IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); // Add the sample grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ares Video Grabber"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); // Learn the video properties SaveSizeInfo(sampGrabber); // Start the graph IMediaControl mediaCtrl = (IMediaControl)m_FilterGraph; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); } } }
/// <summary> /// Initializes a new instance of the <see cref="TvCardDVBT"/> class. /// </summary> /// <param name="device">The device.</param> public TvCardDVBT(DsDevice device) : base(device) { _cardType = CardType.DvbT; }
/// <summary> /// Returns the <see cref="CameraInfo"/> for the given <see cref="DsDevice"/>. /// </summary> /// <param name="dev">A <see cref="DsDevice"/> to parse name and capabilities for.</param> /// <returns>The <see cref="CameraInfo"/> for the given device.</returns> private CameraInfo Caps(DsDevice dev) { var camerainfo = new CameraInfo(); // Get the graphbuilder object var graphBuilder = (IFilterGraph2)new FilterGraph(); // Get the ICaptureGraphBuilder2 var capGraph = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); IBaseFilter capFilter = null; try { int hr = capGraph.SetFiltergraph(graphBuilder); DsError.ThrowExceptionForHR(hr); // Add the video device hr = graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter); // DsError.ThrowExceptionForHR(hr); if (hr != 0) { Console.WriteLine("Error in m_graphBuilder.AddSourceFilterForMoniker(). Could not add source filter. Message: " + DsError.GetErrorText(hr)); return null; } //hr = m_graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device"); //DsError.ThrowExceptionForHR(hr); object o = null; DsGuid cat = PinCategory.Capture; DsGuid type = MediaType.Interleaved; DsGuid iid = typeof(IAMStreamConfig).GUID; // Check if Video capture filter is in use hr = capGraph.RenderStream(cat, MediaType.Video, capFilter, null, null); if (hr != 0) { return null; } //hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Interleaved, capFilter, typeof(IAMStreamConfig).GUID, out o); //if (hr != 0) //{ hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o); DsError.ThrowExceptionForHR(hr); //} var videoStreamConfig = o as IAMStreamConfig; int iCount = 0; int iSize = 0; try { if (videoStreamConfig != null) videoStreamConfig.GetNumberOfCapabilities(out iCount, out iSize); } catch (Exception) { //ErrorLogger.ProcessException(ex, false); return null; } pscc = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps))); camerainfo.Name = dev.Name; camerainfo.DirectshowDevice = dev; for (int i = 0; i < iCount; i++) { VideoStreamConfigCaps scc; try { AMMediaType curMedType; if (videoStreamConfig != null) hr = videoStreamConfig.GetStreamCaps(i, out curMedType, pscc); Marshal.ThrowExceptionForHR(hr); scc = (VideoStreamConfigCaps)Marshal.PtrToStructure(pscc, typeof(VideoStreamConfigCaps)); var CSF = new CamSizeFPS(); CSF.FPS = (int)(10000000 / scc.MinFrameInterval); CSF.Height = scc.InputSize.Height; CSF.Width = scc.InputSize.Width; if (!InSizeFpsList(camerainfo.SupportedSizesAndFPS, CSF)) if (ParametersOK(CSF)) camerainfo.SupportedSizesAndFPS.Add(CSF); } catch (Exception) { //ErrorLogger.ProcessException(ex, false); } } Marshal.FreeCoTaskMem(pscc); } finally { if (graphBuilder != null) { Marshal.ReleaseComObject(graphBuilder); } if (capFilter != null) { Marshal.ReleaseComObject(capFilter); } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); } } return camerainfo; }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iFrameRate, int iWidth, int iHeight) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; // Get the graphbuilder object m_FilterGraph = (IFilterGraph2) new FilterGraph(); m_mediaCtrl = m_FilterGraph as IMediaControl; try { // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Get the SampleGrabber interface sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(m_FilterGraph); DsError.ThrowExceptionForHR(hr); // Add the video device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter); DsError.ThrowExceptionForHR(hr); IBaseFilter baseGrabFlt = (IBaseFilter)sampGrabber; m_capFilter = capFilter; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); // If any of the default config items are set if (iFrameRate + iHeight + iWidth > 0) { SetConfigParms(capGraph, capFilter, iFrameRate, iWidth, iHeight); } hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt); DsError.ThrowExceptionForHR(hr); m_capGraph = capGraph; SaveSizeInfo(sampGrabber); } finally { //if (capFilter != null) //{ // Marshal.ReleaseComObject(capFilter); // capFilter = null; //} if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } //if (capGraph != null) //{ // Marshal.ReleaseComObject(capGraph); // capGraph = null; //} } }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP, Control hControl) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin pCaptureOut = null; IPin pSampleIn = null; IPin pRenderIn = null; // Get the graphbuilder object m_FilterGraph = new FilterGraph() as IFilterGraph2; try { #if DEBUG m_rot = new DsROTEntry(m_FilterGraph); #endif // add the video input device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); // Find the still pin m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); // Didn't find one. Is there a preview pin? if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } // Still haven't found one. Need to put a splitter in so we have // one stream to capture the bitmap from, and one to display. Ok, we // don't *have* to do it that way, but we are going to anyway. if (m_pinStill == null) { IPin pRaw = null; IPin pSmart = null; // There is no still pin m_VidControl = null; // Add a splitter IBaseFilter iSmartTee = (IBaseFilter)new SmartTee(); try { hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee"); DsError.ThrowExceptionForHR(hr); // Find the find the capture pin from the video device and the // input pin for the splitter, and connnect them pRaw = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0); hr = m_FilterGraph.Connect(pRaw, pSmart); DsError.ThrowExceptionForHR(hr); // Now set the capture and still pins (from the splitter) m_pinStill = DsFindPin.ByName(iSmartTee, "Preview"); pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture"); // If any of the default config items are set, perform the config // on the actual video device (rather than the splitter) if (iHeight + iWidth + iBPP > 0) { SetConfigParms(pRaw, iWidth, iHeight, iBPP); } } finally { if (pRaw != null) { Marshal.ReleaseComObject(pRaw); } if (pRaw != pSmart) { Marshal.ReleaseComObject(pSmart); } if (pRaw != iSmartTee) { Marshal.ReleaseComObject(iSmartTee); } } } else { // Get a control pointer (used in Click()) m_VidControl = capFilter as IAMVideoControl; pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); // If any of the default config items are set if (iHeight + iWidth + iBPP > 0) { SetConfigParms(m_pinStill, iWidth, iHeight, iBPP); } } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; // Configure the sample grabber IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); // Get the default video renderer IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter; hr = m_FilterGraph.AddFilter(pRenderer, "Renderer"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0); // Add the sample grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); } else { // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } // Learn the video properties SaveSizeInfo(sampGrabber); ConfigVideoWindow(hControl); // Start the graph IMediaControl mediaCtrl = m_FilterGraph as IMediaControl; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); pCaptureOut = null; } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); pRenderIn = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } }
/// <summary> /// Returns the <see cref="CameraInfo"/> for the given <see cref="DsDevice"/>. /// </summary> /// <param name="dev">A <see cref="DsDevice"/> to parse name and capabilities for.</param> /// <returns>The <see cref="CameraInfo"/> for the given device.</returns> private CameraInfo Caps(DsDevice dev) { var camerainfo = new CameraInfo(); // Get the graphbuilder object var graphBuilder = (IFilterGraph2) new FilterGraph(); // Get the ICaptureGraphBuilder2 var capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); IBaseFilter capFilter = null; try { int hr = capGraph.SetFiltergraph(graphBuilder); DsError.ThrowExceptionForHR(hr); // Add the video device hr = graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter); // DsError.ThrowExceptionForHR(hr); if (hr != 0) { Console.WriteLine("Error in m_graphBuilder.AddSourceFilterForMoniker(). Could not add source filter. Message: " + DsError.GetErrorText(hr)); return(null); } //hr = m_graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device"); //DsError.ThrowExceptionForHR(hr); object o = null; DsGuid cat = PinCategory.Capture; DsGuid type = MediaType.Interleaved; DsGuid iid = typeof(IAMStreamConfig).GUID; // Check if Video capture filter is in use hr = capGraph.RenderStream(cat, MediaType.Video, capFilter, null, null); if (hr != 0) { return(null); } //hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Interleaved, capFilter, typeof(IAMStreamConfig).GUID, out o); //if (hr != 0) //{ hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o); DsError.ThrowExceptionForHR(hr); //} var videoStreamConfig = o as IAMStreamConfig; int iCount = 0; int iSize = 0; try { if (videoStreamConfig != null) { videoStreamConfig.GetNumberOfCapabilities(out iCount, out iSize); } } catch (Exception) { //ErrorLogger.ProcessException(ex, false); return(null); } pscc = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps))); camerainfo.Name = dev.Name; camerainfo.DirectshowDevice = dev; for (int i = 0; i < iCount; i++) { VideoStreamConfigCaps scc; try { AMMediaType curMedType; if (videoStreamConfig != null) { hr = videoStreamConfig.GetStreamCaps(i, out curMedType, pscc); } Marshal.ThrowExceptionForHR(hr); scc = (VideoStreamConfigCaps)Marshal.PtrToStructure(pscc, typeof(VideoStreamConfigCaps)); var CSF = new CamSizeFPS(); CSF.FPS = (int)(10000000 / scc.MinFrameInterval); CSF.Height = scc.InputSize.Height; CSF.Width = scc.InputSize.Width; if (!InSizeFpsList(camerainfo.SupportedSizesAndFPS, CSF)) { if (ParametersOK(CSF)) { camerainfo.SupportedSizesAndFPS.Add(CSF); } } } catch (Exception) { //ErrorLogger.ProcessException(ex, false); } } Marshal.FreeCoTaskMem(pscc); } finally { if (graphBuilder != null) { Marshal.ReleaseComObject(graphBuilder); } if (capFilter != null) { Marshal.ReleaseComObject(capFilter); } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); } } return(camerainfo); }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iFrameRate, int iWidth, int iHeight) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; // Get the graphbuilder object m_FilterGraph = (IFilterGraph2) new FilterGraph(); m_mediaCtrl = m_FilterGraph as IMediaControl; try { // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Get the SampleGrabber interface sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph( m_FilterGraph ); DsError.ThrowExceptionForHR( hr ); // Add the video device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter); DsError.ThrowExceptionForHR( hr ); IBaseFilter baseGrabFlt = (IBaseFilter) sampGrabber; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter( baseGrabFlt, "Ds.NET Grabber" ); DsError.ThrowExceptionForHR( hr ); // If any of the default config items are set if (iFrameRate + iHeight + iWidth > 0) { SetConfigParms(capGraph, capFilter, iFrameRate, iWidth, iHeight); } hr = capGraph.RenderStream( PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt ); DsError.ThrowExceptionForHR( hr ); SaveSizeInfo(sampGrabber); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); capGraph = null; } } }
private (LocalVideoSourceCapability[] caps, InputDeviceState state) GetCapabilities(DsDevice device) { if (_initialLogging) { Log.Information($"Caps {device.Name}: getting"); } var list = new List <LocalVideoSourceCapability>(); IntPtr pCaps = IntPtr.Zero; IFilterGraph2 filterGraph2 = null; IBaseFilter sourceFilter = null; IAMStreamConfig streamConfig = null; object pin = null; InputDeviceState state = InputDeviceState.Ready; try { filterGraph2 = new FilterGraph() as IFilterGraph2; if (filterGraph2 == null) { throw new NotSupportedException("filter2 is null"); } LocalVideoSourceManager.AddCaptureFilter(filterGraph2, device, out sourceFilter); pin = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); if (pin == null) { pin = sourceFilter; } streamConfig = pin as IAMStreamConfig; if (streamConfig == null) { throw new NotSupportedException("pin is null"); } int count = 0; int size = 0; Checked(() => streamConfig.GetNumberOfCapabilities(out count, out size), "GetNumberOfCapabilities", null); if (count <= 0) { throw new NotSupportedException("This video source does not report capabilities."); } if (size != Marshal.SizeOf(typeof(VideoStreamConfigCaps))) { throw new NotSupportedException("Unable to retrieve video source capabilities. This video source requires a larger VideoStreamConfigCaps structure."); } // Alloc memory for structure pCaps = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps))); for (int i = 0; i < count; i++) { AMMediaType mediaType = null; Checked(() => streamConfig.GetStreamCaps(i, out mediaType, pCaps), "GetStreamCaps", null); VideoStreamConfigCaps caps = (VideoStreamConfigCaps)Marshal.PtrToStructure(pCaps, typeof(VideoStreamConfigCaps)); var format = GetMediaTypeInfo(mediaType, out var height, out var width, out var compression, out var videoInfoHeader, out var videoInfoHeader2); var result = new LocalVideoSourceCapability() { MaxF = GetFps(caps.MinFrameInterval), MinF = GetFps(caps.MaxFrameInterval), Fmt = format, W = width, H = height, }; list.Add(result); } } catch (UnauthorizedAccessException e) { Log.Warning(e, $"Error during retreiving caps for '{device.Name}' (Locked)"); state = InputDeviceState.Locked; } catch (Exception e) { Log.Error(e, $"Error during retreiving caps for '{device.Name}'"); state = InputDeviceState.Failed; } finally { if (pCaps != IntPtr.Zero) { Marshal.FreeCoTaskMem(pCaps); } } try { ReleaseComObject(sourceFilter); ReleaseComObject(filterGraph2); ReleaseComObject(streamConfig); ReleaseComObject(pin); } catch (Exception e) { Log.Error(e, $"ReleaseComObject('{device.Name}') failed"); } if (_initialLogging) { Log.Information($"Caps {device.Name}: {string.Join("; ", list.Select(s => s.ToString()))}"); } return(list.ToArray(), state); }
/// <summary> /// Disposes the TvAudio component /// </summary> public void Dispose() { if (mode == TvAudioVariant.Normal) { if (_filterTvAudioTuner != null) { while (Release.ComObject(_filterTvAudioTuner) > 0) {} _filterTvAudioTuner = null; } if (_audioDevice != null) { DevicesInUse.Instance.Remove(_audioDevice); _audioDevice = null; } } }
/// <summary> /// 获取当前接入的视频设备信息 /// </summary> /// <returns></returns> private List <DsDevice> GetCameraList() { DsDevice[] list = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); return(list.ToList()); }
public void InitDevice(DsDevice device, int iWidth, int iHeight) { int hr; object camDevice; Guid iid = typeof(IBaseFilter).GUID; device.Mon.BindToObject(null, null, ref iid, out camDevice); IBaseFilter camFilter = camDevice as IBaseFilter; m_CameraControl = camFilter as IAMCameraControl; m_VideoControl = camFilter as IAMVideoProcAmp; ISampleGrabber sampGrabber = null; graphBuilder = (IGraphBuilder)new FilterGraph(); //Create the Capture Graph Builder ICaptureGraphBuilder2 captureGraphBuilder = null; captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); // Attach the filter graph to the capture graph hr = captureGraphBuilder.SetFiltergraph(this.graphBuilder); DsError.ThrowExceptionForHR(hr); //Add the Video input device to the graph hr = graphBuilder.AddFilter(camFilter, "WebCam" + deviceNumber); DsError.ThrowExceptionForHR(hr); // Configure the sample grabber sampGrabber = new SampleGrabber() as ISampleGrabber; ConfigureSampleGrabber(sampGrabber); IBaseFilter sampGrabberBF = sampGrabber as IBaseFilter; //Add the Video compressor filter to the graph hr = graphBuilder.AddFilter(sampGrabberBF, "SampleGrabber" + deviceNumber); DsError.ThrowExceptionForHR(hr); IBaseFilter nullRender = new NullRenderer() as IBaseFilter; graphBuilder.AddFilter(nullRender, "NullRenderer" + deviceNumber); InitResolution(captureGraphBuilder, camFilter, iWidth, iHeight); hr = captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, camDevice, sampGrabberBF, nullRender); DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampGrabber); Marshal.ReleaseComObject(sampGrabber); Marshal.ReleaseComObject(captureGraphBuilder); }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, AMMediaType media) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; // Get the graphbuilder object m_FilterGraph = (IFilterGraph2) new FilterGraph(); m_mediaCtrl = m_FilterGraph as IMediaControl; try { // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); /* * // check for crossbar * var capDevices2 = DsDevice.GetDevicesOfCat(FilterCategory.AMKSCrossbar); * if (capDevices2.Length > 0) * { * * IBaseFilter cross; * hr = m_FilterGraph.AddSourceFilterForMoniker(capDevices2[0].Mon, null, "crossbar", out cross); * ISpecifyPropertyPages pProp = cross as ISpecifyPropertyPages; * * //Get the name of the filter from the FilterInfo struct * FilterInfo filterInfo; * hr = cross.QueryFilterInfo(out filterInfo); * DsError.ThrowExceptionForHR(hr); * * // Get the propertypages from the property bag * DsCAUUID caGUID; * hr = pProp.GetPages(out caGUID); * DsError.ThrowExceptionForHR(hr); * * //Create and display the OlePropertyFrame * object oDevice = (object)cross; * hr = NativeMethods.OleCreatePropertyFrame(IntPtr.Zero, 0, 0, filterInfo.achName, 1, ref oDevice, caGUID.cElems, caGUID.pElems, 0, 0, IntPtr.Zero); * DsError.ThrowExceptionForHR(hr); * * Marshal.ReleaseComObject(oDevice); * * //IAMCrossbar crossbar2 = cross as IAMCrossbar; * //int inputPinCount, outputPinCount; * //crossbar2.get_PinCounts(out inputPinCount, out outputPinCount); * //crossbar2.Route(0, (int)PhysicalConnectorType.Video_Composite); * cross = null; * }*/ // Get the SampleGrabber interface sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(m_FilterGraph); DsError.ThrowExceptionForHR(hr); // Add the video device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter); DsError.ThrowExceptionForHR(hr); // add video crossbar // thanks to Andrew Fernie - this is to get tv tuner cards working IAMCrossbar crossbar = null; object o; hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMCrossbar).GUID, out o); if (hr >= 0) { crossbar = (IAMCrossbar)o; int oPin, iPin; int ovLink, ivLink; ovLink = ivLink = 0; crossbar.get_PinCounts(out oPin, out iPin); int pIdxRel; PhysicalConnectorType tp; for (int i = 0; i < iPin; i++) { crossbar.get_CrossbarPinInfo(true, i, out pIdxRel, out tp); if (tp == PhysicalConnectorType.Video_Composite) { ivLink = i; } } for (int i = 0; i < oPin; i++) { crossbar.get_CrossbarPinInfo(false, i, out pIdxRel, out tp); if (tp == PhysicalConnectorType.Video_VideoDecoder) { ovLink = i; } } try { crossbar.Route(ovLink, ivLink); o = null; } catch { throw new Exception("Failed to get IAMCrossbar"); } } //add AVI Decompressor IBaseFilter pAVIDecompressor = (IBaseFilter) new AVIDec(); hr = m_FilterGraph.AddFilter(pAVIDecompressor, "AVI Decompressor"); DsError.ThrowExceptionForHR(hr); // IBaseFilter baseGrabFlt = (IBaseFilter)sampGrabber; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); SetConfigParms(capGraph, capFilter, media); hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, pAVIDecompressor, baseGrabFlt); if (hr < 0) { hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt); } DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampGrabber); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); capGraph = null; } } }