void BuildGraph() { int hr; IBaseFilter ppFilter; DsDevice [] devs; IGraphBuilder graphBuilder = new FilterGraph() as IGraphBuilder; m_ROT = new DsROTEntry(graphBuilder); IFilterGraph2 ifg2 = graphBuilder as IFilterGraph2; devs = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); DsDevice dev = devs[0]; hr = ifg2.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out ppFilter); DsError.ThrowExceptionForHR(hr); m_idf = ppFilter as IAMDroppedFrames; IPin IPinOut = DsFindPin.ByDirection(ppFilter, PinDirection.Output, 0); hr = ifg2.Render(IPinOut); DsError.ThrowExceptionForHR(hr); m_imc = graphBuilder as IMediaControl; hr = m_imc.Run(); DsError.ThrowExceptionForHR(hr); }
private void Setup() { int hr; IFilterGraph2 ifg = new FilterGraph() as IFilterGraph2; m_imc = ifg as IMediaControl; DsROTEntry rot = new DsROTEntry(ifg); IBaseFilter pFilter; DsDevice[] devs = DsDevice.GetDevicesOfCat(FilterCategory.AudioInputDevice); hr = ifg.AddSourceFilterForMoniker(devs[0].Mon, null, devs[0].Name, out pFilter); DsError.ThrowExceptionForHR(hr); ICaptureGraphBuilder2 icgb2 = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; hr = icgb2.SetFiltergraph(ifg); DsError.ThrowExceptionForHR(hr); //IPin pPin = DsFindPin.ByDirection((IBaseFilter)o, PinDirection.Output, 0); hr = icgb2.RenderStream(null, MediaType.Audio, pFilter, null, null); DsError.ThrowExceptionForHR(hr); IBaseFilter pAudio; hr = ifg.FindFilterByName("Audio Renderer", out pAudio); m_ibn = pAudio as IAMAudioRendererStats; }
static XmlElement encodeFilterGraph(XmlDocument doc, FilterGraph graph) { #if TODO XmlElement graph_e = doc.CreateElement("graph"); graph_e.SetAttribute("name", graph.getName()); foreach (Filter f in graph.getFilters()) { XmlElement filter_e = doc.CreateElement("filter"); //XmlAttributeCollection attrs = new XmlAttributeCollection(filter_e); //attrs["type"] = f.getFilterType(); XmlAttribute xmlAttrib = doc.CreateAttribute("type"); xmlAttrib.Value = f.getFilterType(); filter_e.Attributes.Append(xmlAttrib); Properties props = f.getProperties(); foreach (Property i in props) { filter_e.AppendChild(encodeProperty(doc, i)); } graph_e.AppendChild(filter_e); } return(graph_e); #endif throw new NotImplementedException(); }
public void RenderAndRunAudio(AudioCaptureGraph acg, bool playIt) { if (acg == null) { throw new ArgumentNullException("Can't render an audio graph without an audio capture device"); } if (playIt) { Log("Playing audio (render and run graph) - " + acg.Source.FriendlyName); // Re-add the renderer in case they changed it since the last // time they played the audio acg.AddRenderer((FilterInfo)cboSpeakers.SelectedItem); acg.Run(); } else { Log("Stop audio (stop and unrender graph) - " + acg.Source.FriendlyName); acg.Stop(); acg.RemoveRenderer(); } Log(FilterGraph.Debug(acg.IFilterGraph)); }
void BuildGraph() { int hr; IGraphBuilder graphBuilder = new FilterGraph() as IGraphBuilder; m_ROT = new DsROTEntry(graphBuilder); IFilterGraph2 ifg2 = graphBuilder as IFilterGraph2; hr = graphBuilder.RenderFile("foo.avi", null); DsError.ThrowExceptionForHR(hr); // Get a ICaptureGraphBuilder2 ICaptureGraphBuilder2 icgb = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); hr = icgb.SetFiltergraph((IGraphBuilder)graphBuilder); DsError.ThrowExceptionForHR(hr); m_mediaEventEx = graphBuilder as IMediaEventEx; hr = m_mediaEventEx.SetNotifyWindow(this.Handle, WM_GRAPHNOTIFY, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); Thread.Sleep(500); m_imc = graphBuilder as IMediaControl; hr = m_imc.Run(); DsError.ThrowExceptionForHR(hr); }
public void RenderAndRunVideo(VideoCaptureGraph vcg, bool playIt) { if (playIt) { Log("Playing video (render and run graph) - " + vcg.Source.FriendlyName); vcg.RenderLocal(); VideoCapability.DisableDXVA(vcg.FilgraphManager); // Set device name in the video window and turn off the system menu IVideoWindow iVW = (IVideoWindow)vcg.FilgraphManager; iVW.Caption = vcg.Source.FriendlyName; iVW.WindowStyle &= ~0x00080000; // WS_SYSMENU vcg.Run(); } else { Log("Stop video (stop and unrender graph) - " + vcg.Source.FriendlyName); vcg.Stop(); vcg.RemoveRenderer(); // I have no idea why the video window stays up but this fixes it GC.Collect(); } Log(FilterGraph.Debug(vcg.IFilterGraph)); }
/// <summary> /// Creates the camera (capture graph) /// </summary> public void ActivateCamera() { Log(string.Format(CultureInfo.CurrentCulture, "\r\nInitializing camera - {0}, {1}", fi.DisplayName, fi.Moniker)); // Get camera up and running CreateVideoGraph(fi); RestoreCameraSettings(); RestoreVideoSettings(); LogCurrentMediaType(cg.Source); // Add compressor if necessary AddVideoCompressor(); try { //This was observed to fail for some non-standard compressors LogCurrentMediaType(cg.Compressor); } catch (Exception ex) { Log("Failed to find compressor current media type: " + ex.Message); } // Log all the filters in the graph Log(FilterGraph.Debug(cg.IFilterGraph)); }
/// <summary> /// Returns available frame sizes with RGB color system for device moniker /// </summary> /// <param name="moniker">Moniker (device identification) of camera.</param> /// <returns>List of frame sizes with RGB color system of device</returns> public static FrameSize[] GetFrameSizeList(IMoniker moniker) { int hr; FrameSize[] FrameSizeArray = null; // Get the graphbuilder object IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; try { // add the video input device hr = filterGraph.AddSourceFilterForMoniker(moniker, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); FrameSizeArray = GetFrameSizesAvailable(capFilter); } finally { SafeReleaseComObject(filterGraph); filterGraph = null; SafeReleaseComObject(capFilter); capFilter = null; } return(FrameSizeArray); }
void BuildGraph() { int hr; IBaseFilter ppbf, ppFilter; ICaptureGraphBuilder2 icgb2; DsDevice [] devs; IGraphBuilder graphBuilder = new FilterGraph() as IGraphBuilder; icgb2 = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); icgb2.SetFiltergraph(graphBuilder); DsROTEntry ds = new DsROTEntry(graphBuilder); IFilterGraph2 ifg2 = graphBuilder as IFilterGraph2; devs = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); DsDevice dev = devs[0]; hr = ifg2.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out ppFilter); DsError.ThrowExceptionForHR(hr); // Use the ICaptureGraphBuilder2 to add the avi mux hr = icgb2.SetOutputFileName(MediaSubType.Avi, FileName, out ppbf, out m_ppsink); DsError.ThrowExceptionForHR(hr); hr = icgb2.RenderStream(PinCategory.Capture, MediaType.Video, ppFilter, null, ppbf); DsError.ThrowExceptionForHR(hr); }
public CellCompiler( string _cell_id, string _abs_output_uri, FeatureLayer layer, FilterGraph graph, float _min_range, float _max_range, FilterEnv env, ResourcePackager _packager, osgDB.Archive _archive, // =NULL, object user_data) : // =NULL ); base(_abs_output_uri, layer, graph, env) { packager = _packager; cell_id = _cell_id; abs_output_uri = _abs_output_uri; min_range = _min_range; max_range = _max_range; archive = _archive; //TODO: maybe the FilterEnv should just have one of these by default. SmartReadCallback smart = new SmartReadCallback(); smart.setMinRange(min_range); env.setTerrainReadCallback(smart); setUserData(_user_data); output_status = CellCompiler.OutputStatus.OUTPUT_UNKNOWN; }
/// <summary> /// /// </summary> /// <param name="dsDevice"></param> /// <param name="prop"></param> /// <returns></returns> public int GetVideoControl(DsDevice dsDevice, VideoProcAmpProperty prop) { IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; int retVal = 0; try { // add the video input device int hr = filterGraph.AddSourceFilterForMoniker(dsDevice.Mon, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); IAMVideoProcAmp videoControl = capFilter as IAMVideoProcAmp; int min, max, step, default_val; VideoProcAmpFlags flag = 0; videoControl.GetRange(prop, out min, out max, out step, out default_val, out flag); videoControl.Get(prop, out retVal, out flag); } catch (Exception ex) { Console.WriteLine(ex.Message); } return(retVal); }
public void SetVideoInfo(ContentsInfo _ci, System.IO.FileInfo fi) { //미디어 정보(플레이시간/ width/ height) FilterGraph graphFilter = new FilterGraph(); IGraphBuilder graphBuilder; IMediaPosition mediaPos; double length = 0.0; int Height, Width = 0; graphBuilder = (IGraphBuilder)graphFilter; graphBuilder.RenderFile(fi.FullName, null); mediaPos = (IMediaPosition)graphBuilder; mediaPos.get_Duration(out length); IVideoWindow info; info = (IVideoWindow)graphBuilder; info.get_Height(out Height); info.get_Width(out Width); _ci.FileInfo.Add(new ContentsManager.Model.FileInfo() { Name = fi.Name, Size = fi.Length, Form = fi.Extension, Registrant = "", RegistrantDate = fi.CreationTime, Width = Width, Height = Height, Duration = length }); }
/// <summary> /// /// </summary> /// <param name="dsDevice"></param> /// <param name="bEnable"></param> public void AutoFocus(DsDevice dsDevice, bool bEnable) { IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; try { // add the video input device int hr = filterGraph.AddSourceFilterForMoniker(dsDevice.Mon, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); IAMCameraControl cameraControl = capFilter as IAMCameraControl; if (bEnable) { cameraControl.Set(CameraControlProperty.Focus, 250, CameraControlFlags.Auto); } else { cameraControl.Set(CameraControlProperty.Focus, 250, CameraControlFlags.Manual); } } catch (Exception ex) { Console.WriteLine(ex.Message); } }
/// <summary> /// /// </summary> /// <param name="dsDevice"></param> /// <returns></returns> static public string[] GetCameraCapability(DsDevice dsDevice) { int hr; IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; IPin pPin = null; string[] listVideoInfo; try { // add the video input device hr = filterGraph.AddSourceFilterForMoniker(dsDevice.Mon, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); pPin = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); //listResolution = GetResolutionsAvailable( pPin ).ToList(); listVideoInfo = GetResolutionsAvailable(pPin); } finally { Marshal.ReleaseComObject(pPin); pPin = null; } return(listVideoInfo); }
void BuildGraph() { int hr; IBaseFilter ppFilter; DsDevice [] devs; IGraphBuilder graphBuilder = new FilterGraph() as IGraphBuilder; m_ROT = new DsROTEntry(graphBuilder); IFilterGraph2 ifg2 = graphBuilder as IFilterGraph2; devs = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); DsDevice dev = devs[0]; hr = ifg2.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out ppFilter); DsError.ThrowExceptionForHR(hr); ICaptureGraphBuilder2 captureGraphBuilder = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; hr = captureGraphBuilder.SetFiltergraph(graphBuilder); object o; hr = captureGraphBuilder.FindInterface(null, null, ppFilter, typeof(IAMStreamConfig).GUID, out o); DsError.ThrowExceptionForHR(hr); m_asc = o as IAMStreamConfig; //m_imc = graphBuilder as IMediaControl; //hr = m_imc.Run(); //DsError.ThrowExceptionForHR(hr); }
private PTZDevice(string name, PTZType type) { var devices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); var device = devices.Where(d => d.Name == name).FirstOrDefault(); _device = device; _type = type; if (_device == null) throw new ApplicationException(String.Format("Couldn't find device named {0}!", name)); IFilterGraph2 graphBuilder = new FilterGraph() as IFilterGraph2; IBaseFilter filter = null; IMoniker i = _device.Mon as IMoniker; graphBuilder.AddSourceFilterForMoniker(i, null, _device.Name, out filter); _camControl = filter as IAMCameraControl; _ksPropertySet = filter as IKsPropertySet; if (_camControl == null) throw new ApplicationException("Couldn't get ICamControl!"); if (_ksPropertySet == null) throw new ApplicationException("Couldn't get IKsPropertySet!"); //TODO: Add Absolute if (type == PTZType.Relative && !(SupportFor(KSProperties.CameraControlFeature.KSPROPERTY_CAMERACONTROL_PAN_RELATIVE) && SupportFor(KSProperties.CameraControlFeature.KSPROPERTY_CAMERACONTROL_TILT_RELATIVE))) { throw new NotSupportedException("This camera doesn't appear to support Relative Pan and Tilt"); } //TODO: Do I through NotSupported when methods are called or throw them now? //TODO: Do I check for Zoom or ignore if it's not there? InitZoomRanges(); }
/** * Pushes source data onto the level of detail queue. Levels of detail are * interpreted from front to back. * * @param layer * Feature layer from which to read source data * @param graph * Filter graph to use to build scene graph * @param min_range * Minimum visibility range of this level of detail * @param max_range * Maximum visibility range of this level of detail * @param replace * If true, this detail level will replace the ones before it. If false, it * will join the scene graph without removing the previous levels. * @param depth * Level of detail depth (0 = top level) * @param user_data * User-defined data to pass to the cell compiler */ public void push(FeatureLayer layer, FilterGraph graph, Properties env_props, ResourcePackager packager, float min_range, float max_range, bool replace, uint depth, object user_data) { if (layer != null) { // update the automatic AOI: if (!aoi_auto.isValid()) { aoi_auto = layer.getExtent(); } else { aoi_auto.expandToInclude(layer.getExtent()); } // store the LOD definition: levels.Add(new MapLayerLevelOfDetail( layer, graph, env_props, packager, min_range, max_range, replace, depth, user_data)); grid_valid = false; } }
private IFilterGraph2 BuildGraph(string sFileName) { int hr; IFilterGraph2 graphBuilder = new FilterGraph() as IFilterGraph2; m_dsrot = new DsROTEntry(graphBuilder); try { // Get the file source filter m_Source = new AsyncReader() as IBaseFilter; // Add it to the graph hr = graphBuilder.AddFilter(m_Source, "Ds.NET AsyncReader"); Marshal.ThrowExceptionForHR(hr); // Set the file name IFileSourceFilter fsf = m_Source as IFileSourceFilter; hr = fsf.Load(sFileName, null); Marshal.ThrowExceptionForHR(hr); // Get the interface we are testing m_icgb2 = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; } catch { Marshal.ReleaseComObject(graphBuilder); throw; } return(graphBuilder); }
private void Config2() { int hr; IFilterGraph2 fg; ISBE2Crossbar iSBE2Crossbar; fg = new FilterGraph() as IFilterGraph2; DsROTEntry rot = new DsROTEntry(fg); IBaseFilter streamBuffer = (IBaseFilter) new StreamBufferSource(); m_se = streamBuffer as ISBE2SpanningEvent; m_mc = fg as IMediaControl; hr = fg.AddFilter(streamBuffer, "SBS"); DsError.ThrowExceptionForHR(hr); IFileSourceFilter fs = streamBuffer as IFileSourceFilter; hr = fs.Load(@"C:\Users\Public\Recorded TV\Sample Media\win7_scenic-demoshort_raw.wtv", null); DsError.ThrowExceptionForHR(hr); iSBE2Crossbar = streamBuffer as ISBE2Crossbar; hr = iSBE2Crossbar.EnableDefaultMode(CrossbarDefaultFlags.None); DsError.ThrowExceptionForHR(hr); HookupGraphEventService(fg); RegisterForSBEGlobalEvents(); }
public IBaseFilter GetVideo() { IBaseFilter baseDevice; var filterGraph = new FilterGraph() as IFilterGraph2; filterGraph.AddSourceFilterForMoniker(selectedDevice.Mon, null, selectedDevice.Name, out baseDevice); IPin pin = DsFindPin.ByCategory(baseDevice, PinCategory.Capture, 0); var streamConfig = pin as IAMStreamConfig; AMMediaType media; int iC = 0, iS = 0; streamConfig.GetNumberOfCapabilities(out iC, out iS); IntPtr ptr = Marshal.AllocCoTaskMem(iS); for (int i = 0; i < iC; i++) { streamConfig.GetStreamCaps(i, out media, ptr); VideoInfoHeader v; v = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, v); } Guid iid = typeof(IBaseFilter).GUID; object source; selectedDevice.Mon.BindToObject(null, null, ref iid, out source); return((IBaseFilter)source); }
public override bool RemoveCapability(ICapability capability) { bool ret = base.RemoveCapability(capability); if (ret) { if (fgm != null) { fgm.Stop(); FilterGraph.RemoveFromRot(rotID); FilterGraph.RemoveAllFilters(fgm); fgm = null; } if (fgmEventMonitor != null) { fgmEventMonitor.FgmEvent -= new FgmEventMonitor.FgmEventHandler(FgmEvent); fgmEventMonitor.Dispose(); fgmEventMonitor = null; } wmf = null; } return(ret); }
/// <summary> /// Creation of the fgm and the adding / removing of filters needs to happen on the /// same thread. So make sure it all happens on the UI thread. /// </summary> private void _RtpStream_FirstFrameReceived() { lock (fgmLock) { DisposeFgm(); Debug.Assert(fgm == null); // Create the DirectShow filter graph manager fgm = new FilgraphManagerClass(); IGraphBuilder iGB = (IGraphBuilder)fgm; rotID = FilterGraph.AddToRot((IGraphBuilder)fgm); IBaseFilter bfSource = RtpSourceClass.CreateInstance(); ((MSR.LST.MDShow.Filters.IRtpSource)bfSource).Initialize(rtpStream); iGB.AddFilter(bfSource, "RtpSource"); iGB.Render(Filter.GetPin(bfSource, _PinDirection.PINDIR_OUTPUT, Guid.Empty, Guid.Empty, false, 0)); DisableDXVA(fgm); // Render the video inside of the form iVW = (IVideoWindow)fgm; // Get the correct ratio to use for the video stretching // I would expect the fgm to always be castable to this, but I simply don't trust DShow IBasicVideo iBV = fgm as IBasicVideo; if (iBV != null) { int vidWidth, vidHeight; iBV.GetVideoSize(out vidWidth, out vidHeight); vidSrcRatio = (double)vidHeight / (double)vidWidth; } // Remove the border from the default DShow renderer UI int ws = WindowStyle; ws = ws & ~(0x00800000); // Remove WS_BORDER ws = ws & ~(0x00400000); // Remove WS_DLGFRAME WindowStyle = ws; iVW = null; uiState &= ~(int)FAudioVideo.UIState.RemoteVideoStopped; if (form != null) { ((FAudioVideo)form).UpdateVideoUI(uiState); } // FirstFrameReceived interprets fgmState as the *desired* state for the fgm // Because ResumePlayingVideo won't actually start if the state is already // Running, we change it to Stopped so that it will start if (IsPlaying && fgmState == FilterGraph.State.Running) { fgmState = FilterGraph.State.Stopped; ResumePlayingVideo(); } } }
private void Config() { IFilterGraph2 fg; ISBE2Crossbar iSBE2Crossbar; fg = new FilterGraph() as IFilterGraph2; IBaseFilter streamBuffer = (IBaseFilter) new StreamBufferSource(); int hr; hr = fg.AddFilter(streamBuffer, "SBS"); DsError.ThrowExceptionForHR(hr); IFileSourceFilter fs = streamBuffer as IFileSourceFilter; hr = fs.Load(@"C:\Users\Public\Recorded TV\Sample Media\win7_scenic-demoshort_raw.wtv", null); DsError.ThrowExceptionForHR(hr); iSBE2Crossbar = streamBuffer as ISBE2Crossbar; hr = iSBE2Crossbar.EnableDefaultMode(CrossbarDefaultFlags.None); DsError.ThrowExceptionForHR(hr); IMediaControl mc = fg as IMediaControl; hr = mc.Run(); DsError.ThrowExceptionForHR(hr); System.Threading.Thread.Sleep(10); hr = iSBE2Crossbar.EnumStreams(out m_es); DsError.ThrowExceptionForHR(hr); Debug.Assert(m_es != null); }
public static void Run() { var fg = FilterGraph.Create(); var mc = (IMediaControl)fg; var me = (IMediaEvent)fg; fg.RenderFile("c:\\test.mp3", IntPtr.Zero); IEnumFilters ief; var filters = new IBaseFilter[8]; fg.EnumFilters(out ief); int fetched; ief.Next(8, filters, out fetched); for (int i = 0; i < fetched; i++) { var ibf = filters[i]; FilterInfo fi; ibf.QueryFilterInfo(out fi); string vendorInfo = ""; try { ibf.QueryVendorInfo(out vendorInfo); } catch (Exception) { } Console.WriteLine(fi.Name + " " + vendorInfo); } Console.ReadLine(); }
private void TestEm() { int hr; AllocatorProperties prop = new AllocatorProperties(); Guid grf = typeof(IBaseFilter).GUID; prop.cbAlign = 1; prop.cbBuffer = 3000000; prop.cbPrefix = 0; prop.cBuffers = 12; hr = m_ibn.SuggestAllocatorProperties(prop); DsError.ThrowExceptionForHR(hr); IGraphBuilder ifg = new FilterGraph() as IGraphBuilder; DsROTEntry rot = new DsROTEntry(ifg); hr = ifg.AddFilter(m_ibf, "Device"); DsError.ThrowExceptionForHR(hr); ICaptureGraphBuilder2 icgb2 = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; hr = icgb2.SetFiltergraph(ifg); DsError.ThrowExceptionForHR(hr); hr = icgb2.RenderStream(null, null, m_ibf, null, null); DsError.ThrowExceptionForHR(hr); // Returns E_FAIL for all my devices, so I wrote my own filter // that implements it for a test. Note: You CANNOT use "out" here. hr = m_ibn.GetAllocatorProperties(prop); //DsError.ThrowExceptionForHR(hr); rot.Dispose(); }
private void TestCalibrate() { //No hardware IMediaEventSink mes; IFilterGraph2 FilterGraph; int hr; FilterGraph = (IFilterGraph2) new FilterGraph(); hr = FilterGraph.RenderFile("foo.avi", null); DsError.ThrowExceptionForHR(hr); mes = (IMediaEventSink)FilterGraph; IMediaEvent pEvent = (IMediaEvent)FilterGraph; int ret = 0; IntPtr eventHandle = IntPtr.Zero; hr = pEvent.GetEventHandle(out eventHandle); DsError.ThrowExceptionForHR(hr); hr = _extDevice.Calibrate(eventHandle, ExtTransportEdit.Active, out ret); //DsError.ThrowExceptionForHR(hr); //E_NOTIMPL , but atleast it's called. hr = _extDevice.Calibrate(eventHandle, 0, out ret); //DsError.ThrowExceptionForHR(hr); //E_NOTIMPL , but atleast it's called. }
/// <summary> /// Returns available resolutions with RGB color system for device moniker /// </summary> /// <param name="moniker">Moniker (device identification) of camera.</param> /// <returns>List of resolutions with RGB color system of device</returns> public static ResolutionList GetResolutionList(IMoniker moniker) { int hr; ResolutionList ResolutionsAvailable = null; //new ResolutionList(); // Get the graphbuilder object IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; try { // add the video input device hr = filterGraph.AddSourceFilterForMoniker(moniker, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); ResolutionsAvailable = GetResolutionsAvailable(capFilter); } finally { SafeReleaseComObject(filterGraph); filterGraph = null; SafeReleaseComObject(capFilter); capFilter = null; } return(ResolutionsAvailable); }
private void buildGraph() { int hr = 0; IBaseFilter captureFilter;AMMediaType pmt4 = new AMMediaType(); IFilterGraph2 filtergraph = new FilterGraph() as IFilterGraph2; ICaptureGraphBuilder2 pBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); hr = pBuilder.SetFiltergraph(m_graph); DsError.ThrowExceptionForHR(hr); filtergraph.AddSourceFilterForMoniker(m_capDev.Mon, null, m_capDev.Name, out captureFilter); m_graph.AddFilter(captureFilter, "CapFilter"); DsError.ThrowExceptionForHR(hr); IBaseFilter pSampleGrabber = (IBaseFilter)Activator.CreateInstance(typeof(SampleGrabber)); hr = m_graph.AddFilter(pSampleGrabber, "SampleGrabber"); DsError.ThrowExceptionForHR(hr); hr = ((ISampleGrabber)pSampleGrabber).SetCallback(new StillGrabberCallBack(), 0); AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; VideoInfoHeader format = new VideoInfoHeader(); format.SrcRect = new DsRect(); format.TargetRect = new DsRect(); format.BmiHeader = new BitmapInfoHeader() format.BmiHeader.Size = }
private void Config2() { Guid g = typeof(IBaseFilter).GUID; DsDevice[] devs; IBaseFilter ibf, ibf2; object o; int hr; IFilterGraph2 ifg = new FilterGraph() as IFilterGraph2; ibf = m_rs as IBaseFilter; hr = ifg.AddFilter(ibf, "resizer"); DsError.ThrowExceptionForHR(hr); devs = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); devs[1].Mon.BindToObject(null, null, ref g, out o); ibf2 = o as IBaseFilter; hr = ifg.AddFilter(ibf2, "camera"); DsError.ThrowExceptionForHR(hr); ICaptureGraphBuilder2 cgb = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; hr = cgb.SetFiltergraph(ifg); DsError.ThrowExceptionForHR(hr); hr = cgb.RenderStream(null, null, ibf2, null, ibf); DsError.ThrowExceptionForHR(hr); }
private void Setup() { int hr; IBaseFilter ibf; IFilterGraph2 ifg = new FilterGraph() as IFilterGraph2; m_imc = ifg as IMediaControl; DsROTEntry rot = new DsROTEntry(ifg); DsDevice[] devs = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); hr = ifg.AddSourceFilterForMoniker(devs[0].Mon, null, devs[0].Name, out ibf); ICaptureGraphBuilder2 icgb2 = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; hr = icgb2.SetFiltergraph(ifg); DsError.ThrowExceptionForHR(hr); hr = icgb2.RenderStream(null, null, ibf, null, null); DsError.ThrowExceptionForHR(hr); IBaseFilter pFilter; hr = ifg.FindFilterByName("Video Renderer", out pFilter); m_qc = pFilter as IQualityControl; rot.Dispose(); }
public FilterRegister(FilterGraph filterGraph, IFilterPropertyProvider filterPropertyProvider, IFilterProvider filterProvider) { this.filterGraph = filterGraph; this.filterPropertyProvider = filterPropertyProvider; this.filterProvider = filterProvider; }
/// <summary> /// Initialises DirectShow interfaces /// </summary> private void InitInterfaces() { fg = new FilterGraph(); gb = (IGraphBuilder)fg; mc = (IMediaControl)fg; me = (IMediaEventEx)fg; ms = (IMediaSeeking)fg; mp = (IMediaPosition)fg; }
/// <summary> /// Closes DirectShow interfaces /// </summary> private void CloseInterfaces() { if (me != null) { DsError.ThrowExceptionForHR(mc.Stop()); //0x00008001 = WM_GRAPHNOTIFY DsError.ThrowExceptionForHR(me.SetNotifyWindow(IntPtr.Zero, 0x00008001, IntPtr.Zero)); } mc = null; me = null; gb = null; ms = null; mp = null; if (fg != null) Marshal.ReleaseComObject(fg); fg = null; }
void CloseInterfaces() { if (me != null) { hr = mc.Stop(); DsError.ThrowExceptionForHR(hr); hr = me.SetNotifyWindow(IntPtr.Zero, WM_GRAPHNOTIFY, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); } mc = null; me = null; gb = null; if (matroska_mux != null) Marshal.ReleaseComObject(matroska_mux); matroska_mux = null; if (fg != null) Marshal.ReleaseComObject(fg); fg = null; }
/** * Pushes source data onto the level of detail queue. Levels of detail are * interpreted from front to back. * * @param layer * Feature layer from which to read source data * @param graph * Filter graph to use to build scene graph * @param min_range * Minimum visibility range of this level of detail * @param max_range * Maximum visibility range of this level of detail * @param replace * If true, this detail level will replace the ones before it. If false, it * will join the scene graph without removing the previous levels. * @param depth * Level of detail depth (0 = top level) * @param user_data * User-defined data to pass to the cell compiler */ public void push(FeatureLayer layer, FilterGraph graph, Properties env_props, ResourcePackager packager, float min_range, float max_range, bool replace, uint depth, object user_data) { if (layer != null) { // update the automatic AOI: if (!aoi_auto.isValid()) aoi_auto = layer.getExtent(); else aoi_auto.expandToInclude(layer.getExtent()); // store the LOD definition: levels.Add(new MapLayerLevelOfDetail( layer, graph, env_props, packager, min_range, max_range, replace, depth, user_data)); grid_valid = false; } }
/// <summary> /// Returns available resolutions with RGB color system for device moniker /// </summary> /// <param name="moniker">Moniker (device identification) of camera.</param> /// <returns>List of resolutions with RGB color system of device</returns> public static ResolutionList GetResolutionList(IMoniker moniker) { int hr; ResolutionList ResolutionsAvailable = null; //new ResolutionList(); // Get the graphbuilder object IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; try { // add the video input device hr = filterGraph.AddSourceFilterForMoniker(moniker, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); ResolutionsAvailable = GetResolutionsAvailable(capFilter); } finally { SafeReleaseComObject(filterGraph); filterGraph = null; SafeReleaseComObject(capFilter); capFilter = null; } return ResolutionsAvailable; }
static XmlElement encodeFilterGraph(XmlDocument doc, FilterGraph graph) { #if TODO XmlElement graph_e = doc.CreateElement("graph"); graph_e.SetAttribute("name", graph.getName()); foreach (Filter f in graph.getFilters()) { XmlElement filter_e = doc.CreateElement("filter"); //XmlAttributeCollection attrs = new XmlAttributeCollection(filter_e); //attrs["type"] = f.getFilterType(); XmlAttribute xmlAttrib = doc.CreateAttribute("type"); xmlAttrib.Value = f.getFilterType(); filter_e.Attributes.Append(xmlAttrib); Properties props = f.getProperties(); foreach (Property i in props) { filter_e.AppendChild(encodeProperty(doc, i)); } graph_e.AppendChild(filter_e); } return graph_e; #endif throw new NotImplementedException(); }
static FilterGraph decodeFilterGraph(XmlElement e, Project proj) { FilterGraph graph = null; if (e != null) { string name = e.GetAttribute("name"); //TODO: assert name #if TODO_DANI string parent_name = e.GetAttribute("inherits"); if (!string.IsNullOrEmpty(parent_name)) { FilterGraph parent_graph = proj.getFilterGraph(parent_name); if (parent_graph == null) { //osgGIS.notify( osg.WARN ) // << "Parent graph \"" << parent_name << "\" not found for graph \"" // << name << "\"" << std.endl; } else { graph = (FilterGraph)parent_graph; //TODO... } } else { #endif graph = new FilterGraph(); graph.setName(name); XmlNodeList filter_els = e.GetElementsByTagName("filter"); foreach (XmlNode i in filter_els) { XmlElement f_e = (XmlElement)i; string type = f_e.GetAttribute("type"); Filter f = MogreGis.Registry.instance().createFilterByType(type); // try again with "Filter" suffix if (f == null && !type.EndsWith("Filter", false, System.Globalization.CultureInfo.InvariantCulture)) f = MogreGis.Registry.instance().createFilterByType(type + "Filter"); if (f != null) { XmlNodeList prop_els = f_e.GetElementsByTagName("property"); foreach (XmlNode k in prop_els) { XmlElement k_e = (XmlElement)k; string name_ = k_e.GetAttribute("name"); string value_ = k_e.GetAttribute("value"); f.setProperty(new Property(name_, value_)); } graph.appendFilter(f); } } #if TODO_DANI } #endif } return graph; #if TODO_DANI #endif throw new NotImplementedException(); }
private void StopConversion() { if (_mediaEvent != null) { _mediaControl.Stop(); _mediaEvent.SetNotifyWindow(IntPtr.Zero, WM_GRAPHNOTIFY, IntPtr.Zero); } _mediaSeeking = null; _mediaControl = null; _mediaEvent = null; _graphBuilder = null; if (_mpegFilter != null) Marshal.ReleaseComObject(_mpegFilter); _mpegFilter = null; if (_filterGraph != null) Marshal.ReleaseComObject(_filterGraph); _filterGraph = null; labelProgress.Text = "Complete"; }
// --------------------- Private Methods ----------------------- /// <summary> /// Create a new filter graph and add filters (devices, compressors, /// misc), but leave the filters unconnected. Call renderGraph() /// to connect the filters. /// </summary> protected void createGraph() { Guid cat; Guid med; int hr; // Ensure required properties are set if ( videoDevice == null && audioDevice == null ) throw new ArgumentException( "The video and/or audio device have not been set. Please set one or both to valid capture devices.\n" ); // Skip if we are already created if ( (int)graphState < (int)GraphState.Created ) { // Garbage collect, ensure that previous filters are released GC.Collect(); // Make a new filter graph #if DSHOWNET // Make a new filter graph graphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); // Get the Capture Graph Builder Guid clsid = Clsid.CaptureGraphBuilder2; Guid riid = typeof(ICaptureGraphBuilder2).GUID; captureGraphBuilder = (ICaptureGraphBuilder2)DsBugWO.CreateDsInstance(ref clsid, ref riid); #else FilterGraph graph = new FilterGraph(); graphBuilder = (IGraphBuilder)graph; // Get the Capture Graph Builder captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); #endif // Link the CaptureGraphBuilder to the filter graph hr = captureGraphBuilder.SetFiltergraph(graphBuilder); if (hr < 0) Marshal.ThrowExceptionForHR(hr); // Add the graph to the Running Object Table so it can be // viewed with GraphEdit #if DEBUG #if DSHOWNET DsROT.AddGraphToRot(graphBuilder, out rotCookie); #else rotCookie = new DsROTEntry(graphBuilder); #endif #endif // Get the video device and add it to the filter graph if ( VideoDevice != null ) { videoDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( VideoDevice.MonikerString ); hr = graphBuilder.AddFilter( videoDeviceFilter, "Video Capture Device" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Get the audio device and add it to the filter graph if ( AudioDevice != null ) { audioDeviceFilter = (IBaseFilter) Marshal.BindToMoniker( AudioDevice.MonikerString ); hr = graphBuilder.AddFilter( audioDeviceFilter, "Audio Capture Device" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Get the video compressor and add it to the filter graph if ( VideoCompressor != null ) { videoCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( VideoCompressor.MonikerString ); hr = graphBuilder.AddFilter( videoCompressorFilter, "Video Compressor" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Get the audio compressor and add it to the filter graph if ( AudioCompressor != null ) { audioCompressorFilter = (IBaseFilter) Marshal.BindToMoniker( AudioCompressor.MonikerString ); hr = graphBuilder.AddFilter( audioCompressorFilter, "Audio Compressor" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } // Retrieve the stream control interface for the video device // FindInterface will also add any required filters // (WDM devices in particular may need additional // upstream filters to function). // Try looking for an interleaved media type object o; cat = PinCategory.Capture; med = MediaType.Interleaved; Guid iid = typeof(IAMStreamConfig).GUID; #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o ); #endif if ( hr != 0 ) { // If not found, try looking for a video media type med = MediaType.Video; #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o ); #endif if ( hr != 0 ) o = null; } videoStreamConfig = o as IAMStreamConfig; // #if NEWCODE // Start of new Brian's Low code // Retrieve the stream control interface for the video device // FindInterface will also add any required filters // (WDM devices in particular may need additional // upstream filters to function). // Try looking for an interleaved media type o = null; cat = PinCategory.Preview; med = MediaType.Interleaved; iid = typeof(IAMStreamConfig).GUID; #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o ); #endif if ( hr != 0 ) { // If not found, try looking for a video media type med = MediaType.Video; #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o ); #endif if ( hr != 0 ) o = null; } this.previewStreamConfig = o as IAMStreamConfig; // End of new Brian's Low code if( (this.videoStreamConfig != null)|| (this.previewStreamConfig != null) ) { this.dxUtils = new DxUtils(); bool result = this.dxUtils.InitDxUtils(this.videoDeviceFilter); if((!result)&&(!this.dxUtils.FindMediaData(this.videoStreamConfig))) { this.dxUtils.Dispose(); this.dxUtils = null; } } // #endif // Retrieve the stream control interface for the audio device o = null; cat = PinCategory.Capture; med = MediaType.Audio ; iid = typeof(IAMStreamConfig).GUID; if( (this.AudioViaPci)&& (audioDeviceFilter == null)&&(videoDeviceFilter != null) ) { hr = captureGraphBuilder.FindInterface( #if DSHOWNET ref cat, ref med, videoDeviceFilter, ref iid, out o ); #else DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o); #endif } else { #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, audioDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), audioDeviceFilter, DsGuid.FromGuid(iid), out o ); #endif } if (hr != 0) o = null; audioStreamConfig = o as IAMStreamConfig; // Retreive the media control interface (for starting/stopping graph) mediaControl = (IMediaControl) graphBuilder; // Reload any video crossbars if ( videoSources != null ) videoSources.Dispose(); videoSources = null; // Reload any audio crossbars if ( audioSources != null ) audioSources.Dispose(); audioSources = null; // Reload any property pages exposed by filters this.PropertyPages = null; // Reload capabilities of video device videoCaps = null; previewCaps = null; // Reload capabilities of video device audioCaps = null; // Retrieve TV Tuner if available o = null; cat = PinCategory.Capture; med = MediaType.Interleaved; iid = typeof(IAMTVTuner).GUID; #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o ); #endif if ( hr != 0 ) { med = MediaType.Video ; #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, DsGuid.FromGuid(iid), out o ); #endif if ( hr != 0 ) o = null; } IAMTVTuner t = o as IAMTVTuner; if ( t != null ) { tuner = new Tuner(t); // Do not forget to set proper country code (Netherlands is 31) } // No check on TV Audio needed, it will show up in the // PropertyPages when it is available // Code for finding the TV audio interface o = null; cat = PinCategory.Capture; med = MediaType.Interleaved; iid = typeof(IAMTVAudio).GUID; hr = captureGraphBuilder.FindInterface( #if DSHOWNET ref cat, ref med, videoDeviceFilter, ref iid, out o); #else cat, med, videoDeviceFilter, iid, out o); #endif if ( hr != 0 ) { med = MediaType.Video; #if DSHOWNET hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); #else hr = captureGraphBuilder.FindInterface( cat, med, videoDeviceFilter, iid, out o); #endif if ( hr != 0 ) { o = null; } } if((o != null)&&(tuner != null)) { IAMTVAudio a = o as IAMTVAudio; TvAudio = a; #if DEBUG Debug.WriteLine("FindInterface tuner.TvAudio"); #endif // DEBUG } /* // ----------- VMR 9 ------------------- //## check out samples\inc\vmrutil.h :: RenderFileToVMR9 IBaseFilter vmr = null; if ( ( VideoDevice != null ) && ( previewWindow != null ) ) { vmr = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.VideoMixingRenderer9, true ) ); hr = graphBuilder.AddFilter( vmr, "VMR" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); IVMRFilterConfig9 vmrFilterConfig = (IVMRFilterConfig9) vmr; hr = vmrFilterConfig.SetRenderingMode( VMRMode9.Windowless ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); IVMRWindowlessControl9 vmrWindowsless = (IVMRWindowlessControl9) vmr; hr = vmrWindowsless.SetVideoClippingWindow( previewWindow.Handle ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); } //------------------------------------------- // ---------- SmartTee --------------------- IBaseFilter smartTeeFilter = (IBaseFilter) Activator.CreateInstance( Type.GetTypeFromCLSID( Clsid.SmartTee, true ) ); hr = graphBuilder.AddFilter( smartTeeFilter, "Video Smart Tee" ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // Video -> SmartTee cat = PinCategory.Capture; med = MediaType.Video; hr = captureGraphBuilder.RenderStream( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), videoDeviceFilter, null, smartTeeFilter ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // smarttee -> mux cat = PinCategory.Capture; med = MediaType.Video; hr = captureGraphBuilder.RenderStream( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), smartTeeFilter, null, muxFilter ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // smarttee -> vmr cat = PinCategory.Preview; med = MediaType.Video; hr = captureGraphBuilder.RenderStream( DsGuid.FromGuid(cat), DsGuid.FromGuid(med), smartTeeFilter, null, vmr ); if( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // ------------------------------------- */ // Update the state now that we are done graphState = GraphState.Created; } }
private MovieFileData GetMovieFileData(string file) { try { Debug.WriteLine (string.Format ("Getting info for movie \"{0}\"", file)); FilterGraph graphbuilder = new FilterGraph (); ((IGraphBuilder) graphbuilder).RenderFile (file, null); int x; int y; long duration; ((IBasicVideo) graphbuilder).GetVideoSize (out x, out y); ((IMediaSeeking) graphbuilder).GetDuration (out duration); Marshal.ReleaseComObject (graphbuilder); return new MovieFileData () { X = x, Y = y, Duration = (double) duration / 6e8d }; } catch { return new MovieFileData () { Duration = double.NaN, X = -1, Y = -1 }; } }
public virtual XmlDocument writeFilterGraph(FilterGraph graph) { XmlDocument doc = new XmlDocument(); if (graph != null) { XmlElement graph_e = encodeFilterGraph(doc, graph); doc.AppendChild(graph_e); } return doc; }
public ExtractWithGraph(string SourceFile, string workPath, ExtractMediaType mediaType, JobStatus jobStatus, Log jobLog) { _jobStatus = jobStatus; _extractMediaType = mediaType; _SourceFile = SourceFile; _workPath = workPath; _jobLog = jobLog; _Ext = FilePaths.CleanExt(SourceFile).Replace(".", ""); //Set the decryptor type depending on the file type DVR-MS or WTV or TIVO if (_Ext == "dvr-ms") _CLSI_Decryptor = CLSID_DVRMSDecryptTag; else if (_Ext == "wtv") _CLSI_Decryptor = CLSID_WTVDecryptTag; // Set up base graph _fg = new FilterGraph(); _gb = (IGraphBuilder)_fg; }
public IBaseFilter GetVideo() { IBaseFilter baseDevice; var filterGraph = new FilterGraph() as IFilterGraph2; filterGraph.AddSourceFilterForMoniker(selectedDevice.Mon, null, selectedDevice.Name, out baseDevice); IPin pin = DsFindPin.ByCategory(baseDevice, PinCategory.Capture, 0); var streamConfig = pin as IAMStreamConfig; AMMediaType media; int iC = 0, iS = 0; streamConfig.GetNumberOfCapabilities(out iC, out iS); IntPtr ptr = Marshal.AllocCoTaskMem(iS); for (int i = 0; i < iC; i++) { streamConfig.GetStreamCaps(i, out media, ptr); VideoInfoHeader v; v = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, v); } Guid iid = typeof(IBaseFilter).GUID; object source; selectedDevice.Mon.BindToObject(null, null, ref iid, out source); return (IBaseFilter)source; }
private void StartConversion() { // build directshow graph _filterGraph = new FilterGraph(); _graphBuilder = (IGraphBuilder)_filterGraph; // Create Standard MPEG Filter Guid guid = new Guid("CFD87339-C61F-46ca-B6A1-F87D6B96243E"); Type comtype = Type.GetTypeFromCLSID(guid); _mpegFilter = (IBaseFilter)Activator.CreateInstance(comtype); SetFilterParams(); // Set the output file IBaseFilter fileWriter = (IBaseFilter)new FileWriter(); IFileSinkFilter fs = (IFileSinkFilter)fileWriter; int hr = fs.SetFileName(textBoxTgt.Text, null); hr = _graphBuilder.AddFilter(fileWriter, "File Writer Filter"); DsError.ThrowExceptionForHR(hr); // Add the encoder filter to the graph hr = _graphBuilder.AddFilter(_mpegFilter, "MPEG Filter"); DsError.ThrowExceptionForHR(hr); // use Intelligent connect to build the rest of the graph hr = _graphBuilder.RenderFile(textBoxSrc.Text, null); DsError.ThrowExceptionForHR(hr); // get the interfaces we need for info and control _mediaControl = (IMediaControl)_filterGraph; _mediaEvent = (IMediaEventEx)_filterGraph; _mediaSeeking = (IMediaSeeking)_mpegFilter; hr = _mediaEvent.SetNotifyWindow(this.Handle, WM_GRAPHNOTIFY, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); // we are ready to convert hr = _mediaControl.Run(); DsError.ThrowExceptionForHR(hr); }
private void InitInterfaces() { try { // initialise the graph and get the graph builder and media control from it _graph = new FilterGraph(); _mediaEvent = (IMediaEventEx)_graph; _graphBuilder = (IGraphBuilder)_graph; _mediaControl = (IMediaControl)_graph; } catch (Exception ex) { throw new ApplicationException("Couldn't initialise filter graph: " + ex.Message); } }
void InitInterfaces() { try { fg = new FilterGraph(); gb = (IGraphBuilder)fg; mc = (IMediaControl)fg; me = (IMediaEventEx)fg; } catch (Exception) { MessageBox.Show("Baslatilamadi"); } }