public override bool Build(DirectShowLib.IPin source, IBuild build) { bool result = false; foreach (Abstract candidate in this.Next) if (result = candidate.Build(source, build)) break; return result; }
public virtual bool Build(DirectShowLib.IBaseFilter previous, int i, IBuild build) { bool result = false; DirectShowLib.IPin outPin = DirectShowLib.DsFindPin.ByDirection(previous, DirectShowLib.PinDirection.Output, i); if (outPin.NotNull()) result = this.Build(outPin, build); return result; }
public override bool Build(DirectShowLib.IPin source, DirectShow.Binding.IBuild build) { bool result = false; foreach (Filters.Abstract candidate in this.Next) candidate.Output = this.Output; foreach (DirectShow.Binding.Filters.Abstract candidate in this.Next) if (result = candidate.Build(source, build)) break; return result; }
public virtual bool Build(DirectShowLib.IBaseFilter previous, IBuild build) { bool result = false; if (this.Output == -1) { for (int i = 0; i < 6 && !result; i++) result = this.Build(previous, i, build); } else result = this.Build(previous, this.Output.Value, build); return result; }
protected override Image Media(DirectShowLib.IBaseFilter filter) { Image result = null; if (filter is DirectShowLib.IAMAnalogVideoDecoder) { DirectShowLib.AnalogVideoStandard standard; (filter as DirectShowLib.IAMAnalogVideoDecoder).get_TVFormat(out standard); if(standard >= DirectShowLib.AnalogVideoStandard.PAL_B && standard <= DirectShowLib.AnalogVideoStandard.PAL_60) result = new Image() { Type = DirectShowLib.MediaSubType.YUY2, Resolution = new Geometry2D.Integer.Size(720, 576), ForceHeight = true }; } return result; }
public override bool Build(DirectShowLib.IPin source, IBuild build) { foreach (Filters.Abstract candidate in this.Next) candidate.FuzzyMatch = this.FuzzyMatch; bool result = false; foreach (Abstract candidate in this.Next) { candidate.Rate = this.Rate; if (result = candidate.Build(source, build)) break; } return result; }
public override bool Build(DirectShowLib.IPin source, DirectShow.Binding.IBuild build) { build.Playing += () => { // if (this.recorder.NotNull()) // this.recorder.StartBackupToFile(this.filename, 0); }; build.OnClose += () => { // if (this.recorder.NotNull()) // this.recorder.StopBackupToFile(); }; return base.Build(source, build); }
internal Pin FindPinForIPin(DirectShowLib.IPin ipin) { foreach (Filter filter in _filters) { foreach (Pin pin in filter.Pins) { if (pin.IPin == ipin) { return pin; } } } return null; }
DirectShowLib.AMMediaType[] GetOutputMediaTypes(DirectShowLib.IBaseFilter filter) { DirectShowLib.AMMediaType[] result = null; DirectShowLib.IPin outPin = DirectShowLib.DsFindPin.ByDirection(filter, DirectShowLib.PinDirection.Output, 0); if (outPin is DirectShowLib.IAMStreamConfig) { int count = 0; int size = 0; Exception.GraphError.Check((outPin as DirectShowLib.IAMStreamConfig).GetNumberOfCapabilities(out count, out size)); Buffer.Vector<byte> buffer = new Buffer.Vector<byte>(size); result = new DirectShowLib.AMMediaType[count]; for (int i = 0; i < count; i++) Exception.GraphError.Check((outPin as DirectShowLib.IAMStreamConfig).GetStreamCaps(i, out result[i], buffer)); } return result; }
public override bool Build(DirectShowLib.IPin source, IBuild build) { bool result = false; DirectShowLib.IBaseFilter filter; if (build.Graph.AddSourceFilter(this.file, "Ds.NET FileFilter", out filter) == 0) { foreach (Abstract candidate in this.Next) if (result = candidate.Build(filter, build)) break; } else { Error.Log.Append(Error.Level.Debug, "Unable to open file.", "DirectShow was unable to open file \"" + this.file + "\"."); Exception.GraphError.Check(build.Graph.RemoveFilter(filter)); } return result; }
public override bool Build(DirectShowLib.IPin source, Imint.Media.DirectShow.Binding.IBuild build) { bool result = false; DirectShowLib.IBaseFilter filter = this.Create(); if (filter.NotNull() && build.Graph.AddFilter(filter, this.Description) == 0) { foreach (DirectShow.Binding.Filters.Abstract candidate in this.Next) if (result = candidate.Build(filter, build)) break; } else { Error.Log.Append(Error.Level.Debug, "Unable to open file.", "DirectShow was unable to open file \"" + this.file + "\"."); Binding.Exception.GraphError.Check(build.Graph.RemoveFilter(filter)); } return result; }
public override bool Build(DirectShowLib.IPin source, DirectShow.Binding.IBuild build) { bool result = false; DirectShowLib.IBaseFilter filter = this.Create(); if (build.Graph.AddFilter(filter, "AXIS RTP Source Filter") == 0) { foreach (DirectShow.Binding.Filters.Abstract candidate in this.Next) if (result = candidate.Build(filter, build)) break; } else { Error.Log.Append(Error.Level.Debug, "Unable to open AXIS RTP Source Filter.", "AXIS RTP Source Filter was unable to open network path \"" + this.url + "\"."); DirectShow.Binding.Exception.GraphError.Check(build.Graph.RemoveFilter(filter)); } return result; }
public override bool Build(DirectShowLib.IPin source, DirectShow.Binding.IBuild build) { bool result = false; DirectShowLib.IBaseFilter filter = this.Create(); if (build.Graph.AddFilter(filter, "Hauppauge Transport Reader") == 0) { foreach (DirectShow.Binding.Filters.Abstract candidate in this.Next) if (result = candidate.Build(filter, this.Output.Value, build)) break; } else { Error.Log.Append(Error.Level.Debug, "Unable to open Hauppauge Transport Reader.", "Hauppauge Transport Reader was unable to open file \"" + this.file + "\"."); DirectShow.Binding.Exception.GraphError.Check(build.Graph.RemoveFilter(filter)); } return result; }
public int RenderGraph(DirectShowLib.IFilterGraph2 graph, string fileName) { FileInfo info = new FileInfo(fileName); string ext = info.Extension; bool has = false; foreach (string s in exts) { if (s == ext) { has = true; break; } } if (!has) return -1; IBaseFilter source = (IBaseFilter)SupportMethod.AddFilterFromClsid(graph, Uuids.NormalSource, "source"); IFileSourceFilter fileSource = source as IFileSourceFilter; AMMediaType type = null; int hr = fileSource.Load(fileName,type); if (hr != 0) return hr; string name; IBaseFilter demux = (IBaseFilter)SupportMethod.AddFilterFromClsid(graph, Uuids.RealMediaSplitterFilter, "demux"); IBaseFilter video = (IBaseFilter)SupportMethod.AddFilterFromClsid(graph, Uuids.RealVideoDecoder, "video"); IBaseFilter audio = (IBaseFilter)SupportMethod.AddFilterFromClsid(graph, Uuids.RealAudioDecoder, "audio"); hr = SupportMethod.ConnectFilter(graph, source as IBaseFilter, demux as IBaseFilter); if (hr != 0) return hr; hr = SupportMethod.ConnectFilter(graph, demux as IBaseFilter, video); if (hr != 0) return hr; hr = SupportMethod.ConnectFilter(graph, demux as IBaseFilter, audio); if (hr != 0) return hr; hr = SupportMethod.RenderFilter(graph, video); if (hr != 0) return hr; hr = SupportMethod.RenderFilter(graph, audio); if (hr != 0) return hr; return 0; }
public override bool Build(DirectShowLib.IPin source, IBuild build) { bool result = false; DirectShowLib.IBaseFilter filter = this.Create(); if (filter.NotNull() && build.Graph.AddFilter(filter, this.Description) == 0 && this.PreConfiguration(build)) { result = true; DirectShowLib.PinInfo pinInformation; Exception.GraphError.Check(source.QueryPinInfo(out pinInformation)); DirectShowLib.FilterInfo filterInformation; Exception.GraphError.Check(pinInformation.filter.QueryFilterInfo(out filterInformation)); switch (filterInformation.achName) { case "Capture": this.CreateSource(DirectShowLib.PinCategory.Capture, pinInformation.filter, filter, build); break; case "Source": this.CreateSource(null, pinInformation.filter, filter, build); break; default: if (!(result = (this.FuzzyMatch ? 0 <= build.Graph.Connect(source, DirectShowLib.DsFindPin.ByDirection(filter, DirectShowLib.PinDirection.Input, 0)) : 0 == build.Graph.ConnectDirect(source, DirectShowLib.DsFindPin.ByDirection(filter, DirectShowLib.PinDirection.Input, 0), new DirectShowLib.AMMediaType())))) { Error.Log.Append(Error.Level.Debug, "Unable to connect.", "DirectShow was unable to connect \"" + filterInformation.achName + "\" with \"" + this.Description + "\"."); Exception.GraphError.Check(source.Disconnect()); Exception.GraphError.Check(build.Graph.RemoveFilter(filter)); } break; } if (result &= this.PostConfiguration(build)) { if (this.WaitForOutput.Ticks > 0) System.Threading.Thread.Sleep(this.WaitForOutput); for (int i = 0; i < this.Followers.Length; i++) foreach (Filters.Abstract candidate in this.Followers[i]) if (result &= candidate.Build(filter, build)) break; } } return result; }
public override bool Build(DirectShowLib.IPin source, IBuild build) { bool result = false; DirectShowLib.IBaseFilter filter = this.Create(); if (filter.NotNull()) { Exception.GraphError.Check((filter as DirectShowLib.IFileSourceFilter).Load(this.file, new DirectShowLib.AMMediaType())); if (build.Graph.AddFilter(filter, "Asf Reader") == 0) { foreach (Abstract candidate in this.Next) if (result = candidate.Build(filter, build)) break; } else { Error.Log.Append(Error.Level.Debug, "Unable to open file.", "DirectShow was unable to open file \"" + this.file + "\"."); Exception.GraphError.Check(build.Graph.RemoveFilter(filter)); } } return result; }
public override bool Build(DirectShowLib.IPin source, IBuild build) { bool result = false; if (Abstract.FindCaptureDeviceNameByIdentifier(this.device).NotNull()) { DirectShowLib.IBaseFilter filter = Abstract.FindCaptureDeviceByIdentifier(this.device, false); if (filter.NotNull() && this.SetFormat(filter)) { if (build.Graph.AddFilter(filter, "Capture") == 0) { foreach (Filters.Abstract candidate in this.Next) if (result = candidate.Build(filter, 0, build)) break; } else { Error.Log.Append(Error.Level.Debug, "Unable to open capture.", "DirectShow was unable to capture \"" + this.device + "\"."); Exception.GraphError.Check(build.Graph.RemoveFilter(filter)); } } } return result; }
public override bool Build(DirectShowLib.IPin source, IBuild build) { this.build = build; this.build.OnClose += this.Dispose; bool result; if (result = base.Build(source, build)) { DirectShowLib.AMMediaType media = new DirectShowLib.AMMediaType(); Exception.GraphError.Check((this.grabber as DirectShowLib.ISampleGrabber).GetConnectedMediaType(media)); DirectShowLib.VideoInfoHeader header = (DirectShowLib.VideoInfoHeader)System.Runtime.InteropServices.Marshal.PtrToStructure(media.formatPtr, typeof(DirectShowLib.VideoInfoHeader)); this.size = new Geometry2D.Integer.Size(header.BmiHeader.Width, header.BmiHeader.Height); this.lifetime = header.AvgTimePerFrame; // NOTE!!!! Here we set a default frame rate if the video does not have such information available. if (this.lifetime < 1000 || this.lifetime > 10000000) this.lifetime = 400000; if (this.Rate.NotNull()) { double factor = (double)this.Rate / (1000 / new TimeSpan(this.lifetime).TotalMilliseconds) ; int code = (this.build.Graph as DirectShowLib.IMediaSeeking).SetRate(factor); } } return result; }
public void OnGraphEvent(DirectShowLib.EventCode eventCode, IntPtr param1, IntPtr param2) { Debug.WriteLine(eventCode); switch (eventCode) { case DirectShowLib.EventCode.Complete: bnStop_Click(null, null); break; case DirectShowLib.EventCode.StepComplete: // Frame stepping has completed. m_bFrameStepping = false; UpdateUI(); break; case DirectShowLib.EventCode.ErrorAbort: NotifyError("An error occurred during playback.", param1.ToInt32()); UpdateUI(); break; default: break; } }
private void InitializeEvr(DirectShowLib.IBaseFilter pEvr, int dwStreams, string forcedVideoRenderer) { int hr = 0; var pGetService = pEvr as IMFGetService; IMFVideoDisplayControl pDisplay; // Continue with the rest of the set-up. //try to load the custom presenter IMFVideoPresenter pPresenter = null; if (_config.VideoConfig.EnableCustomPresenter && !string.Equals(forcedVideoRenderer, "evr", StringComparison.OrdinalIgnoreCase)) { IMFVideoRenderer pRenderer = pEvr as IMFVideoRenderer; pPresenter = URCOMLoader.Instance.GetObject("EVR Presenter (babgvant)", false) as IMFVideoPresenter; try { if (pPresenter != null) { hr = pRenderer.InitializeRenderer(null, pPresenter); if (hr > -1) { _customEvrPresenterLoaded = true; IEVRCPConfig cp = pPresenter as IEVRCPConfig; if (cp != null) { hr = cp.SetInt(EVRCPSetting.NOMINAL_RANGE, _config.VideoConfig.NominalRange); DsError.ThrowExceptionForHR(hr); } } } } finally { if (pPresenter != null) Marshal.ReleaseComObject(pPresenter); } } // Set the video window. object o; hr = pGetService.GetService(MFServices.MR_VIDEO_RENDER_SERVICE, typeof(IMFVideoDisplayControl).GUID, out o); DsError.ThrowExceptionForHR(hr); try { pDisplay = (IMFVideoDisplayControl)o; } catch { Marshal.ReleaseComObject(o); throw; } // Set the number of streams. hr = pDisplay.SetVideoWindow(VideoWindowHandle); DsError.ThrowExceptionForHR(hr); IEVRFilterConfig evrConfig = pEvr as IEVRFilterConfig; int pdwMaxStreams; if (evrConfig != null) { hr = evrConfig.GetNumberOfStreams(out pdwMaxStreams); DsError.ThrowExceptionForHR(hr); _logger.Debug("NumberOfStreams: {0}", pdwMaxStreams); if (pdwMaxStreams < dwStreams) { hr = evrConfig.SetNumberOfStreams(dwStreams); DsError.ThrowExceptionForHR(hr); _logger.Debug("Set NumberOfStreams: {0}", dwStreams); } } else _logger.Error("Couldn't get IEVRFilterConfig from EVR"); // Return the IMFVideoDisplayControl pointer to the caller. _mPDisplay = pDisplay; }
//public bool ToogleDvdSubtitles() //{ // int hr = 0; // if (_mDvdControl != null) // { // AMLine21CCState cState = AMLine21CCState.On; // IAMLine21Decoder dvdSubtitle = FilterGraphTools.FindLine21Filter(m_graph); // try // { // if (dvdSubtitle != null) // { // hr = dvdSubtitle.GetServiceState(out cState); // DsError.ThrowExceptionForHR(hr); // _logger.Debug("ToogleDvdSubtitles: - CurrentState: {0}", cState); // if (cState == AMLine21CCState.Off) // hr = dvdSubtitle.SetServiceState(AMLine21CCState.On); // else // hr = dvdSubtitle.SetServiceState(AMLine21CCState.Off); // DsError.ThrowExceptionForHR(hr); // } // return cState != AMLine21CCState.On; // } // finally // { // if (dvdSubtitle != null) // Marshal.ReleaseComObject(dvdSubtitle); // } // } // else // return false; //} private List<Guid> GetPinMediaTypes(DirectShowLib.IPin pin) { int hr = 0; int j = -1; var mt = new List<Guid>(); IEnumMediaTypes emtDvr; pin.EnumMediaTypes(out emtDvr); while (j != 0) { var amtDvr = new DirectShowLib.AMMediaType[1]; IntPtr d = Marshal.AllocCoTaskMem(4); try { hr = emtDvr.Next(1, amtDvr, d); DsError.ThrowExceptionForHR(hr); j = Marshal.ReadInt32(d); } finally { Marshal.FreeCoTaskMem(d); } if (j != 0) { if (amtDvr[0].majorType == DvdEncryptedMediaType) { if (amtDvr[0].subType == DirectShowLib.MediaSubType.Mpeg2Video) { mt.Add(DirectShowLib.MediaType.Video); } else if (amtDvr[0].subType == DirectShowLib.MediaSubType.DolbyAC3) { mt.Add(DirectShowLib.MediaType.Audio); } else mt.Add(amtDvr[0].subType); } else mt.Add(amtDvr[0].majorType); DsUtils.FreeAMMediaType(amtDvr[0]); amtDvr[0] = null; } } return mt; }
protected override Image Media(DirectShowLib.IBaseFilter filter) { return new Image() { Type = DirectShowLib.MediaSubType.YUY2, Resolution = new Geometry2D.Integer.Size(800, 600) }; }
private void SetDMOParams(DirectShowLib.IBaseFilter dmoWrapperFilter) { int hr; Guid g; int i; int pc; ParamInfo pInfo; IMediaParamInfo paramInfo = dmoWrapperFilter as IMediaParamInfo; // With a little effort, a generic parameter handling routine // could be produced. You know the number of parameters (GetParamCount), // the type of the parameter (pInfo.mpType), the range of values for // int and float (pInfo.mpdMinValue, pInfo.mpdMaxValue), if the parameter is an // enum, you have the strings (GetParamText). hr = paramInfo.GetParamCount(out pc); DMOError.ThrowExceptionForHR(hr); // Walk all the parameters for (int pCur = 0; pCur < pc; pCur++) { IntPtr ip; hr = paramInfo.GetParamInfo(pCur, out pInfo); DMOError.ThrowExceptionForHR(hr); hr = paramInfo.GetParamText(0, out ip); DMOError.ThrowExceptionForHR(hr); try { string sName, sUnits; string[] sEnum; ParseParamText(ip, out sName, out sUnits, out sEnum); Debug.WriteLine(string.Format("Parameter name: {0}", sName)); Debug.WriteLine(string.Format("Parameter units: {0}", sUnits)); // Not all params will have enumerated strings. if (pInfo.mpType == MPType.ENUM) { // The final entry in "splitted" will be a blank (used to terminate the list). for (int x = 0; x < sEnum.Length; x++) { Debug.WriteLine(string.Format("Parameter Enum strings: {0} = {1}", x, sEnum[x])); } } } finally { Marshal.FreeCoTaskMem(ip); } } hr = paramInfo.GetCurrentTimeFormat(out g, out i); DMOError.ThrowExceptionForHR(hr); hr = paramInfo.GetSupportedTimeFormat(0, out g); DMOError.ThrowExceptionForHR(hr); MPData o = new MPData(); m_param = dmoWrapperFilter as IMediaParams; o.vInt = 0; hr = m_param.SetParam(0, o); DMOError.ThrowExceptionForHR(hr); }
private static void ShowFilterGraphFilterPins(Panoply.Library.FilterGraph.Filter filter, DirectShowLib.PinDirection direction) { foreach (FilterGraph.Pin pin in filter.GetPins(direction)) { Console.Write(" * {0}", pin.Name); FilterGraph.Pin connectedPin = pin.GetConnectedPin(); if (connectedPin == null) { Console.WriteLine(" (Not connected)"); } else { Console.WriteLine(" (Connected to filter '{0}' pin '{1}')", connectedPin.Filter.Name, connectedPin.Name); } } }
public override bool Build(DirectShowLib.IPin source, DirectShow.Binding.IBuild build) { return base.Build(source, build); }
private void SetEvrVppMode(DirectShowLib.IBaseFilter pEvr) { int hr = 0; object objVideoProc = null; IMFGetService mfgs = pEvr as IMFGetService; if (mfgs != null) { try { mfgs.GetService(MFServices.MR_VIDEO_MIXER_SERVICE, typeof(IMFVideoProcessor).GUID, out objVideoProc ); IMFVideoProcessor evrProc = objVideoProc as IMFVideoProcessor; int dModes; IntPtr ppModes = IntPtr.Zero; Guid lpMode = Guid.Empty; Guid bestMode = Guid.Empty; hr = evrProc.GetVideoProcessorMode(out lpMode); DsError.ThrowExceptionForHR(hr); List<Guid> vpModes = new List<Guid>(); try { hr = evrProc.GetAvailableVideoProcessorModes(out dModes, out ppModes); DsError.ThrowExceptionForHR(hr); if (dModes > 0) { for (int i = 0; i < dModes; i++) { int offSet = Marshal.SizeOf(Guid.Empty) * i; Guid vpMode = (Guid)Marshal.PtrToStructure(((IntPtr)((int)ppModes + offSet)), typeof(Guid)); vpModes.Add(vpMode); _logger.Debug("VideoMode Found: {0}", vpMode); } } } finally { if (ppModes != IntPtr.Zero) Marshal.FreeCoTaskMem(ppModes); } bestMode = vpModes[0]; _logger.Debug("Set ProcessorMode: {0} BestMode: {1}", lpMode, bestMode); if (lpMode.CompareTo(bestMode) != 0) { hr = evrProc.SetVideoProcessorMode(ref bestMode); DsError.ThrowExceptionForHR(hr); hr = evrProc.GetVideoProcessorMode(out lpMode); DsError.ThrowExceptionForHR(hr); _logger.Debug("Current ProcessorMode: {0} BestMode: {1}", lpMode, bestMode); } } finally { if (objVideoProc != null) Marshal.ReleaseComObject(objVideoProc); } } }
private void RenderStreams(DirectShowLib.IBaseFilter pSource, string forcedVideoRenderer, bool enableXySubFilter) { int hr; _filterGraph = m_graph as DirectShowLib.IFilterGraph2; if (_filterGraph == null) { throw new Exception("Could not QueryInterface for the IFilterGraph2"); } var useDefaultRenderer = true; DirectShowLib.IEnumPins pEnum; hr = pSource.EnumPins(out pEnum); DsError.ThrowExceptionForHR(hr); DirectShowLib.IPin[] pins = { null }; /* Counter for how many pins successfully rendered */ var pinsRendered = 0; /* Loop over each pin of the source filter */ while (pEnum.Next(1, pins, IntPtr.Zero) == 0) { //explicitly build graph to avoid unwanted filters worming their way in List<Guid> mediaTypes = GetPinMediaTypes(pins[0]); bool needsRender = true; for (int m = 0; m < mediaTypes.Count; m++) { DirectShowLib.IPin decIn = null; DirectShowLib.IPin decOut = null; DirectShowLib.IPin rendIn = null; var enableMadvr = _config.VideoConfig.EnableMadvr && (string.IsNullOrWhiteSpace(forcedVideoRenderer) || string.Equals(forcedVideoRenderer, "madvr", StringComparison.OrdinalIgnoreCase)); try { if (mediaTypes[m] == DirectShowLib.MediaType.Video) { #region Video //add the video renderer first so we know whether to enable DXVA2 in "Auto" mode. if (enableMadvr) { try { _madvr = URCOMLoader.Instance.GetObject(typeof(MadVR).GUID, true); // new MadVR(); var vmadvr = _madvr as DirectShowLib.IBaseFilter; if (vmadvr != null) { hr = m_graph.AddFilter(vmadvr, "MadVR Video Renderer"); DsError.ThrowExceptionForHR(hr); try { MadVRSettings msett = new MadVRSettings(_madvr); bool smoothMotion = msett.GetBool("smoothMotionEnabled"); if (smoothMotion != _config.VideoConfig .UseMadVrSmoothMotion) msett.SetBool("smoothMotionEnabled", _config.VideoConfig .UseMadVrSmoothMotion); if ( string.Compare(msett.GetString("smoothMotionMode"), _config.VideoConfig .MadVrSmoothMotionMode, true) != 0) { bool success = msett.SetString("smoothMotionMode", _config.VideoConfig .MadVrSmoothMotionMode); } MFNominalRange levels = (MFNominalRange)_config.VideoConfig.NominalRange; //string madVrLevelInitial = msett.GetString("levels"); //switch (levels) //{ // case MFNominalRange.MFNominalRange_0_255: // msett.SetString("levels", "PC Levels"); // break; // case MFNominalRange.MFNominalRange_16_235: // msett.SetString("levels", "TV Levels"); // break; //} //string madVrLevel = msett.GetString("levels"); //if (string.Compare(madVrLevel, madVrLevelInitial, false) != 0) // _logger.Debug("Changed madVR levels from {0} to {1}", madVrLevelInitial, madVrLevel); } catch (Exception ex) { _logger.ErrorException("Error configuring madVR", ex); } } } catch (Exception ex) { _logger.ErrorException("Error adding MadVR filter", ex); } } else // Add default video renderer { _mPEvr = (DirectShowLib.IBaseFilter)new EnhancedVideoRenderer(); hr = m_graph.AddFilter(_mPEvr, "EVR"); DsError.ThrowExceptionForHR(hr); //we only need 2 input pins on the EVR if LAV Video isn't used for DVDs, but it doesn't hurt to have them InitializeEvr(_mPEvr, _isDvd ? 2 : 1, forcedVideoRenderer); } try { _lavvideo = URCOMLoader.Instance.GetObject(typeof(LAVVideo).GUID, true); //new LAVVideo(); var vlavvideo = _lavvideo as DirectShowLib.IBaseFilter; if (vlavvideo != null) { hr = m_graph.AddFilter(vlavvideo, "LAV Video Decoder"); DsError.ThrowExceptionForHR(hr); ILAVVideoSettings vsett = vlavvideo as ILAVVideoSettings; if (vsett != null) { //we only want to set it for MB hr = vsett.SetRuntimeConfig(true); DsError.ThrowExceptionForHR(hr); _logger.Debug("GPU Model: {0}", VideoConfiguration.GpuModel); LAVHWAccel configuredMode = VideoConfigurationUtils.GetHwaMode( _config.VideoConfig, _customEvrPresenterLoaded); LAVHWAccel testme = vsett.GetHWAccel(); _logger.Debug("Current HWA Mode: {0} Desired Mode: {1}", testme, configuredMode); if (testme != configuredMode) { hr = vsett.SetHWAccel(configuredMode); DsError.ThrowExceptionForHR(hr); } foreach (string c in DirectShowPlayer.GetLAVVideoCodecs()) { LAVVideoCodec codec = (LAVVideoCodec)Enum.Parse(typeof(LAVVideoCodec), c); bool isEnabled = vsett.GetFormatConfiguration(codec); if ( _config.VideoConfig.EnabledCodecs .Contains(c)) { if (!isEnabled) { _logger.Debug("Enable support for: {0}", c); hr = vsett.SetFormatConfiguration(codec, true); DsError.ThrowExceptionForHR(hr); } } else if (isEnabled) { _logger.Debug("Disable support for: {0}", c); hr = vsett.SetFormatConfiguration(codec, false); DsError.ThrowExceptionForHR(hr); } } foreach (string hwaCodec in DirectShowPlayer.GetLAVVideoHwaCodecs()) { LAVVideoHWCodec codec = (LAVVideoHWCodec)Enum.Parse(typeof(LAVVideoHWCodec), hwaCodec); bool hwaIsEnabled = vsett.GetHWAccelCodec(codec); if ( _config.VideoConfig.HwaEnabledCodecs .Contains(hwaCodec)) { if (!hwaIsEnabled) { _logger.Debug("Enable HWA support for: {0}", hwaCodec); hr = vsett.SetHWAccelCodec(codec, true); DsError.ThrowExceptionForHR(hr); } } else if (hwaIsEnabled) { _logger.Debug("Disable HWA support for: {0}", hwaCodec); hr = vsett.SetHWAccelCodec(codec, false); DsError.ThrowExceptionForHR(hr); } } if (!vsett.GetDVDVideoSupport()) { _logger.Debug("Enable DVD support."); hr = vsett.SetDVDVideoSupport(true); DsError.ThrowExceptionForHR(hr); } int hwaRes = vsett.GetHWAccelResolutionFlags(); if (hwaRes != _config.VideoConfig.HwaResolution && _config.VideoConfig.HwaResolution > 0) { _logger.Debug("Change HWA resolution support from {0} to {1}.", hwaRes, _config.VideoConfig.HwaResolution); hr = vsett.SetHWAccelResolutionFlags( VideoConfigurationUtils.GetHwaResolutions( _config.VideoConfig)); DsError.ThrowExceptionForHR(hr); } hr = vsett.SetTrayIcon( _config.VideoConfig.ShowTrayIcon); DsError.ThrowExceptionForHR(hr); } } decIn = DsFindPin.ByDirection((DirectShowLib.IBaseFilter)_lavvideo, PinDirection.Input, 0); if (decIn != null) { hr = _filterGraph.ConnectDirect(pins[0], decIn, null); DsError.ThrowExceptionForHR(hr); decOut = DsFindPin.ByDirection((DirectShowLib.IBaseFilter)_lavvideo, PinDirection.Output, 0); if (enableXySubFilter) //this flag indicates whether we should handle subtitle rendering { var xySubFilterSucceeded = false; // Load xySubFilter if configured and if madvr succeeded if (_madvr != null || _customEvrPresenterLoaded) { try { _xySubFilter = URCOMLoader.Instance.GetObject(typeof(XySubFilter).GUID, true); //new XySubFilter(); var vxySubFilter = _xySubFilter as DirectShowLib.IBaseFilter; if (vxySubFilter != null) { hr = m_graph.AddFilter(vxySubFilter, "xy-SubFilter"); DsError.ThrowExceptionForHR(hr); } xySubFilterSucceeded = true; } catch (Exception ex) { _logger.ErrorException("Error adding xy-SubFilter filter", ex); } } // Fallback to xyVsFilter if (!xySubFilterSucceeded) { try { _xyVsFilter = URCOMLoader.Instance.GetObject(typeof(XYVSFilter).GUID, true); //new XYVSFilter(); var vxyVsFilter = _xyVsFilter as DirectShowLib.IBaseFilter; if (vxyVsFilter != null) { hr = m_graph.AddFilter(vxyVsFilter, "xy-VSFilter"); DsError.ThrowExceptionForHR(hr); } } catch (Exception ex) { _logger.ErrorException("Error adding xy-VSFilter filter", ex); } } if (_xyVsFilter != null) //If using VSFilter { //insert xyVsFilter b/w LAV Video and the renderer rendIn = DsFindPin.ByName((DirectShowLib.IBaseFilter)_xyVsFilter, "Video"); //connect it to VSFilter if (decOut != null && rendIn != null) { hr = _filterGraph.ConnectDirect(decOut, rendIn, null); DsError.ThrowExceptionForHR(hr); CleanUpInterface(rendIn); CleanUpInterface(decOut); rendIn = null; decOut = null; } //grab xyVsFilter's output pin so it can be connected to the renderer decOut = DsFindPin.ByDirection((DirectShowLib.IBaseFilter)_xyVsFilter, PinDirection.Output, 0); } } if (_madvr != null) { rendIn = DsFindPin.ByDirection((DirectShowLib.IBaseFilter)_madvr, PinDirection.Input, 0); } else { rendIn = DsFindPin.ByDirection((DirectShowLib.IBaseFilter)_mPEvr, PinDirection.Input, 0); } if (decOut != null && rendIn != null) { hr = _filterGraph.ConnectDirect(decOut, rendIn, null); DsError.ThrowExceptionForHR(hr); needsRender = false; break; } } } catch (Exception ex) { _logger.ErrorException("Error adding LAV Video filter", ex); } #endregion } else if (mediaTypes[m] == DirectShowLib.MediaType.Audio) { #region Audio //we have an audio pin so add a renderer and decoder switch (_config.AudioConfig.Renderer) { case AudioRendererChoice.Reclock: try { _reclockAudioRenderer = new ReclockAudioRenderer(); var aRenderer = _reclockAudioRenderer as DirectShowLib.IBaseFilter; if (aRenderer != null) { hr = m_graph.AddFilter(aRenderer, "Reclock Audio Renderer"); DsError.ThrowExceptionForHR(hr); useDefaultRenderer = false; _logger.Debug("Added reclock audio renderer"); } } catch (Exception ex) { _logger.ErrorException("Error adding reclock filter", ex); } break; case AudioRendererChoice.WASAPI: try { _wasapiAR = URCOMLoader.Instance.GetObject(typeof(MPAudioFilter).GUID, true); var aRenderer = _wasapiAR as DirectShowLib.IBaseFilter; if (aRenderer != null) { hr = m_graph.AddFilter(aRenderer, "WASAPI Audio Renderer"); DsError.ThrowExceptionForHR(hr); useDefaultRenderer = false; _logger.Debug("Added WASAPI audio renderer"); IMPAudioRendererConfig arSett = aRenderer as IMPAudioRendererConfig; if (arSett != null) { arSett.SetInt(MPARSetting.WASAPI_MODE, (int)AUDCLNT_SHAREMODE.EXCLUSIVE); arSett.SetBool(MPARSetting.WASAPI_EVENT_DRIVEN, _config.AudioConfig.UseWasapiEventMode); _logger.Debug("Set WASAPI use event mode: {0}", _config.AudioConfig.UseWasapiEventMode); arSett.SetString(MPARSetting.SETTING_AUDIO_DEVICE, _config.AudioConfig.AudioDevice); _logger.Debug("Set WASAPI audio device: {0}", _config.AudioConfig.AudioDevice); SpeakerConfig sc = SpeakerConfig.Stereo; //use stereo for maxium compat Enum.TryParse<SpeakerConfig>(_config.AudioConfig.SpeakerLayout, out sc); arSett.SetInt(MPARSetting.SPEAKER_CONFIG, (int)sc); _logger.Debug("Set WASAPI speaker config: {0}", sc); //audSett.SetSpeakerMatchOutput(true); arSett.SetBool(MPARSetting.ALLOW_BITSTREAMING, true); arSett.SetInt(MPARSetting.USE_FILTERS, _config.AudioConfig.WasapiARFilters); _logger.Debug("Set WASAPI filter config: {0}", _config.AudioConfig.WasapiARFilters); AC3Encoding a3 = (AC3Encoding)_config.AudioConfig.Ac3EncodingMode; arSett.SetInt(MPARSetting.AC3_ENCODING, (int)a3); _logger.Debug("Set WASAPI AC3 encoding: {0}", a3); arSett.SetBool(MPARSetting.ENABLE_TIME_STRETCHING, _config.AudioConfig.EnableTimeStretching); _logger.Debug("Set WASAPI use time stretching: {0}", _config.AudioConfig.EnableTimeStretching); arSett.SetInt(MPARSetting.OUTPUT_BUFFER_LENGTH, _config.AudioConfig.OutputBufferSize); _logger.Debug("Set WASAPI buffer: {0}", _config.AudioConfig.OutputBufferSize); } } } catch (Exception ex) { _logger.ErrorException("Error adding WASAPI audio filter", ex); } break; } if (useDefaultRenderer) { AddDefaultAudioRenderer(); } try { _lavaudio = URCOMLoader.Instance.GetObject(typeof(LAVAudio).GUID, true); // new LAVAudio(); var vlavaudio = _lavaudio as DirectShowLib.IBaseFilter; if (vlavaudio != null) { _logger.Debug("Add LAVAudio to the graph."); hr = m_graph.AddFilter(vlavaudio, "LAV Audio Decoder"); DsError.ThrowExceptionForHR(hr); ILAVAudioSettings asett = vlavaudio as ILAVAudioSettings; if (asett != null) { _logger.Debug("Enable LAVAudio Runtime Config"); //we only want to set it for MB hr = asett.SetRuntimeConfig(true); DsError.ThrowExceptionForHR(hr); foreach (string c in DirectShowPlayer.GetLAVAudioCodecs()) { LAVAudioCodec codec = (LAVAudioCodec)Enum.Parse(typeof(LAVAudioCodec), c); bool isEnabled = asett.GetFormatConfiguration(codec); if ( _config.AudioConfig.EnabledCodecs.Contains( c)) { if (!isEnabled) { _logger.Debug("Enable support for: {0}", c); hr = asett.SetFormatConfiguration(codec, true); DsError.ThrowExceptionForHR(hr); } } else if (isEnabled) { _logger.Debug("Disable support for: {0}", c); hr = asett.SetFormatConfiguration(codec, false); DsError.ThrowExceptionForHR(hr); } } //enable/disable bitstreaming if ((_config.AudioConfig.AudioBitstreaming & BitstreamChoice.SPDIF) == BitstreamChoice.SPDIF) { _logger.Debug("Enable LAVAudio S/PDIF bitstreaming"); hr = asett.SetBitstreamConfig(LAVBitstreamCodec.AC3, true); DsError.ThrowExceptionForHR(hr); hr = asett.SetBitstreamConfig(LAVBitstreamCodec.DTS, true); DsError.ThrowExceptionForHR(hr); } if ((_config.AudioConfig.AudioBitstreaming & BitstreamChoice.HDMI) == BitstreamChoice.HDMI) { _logger.Debug("Enable LAVAudio HDMI bitstreaming"); hr = asett.SetBitstreamConfig(LAVBitstreamCodec.EAC3, true); DsError.ThrowExceptionForHR(hr); hr = asett.SetBitstreamConfig(LAVBitstreamCodec.TRUEHD, true); DsError.ThrowExceptionForHR(hr); hr = asett.SetBitstreamConfig(LAVBitstreamCodec.DTSHD, true); DsError.ThrowExceptionForHR(hr); } if (_config.AudioConfig.Delay > 0) { _logger.Debug("Set LAVAudio audio delay: {0}", _config.AudioConfig.Delay); hr = asett.SetAudioDelay(true, _config.AudioConfig.Delay); DsError.ThrowExceptionForHR(hr); } _logger.Debug("Set LAVAudio auto AV Sync: {0}", _config.AudioConfig.EnableAutoSync); hr = asett.SetAutoAVSync( _config.AudioConfig.EnableAutoSync); DsError.ThrowExceptionForHR(hr); _logger.Debug("Set LAVAudio Expand61: {0}", _config.AudioConfig.Expand61); hr = asett.SetExpand61(_config.AudioConfig.Expand61); DsError.ThrowExceptionForHR(hr); _logger.Debug("Set LAVAudio ExpandMono: {0}", _config.AudioConfig.ExpandMono); hr = asett.SetExpandMono( _config.AudioConfig.ExpandMono); DsError.ThrowExceptionForHR(hr); _logger.Debug("Set LAVAudio ConvertToStandardLayout: {0}", _config.AudioConfig.ConvertToStandardLayout); hr = asett.SetOutputStandardLayout( _config.AudioConfig.ConvertToStandardLayout); DsError.ThrowExceptionForHR(hr); _logger.Debug("Set LAVAudio audio EnableDRC: {0}", _config.AudioConfig.EnableDRC); hr = asett.SetDRC(_config.AudioConfig.EnableDRC, _config.AudioConfig.DRCLevel); DsError.ThrowExceptionForHR(hr); _logger.Debug("Set LAVAudio audio ShowTrayIcon: {0}", _config.AudioConfig.ShowTrayIcon); hr = asett.SetTrayIcon( _config.AudioConfig.ShowTrayIcon); DsError.ThrowExceptionForHR(hr); bool mixingEnabled = asett.GetMixingEnabled(); if (mixingEnabled != _config.AudioConfig.EnablePCMMixing) { _logger.Debug("Set LAVAudio EnablePCMMixing: {0}", _config.AudioConfig.EnablePCMMixing); hr = asett.SetMixingEnabled(!mixingEnabled); DsError.ThrowExceptionForHR(hr); } if (_config.AudioConfig.EnablePCMMixing) { _logger.Debug("Set LAVAudio MixingSetting: {0}", _config.AudioConfig.MixingSetting); LAVAudioMixingFlag amf = (LAVAudioMixingFlag) _config.AudioConfig.MixingSetting; hr = asett.SetMixingFlags(amf); DsError.ThrowExceptionForHR(hr); _logger.Debug("Set LAVAudio MixingEncoding: {0}", _config.AudioConfig.MixingEncoding); LAVAudioMixingMode amm = (LAVAudioMixingMode) Enum.Parse(typeof(LAVAudioMixingMode), _config.AudioConfig.MixingEncoding); hr = asett.SetMixingMode(amm); DsError.ThrowExceptionForHR(hr); _logger.Debug("Set LAVAudio MixingLayout: {0}", _config.AudioConfig.MixingLayout); LAVAudioMixingLayout aml = (LAVAudioMixingLayout) Enum.Parse(typeof(LAVAudioMixingLayout), _config.AudioConfig.MixingLayout); hr = asett.SetMixingLayout(aml); DsError.ThrowExceptionForHR(hr); _logger.Debug( "Set LAVAudio LfeMixingLevel: {0} CenterMixingLevel: {1} SurroundMixingLevel: {2}", _config.AudioConfig.LfeMixingLevel, _config.AudioConfig.CenterMixingLevel, _config.AudioConfig.SurroundMixingLevel); int lfe, center, surround; //convert to the # that LAV Audio expects lfe = (int) (_config.AudioConfig.LfeMixingLevel * 10000.01); center = (int) (_config.AudioConfig.CenterMixingLevel * 10000.01); surround = (int) (_config.AudioConfig .SurroundMixingLevel * 10000.01); hr = asett.SetMixingLevels(center, surround, lfe); DsError.ThrowExceptionForHR(hr); } for (int i = 0; i < (int)LAVBitstreamCodec.NB; i++) { LAVBitstreamCodec codec = (LAVBitstreamCodec)i; bool isEnabled = asett.GetBitstreamConfig(codec); _logger.Log(LogSeverity.Debug, "{0} bitstreaming: {1}", codec, isEnabled); } } } } catch (Exception ex) { _logger.ErrorException("Error adding LAV Audio filter", ex); } decIn = DsFindPin.ByDirection((DirectShowLib.IBaseFilter)_lavaudio, PinDirection.Input, 0); if (decIn != null) { hr = _filterGraph.ConnectDirect(pins[0], decIn, null); if (hr < 0) //LAV cannot handle this audio type { _logger.Warn("LAV Audio could not decode audio media type."); } else { //DsError.ThrowExceptionForHR(hr); decOut = DsFindPin.ByDirection((DirectShowLib.IBaseFilter)_lavaudio, PinDirection.Output, 0); } rendIn = DsFindPin.ByDirection(AudioRenderer, PinDirection.Input, 0); if (decOut != null && rendIn != null) { hr = _filterGraph.ConnectDirect(decOut, rendIn, null); if (hr == -2004287474 && _wasapiAR != null) //AUDCLNT_E_EXCLUSIVE_MODE_NOT_ALLOWED { IMPAudioRendererConfig arSett = _wasapiAR as IMPAudioRendererConfig; if (arSett != null) { arSett.SetInt(MPARSetting.WASAPI_MODE, (int)AUDCLNT_SHAREMODE.SHARED); _logger.Warn("WASAPI AR failed to connected in exclusive mode, check device properties"); hr = _filterGraph.ConnectDirect(decOut, rendIn, null); } } DsError.ThrowExceptionForHR(hr); needsRender = false; break; } } #endregion } else if (mediaTypes[m] == SubtitleMediaType /*DirectShowLib.MediaType.Subtitle*/) { #region subtitles if (_xySubFilter != null) { rendIn = DsFindPin.ByDirection((DirectShowLib.IBaseFilter)_xySubFilter, PinDirection.Input, 0); } else if (_xyVsFilter != null) { rendIn = DsFindPin.ByName((DirectShowLib.IBaseFilter)_xyVsFilter, "Input"); } if (rendIn != null) { hr = _filterGraph.ConnectDirect(pins[0], rendIn, null); DsError.ThrowExceptionForHR(hr); needsRender = false; break; } #endregion } else if (mediaTypes[m] == DvdSubpictureMediaType) { #region DVD Subpicture if (_lavvideo != null) { rendIn = DsFindPin.ByName((DirectShowLib.IBaseFilter)_lavvideo, "Subtitle Input"); if (rendIn != null) { hr = _filterGraph.ConnectDirect(pins[0], rendIn, null); DsError.ThrowExceptionForHR(hr); needsRender = false; break; } } #endregion } } finally { CleanUpInterface(decIn); CleanUpInterface(decOut); CleanUpInterface(rendIn); } } if (needsRender) { if (_filterGraph.RenderEx(pins[0], AMRenderExFlags.RenderToExistingRenderers, IntPtr.Zero) >= 0) pinsRendered++; } else pinsRendered++; Marshal.ReleaseComObject(pins[0]); } Marshal.ReleaseComObject(pEnum); if (pinsRendered == 0) { throw new Exception("Could not render any streams from the source Uri"); } _logger.Debug("Completed RenderStreams with {0} pins.", pinsRendered); if (_item.IsVideo) { SetVideoWindow(); if (_mPEvr != null) SetEvrVppMode(_mPEvr); } }
private void BuildGraph(DirectShowLib.DsDevice dsDevice) { int hr = 0; pGraph = new FilterGraph() as IFilterGraph2; //graph builder ICaptureGraphBuilder2 pBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); try { hr = pBuilder.SetFiltergraph(pGraph); DsError.ThrowExceptionForHR(hr); // Add camera IBaseFilter camera; //hr = pGraph.FindFilterByName(dsDevice.Name, out camera); hr = ((IFilterGraph2)pGraph).AddSourceFilterForMoniker(dsDevice.Mon, null, dsDevice.Name, out camera); DsError.ThrowExceptionForHR(hr); hr = pGraph.AddFilter(camera, "camera"); DsError.ThrowExceptionForHR(hr); // Set format for camera AMMediaType pmt = new AMMediaType(); pmt.majorType = MediaType.Video; pmt.subType = MediaSubType.YUY2; pmt.formatType = FormatType.VideoInfo; pmt.fixedSizeSamples = true; pmt.formatSize = 88; pmt.sampleSize = 829440; pmt.temporalCompression = false; VideoInfoHeader format = new VideoInfoHeader(); format.SrcRect = new DsRect(); format.TargetRect = new DsRect(); format.BitRate = 20736000; format.AvgTimePerFrame = 400000; format.BmiHeader = new BitmapInfoHeader(); format.BmiHeader.Size = 40; format.BmiHeader.Width = 720; format.BmiHeader.Height = 576; format.BmiHeader.Planes = 1; format.BmiHeader.BitCount = 24; format.BmiHeader.Compression = 844715353; format.BmiHeader.ImageSize = 827440; pmt.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(format)); Marshal.StructureToPtr(format, pmt.formatPtr, false); hr = ((IAMStreamConfig)DsFindPin.ByCategory(camera, PinCategory.Capture, 0)).SetFormat(pmt); //hr = ((IAMStreamConfig)GetPin(pUSB20Camera, "Capture")).SetFormat(pmt); DsUtils.FreeAMMediaType(pmt); DsError.ThrowExceptionForHR(hr); IAMCrossbar crossBar = null; object dummy; hr = pBuilder.FindInterface(PinCategory.Capture, MediaType.Video, camera, typeof(IAMCrossbar).GUID, out dummy); if( hr >=0) { crossBar = (IAMCrossbar)dummy; int oPin, iPin; int ovLink, ivLink; ovLink = ivLink = 0; crossBar.get_PinCounts(out oPin, out iPin); int pIdxRel; PhysicalConnectorType physicalConType; for (int i = 0; i < iPin; i++) { crossBar.get_CrossbarPinInfo(true, i, out pIdxRel, out physicalConType); if (physicalConType == PhysicalConnectorType.Video_Composite) ivLink = i; } for (int i = 0; i < oPin; i++) { crossBar.get_CrossbarPinInfo(false, i, out pIdxRel, out physicalConType); if (physicalConType == PhysicalConnectorType.Video_VideoDecoder) ovLink = i; } try { crossBar.Route(ovLink, ivLink); } catch { throw new Exception("Failed to get IAMCrossbar"); } } //add AVI Decompressor IBaseFilter pAVIDecompressor = (IBaseFilter)new AVIDec(); hr = pGraph.AddFilter(pAVIDecompressor, "AVI Decompressor"); //add color space converter IBaseFilter pColorSpaceConverter = (IBaseFilter)new Colour(); hr = pGraph.AddFilter(pColorSpaceConverter, "Color space converter"); DsError.ThrowExceptionForHR(hr); // Connect camera and AVI Decomp hr = pGraph.ConnectDirect(DsFindPin.ByCategory(camera, PinCategory.Capture, 0), DsFindPin.ByName(pAVIDecompressor, "XForm In"), null); DsError.ThrowExceptionForHR(hr); // Connect AVI Decomp and color space converter hr = pGraph.ConnectDirect(DsFindPin.ByName(pAVIDecompressor, "XForm Out"), DsFindPin.ByName(pColorSpaceConverter, "Input"), null); DsError.ThrowExceptionForHR(hr); //add SampleGrabber //IBaseFilter pSampleGrabber = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(CLSID_SampleGrabber)); //hr = pGraph.AddFilter(pSampleGrabber, "SampleGrabber"); IBaseFilter sampleGrabber = new SampleGrabber() as IBaseFilter; hr = pGraph.AddFilter(sampleGrabber, "Sample grabber"); DsError.ThrowExceptionForHR(hr); // Configure the samplegrabber AMMediaType pSampleGrabber_pmt = new AMMediaType(); pSampleGrabber_pmt.majorType = MediaType.Video; pSampleGrabber_pmt.subType = MediaSubType.ARGB32; pSampleGrabber_pmt.formatType = FormatType.VideoInfo; pSampleGrabber_pmt.fixedSizeSamples = true; pSampleGrabber_pmt.formatSize = 88; pSampleGrabber_pmt.sampleSize = 1658880; pSampleGrabber_pmt.temporalCompression = false; VideoInfoHeader pSampleGrabber_format = new VideoInfoHeader(); pSampleGrabber_format.SrcRect = new DsRect(); pSampleGrabber_format.SrcRect.right = 720; pSampleGrabber_format.SrcRect.bottom = 576; pSampleGrabber_format.TargetRect = new DsRect(); pSampleGrabber_format.TargetRect.right = 720; pSampleGrabber_format.TargetRect.bottom = 576; pSampleGrabber_format.BitRate = 331776000; pSampleGrabber_format.AvgTimePerFrame = 400000; pSampleGrabber_format.BmiHeader = new BitmapInfoHeader(); pSampleGrabber_format.BmiHeader.Size = 40; pSampleGrabber_format.BmiHeader.Width = 720; pSampleGrabber_format.BmiHeader.Height = 576; pSampleGrabber_format.BmiHeader.Planes = 1; pSampleGrabber_format.BmiHeader.BitCount = 32; pSampleGrabber_format.BmiHeader.ImageSize = 1658880; pSampleGrabber_pmt.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(pSampleGrabber_format)); Marshal.StructureToPtr(pSampleGrabber_format, pSampleGrabber_pmt.formatPtr, false); hr = ((ISampleGrabber)sampleGrabber).SetMediaType(pSampleGrabber_pmt); DsError.ThrowExceptionForHR(hr); //connect MJPG dec and SampleGrabber //hr = pGraph.ConnectDirect(GetPin(pMJPGDecompressor, "XForm Out"), GetPin(pSampleGrabber, "Input"), null); hr = pGraph.ConnectDirect(DsFindPin.ByName(pColorSpaceConverter, "XForm Out"), DsFindPin.ByName(sampleGrabber, "Input"), null); DsError.ThrowExceptionForHR(hr); //set callback hr = ((ISampleGrabber)sampleGrabber).SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); } finally { // Clean this mess up! } }
/// <summary> /// Produces a usable string that describes the MediaType object /// </summary> /// <returns>Concatenation of MajorType + SubType + FormatType + Fixed + Temporal + SampleSize.ToString</returns> public static string AMMediaTypeToString(DirectShowLib.AMMediaType pmt) { return string.Format("{0} {1} {2} {3} {4} {5}", MediaTypeToString(pmt.majorType), MediaSubTypeToString(pmt.subType), MediaFormatTypeToString(pmt.formatType), (pmt.fixedSizeSamples ? "FixedSamples" : "NotFixedSamples"), (pmt.temporalCompression ? "temporalCompression" : "NottemporalCompression"), pmt.sampleSize.ToString()); }
// // This method checks if a pin is processing a video stream // bool CheckVideo(DirectShowLib.IPin pin) { AMMediaType mt = new AMMediaType(); mt.majorType = DirectShowLib.MediaType.Video; if (pin.QueryAccept(mt) == 0) return true; else return false; }