public AudioCapability(FilterInfo fi) : base(fi) { AutoPlayLocal = false; // Don't usually want to play your own audio (can create loopback) RtpStream.FirstFrameReceived += new RtpStream.FirstFrameReceivedEventHandler(RtpStream_FirstFrameReceived); }
public frmAudioSettings(FilterInfo fi, frmAVDevices frmAV) { InitializeComponent(); Debug.Assert(frmAV != null); this.frmAV = frmAV; this.sourceFilter = fi; ac = new AudioCapability(fi); ac.SetLogger(new AVLogger(Log)); ac.ActivateMicrophone(); }
protected CaptureGraph(FilterInfo fiSource) { try { // Fgm initialization fgm = new FilgraphManagerClass(); iFG = (IFilterGraph)fgm; iGB = (IGraphBuilder)fgm; rotID = FilterGraph.AddToRot(iGB); // Create source filter and initialize it source = (SourceFilter)Filter.CreateFilter(fiSource); iGB.AddFilter(source.BaseFilter, source.FriendlyName); source.AddedToGraph(fgm); } catch(Exception) { Cleanup(); throw; } }
public CapabilityDeviceWithWindow(FilterInfo fi) : base(fi) {}
public virtual void AddCompressor(FilterInfo fiCompressor) { RemoveCompressor(); compressor = (Compressor)Filter.CreateFilter(fiCompressor); iGB.AddFilter(compressor.BaseFilter, compressor.FriendlyName); compressor.AddedToGraph(fgm); // Chooses input pin Dictionary<string, Object> args = new Dictionary<string, object>(); args.Add("SourceFilter", source); compressor.PreConnectConfig(args); try { iGB.Connect(source.OutputPin, compressor.InputPin); } catch(COMException) { RemoveCompressor(); throw; } }
/// <summary> /// These graphs can be shared by audio and video capabilites. We only create one instance per source filter. /// Application code should normally call this instead of using the constructor. A side effect of calling /// on a running graph is that the graph will be stopped. It is the caller's responsibility to call Run if needed. /// </summary> /// <param name="fi"></param> /// <returns></returns> public static DVCaptureGraph GetInstance(FilterInfo fi) { if (!instances.ContainsKey(fi)) { instances.Add(fi, new DVCaptureGraph(fi)); } else { instances[fi].Stop(); instances[fi].RefCount++; } return instances[fi]; }
public Compressor(FilterInfo fi) : base(fi){}
public virtual void AddAudioRenderer(FilterInfo fiRenderer) { RemoveRenderer(); renderer = (Renderer)Filter.CreateFilter(fiRenderer); iGB.AddFilter(renderer.BaseFilter, renderer.FriendlyName); renderer.AddedToGraph(fgm); // Chooses input pin IPin pin = compressor == null ? source.OutputPin : compressor.OutputPin; try { iGB.Connect(pin, renderer.InputPin); } catch(COMException) { RemoveRenderer(); throw; } }
public AudioCaptureGraph(FilterInfo fiSource) : base(fiSource) { }
public NetworkRenderer(FilterInfo fi) : base(fi){}
public Filter(FilterInfo fi) { this.fi = fi; filter = InstantiateFilter(); }
public Renderer(FilterInfo fi) : base(fi){}
public AudioRenderer(FilterInfo fi) : base(fi) { if(fi.Category != AudioRenderer.CategoryGuid) { Debug.Assert(false); throw new ArgumentOutOfRangeException("fi.Category", fi.Category, Strings.UnexpectedFilterCategory); } }
public static FilterInfo[] EnumerateFilters(Guid filterCategory) { ArrayList filters = new ArrayList(); IEnumMoniker iEnum; ICreateDevEnum iDE = CreateDeviceEnumClass.CreateInstance(); iDE.CreateClassEnumerator(ref filterCategory, out iEnum, 0); if (iEnum != null) { IMoniker iMon; uint fetched; // To enter loop iEnum.RemoteNext(1, out iMon, out fetched); while(fetched == 1) { string monikerName; iMon.GetDisplayName(null, null, out monikerName); object oPropertyBag; iMon.RemoteBindToStorage(null ,null, ref IID_IPropertyBag, out oPropertyBag); IPropertyBag iPB = (IPropertyBag)oPropertyBag; try { object oFriendlyName; iPB.RemoteRead("FriendlyName", out oFriendlyName, null, 0, null); filters.Add(new FilterInfo(monikerName, ((string)oFriendlyName).Trim(), filterCategory)); } catch { } iEnum.RemoteNext(1, out iMon, out fetched); } } // Strongly type the data FilterInfo[] ret = new FilterInfo[filters.Count]; filters.CopyTo(ret); return ret; }
public VideoCompressor(FilterInfo fi) : base(fi) { if(fi.Category != VideoCompressor.CategoryGuid) { Debug.Assert(false); throw new ArgumentOutOfRangeException("fi.Category", fi.Category, Strings.UnexpectedFilterCategory); } }
public static Filter CreateFilter(FilterInfo fi) { // Capture devices if(fi.Category == AudioSource.CategoryGuid) return new AudioSource(fi); if (fi.Category == VideoSource.CategoryGuid) { //A special case for DV with audio if (DVSource.IsDVSourceWithAudio(fi)) { return new DVSource(fi); } //Special case for BlackMagic hardware if (fi.Name == "Decklink Video Capture") { return new BlackMagicSource(fi); } return new VideoSource(fi); } // Compressors if (fi.Category == AudioCompressor.CategoryGuid) { if (fi.Name == "Opus Encoder") { return new OpusAudioCompressor(fi); } return new AudioCompressor(fi); } if(fi.Category == VideoCompressor.CategoryGuid) return new VideoCompressor(fi); // Renderers if(fi.Category == AudioRenderer.CategoryGuid) return new AudioRenderer(fi); throw new Exception(string.Format(CultureInfo.CurrentCulture, Strings.UnknownFilterCategory, fi.Category.ToString())); }
public CapabilityDevice(FilterInfo fi) : base() { this.fi = fi; name = Conference.LocalParticipant.Name + " - " + fi.DisplayName; }
/// <summary> /// Makes sure each FilterInfo.DisplayName is unique /// by adding a number onto the end of duplicates /// </summary> public static FilterInfo[] UniquifyDisplayNames(FilterInfo[] fis) { // The last one would have nothing to compare against for(int i = 0; i < fis.Length - 1; i++) { // The uniquifying number int id = 0; // No need to compare against yourself or anyone before you for(int j = i + 1; j < fis.Length; j++) { if(fis[i].DisplayName == fis[j].DisplayName) { fis[j].DisplayName = string.Format(CultureInfo.CurrentCulture, "{0} [{1}]", fis[j].DisplayName, ++id); } } } return fis; }
public VideoCaptureGraph(FilterInfo fiSource) : base(fiSource) { }
public SourceFilter(FilterInfo fi) : base(fi){}
public virtual void AddCompressor(FilterInfo fiCompressor) { RemoveCompressor(); compressor = (Compressor)Filter.CreateFilter(fiCompressor); iGB.AddFilter(compressor.BaseFilter, compressor.FriendlyName); compressor.AddedToGraph(fgm); // Chooses input pin try { iGB.Connect(source.OutputPin, compressor.InputPin); } catch(COMException) { RemoveCompressor(); throw; } }
/// <summary> /// The compressor could be audio or video. If there is already an existing compressor of the /// given type, remove it and any downstream filters before adding the given compressor. /// The graph my not yet contain a DV Splitter, in which case, add the splitter first. /// </summary> /// <param name="fiCompressor"></param> public override void AddCompressor(FilterInfo fiCompressor) { Stop(); if (!AddDVSplitter()) { throw new ApplicationException("Failed to add DV Splitter Filter"); } if (fiCompressor.Category.Equals(Filter.CLSID_AudioCompressorCategory)) { if (audioCompressor != null) { RemoveFiltersDownstreamFromPin(splitterAudioOut); RemoveAndDispose(audioCompressor); audioCompressor = null; } audioCompressor = (Compressor)Filter.CreateFilter(fiCompressor); iGB.AddFilter(audioCompressor.BaseFilter, audioCompressor.FriendlyName); audioCompressor.AddedToGraph(fgm); compressor = audioCompressor; _AMMediaType[] mts = Pin.GetMediaTypes(splitterAudioOut); //Returns one DVInfo mt. foreach (_AMMediaType mt in mts) { Debug.WriteLine(MediaType.Dump(mt)); } try { iGB.Connect(this.splitterAudioOut, audioCompressor.InputPin); } catch (COMException) { RemoveAndDispose(audioCompressor); audioCompressor = null; throw; } } else if (fiCompressor.Category.Equals(Filter.CLSID_VideoCompressorCategory)) { if (videoCompressor != null) { RemoveFiltersDownstreamFromPin(splitterVideoOut); RemoveAndDispose(videoCompressor); videoCompressor = null; } videoCompressor = (Compressor)Filter.CreateFilter(fiCompressor); iGB.AddFilter(videoCompressor.BaseFilter, videoCompressor.FriendlyName); videoCompressor.AddedToGraph(fgm); compressor = videoCompressor; try { iGB.Connect(this.splitterVideoOut, videoCompressor.InputPin); } catch (COMException) { RemoveAndDispose(videoCompressor); videoCompressor = null; throw; } } }
public DVCaptureGraph(FilterInfo fiSource) : base(fiSource) { myFilterInfo = fiSource; RefCount = 1; }
public frmAudioSettingsOpus(FilterInfo fi, frmAVDevices frmAV): base(fi, frmAV) { InitializeComponent(); }
/// <summary> /// Finish building the Graph for local DV audio playback. If there is already an audio compressor, just /// add the renderer, and connect. Otherwise add and connect the DV Splitter and the renderer. /// </summary> /// <param name="fiRenderer"></param> public override void AddAudioRenderer(FilterInfo fiRenderer) { RemoveAndDispose(audioRenderer); //Add selected renderer audioRenderer = (Renderer)Filter.CreateFilter(fiRenderer); iGB.AddFilter(audioRenderer.BaseFilter, audioRenderer.FriendlyName); audioRenderer.AddedToGraph(fgm); if (!AddDVSplitter()) { RemoveAndDispose(audioRenderer); throw new ApplicationException("Failed to add DV Splitter Filter"); } IPin pin = audioCompressor == null ? splitterAudioOut : audioCompressor.OutputPin; //Connect try { iGB.Connect(pin, audioRenderer.InputPin); } catch (COMException) { RemoveAndDispose(audioRenderer); throw; } renderer = audioRenderer; }
public NetworkSource(FilterInfo fi) : base(fi){}
protected CaptureGraph(FilterInfo fiSource) { try { // Fgm initialization fgm = new FilgraphManagerClass(); iFG = (IFilterGraph)fgm; iGB = (IGraphBuilder)fgm; rotID = FilterGraph.AddToRot(iGB); // Create source filter and initialize it source = (SourceFilter)Filter.CreateFilter(fiSource); iGB.AddFilter(source.BaseFilter, source.FriendlyName); source.AddedToGraph(fgm); // Pass flags to the RtpRenderer filter from the config file. this.rtpRendererFlags = 0; string setting = ConfigurationManager.AppSettings[AppConfig.MDS_RtpRendererFlags]; if (!String.IsNullOrEmpty(setting)) { if (!byte.TryParse(setting,out rtpRendererFlags)) { rtpRendererFlags = 0; } } } catch(Exception) { Cleanup(); throw; } }
/// <summary> /// Creates the actual FilgraphManager with the chosen microphone /// </summary> private void CreateAudioGraph(FilterInfo fi) { Debug.Assert(acg == null); // Create the graph, which creates the source filter cg = new AudioCaptureGraph(fi); Log(acg.AudioSource.Dump()); }
public BlackMagicSource(FilterInfo fi) : base(fi) { }
/// <summary> /// Creates the actual FilgraphManager with the chosen microphone /// </summary> private void CreateAudioGraph(FilterInfo fi) { Debug.Assert(cg == null); if (DVSource.IsDVSourceWithAudio(fi)) { cg = DVCaptureGraph.GetInstance(fi); Log(((DVCaptureGraph)cg).DVSource.Dump()); } else { // Create the graph, which creates the source filter cg = new AudioCaptureGraph(fi); Log(((AudioCaptureGraph)cg).AudioSource.Dump()); } }