private static SinkWriter CreateSinkWriter(string outputFile) { // n.b. could try specifying the container type using attributes, but I think // it does a decent job of working it out from the file extension // n.b. AAC encode on Win 8 can have AAC extension, but use MP4 in win 7 // http://msdn.microsoft.com/en-gb/library/windows/desktop/dd389284%28v=vs.85%29.aspx SinkWriter writer; using (var attributes = new MediaAttributes()) { MediaFactory.CreateAttributes(attributes, 1); attributes.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms.Guid, (UInt32)1); try { writer = MediaFactory.CreateSinkWriterFromURL(outputFile, null, attributes); } catch (COMException e) { if (e.GetHResult() == MF_E_NOT_FOUND) { throw new ArgumentException("Was not able to create a sink writer for this file extension"); } throw; } } return(writer); }
public static Activate[] EnumerateVideoDevices() { var attributes = new MediaAttributes(); MediaFactory.CreateAttributes(attributes, 1); attributes.Set(CaptureDeviceAttributeKeys.SourceType, CaptureDeviceAttributeKeys.SourceTypeVideoCapture.Guid); return(MediaFactory.EnumDeviceSources(attributes)); }
public static List <VideoCaptureDevice> GetVideoCaptureDevices() { List <VideoCaptureDevice> devices = new List <VideoCaptureDevice>(); Activate[] activates = null; try { using (var attributes = new MediaAttributes()) { MediaFactory.CreateAttributes(attributes, 1); attributes.Set(CaptureDeviceAttributeKeys.SourceType, CaptureDeviceAttributeKeys.SourceTypeVideoCapture.Guid); activates = MediaFactory.EnumDeviceSources(attributes); foreach (var activate in activates) { var friendlyName = activate.Get(CaptureDeviceAttributeKeys.FriendlyName); var isHwSource = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapHwSource); //var maxBuffers = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapMaxBuffers); var symbolicLink = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapSymbolicLink); //var mediaTypes = activate.Get(TransformAttributeKeys.MftOutputTypesAttributes); devices.Add(new VideoCaptureDevice { Name = friendlyName, SymLink = symbolicLink }); Console.WriteLine("FriendlyName " + friendlyName + "\r\n" + "isHwSource " + isHwSource + "\r\n" + //"maxBuffers " + maxBuffers + "symbolicLink " + symbolicLink); } } } catch (Exception ex) { Console.WriteLine(ex); } finally { if (activates != null) { foreach (var act in activates) { act.Dispose(); } } } return(devices); }
private static Activate GetActivateBySymLink(string symLink) { Activate activate = null; Activate[] activates = null; using (var attributes = new MediaAttributes()) { MediaFactory.CreateAttributes(attributes, 2); attributes.Set(CaptureDeviceAttributeKeys.SourceType, CaptureDeviceAttributeKeys.SourceTypeVideoCapture.Guid); //attributes.Set(CaptureDeviceAttributeKeys.SourceTypeVidcapSymbolicLink, symLink); activates = MediaFactory.EnumDeviceSources(attributes); } if (activates == null || activates.Length == 0) { logger.Error("SourceTypeVideoCapture not found"); return(null); } foreach (var _activate in activates) { Console.WriteLine("---------------------------------------------"); var friendlyName = _activate.Get(CaptureDeviceAttributeKeys.FriendlyName); var isHwSource = _activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapHwSource); //var maxBuffers = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapMaxBuffers); var symbolicLink = _activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapSymbolicLink); logger.Info("FriendlyName " + friendlyName + "\r\n" + "isHwSource " + isHwSource + "\r\n" + //"maxBuffers " + maxBuffers + "symbolicLink " + symbolicLink); if (symbolicLink == symLink) { activate = _activate; continue; } _activate?.Dispose(); } return(activate); }
public static void EnumerateCaptureSources() { Activate[] activates = null; using (var attributes = new MediaAttributes()) { MediaFactory.CreateAttributes(attributes, 1); attributes.Set(CaptureDeviceAttributeKeys.SourceType, CaptureDeviceAttributeKeys.SourceTypeVideoCapture.Guid); activates = MediaFactory.EnumDeviceSources(attributes); } if (activates == null || activates.Length == 0) { Console.WriteLine("SourceTypeVideoCapture not found"); return; } foreach (var _activate in activates) { Console.WriteLine("---------------------------------------------"); var friendlyName = _activate.Get(CaptureDeviceAttributeKeys.FriendlyName); var isHwSource = _activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapHwSource); //var maxBuffers = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapMaxBuffers); var symbolicLink = _activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapSymbolicLink); Console.WriteLine("FriendlyName " + friendlyName + "\r\n" + "isHwSource " + isHwSource + "\r\n" + //"maxBuffers " + maxBuffers + "symbolicLink " + symbolicLink); var mediaSource = _activate.ActivateObject <MediaSource>(); var log = MfTool.LogMediaSource(mediaSource); Console.WriteLine(log); mediaSource?.Dispose(); _activate?.Dispose(); } }
public static Activate[] EnumerateVideoDevices() { var attributes = new MediaAttributes(); MediaFactory.CreateAttributes(attributes, 1); attributes.Set(CaptureDeviceAttributeKeys.SourceType, CaptureDeviceAttributeKeys.SourceTypeVideoCapture.Guid); var mediaFoundationActivates = MediaFactory.EnumDeviceSources(attributes); Activate[] result = new Activate[mediaFoundationActivates.Length]; Dictionary <string, int[]> order = new Dictionary <string, int[]>(); DsDevice[] capDevicesDS; capDevicesDS = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); //tries to match the order of the found devices in DirectShow and MediaFoundation for (int i = 0; i < mediaFoundationActivates.Length; i++) { var friendlyName = mediaFoundationActivates[i].Get(CaptureDeviceAttributeKeys.FriendlyName); var finalName = friendlyName; var suffix = ""; //used to handle multiple devices listed with the same name var counter = 1; for (int j = 0; j < capDevicesDS.Length; j++) { var friendlyNameDS = capDevicesDS[j].Name + suffix; if (friendlyName + suffix == friendlyNameDS) { if (!order.ContainsKey(friendlyName + suffix)) { order.Add(friendlyName + suffix, new int[] { i, j }); result[j] = mediaFoundationActivates[i]; suffix = ""; break; } else { suffix = counter++.ToString(); continue; } } } } return(result); }
private static SinkWriter CreateSinkWriter(string outputFile) { SinkWriter writer; using (var attributes = new MediaAttributes()) { MediaFactory.CreateAttributes(attributes, 1); attributes.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms.Guid, (UInt32)1); try { writer = MediaFactory.CreateSinkWriterFromURL(outputFile, IntPtr.Zero, attributes); } catch (COMException e) { if (e.ErrorCode == unchecked ((int)0xC00D36D5)) { throw new ArgumentException("Was not able to create a sink writer for this file extension"); } throw; } } return(writer); }
public void Setup(string fileName, Direct3DDeviceManager devMan = null) { logger.Debug("VideoFileSource::Setup()"); // using (var sourceResolver = new SourceResolver()) { var unkObj = sourceResolver.CreateObjectFromURL(fileName, SourceResolverFlags.MediaSource); var guid = typeof(MediaSource).GUID; unkObj.QueryInterface(ref guid, out var pUnk); mediaSource = new MediaSource(pUnk); } using (var mediaAttributes = new MediaAttributes(IntPtr.Zero)) { MediaFactory.CreateAttributes(mediaAttributes, 5); //mediaAttributes.Set(SourceReaderAttributeKeys.EnableVideoProcessing, 1); if (devMan != null) { //mediaAttributes.Set(SourceReaderAttributeKeys.DisableDxva, 0); mediaAttributes.Set(SourceReaderAttributeKeys.D3DManager, devMan); } //mediaAttributes.Set(CodecApiPropertyKeys.AVLowLatencyMode, false); sourceReader = new SourceReader(mediaSource, mediaAttributes); } var charact = mediaSource.Characteristics; Console.WriteLine(MfTool.LogEnumFlags((MediaSourceCharacteristics)charact)); Console.WriteLine("------------------CurrentMediaType-------------------"); int videoStreamIndex = (int)SourceReaderIndex.FirstVideoStream; using (var currentMediaType = sourceReader.GetCurrentMediaType(videoStreamIndex)) { Console.WriteLine(MfTool.LogMediaType(currentMediaType)); var frameSize = currentMediaType.Get(MediaTypeAttributeKeys.FrameSize); var frameRate = currentMediaType.Get(MediaTypeAttributeKeys.FrameRate); OutputMediaType = new MediaType(); OutputMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); OutputMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.NV12); // VideoFormatGuids.Yv12); OutputMediaType.Set(MediaTypeAttributeKeys.FrameSize, frameSize); OutputMediaType.Set(MediaTypeAttributeKeys.FrameRate, frameRate); OutputMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); OutputMediaType.Set(MediaTypeAttributeKeys.AllSamplesIndependent, 1); sourceReader.SetCurrentMediaType(videoStreamIndex, OutputMediaType); Console.WriteLine("------------------NEW MediaType-------------------"); Console.WriteLine(MfTool.LogMediaType(OutputMediaType)); } }
public void Setup(int deviceIndex = 0) { logger.Debug("VideoCaptureSource::Setup()"); Activate[] activates = null; using (var attributes = new MediaAttributes()) { MediaFactory.CreateAttributes(attributes, 1); attributes.Set(CaptureDeviceAttributeKeys.SourceType, CaptureDeviceAttributeKeys.SourceTypeVideoCapture.Guid); activates = MediaFactory.EnumDeviceSources(attributes); } if (activates == null || activates.Length == 0) { logger.Error("SourceTypeVideoCapture not found"); Console.ReadKey(); } foreach (var activate in activates) { Console.WriteLine("---------------------------------------------"); var friendlyName = activate.Get(CaptureDeviceAttributeKeys.FriendlyName); var isHwSource = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapHwSource); //var maxBuffers = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapMaxBuffers); var symbolicLink = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapSymbolicLink); logger.Info("FriendlyName " + friendlyName + "\r\n" + "isHwSource " + isHwSource + "\r\n" + //"maxBuffers " + maxBuffers + "symbolicLink " + symbolicLink); } var currentActivator = activates[deviceIndex]; mediaSource = currentActivator.ActivateObject <MediaSource>(); foreach (var a in activates) { a.Dispose(); } using (var mediaAttributes = new MediaAttributes(IntPtr.Zero)) { MediaFactory.CreateAttributes(mediaAttributes, 2); mediaAttributes.Set(SourceReaderAttributeKeys.EnableVideoProcessing, 1); //var devMan = new DXGIDeviceManager(); //devMan.ResetDevice(device); //mediaAttributes.Set(SourceReaderAttributeKeys.D3DManager, devMan); //MediaFactory.CreateSourceReaderFromMediaSource(mediaSource, mediaAttributes, sourceReader); sourceReader = new SourceReader(mediaSource, mediaAttributes); } Console.WriteLine("------------------CurrentMediaType-------------------"); var mediaType = sourceReader.GetCurrentMediaType(SourceReaderIndex.FirstVideoStream); Console.WriteLine(MfTool.LogMediaType(mediaType)); var frameSize = MfTool.GetFrameSize(mediaType); var subtype = mediaType.Get(MediaTypeAttributeKeys.Subtype); mediaType?.Dispose(); //Device device = null; int adapterIndex = 0; using (var dxgiFactory = new SharpDX.DXGI.Factory1()) { var adapter = dxgiFactory.Adapters1[adapterIndex]; device = new Device(adapter, //DeviceCreationFlags.Debug | DeviceCreationFlags.VideoSupport | DeviceCreationFlags.BgraSupport); using (var multiThread = device.QueryInterface <SharpDX.Direct3D11.Multithread>()) { multiThread.SetMultithreadProtected(true); } } SharedTexture = new Texture2D(device, new Texture2DDescription { CpuAccessFlags = CpuAccessFlags.None, BindFlags = BindFlags.RenderTarget | BindFlags.ShaderResource, Format = SharpDX.DXGI.Format.B8G8R8A8_UNorm, Width = frameSize.Width, Height = frameSize.Height, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1, Quality = 0 }, Usage = ResourceUsage.Default, //OptionFlags = ResourceOptionFlags.GdiCompatible//ResourceOptionFlags.None, OptionFlags = ResourceOptionFlags.Shared, }); texture = new Texture2D(device, new Texture2DDescription { CpuAccessFlags = CpuAccessFlags.Read, BindFlags = BindFlags.None, Format = SharpDX.DXGI.Format.B8G8R8A8_UNorm, Width = frameSize.Width, Height = frameSize.Height, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1, Quality = 0 }, Usage = ResourceUsage.Staging, OptionFlags = ResourceOptionFlags.None, }); processor = new MfVideoProcessor(null); var inProcArgs = new MfVideoArgs { Width = frameSize.Width, Height = frameSize.Height, // Format = VideoFormatGuids.Rgb24, Format = subtype,//VideoFormatGuids.NV12, }; var outProcArgs = new MfVideoArgs { Width = frameSize.Width, Height = frameSize.Height, Format = VideoFormatGuids.Argb32, //Format = VideoFormatGuids.Rgb32,//VideoFormatGuids.Argb32, }; processor.Setup(inProcArgs, outProcArgs); //processor.SetMirror(VideoProcessorMirror.MirrorHorizontal); processor.SetMirror(VideoProcessorMirror.MirrorVertical); }
//// =========================================================================================================== //// Constructors //// =========================================================================================================== /// <summary> /// Initializes a new instance of the <see cref="MediaAttributes"/> class. /// </summary> /// <param name="initialSizeInBytes"> /// The initial number of elements allocated for the attribute store. The attribute store grows as needed. /// Default is 0 /// </param> /// <remarks> /// <p> /// Attributes are used throughout Microsoft Media Foundation to configure objects, describe media formats, /// query object properties, and other purposes. For more information, see Attributes in Media Foundation. /// </p> /// <p>For a complete list of all the defined attribute GUIDs in Media Foundation, see Media Foundation Attributes.</p> /// </remarks> /// <unmanaged>HRESULT MFCreateAttributes([Out] IMFAttributes** ppMFAttributes,[In] unsigned int cInitialSize)</unmanaged> /// <unmanaged-short>MFCreateAttributes</unmanaged-short> public MediaAttributes(int initialSizeInBytes = 0) : base(IntPtr.Zero) { MediaFactory.CreateAttributes(this, initialSizeInBytes); }