private static SinkWriter CreateSinkWriter(string outputFile) { // n.b. could try specifying the container type using attributes, but I think // it does a decent job of working it out from the file extension // n.b. AAC encode on Win 8 can have AAC extension, but use MP4 in win 7 // http://msdn.microsoft.com/en-gb/library/windows/desktop/dd389284%28v=vs.85%29.aspx SinkWriter writer; using (var attributes = new MediaAttributes()) { MediaFactory.CreateAttributes(attributes, 1); attributes.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms.Guid, (UInt32)1); try { writer = MediaFactory.CreateSinkWriterFromURL(outputFile, null, attributes); } catch (COMException e) { if (e.GetHResult() == MF_E_NOT_FOUND) { throw new ArgumentException("Was not able to create a sink writer for this file extension"); } throw; } } return(writer); }
public static Activate[] EnumerateVideoDevices() { var attributes = new MediaAttributes(); attributes.Set(CaptureDeviceAttributeKeys.SourceType, CaptureDeviceAttributeKeys.SourceTypeVideoCapture.Guid); return(MediaFactory.EnumDeviceSources(attributes)); }
private static SourceReader ConfigureSourceReader(byte[] videoFileBytes) { var attributes = new MediaAttributes(); attributes.Set(SourceReaderAttributeKeys.EnableVideoProcessing, 1); return(new SourceReader(videoFileBytes, attributes)); }
//private static void NewMethod1() //{ // var flags = DeviceCreationFlags.VideoSupport | // DeviceCreationFlags.BgraSupport | // DeviceCreationFlags.Debug; // var device = new SharpDX.Direct3D11.Device(SharpDX.Direct3D.DriverType.Hardware, flags); // using (var multiThread = device.QueryInterface<SharpDX.Direct3D11.Multithread>()) // { // multiThread.SetMultithreadProtected(true); // } // System.Drawing.Bitmap bmp = new System.Drawing.Bitmap(@"D:\Temp\4.bmp"); // Texture2D rgbTexture = DxTool.GetTexture(bmp, device); // var bufTexture = new Texture2D(device, // new Texture2DDescription // { // // Format = Format.NV12, // Format = SharpDX.DXGI.Format.B8G8R8A8_UNorm, // Width = 1920, // Height = 1080, // MipLevels = 1, // ArraySize = 1, // SampleDescription = { Count = 1 }, // }); // device.ImmediateContext.CopyResource(rgbTexture, bufTexture); // var processor = new MfVideoProcessor(null); // var inProcArgs = new MfVideoArgs // { // Width = 1920, // Height = 1080, // Format = SharpDX.MediaFoundation.VideoFormatGuids.Argb32, // }; // var outProcArgs = new MfVideoArgs // { // Width = 1920, // Height = 1080, // Format = SharpDX.MediaFoundation.VideoFormatGuids.NV12,//.Argb32, // }; // processor.Setup(inProcArgs, outProcArgs); // processor.Start(); // var msEncoder = new MfH264EncoderMS(); // var encArgs = new MfVideoArgs // { // Width = 1920, // Height = 1080, // FrameRate = 30, // Format = SharpDX.MediaFoundation.VideoFormatGuids.NV12, // }; // msEncoder.Setup(encArgs); // msEncoder.Start(); // var rgbSample = MediaFactory.CreateVideoSampleFromSurface(null); // // Create the media buffer from the texture // MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, bufTexture, 0, false, out var mediaBuffer); // using (var buffer2D = mediaBuffer.QueryInterface<Buffer2D>()) // { // mediaBuffer.CurrentLength = buffer2D.ContiguousLength; // } // rgbSample.AddBuffer(mediaBuffer); // var ffEncoder = new H264Encoder(); // ffEncoder.Setup(new VideoEncoderSettings // { // Width = encArgs.Width, // Height = encArgs.Height, // FrameRate = 30, // }); // while (true) // { // rgbSample.SampleTime = 0; // rgbSample.SampleDuration = 0; // var result = processor.ProcessSample(rgbSample, out var nv12Sampel); // if (result) // { // using (var buffer = nv12Sampel.ConvertToContiguousBuffer()) // { // var ptr = buffer.Lock(out var maxLen, out var curLen); // ffEncoder.Encode(ptr, curLen, 0); // buffer.Unlock(); // } // //result = msEncoder.ProcessSample(nv12Sampel, out var outputSample); // //if (result) // //{ // //} // } // nv12Sampel?.Dispose(); // Thread.Sleep(300); // } // //Console.ReadKey(); // //Console.WriteLine("-------------------------------"); // //return; //} private static void NewMethod() { Guid CColorConvertDMO = new Guid("98230571-0087-4204-b020-3282538e57d3"); Guid VideoProcessorMFT = new Guid("88753B26-5B24-49BD-B2E7-0C445C78C982"); Guid NVidiaH264EncoderMFT = new Guid("60F44560-5A20-4857-BFEF-D29773CB8040"); Guid IntelQSVH264EncoderMFT = new Guid("4BE8D3C0-0515-4A37-AD55-E4BAE19AF471"); //ArrayList inputTypes = new ArrayList(); //ArrayList outputTypes = new ArrayList(); //MFInt inputTypesNum = new MFInt(); //MFInt outputTypesNum = new MFInt(); //IntPtr ip = IntPtr.Zero; var result = MfApi.MFTGetInfo(CColorConvertDMO, out string pszName, out IntPtr ppInputTypes, out uint inputTypesNum, out IntPtr ppOutputTypes, out uint outputTypesNum, out IntPtr ppAttributes); if (result == MediaToolkit.NativeAPIs.HResult.S_OK) { MediaAttributes mediaAttributes = new MediaAttributes(ppAttributes); Console.WriteLine(MfTool.LogMediaAttributes(mediaAttributes)); Console.WriteLine("InputTypes-------------------------------------"); MarshalHelper.PtrToArray(ppInputTypes, (int)inputTypesNum, out MFTRegisterTypeInfo[] inputTypes); foreach (var type in inputTypes) { var majorType = type.guidMajorType; var subType = type.guidSubtype; //Console.WriteLine(MfTool.GetMediaTypeName(majorType)); Console.WriteLine(MfTool.GetMediaTypeName(subType)); } Console.WriteLine(""); Console.WriteLine("OutputTypes-------------------------------------"); MarshalHelper.PtrToArray(ppOutputTypes, (int)outputTypesNum, out MFTRegisterTypeInfo[] outputTypes); foreach (var type in outputTypes) { var majorType = type.guidMajorType; var subType = type.guidSubtype; //Console.WriteLine(MfTool.GetMediaTypeName(majorType)); Console.WriteLine(MfTool.GetMediaTypeName(subType)); } } Console.WriteLine("Press any key to exit..."); Console.ReadKey(); return; }
/// <inheritdoc /> /// <summary> /// Starts the encoder after all properties have been initialied /// </summary> public override void Start() { MediaFactory.Startup(MediaFactory.Version, MFSTARTUP_NOSOCKET); using (var attrs = new MediaAttributes()) { attrs.Set(TranscodeAttributeKeys.TranscodeContainertype, this.containerType); attrs.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms, 1); attrs.Set(SinkWriterAttributeKeys.LowLatency, true); if (this.dxgiManager != null) { attrs.Set(SinkWriterAttributeKeys.D3DManager, this.dxgiManager); } // create byte stream and sink writer this.byteStream = new ByteStream(DestinationStream); this.sinkWriter = MediaFactory.CreateSinkWriterFromURL(null, this.byteStream, attrs); // create output media type using (var outMediaType = new SharpDX.MediaFoundation.MediaType()) { outMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); outMediaType.Set(MediaTypeAttributeKeys.Subtype, this.videoFormat); outMediaType.Set(MediaTypeAttributeKeys.AvgBitrate, this.bitRate); outMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); outMediaType.Set(MediaTypeAttributeKeys.FrameSize, ((long)FrameSize.Width << 32) | (uint)FrameSize.Height); outMediaType.Set(MediaTypeAttributeKeys.FrameRate, ((long)this.frameRate << 32) | 1); outMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, (1 << 32) | 1); this.sinkWriter.AddStream(outMediaType, out this.streamIdx); } // create input media type using (var inMediaType = new SharpDX.MediaFoundation.MediaType()) { inMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); inMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); inMediaType.Set(MediaTypeAttributeKeys.FrameSize, ((long)FrameSize.Width << 32) | (uint)FrameSize.Height); inMediaType.Set(MediaTypeAttributeKeys.FrameRate, ((long)this.frameRate << 32) | 1); inMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, (1 << 32) | 1); try { // use NV12 YUV encoding inMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.NV12); this.sinkWriter.SetInputMediaType(this.streamIdx, inMediaType, null); } catch (SharpDXException exception) when(exception.ResultCode == SharpDX.MediaFoundation.ResultCode.InvalidMediaType) { // XXX: fall back to ARGB32 inMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.Argb32); this.sinkWriter.SetInputMediaType(this.streamIdx, inMediaType, null); } } this.sinkWriter.BeginWriting(); } }
private SourceReader CreateSourceReader(Activate activate) { SourceReader reader = null; using (var source = activate.ActivateObject <MediaSource>()) { using (var mediaAttributes = new MediaAttributes()) { if (asyncMode) { sourceReaderCallback = new SourceReaderCallback(); sourceReaderCallback.OnReadSample += SourceReaderCallback_OnReadSample; sourceReaderCallback.OnFlush += SourceReaderCallback_OnFlush; //sourceReaderCallback.OnEvent += SourceReaderCallback_OnEvent; var pUnk = Marshal.GetIUnknownForObject(sourceReaderCallback); try { using (var comObj = new SharpDX.ComObject(pUnk)) { mediaAttributes.Set(SourceReaderAttributeKeys.AsyncCallback, comObj); } } finally { if (pUnk != IntPtr.Zero) { Marshal.Release(pUnk); } } } //mediaAttributes.Set(SourceReaderAttributeKeys.EnableVideoProcessing, 1); /* //Не все камеры поддерживают! * mediaAttributes.Set(SinkWriterAttributeKeys.LowLatency, true); * * mediaAttributes.Set(SourceReaderAttributeKeys.EnableAdvancedVideoProcessing, true); * mediaAttributes.Set(SinkWriterAttributeKeys.ReadwriteDisableConverters, 0); * * mediaAttributes.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms, 1); * using (var devMan = new DXGIDeviceManager()) * { * devMan.ResetDevice(device); * mediaAttributes.Set(SourceReaderAttributeKeys.D3DManager, devMan); * } */ reader = new SourceReader(source, mediaAttributes); } } return(reader); }
/// <inheritdoc /> /// <summary> /// Begins encoding the video. /// </summary> /// <param name="frameSize">Frame size.</param> /// <param name="stream">Output stream.</param> public void Initialize(Size frameSize, Stream stream) { MediaFactory.Startup(MediaFactory.Version, NoSocket); using (var attrs = new MediaAttributes()) { attrs.Set(TranscodeAttributeKeys.TranscodeContainertype, ContainerType); attrs.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms, 1); attrs.Set(SinkWriterAttributeKeys.DisableThrottling, 1); attrs.Set(SinkWriterAttributeKeys.LowLatency, true); if (SurfacePointer != IntPtr.Zero) { // get the source surface this.surface = new Texture2D(SurfacePointer); // create and bind a DXGI device manager this.dxgiManager = new DXGIDeviceManager(); this.dxgiManager.ResetDevice(this.surface.Device); attrs.Set(SinkWriterAttributeKeys.D3DManager, this.dxgiManager); } // create byte stream and sink writer this.byteStream = new ByteStream(stream); this.sinkWriter = MediaFactory.CreateSinkWriterFromURL(null, this.byteStream.NativePointer, attrs); // create output media type using (var outMediaType = new MediaType()) { outMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); outMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormat); outMediaType.Set(MediaTypeAttributeKeys.AvgBitrate, BitRate); outMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); outMediaType.Set(MediaTypeAttributeKeys.FrameSize, ((long)frameSize.Width << 32) | (uint)frameSize.Height); outMediaType.Set(MediaTypeAttributeKeys.FrameRate, ((long)FrameRate << 32) | 1); outMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, (1 << 32) | (uint)1); this.sinkWriter.AddStream(outMediaType, out this.streamIdx); } // create input media type using (var inMediaType = new MediaType()) { inMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); inMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.Rgb32); inMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); inMediaType.Set(MediaTypeAttributeKeys.FrameSize, ((long)frameSize.Width << 32) | (uint)frameSize.Height); inMediaType.Set(MediaTypeAttributeKeys.FrameRate, ((long)FrameRate << 32) | 1); inMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, (1 << 32) | (uint)1); this.sinkWriter.SetInputMediaType(this.streamIdx, inMediaType, null); this.sinkWriter.BeginWriting(); } } }
public static bool CreateMediaSession(byte[] mediaData, out MediaSession?session, IntPtr?windowHandle = null, bool isStartUpMediaManager = false) { PresentationDescriptor?pd = null; Topology?topology = null; try { if (isStartUpMediaManager) { MediaManager.Startup(); } var attributes = new MediaAttributes(mediaData.Length); MediaFactory.CreateMediaSession(attributes, out session); var resolver = new SourceResolver(); var byteStream = new ByteStream(mediaData); resolver.CreateObjectFromByteStream(byteStream, null, (int)SourceResolverFlags.ByteStream, null, out var objType, out var videoObject); GetMediaSource(videoObject, out var source); if (source != null) { MediaFactory.CreateTopology(out topology); source.CreatePresentationDescriptor(out pd); var r1 = CreatePlaybackTopology(source, pd, windowHandle.HasValue ? windowHandle.Value : IntPtr.Zero, out topology); if (r1.Success) { session.SetTopology(0, topology); return(true); } else { session = null; topology = null; return(false); } } else { session = null; topology = null; return(false); } } catch (SharpDXException ex) { Debug.Print(ex.ToString()); session = null; return(false); } finally { pd?.Dispose(); topology?.Dispose(); } }
private void PlatformInitialize() { // The GUID is specified in a GuidAttribute attached to the class AudioStreamVolumeGuid = Guid.Parse(((GuidAttribute)typeof(AudioStreamVolume).GetCustomAttributes(typeof(GuidAttribute), false)[0]).Value); MediaAttributes attr = new MediaAttributes(0); //MediaManagerState.CheckStartup(); MediaManager.Startup(); MediaFactory.CreateMediaSession(attr, out _session); }
private void newSourceReader() { SharpDX.Utilities.Dispose(ref reader); Activate[] devices = null; try { using (MediaAttributes devAttr = new MediaAttributes()) { devAttr.Set(CaptureDeviceAttributeKeys.SourceType.Guid, CaptureDeviceAttributeKeys.SourceTypeVideoCapture.Guid); devices = MediaFactory.EnumDeviceSources(devAttr); if (devices.Count() <= DeviceID) { StringBuilder strException = new StringBuilder(); strException.Append($"Device(s) found:\n"); for (int n = 0; n < devices.Count(); n++) { strException.AppendLine($"{n} : {devices[n].Get(CaptureDeviceAttributeKeys.FriendlyName)}"); } if (devices.Count() == 0) { strException.AppendLine("None"); } throw new ArgumentOutOfRangeException("DeviceID", DeviceID, strException.ToString()); } dev = devices[DeviceID].ActivateObject <MediaSource>(); deviceName = devices[DeviceID].Get(CaptureDeviceAttributeKeys.FriendlyName); applyDevProperties(); } using (MediaAttributes readerAttr = new MediaAttributes()) { readerAttr.Set(SourceReaderAttributeKeys.EnableAdvancedVideoProcessing, EnableAdvancedVideoProcessing); if (EnableHardwareTransform) { if (dxman == null || dxman.IsDisposed) { newDXDeviceForVideo(); } readerAttr.Set(SourceReaderAttributeKeys.D3DManager, dxman); } reader = new SourceReader(dev, readerAttr); } } finally { foreach (var d in devices) { d.Dispose(); } } }
public static List <VideoCaptureDevice> GetVideoCaptureDevices() { List <VideoCaptureDevice> devices = new List <VideoCaptureDevice>(); Activate[] activates = null; try { using (var attributes = new MediaAttributes()) { MediaFactory.CreateAttributes(attributes, 1); attributes.Set(CaptureDeviceAttributeKeys.SourceType, CaptureDeviceAttributeKeys.SourceTypeVideoCapture.Guid); activates = MediaFactory.EnumDeviceSources(attributes); foreach (var activate in activates) { var friendlyName = activate.Get(CaptureDeviceAttributeKeys.FriendlyName); var isHwSource = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapHwSource); //var maxBuffers = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapMaxBuffers); var symbolicLink = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapSymbolicLink); //var mediaTypes = activate.Get(TransformAttributeKeys.MftOutputTypesAttributes); devices.Add(new VideoCaptureDevice { Name = friendlyName, SymLink = symbolicLink }); Console.WriteLine("FriendlyName " + friendlyName + "\r\n" + "isHwSource " + isHwSource + "\r\n" + //"maxBuffers " + maxBuffers + "symbolicLink " + symbolicLink); } } } catch (Exception ex) { Console.WriteLine(ex); } finally { if (activates != null) { foreach (var act in activates) { act.Dispose(); } } } return(devices); }
public static IReadOnlyDictionary <Guid, object> GetAttributes(MediaAttributes atts) { var dic = new Dictionary <Guid, object>(); if (atts != null) { for (int i = 0; i < atts.Count; i++) { object value = atts.GetByIndex(i, out Guid guid); dic[guid] = value; } } return(dic); }
/// <summary> /// Sets and initializes the targetstream for the encoding process. /// </summary> /// <param name="stream">Stream which should be used as the targetstream.</param> /// <param name="inputMediaType">Mediatype of the raw input data to encode.</param> /// <param name="targetMediaType">Mediatype of the encoded data.</param> /// <param name="containerType">Container type which should be used.</param> protected void SetTargetStream(Stream stream, MediaType inputMediaType, MediaType targetMediaType, Guid containerType) { MediaAttributes attributes = null; try { var buffer = new byte[stream.Length]; stream.Read(buffer, 0, buffer.Length); _targetStream = new ByteStream(buffer); attributes = new MediaAttributes(2); attributes.Set(MediaFoundationAttributes.MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, 1); attributes.Set(MediaFoundationAttributes.MF_TRANSCODE_CONTAINERTYPE, containerType); _sinkWriter = SinkWriter.Create(_targetStream, attributes); _streamIndex = _sinkWriter.AddStream(targetMediaType); _sinkWriter.SetInputMediaType(_streamIndex, inputMediaType, null); _targetMediaType = targetMediaType; _sourceBytesPerSecond = inputMediaType.AverageBytesPerSecond; //initialize the sinkwriter _sinkWriter.BeginWriting(); } catch (Exception) { if (_sinkWriter != null) { _sinkWriter.Dispose(); _sinkWriter = null; } if (_targetStream != null) { _targetStream.Dispose(); _targetStream = null; } throw; } finally { if (attributes != null) { attributes.Dispose(); } } }
MediaAttributes GetSinkWriterAttributes(Device Device) { var attr = new MediaAttributes(6); attr.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms, 1); attr.Set(SinkWriterAttributeKeys.ReadwriteDisableConverters, 0); attr.Set(TranscodeAttributeKeys.TranscodeContainertype, TranscodeContainerTypeGuids.Mpeg4); attr.Set(SinkWriterAttributeKeys.LowLatency, true); var devMan = new DXGIDeviceManager(); devMan.ResetDevice(Device); attr.Set(SinkWriterAttributeKeys.D3DManager, devMan); return(attr); }
private static Activate GetActivateBySymLink(string symLink) { Activate activate = null; Activate[] activates = null; using (var attributes = new MediaAttributes()) { MediaFactory.CreateAttributes(attributes, 2); attributes.Set(CaptureDeviceAttributeKeys.SourceType, CaptureDeviceAttributeKeys.SourceTypeVideoCapture.Guid); //attributes.Set(CaptureDeviceAttributeKeys.SourceTypeVidcapSymbolicLink, symLink); activates = MediaFactory.EnumDeviceSources(attributes); } if (activates == null || activates.Length == 0) { logger.Error("SourceTypeVideoCapture not found"); return(null); } foreach (var _activate in activates) { Console.WriteLine("---------------------------------------------"); var friendlyName = _activate.Get(CaptureDeviceAttributeKeys.FriendlyName); var isHwSource = _activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapHwSource); //var maxBuffers = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapMaxBuffers); var symbolicLink = _activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapSymbolicLink); logger.Info("FriendlyName " + friendlyName + "\r\n" + "isHwSource " + isHwSource + "\r\n" + //"maxBuffers " + maxBuffers + "symbolicLink " + symbolicLink); if (symbolicLink == symLink) { activate = _activate; continue; } _activate?.Dispose(); } return(activate); }
partial void StartCaptureCore(SystemCamera camera, CancellationToken token) { using (var deviceAttributes = new MediaAttributes(2)) { deviceAttributes.Set(CaptureDeviceAttributeKeys.SourceType, CaptureDeviceAttributeKeys.SourceTypeVideoCapture.Guid); deviceAttributes.Set(CaptureDeviceAttributeKeys.SourceTypeVidcapSymbolicLink, camera.SystemIdentifier); MediaFactory.CreateDeviceSource(deviceAttributes, out MediaSource videoSource); using (videoSource) { var captureEngine = new CaptureEngine(captureFactory); captureEngine.CaptureEngineEvent += evt => OnEngineEvent(captureEngine, evt); captureEngine.Initialize(captureEngineAttributes, null, videoSource); } } }
public static void EnumerateCaptureSources() { Activate[] activates = null; using (var attributes = new MediaAttributes()) { MediaFactory.CreateAttributes(attributes, 1); attributes.Set(CaptureDeviceAttributeKeys.SourceType, CaptureDeviceAttributeKeys.SourceTypeVideoCapture.Guid); activates = MediaFactory.EnumDeviceSources(attributes); } if (activates == null || activates.Length == 0) { Console.WriteLine("SourceTypeVideoCapture not found"); return; } foreach (var _activate in activates) { Console.WriteLine("---------------------------------------------"); var friendlyName = _activate.Get(CaptureDeviceAttributeKeys.FriendlyName); var isHwSource = _activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapHwSource); //var maxBuffers = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapMaxBuffers); var symbolicLink = _activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapSymbolicLink); Console.WriteLine("FriendlyName " + friendlyName + "\r\n" + "isHwSource " + isHwSource + "\r\n" + //"maxBuffers " + maxBuffers + "symbolicLink " + symbolicLink); var mediaSource = _activate.ActivateObject <MediaSource>(); var log = MfTool.LogMediaSource(mediaSource); Console.WriteLine(log); mediaSource?.Dispose(); _activate?.Dispose(); } }
public static Activate[] EnumerateVideoDevices() { var attributes = new MediaAttributes(); MediaFactory.CreateAttributes(attributes, 1); attributes.Set(CaptureDeviceAttributeKeys.SourceType, CaptureDeviceAttributeKeys.SourceTypeVideoCapture.Guid); var mediaFoundationActivates = MediaFactory.EnumDeviceSources(attributes); Activate[] result = new Activate[mediaFoundationActivates.Length]; Dictionary <string, int[]> order = new Dictionary <string, int[]>(); DsDevice[] capDevicesDS; capDevicesDS = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); //tries to match the order of the found devices in DirectShow and MediaFoundation for (int i = 0; i < mediaFoundationActivates.Length; i++) { var friendlyName = mediaFoundationActivates[i].Get(CaptureDeviceAttributeKeys.FriendlyName); var finalName = friendlyName; var suffix = ""; //used to handle multiple devices listed with the same name var counter = 1; for (int j = 0; j < capDevicesDS.Length; j++) { var friendlyNameDS = capDevicesDS[j].Name + suffix; if (friendlyName + suffix == friendlyNameDS) { if (!order.ContainsKey(friendlyName + suffix)) { order.Add(friendlyName + suffix, new int[] { i, j }); result[j] = mediaFoundationActivates[i]; suffix = ""; break; } else { suffix = counter++.ToString(); continue; } } } } return(result); }
/// <summary> /// <p>Enumerates a list of audio or video capture devices.</p> /// </summary> /// <param name="attributesRef"><dd> <p>Pointer to an attribute store that contains search criteria. To create the attribute store, call <strong><see cref="SharpDX.MediaFoundation.MediaFactory.CreateAttributes"/></strong>. Set one or more of the following attributes on the attribute store:</p> <table> <tr><th>Value</th><th>Meaning</th></tr> <tr><td><dl> <dt><strong><see cref="SharpDX.MediaFoundation.CaptureDeviceAttributeKeys.SourceType"/></strong></dt> </dl> </td><td> <p>Specifies whether to enumerate audio or video devices. (Required.)</p> </td></tr> <tr><td><dl> <dt><strong><see cref="SharpDX.MediaFoundation.CaptureDeviceAttributeKeys.SourceTypeAudcapRole"/></strong></dt> </dl> </td><td> <p>For audio capture devices, specifies the device role. (Optional.)</p> </td></tr> <tr><td><dl> <dt><strong><see cref="SharpDX.MediaFoundation.CaptureDeviceAttributeKeys.SourceTypeVidcapCategory"/></strong></dt> </dl> </td><td> <p>For video capture devices, specifies the device category. (Optional.)</p> </td></tr> </table> <p>?</p> </dd></param> /// <param name="pSourceActivateOut"><dd> <p>Receives an array of <strong><see cref="SharpDX.MediaFoundation.Activate"/></strong> interface references. Each reference represents an activation object for a media source. The function allocates the memory for the array. The caller must release the references in the array and call <strong>CoTaskMemFree</strong> to free the memory for the array.</p> </dd></param> /// <param name="cSourceActivateRef"><dd> <p>Receives the number of elements in the <em>pppSourceActivate</em> array. If no capture devices match the search criteria, this parameter receives the value 0.</p> </dd></param> /// <returns><p>If this function succeeds, it returns <strong><see cref="SharpDX.Result.Ok"/></strong>. Otherwise, it returns an <strong><see cref="SharpDX.Result"/></strong> error code.</p></returns> /// <remarks> /// <p>Each returned <strong><see cref="SharpDX.MediaFoundation.Activate"/></strong> reference represents a capture device, and can be used to create a media source for that device. You can also use the <strong><see cref="SharpDX.MediaFoundation.Activate"/></strong> reference to query for attributes that describe the device. The following attributes might be set:</p><table> <tr><th>Attribute</th><th>Description</th></tr> <tr><td> <see cref="SharpDX.MediaFoundation.CaptureDeviceAttributeKeys.FriendlyName"/> </td><td>The display name of the device.</td></tr> <tr><td> <see cref="SharpDX.MediaFoundation.CaptureDeviceAttributeKeys.MediaType"/> </td><td>The major type and subtype GUIDs that describe the device's output format.</td></tr> <tr><td> <see cref="SharpDX.MediaFoundation.CaptureDeviceAttributeKeys.SourceType"/> </td><td>The type of capture device (audio or video).</td></tr> <tr><td> <see cref="SharpDX.MediaFoundation.CaptureDeviceAttributeKeys.SourceTypeAudcapEndpointId"/> </td><td>The audio endpoint ID string. (Audio devices only.)</td></tr> <tr><td> <see cref="SharpDX.MediaFoundation.CaptureDeviceAttributeKeys.SourceTypeVidcapCategory"/> </td><td>The device category. (Video devices only.)</td></tr> <tr><td> <see cref="SharpDX.MediaFoundation.CaptureDeviceAttributeKeys.SourceTypeVidcapHwSource"/> </td><td> Whether a device is a hardware or software device. (Video devices only.)</td></tr> <tr><td> <see cref="SharpDX.MediaFoundation.CaptureDeviceAttributeKeys.SourceTypeVidcapSymbolicLink"/> </td><td>The symbolic link for the device driver. (Video devices only.)</td></tr> </table><p>?</p><p>To create a media source from an <strong><see cref="SharpDX.MediaFoundation.Activate"/></strong> reference, call the <strong><see cref="SharpDX.MediaFoundation.Activate.ActivateObject"/></strong> method.</p> /// </remarks> /// <include file='.\..\Documentation\CodeComments.xml' path="/comments/comment[@id='MFEnumDeviceSources']/*"/> /// <msdn-id>dd388503</msdn-id> /// <unmanaged>HRESULT MFEnumDeviceSources([In] IMFAttributes* pAttributes,[Out, Buffer] IMFActivate*** pppSourceActivate,[Out] unsigned int* pcSourceActivate)</unmanaged> /// <unmanaged-short>MFEnumDeviceSources</unmanaged-short> public static Activate[] EnumDeviceSources(MediaAttributes attributesRef) { IntPtr devicePtr; int devicesCount; EnumDeviceSources(attributesRef, out devicePtr, out devicesCount); var result = new Activate[devicesCount]; unsafe { var address = (void**)devicePtr; for (var i = 0; i < devicesCount; i++) result[i] = new Activate(new IntPtr(address[i])); } return result; }
public static MediaFoundationStreamingSources CreateFromFile(DXGIDeviceManager dxgiDeviceManager, VariablePath ファイルパス, WaveFormat soundDeviceFormat) { var sources = new MediaFoundationStreamingSources(); #region " ファイルから SourceReaderEx を生成する。" //---------------- using (var ビデオ属性 = new MediaAttributes()) { // DXVAに対応しているGPUの場合には、それをデコードに利用するよう指定する。 ビデオ属性.Set(SourceReaderAttributeKeys.D3DManager, dxgiDeviceManager); // 追加のビデオプロセッシングを有効にする。 ビデオ属性.Set(SourceReaderAttributeKeys.EnableAdvancedVideoProcessing, true); // 真偽値が bool だったり // 追加のビデオプロセッシングを有効にしたら、こちらは無効に。 ビデオ属性.Set(SinkWriterAttributeKeys.ReadwriteDisableConverters, 0); // int だったり // 属性を使って、SourceReaderEx を生成。 using (var sourceReader = new SourceReader(ファイルパス.数なしパス, ビデオ属性)) // パスは URI 扱い sources._SourceReaderEx = sourceReader.QueryInterface <SourceReaderEx>(); } //---------------- #endregion #region " WaveFormat を生成。" //---------------- sources._Audioのフォーマット = new WaveFormat( soundDeviceFormat.SampleRate, 32, soundDeviceFormat.Channels, AudioEncoding.IeeeFloat); //---------------- #endregion sources._SourceReaderEx生成後の初期化(); return(sources); }
private static SinkWriter CreateSinkWriter(string outputFile) { SinkWriter writer; using (var attributes = new MediaAttributes()) { MediaFactory.CreateAttributes(attributes, 1); attributes.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms.Guid, (UInt32)1); try { writer = MediaFactory.CreateSinkWriterFromURL(outputFile, IntPtr.Zero, attributes); } catch (COMException e) { if (e.ErrorCode == unchecked ((int)0xC00D36D5)) { throw new ArgumentException("Was not able to create a sink writer for this file extension"); } throw; } } return(writer); }
public MfWriter(VideoWriterArgs Args, Device Device) { if (Args.ImageProvider.EditorType == typeof(Direct2DEditor)) { _inputFormat = VideoFormatGuids.NV12; } else { _inputFormat = VideoFormatGuids.Rgb32; } _device = Device; _frameDuration = TenPower7 / Args.FrameRate; var attr = GetSinkWriterAttributes(Device); _writer = MediaFactory.CreateSinkWriterFromURL(Args.FileName, null, attr); var w = Args.ImageProvider.Width; var h = Args.ImageProvider.Height; _bufferSize = w * h * 4; using (var mediaTypeOut = new MediaType()) { mediaTypeOut.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); mediaTypeOut.Set(MediaTypeAttributeKeys.Subtype, _encodingFormat); mediaTypeOut.Set(MediaTypeAttributeKeys.AvgBitrate, BitRate); mediaTypeOut.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); mediaTypeOut.Set(MediaTypeAttributeKeys.FrameSize, PackLong(w, h)); mediaTypeOut.Set(MediaTypeAttributeKeys.FrameRate, PackLong(Args.FrameRate, 1)); mediaTypeOut.Set(MediaTypeAttributeKeys.PixelAspectRatio, PackLong(1, 1)); _writer.AddStream(mediaTypeOut, out _); } using (var mediaTypeIn = new MediaType()) { mediaTypeIn.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); mediaTypeIn.Set(MediaTypeAttributeKeys.Subtype, _inputFormat); mediaTypeIn.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); mediaTypeIn.Set(MediaTypeAttributeKeys.FrameSize, PackLong(w, h)); mediaTypeIn.Set(MediaTypeAttributeKeys.FrameRate, PackLong(Args.FrameRate, 1)); mediaTypeIn.Set(MediaTypeAttributeKeys.PixelAspectRatio, PackLong(1, 1)); mediaTypeIn.Set(MediaTypeAttributeKeys.AllSamplesIndependent, 1); var encoderParams = new MediaAttributes(2); encoderParams.Set(RateControlModeKey, RateControlMode.Quality); encoderParams.Set(QualityKey, Args.VideoQuality); _writer.SetInputMediaType(VideoStreamIndex, mediaTypeIn, encoderParams); } if (Args.AudioProvider != null) { var wf = Args.AudioProvider.WaveFormat; _audioInBytesPerSecond = wf.SampleRate * wf.Channels * wf.BitsPerSample / 8; using (var audioTypeOut = GetMediaType(wf)) { audioTypeOut.Set(MediaTypeAttributeKeys.Subtype, _encodedAudioFormat); audioTypeOut.Set(MediaTypeAttributeKeys.AudioAvgBytesPerSecond, GetAacBitrate(Args.AudioQuality)); _writer.AddStream(audioTypeOut, out _); } using (var audioTypeIn = GetMediaType(wf)) { audioTypeIn.Set(MediaTypeAttributeKeys.Subtype, AudioFormatGuids.Pcm); _writer.SetInputMediaType(AudioStreamIndex, audioTypeIn, null); } } _writer.BeginWriting(); _copyTexture = new Texture2D(Device, new Texture2DDescription { CpuAccessFlags = CpuAccessFlags.Read, BindFlags = BindFlags.None, Format = Format.B8G8R8A8_UNorm, Width = w, Height = h, OptionFlags = ResourceOptionFlags.None, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1, Quality = 0 }, Usage = ResourceUsage.Staging }); _sample = MediaFactory.CreateVideoSampleFromSurface(null); // Create the media buffer from the texture MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, _copyTexture, 0, false, out _mediaBuffer); using (var buffer2D = _mediaBuffer.QueryInterface <Buffer2D>()) _mediaBuffer.CurrentLength = buffer2D.ContiguousLength; // Attach the created buffer to the sample _sample.AddBuffer(_mediaBuffer); }
public void TestBasicTypes() { MediaManager.Startup(); var attributes = new MediaAttributes(); // 1) Test int var guid1 = Guid.NewGuid(); attributes.Set(guid1, 5); Assert.AreEqual(attributes.Get <int>(guid1), 5); // 2) Test short guid1 = Guid.NewGuid(); attributes.Set(guid1, (short)5); Assert.AreEqual(attributes.Get <short>(guid1), 5); // 3) Test uint guid1 = Guid.NewGuid(); attributes.Set(guid1, (uint)6); Assert.AreEqual(attributes.Get <uint>(guid1), (uint)6); // 4) Test double guid1 = Guid.NewGuid(); attributes.Set(guid1, 5.5); Assert.AreEqual(attributes.Get <double>(guid1), 5.5); // 5) Test float guid1 = Guid.NewGuid(); attributes.Set(guid1, 5.5f); Assert.AreEqual(attributes.Get <float>(guid1), 5.5f); // 6) Test Enum guid1 = Guid.NewGuid(); attributes.Set(guid1, MediaEventTypes.BufferingStarted); Assert.AreEqual(attributes.Get <MediaEventTypes>(guid1), MediaEventTypes.BufferingStarted); // 7) Test long guid1 = Guid.NewGuid(); attributes.Set(guid1, (long)6); Assert.AreEqual(attributes.Get <long>(guid1), (long)6); // 8) Test ulong guid1 = Guid.NewGuid(); attributes.Set(guid1, (ulong)6); Assert.AreEqual(attributes.Get <ulong>(guid1), (ulong)6); // 9) Test IntPtr guid1 = Guid.NewGuid(); attributes.Set(guid1, (IntPtr)6); Assert.AreEqual(attributes.Get <IntPtr>(guid1), new IntPtr(6)); // 10) Test string guid1 = Guid.NewGuid(); attributes.Set(guid1, "Toto"); Assert.AreEqual(attributes.Get <string>(guid1), "Toto"); // 11) Test guid guid1 = Guid.NewGuid(); attributes.Set(guid1, guid1); Assert.AreEqual(attributes.Get <Guid>(guid1), guid1); // 12) Test ComObject guid1 = Guid.NewGuid(); attributes.Set(guid1, attributes); Assert.AreEqual(attributes.Get <MediaAttributes>(guid1).NativePointer, attributes.NativePointer); // 13) Test byte[] guid1 = Guid.NewGuid(); attributes.Set(guid1, new byte[] { 1, 2, 3, 4 }); Assert.AreEqual(attributes.Get <byte[]>(guid1), new byte[] { 1, 2, 3, 4 }); // 14) Test Vector4 guid1 = Guid.NewGuid(); attributes.Set(guid1, new Vector4(1, 2, 3, 4)); Assert.AreEqual(attributes.Get <Vector4>(guid1), new Vector4(1, 2, 3, 4)); // Check size of media attributes Assert.AreEqual(attributes.Count, 14); for (int i = 0; i < attributes.Count; i++) { object value = attributes.GetByIndex(i, out guid1); Console.WriteLine("{0}) {1} ({2})", i, value, value.GetType().Name); } }
// 生成と終了 public MediaFoundationFileVideoSource(VariablePath ファイルパス, double 再生速度 = 1.0) { using var _ = new LogBlock(Log.現在のメソッド名); this.再生速度 = Math.Max(0.01, Math.Min(10.0, 再生速度)); #region " フレームキューを生成。" //---------------- // キューサイズは3フレームとする。 this._FrameQueue = new BlockingQueue <VideoFrame>(3); //---------------- #endregion #region " ファイルから SourceReaderEx を生成する。" //---------------- using (var ビデオ属性 = new MediaAttributes()) { // DXVAに対応しているGPUの場合には、それをデコードに利用するよう指定する。 ビデオ属性.Set(SourceReaderAttributeKeys.D3DManager, Global.MFDXGIDeviceManager); // 追加のビデオプロセッシングを有効にする。 ビデオ属性.Set(SourceReaderAttributeKeys.EnableAdvancedVideoProcessing, true); // 真偽値が bool だったり // 追加のビデオプロセッシングを有効にしたら、こちらは無効に。 ビデオ属性.Set(SinkWriterAttributeKeys.ReadwriteDisableConverters, 0); // int だったり // 属性を使って、SourceReaderEx を生成。 using var sourceReader = new SourceReader(ファイルパス.数なしパス, ビデオ属性); // パスは URI 扱い this._SourceReaderEx = sourceReader.QueryInterface <SourceReaderEx>(); } // 最初のビデオストリームだけを選択。 this._SourceReaderEx.SetStreamSelection(SourceReaderIndex.AllStreams, false); this._SourceReaderEx.SetStreamSelection(SourceReaderIndex.FirstVideoStream, true); //---------------- #endregion #region " ビデオの長さを取得する。" //---------------- this.総演奏時間sec = (long)((this._SourceReaderEx.GetPresentationAttribute(SourceReaderIndex.MediaSource, PresentationDescriptionAttributeKeys.Duration) / this.再生速度) / 10_000_000.0); //---------------- #endregion #region " デコーダを選択し、完全メディアタイプを取得する。" //---------------- // 部分メディアタイプを設定する。 using (var 部分MediaType = new MediaType()) { // フォーマットは ARGB32 で固定とする。(SourceReaderEx を使わない場合、H264 では ARGB32 が選べないので注意。) 部分MediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); 部分MediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.Argb32); // 部分メディアタイプを SourceReaderEx にセットする。SourceReaderEx は、必要なデコーダをロードするだろう。 this._SourceReaderEx.SetCurrentMediaType(SourceReaderIndex.FirstVideoStream, 部分MediaType); } // 完成されたメディアタイプを取得する。 this._MediaType = this._SourceReaderEx.GetCurrentMediaType(SourceReaderIndex.FirstVideoStream); //---------------- #endregion #region " ビデオのフレームサイズを取得する。(動画の途中でのサイズ変更は考慮しない。)" //---------------- long packedFrameSize = this._MediaType.Get(MediaTypeAttributeKeys.FrameSize); this.フレームサイズ = new Size2F((packedFrameSize >> 32) & 0xFFFFFFFF, (packedFrameSize) & 0xFFFFFFFF); //---------------- #endregion this._デコードキャンセル = new CancellationTokenSource(); this._デコード起動完了通知 = new ManualResetEventSlim(false); this._一時停止解除通知 = new ManualResetEventSlim(true); }
public static SourceReader CreateSourceReaderFromByteStream(ByteStream byteStream, MediaAttributes attributes) { return(new SourceReader(byteStream, attributes)); }
public MediaFoundationFileVideoSource(VariablePath ファイルパス, double 再生速度 = 1.0) { this.再生速度 = Math.Max(0.01, Math.Min(10.0, 再生速度)); #region " フレームキューを生成。" //---------------- // キューサイズは3フレームとする。 this._FrameQueue = new BlockingQueue <VideoFrame>(3); //---------------- #endregion #region " ファイルから SourceReaderEx を生成する。" //---------------- using (var ビデオ属性 = new MediaAttributes()) { // DXVAに対応しているGPUの場合には、それをデコードに利用するよう指定する。 ビデオ属性.Set(SourceReaderAttributeKeys.D3DManager, グラフィックデバイス.Instance.DXGIDeviceManager); // 追加のビデオプロセッシングを有効にする。 ビデオ属性.Set(SourceReaderAttributeKeys.EnableAdvancedVideoProcessing, true); // 真偽値が bool だったり // 追加のビデオプロセッシングを有効にしたら、こちらは無効に。 ビデオ属性.Set(SinkWriterAttributeKeys.ReadwriteDisableConverters, 0); // int だったり // 属性を使って、SourceReaderEx を生成。 using (var sourceReader = new SourceReader(ファイルパス.数なしパス, ビデオ属性)) // パスは URI 扱い this._SourceReaderEx = sourceReader.QueryInterface <SourceReaderEx>(); } // 最初のビデオストリームだけを選択。 this._SourceReaderEx.SetStreamSelection(SourceReaderIndex.AllStreams, false); this._SourceReaderEx.SetStreamSelection(SourceReaderIndex.FirstVideoStream, true); //---------------- #endregion #region " ビデオの長さを取得する。" //---------------- this.総演奏時間sec = FDKUtilities.換_100ns単位からsec単位へ( this._SourceReaderEx.GetPresentationAttribute(SourceReaderIndex.MediaSource, PresentationDescriptionAttributeKeys.Duration)) / this.再生速度; //---------------- #endregion #region " デコーダを選択し、完全メディアタイプを取得する。" //---------------- // 部分メディアタイプを設定する。 using (var videoMediaType = new MediaType()) { // フォーマットは ARGB32 で固定とする。(SourceReaderEx を使わない場合、H264 では ARGB32 が選べないので注意。) videoMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); videoMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.Argb32); // 部分メディアタイプを SourceReaderEx にセットする。SourceReaderEx は、必要なデコーダをロードするだろう。 this._SourceReaderEx.SetCurrentMediaType(SourceReaderIndex.FirstVideoStream, videoMediaType); } // 完成されたメディアタイプを取得する。 this._MediaType = this._SourceReaderEx.GetCurrentMediaType(SourceReaderIndex.FirstVideoStream); //---------------- #endregion #region " ビデオのフレームサイズを取得する。" //---------------- long packedFrameSize = this._MediaType.Get(MediaTypeAttributeKeys.FrameSize); // 動画の途中でのサイズ変更には対応しない。 this.フレームサイズ = new Size2F((packedFrameSize >> 32) & 0xFFFFFFFF, (packedFrameSize) & 0xFFFFFFFF); //---------------- #endregion }
public void Setup(string fileName, MfVideoArgs Args) { logger.Debug("MfWriter::Init(...)"); var inputFormat = VideoFormatGuids.NV12; // var inputFormat = VideoFormatGuids.Rgb32; // VideoFormatGuids.NV12 frameDuration = 10_000_000 / Args.FrameRate; var width = Args.Width; var height = Args.Height; var bufSize = width * height * 4; try { using (var attr = new MediaAttributes(6)) { attr.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms, 1); attr.Set(SinkWriterAttributeKeys.ReadwriteDisableConverters, 0); attr.Set(TranscodeAttributeKeys.TranscodeContainertype, TranscodeContainerTypeGuids.Mpeg4); attr.Set(SinkWriterAttributeKeys.LowLatency, true); attr.Set(SinkWriterAttributeKeys.DisableThrottling, 1); using (var devMan = new DXGIDeviceManager()) { devMan.ResetDevice(device); attr.Set(SinkWriterAttributeKeys.D3DManager, devMan); } sinkWriter = MediaFactory.CreateSinkWriterFromURL(fileName, null, attr); } using (var outputMediaType = new MediaType()) { outputMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); outputMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.H264); outputMediaType.Set(MediaTypeAttributeKeys.AvgBitrate, 8_000_000); outputMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); outputMediaType.Set(MediaTypeAttributeKeys.FrameSize, MfTool.PackToLong(width, height)); outputMediaType.Set(MediaTypeAttributeKeys.FrameRate, Args.FrameRate); outputMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, MfTool.PackToLong(1, 1)); sinkWriter.AddStream(outputMediaType, out videoStreamIndex); Debug.WriteLine("mediaTypeOut " + videoStreamIndex); } using (var inputMediaType = new MediaType()) { inputMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); inputMediaType.Set(MediaTypeAttributeKeys.Subtype, inputFormat); inputMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); inputMediaType.Set(MediaTypeAttributeKeys.FrameSize, MfTool.PackToLong(width, height)); inputMediaType.Set(MediaTypeAttributeKeys.FrameRate, Args.FrameRate); inputMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, MfTool.PackToLong(1, 1)); inputMediaType.Set(MediaTypeAttributeKeys.AllSamplesIndependent, 1); using (var encoderParams = new MediaAttributes(2)) { encoderParams.Set(CodecApiPropertyKeys.AVEncCommonRateControlMode, RateControlMode.Quality); encoderParams.Set(CodecApiPropertyKeys.AVEncCommonQuality, Args.Quality); sinkWriter.SetInputMediaType(0, inputMediaType, encoderParams); } } bufTexture = new Texture2D(device, new Texture2DDescription { CpuAccessFlags = CpuAccessFlags.Read, BindFlags = BindFlags.None, Format = Format.B8G8R8A8_UNorm, Width = width, Height = height, OptionFlags = ResourceOptionFlags.None, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1, Quality = 0 }, Usage = ResourceUsage.Staging }); videoSample = MediaFactory.CreateVideoSampleFromSurface(null); // Create the media buffer from the texture MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, bufTexture, 0, false, out mediaBuffer); using (var buffer2D = mediaBuffer.QueryInterface <Buffer2D>()) { mediaBuffer.CurrentLength = buffer2D.ContiguousLength; } // Attach the created buffer to the sample videoSample.AddBuffer(mediaBuffer); } catch (Exception ex) { logger.Error(ex); Close(); throw; } }
public void TestBasicTypes() { MediaManager.Startup(); var attributes = new MediaAttributes(); // 1) Test int var guid1 = Guid.NewGuid(); attributes.Set(guid1, 5); Assert.AreEqual(attributes.Get<int>(guid1), 5); // 2) Test short guid1 = Guid.NewGuid(); attributes.Set(guid1, (short)5); Assert.AreEqual(attributes.Get<short>(guid1), 5); // 3) Test uint guid1 = Guid.NewGuid(); attributes.Set(guid1, (uint)6); Assert.AreEqual(attributes.Get<uint>(guid1), (uint)6); // 4) Test double guid1 = Guid.NewGuid(); attributes.Set(guid1, 5.5); Assert.AreEqual(attributes.Get<double>(guid1), 5.5); // 5) Test float guid1 = Guid.NewGuid(); attributes.Set(guid1, 5.5f); Assert.AreEqual(attributes.Get<float>(guid1), 5.5f); // 6) Test Enum guid1 = Guid.NewGuid(); attributes.Set(guid1, MediaEventTypes.BufferingStarted); Assert.AreEqual(attributes.Get<MediaEventTypes>(guid1), MediaEventTypes.BufferingStarted); // 7) Test long guid1 = Guid.NewGuid(); attributes.Set(guid1, (long)6); Assert.AreEqual(attributes.Get<long>(guid1), (long)6); // 8) Test ulong guid1 = Guid.NewGuid(); attributes.Set(guid1, (ulong)6); Assert.AreEqual(attributes.Get<ulong>(guid1), (ulong)6); // 9) Test IntPtr guid1 = Guid.NewGuid(); attributes.Set(guid1, (IntPtr)6); Assert.AreEqual(attributes.Get<IntPtr>(guid1), new IntPtr(6)); // 10) Test string guid1 = Guid.NewGuid(); attributes.Set(guid1, "Toto"); Assert.AreEqual(attributes.Get<string>(guid1), "Toto"); // 11) Test guid guid1 = Guid.NewGuid(); attributes.Set(guid1, guid1); Assert.AreEqual(attributes.Get<Guid>(guid1), guid1); // 12) Test ComObject guid1 = Guid.NewGuid(); attributes.Set(guid1, attributes); Assert.AreEqual(attributes.Get<MediaAttributes>(guid1).NativePointer, attributes.NativePointer); // 13) Test byte[] guid1 = Guid.NewGuid(); attributes.Set(guid1, new byte[] { 1, 2, 3, 4}); Assert.AreEqual(attributes.Get<byte[]>(guid1), new byte[] { 1, 2, 3, 4 }); // 14) Test Vector4 guid1 = Guid.NewGuid(); attributes.Set(guid1, new Vector4(1,2,3,4)); Assert.AreEqual(attributes.Get<Vector4>(guid1), new Vector4(1,2,3,4)); // Check size of media attributes Assert.AreEqual(attributes.Count, 14); for (int i = 0; i < attributes.Count; i++) { object value = attributes.GetByIndex(i, out guid1); Console.WriteLine("{0}) {1} ({2})", i, value, value.GetType().Name); } }
public static MediaFoundationStreamingSources CreateFromニコ動(string user_id, string password, string video_id, WaveFormat soundDeviceFormat) { var sources = new MediaFoundationStreamingSources(); #region " ニコ動から SourceReaderEx を生成する。" //---------------- if (null == _HttpClient) { _HttpClient = new HttpClient(); } // ログインする。 var content = new FormUrlEncodedContent(new Dictionary <string, string> { { "mail", user_id }, { "password", password }, { "next_url", string.Empty }, }); using (var responseLogin = _HttpClient.PostAsync("https://secure.nicovideo.jp/secure/login?site=niconico", content).Result) { } // 動画ページにアクセスする。(getflvより前に) var responseWatch = _HttpClient.GetStringAsync($"http://www.nicovideo.jp/watch/{video_id}").Result; // 動画情報を取得する。 var responseGetFlv = _HttpClient.GetStringAsync($"http://flapi.nicovideo.jp/api/getflv/{video_id}").Result; var flvmap = HttpUtility.ParseQueryString(responseGetFlv); var flvurl = flvmap["url"]; // 動画の長さを取得する。 ulong 長さbyte = 0; string contentType = ""; using (var requestMovie = new HttpRequestMessage(HttpMethod.Get, flvurl)) using (var responseMovie = _HttpClient.SendAsync(requestMovie, HttpCompletionOption.ResponseHeadersRead).Result) { 長さbyte = (ulong)(responseMovie.Content.Headers.ContentLength); contentType = responseMovie.Content.Headers.ContentType.MediaType; } // IMFByteStream を生成する。 sources._ByteStream = new ByteStream(IntPtr.Zero); sources._HttpRandomAccessStream = new HttpRandomAccessStream(_HttpClient, 長さbyte, flvurl); sources._unkHttpRandomAccessStream = new ComObject(Marshal.GetIUnknownForObject(sources._HttpRandomAccessStream)); MediaFactory.CreateMFByteStreamOnStreamEx(sources._unkHttpRandomAccessStream, sources._ByteStream); using (var 属性 = sources._ByteStream.QueryInterfaceOrNull <MediaAttributes>()) { // content-type を設定する。 属性.Set(ByteStreamAttributeKeys.ContentType, contentType); } // SourceResolver で IMFByteStream から MediaSouce を取得する。 using (var sourceResolver = new SourceResolver()) using (var unkMediaSource = sourceResolver.CreateObjectFromStream(sources._ByteStream, null, SourceResolverFlags.MediaSource)) { sources._MediaSource = unkMediaSource.QueryInterface <MediaSource>(); // MediaSource から SourceReaderEx を生成する。 using (var 属性 = new MediaAttributes()) { // DXVAに対応しているGPUの場合には、それをデコードに利用するよう指定する。 属性.Set(SourceReaderAttributeKeys.D3DManager, グラフィックデバイス.Instance.MFDXGIDeviceManager); // 追加のビデオプロセッシングを有効にする。 属性.Set(SourceReaderAttributeKeys.EnableAdvancedVideoProcessing, true); // 真偽値が bool だったり // 追加のビデオプロセッシングを有効にしたら、こちらは無効に。 属性.Set(SinkWriterAttributeKeys.ReadwriteDisableConverters, 0); // int だったり // 属性を使って、SourceReaderEx を生成。 using (var sourceReader = new SourceReader(sources._MediaSource, 属性)) { sources._SourceReaderEx = sourceReader.QueryInterfaceOrNull <SourceReaderEx>(); } } } //---------------- #endregion #region " WaveFormat を生成。" //---------------- sources._Audioのフォーマット = new WaveFormat( soundDeviceFormat.SampleRate, 32, soundDeviceFormat.Channels, AudioEncoding.IeeeFloat); //---------------- #endregion sources._SourceReaderEx生成後の初期化(); return(sources); }
public void Setup(int deviceIndex = 0) { logger.Debug("VideoCaptureSource::Setup()"); Activate[] activates = null; using (var attributes = new MediaAttributes()) { MediaFactory.CreateAttributes(attributes, 1); attributes.Set(CaptureDeviceAttributeKeys.SourceType, CaptureDeviceAttributeKeys.SourceTypeVideoCapture.Guid); activates = MediaFactory.EnumDeviceSources(attributes); } if (activates == null || activates.Length == 0) { logger.Error("SourceTypeVideoCapture not found"); Console.ReadKey(); } foreach (var activate in activates) { Console.WriteLine("---------------------------------------------"); var friendlyName = activate.Get(CaptureDeviceAttributeKeys.FriendlyName); var isHwSource = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapHwSource); //var maxBuffers = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapMaxBuffers); var symbolicLink = activate.Get(CaptureDeviceAttributeKeys.SourceTypeVidcapSymbolicLink); logger.Info("FriendlyName " + friendlyName + "\r\n" + "isHwSource " + isHwSource + "\r\n" + //"maxBuffers " + maxBuffers + "symbolicLink " + symbolicLink); } var currentActivator = activates[deviceIndex]; mediaSource = currentActivator.ActivateObject <MediaSource>(); foreach (var a in activates) { a.Dispose(); } using (var mediaAttributes = new MediaAttributes(IntPtr.Zero)) { MediaFactory.CreateAttributes(mediaAttributes, 2); mediaAttributes.Set(SourceReaderAttributeKeys.EnableVideoProcessing, 1); //var devMan = new DXGIDeviceManager(); //devMan.ResetDevice(device); //mediaAttributes.Set(SourceReaderAttributeKeys.D3DManager, devMan); //MediaFactory.CreateSourceReaderFromMediaSource(mediaSource, mediaAttributes, sourceReader); sourceReader = new SourceReader(mediaSource, mediaAttributes); } Console.WriteLine("------------------CurrentMediaType-------------------"); var mediaType = sourceReader.GetCurrentMediaType(SourceReaderIndex.FirstVideoStream); Console.WriteLine(MfTool.LogMediaType(mediaType)); var frameSize = MfTool.GetFrameSize(mediaType); var subtype = mediaType.Get(MediaTypeAttributeKeys.Subtype); mediaType?.Dispose(); //Device device = null; int adapterIndex = 0; using (var dxgiFactory = new SharpDX.DXGI.Factory1()) { var adapter = dxgiFactory.Adapters1[adapterIndex]; device = new Device(adapter, //DeviceCreationFlags.Debug | DeviceCreationFlags.VideoSupport | DeviceCreationFlags.BgraSupport); using (var multiThread = device.QueryInterface <SharpDX.Direct3D11.Multithread>()) { multiThread.SetMultithreadProtected(true); } } SharedTexture = new Texture2D(device, new Texture2DDescription { CpuAccessFlags = CpuAccessFlags.None, BindFlags = BindFlags.RenderTarget | BindFlags.ShaderResource, Format = SharpDX.DXGI.Format.B8G8R8A8_UNorm, Width = frameSize.Width, Height = frameSize.Height, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1, Quality = 0 }, Usage = ResourceUsage.Default, //OptionFlags = ResourceOptionFlags.GdiCompatible//ResourceOptionFlags.None, OptionFlags = ResourceOptionFlags.Shared, }); texture = new Texture2D(device, new Texture2DDescription { CpuAccessFlags = CpuAccessFlags.Read, BindFlags = BindFlags.None, Format = SharpDX.DXGI.Format.B8G8R8A8_UNorm, Width = frameSize.Width, Height = frameSize.Height, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1, Quality = 0 }, Usage = ResourceUsage.Staging, OptionFlags = ResourceOptionFlags.None, }); processor = new MfVideoProcessor(null); var inProcArgs = new MfVideoArgs { Width = frameSize.Width, Height = frameSize.Height, // Format = VideoFormatGuids.Rgb24, Format = subtype,//VideoFormatGuids.NV12, }; var outProcArgs = new MfVideoArgs { Width = frameSize.Width, Height = frameSize.Height, Format = VideoFormatGuids.Argb32, //Format = VideoFormatGuids.Rgb32,//VideoFormatGuids.Argb32, }; processor.Setup(inProcArgs, outProcArgs); //processor.SetMirror(VideoProcessorMirror.MirrorHorizontal); processor.SetMirror(VideoProcessorMirror.MirrorVertical); }
/// <summary> /// Called by the ctor to configure the media playback component. /// </summary> private void InitializeMediaPipeline() { MediaManager.Startup(false); MediaAttributes sourceReaderAttributes = new MediaAttributes(); sourceReaderAttributes.Set(SourceReaderAttributeKeys.EnableAdvancedVideoProcessing, true); this.sourceReader = new SourceReader(this.filename, sourceReaderAttributes); this.sourceReader.SetStreamSelection(SourceReaderIndex.AllStreams, false); int streamIndex = 0; bool doneEnumerating = false; while (!doneEnumerating) { try { MediaType mediaType = this.sourceReader.GetCurrentMediaType(streamIndex); var subType = mediaType.Get(MediaTypeAttributeKeys.Subtype); DumpMediaType(mediaType); if (mediaType.MajorType == MediaTypeGuids.Video && this.imageStreamIndex == -1) { this.imageStreamIndex = streamIndex; // get the image size long frameSize = mediaType.Get(MediaTypeAttributeKeys.FrameSize); this.videoHeight = (short)frameSize; this.videoWidth = (short)(frameSize >> 32); // enable the stream and set the current media type this.sourceReader.SetStreamSelection(this.imageStreamIndex, true); mediaType = new MediaType(); mediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); mediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.Rgb24); mediaType.Set(MediaTypeAttributeKeys.FrameSize, frameSize); this.sourceReader.SetCurrentMediaType(this.imageStreamIndex, mediaType); } else if (mediaType.MajorType == MediaTypeGuids.Audio && this.audioStreamIndex == -1) { this.audioStreamIndex = streamIndex; // enable the stream and set the current media type to PCM this.sourceReader.SetStreamSelection(this.audioStreamIndex, true); mediaType = new MediaType(); mediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Audio); mediaType.Set(MediaTypeAttributeKeys.Subtype, AudioFormatGuids.Pcm); this.sourceReader.SetCurrentMediaType(this.audioStreamIndex, mediaType); // get back all the media type details mediaType = this.sourceReader.GetCurrentMediaType(this.audioStreamIndex); int numberOfChannels = mediaType.Get(MediaTypeAttributeKeys.AudioNumChannels); int sampleRate = mediaType.Get(MediaTypeAttributeKeys.AudioSamplesPerSecond); int bitsPerSample = mediaType.Get(MediaTypeAttributeKeys.AudioBitsPerSample); // post our output audio format this.waveFormat = WaveFormat.CreatePcm(sampleRate, bitsPerSample, numberOfChannels); } } catch (Exception e) { Debug.Write(e.GetType()); // expected thrown exception // unfortunately no way to tell how many streams other than trying doneEnumerating = true; } streamIndex += 1; } }
public void Setup(string fileName, Direct3DDeviceManager devMan = null) { logger.Debug("VideoFileSource::Setup()"); // using (var sourceResolver = new SourceResolver()) { var unkObj = sourceResolver.CreateObjectFromURL(fileName, SourceResolverFlags.MediaSource); var guid = typeof(MediaSource).GUID; unkObj.QueryInterface(ref guid, out var pUnk); mediaSource = new MediaSource(pUnk); } using (var mediaAttributes = new MediaAttributes(IntPtr.Zero)) { MediaFactory.CreateAttributes(mediaAttributes, 5); //mediaAttributes.Set(SourceReaderAttributeKeys.EnableVideoProcessing, 1); if (devMan != null) { //mediaAttributes.Set(SourceReaderAttributeKeys.DisableDxva, 0); mediaAttributes.Set(SourceReaderAttributeKeys.D3DManager, devMan); } //mediaAttributes.Set(CodecApiPropertyKeys.AVLowLatencyMode, false); sourceReader = new SourceReader(mediaSource, mediaAttributes); } var charact = mediaSource.Characteristics; Console.WriteLine(MfTool.LogEnumFlags((MediaSourceCharacteristics)charact)); Console.WriteLine("------------------CurrentMediaType-------------------"); int videoStreamIndex = (int)SourceReaderIndex.FirstVideoStream; using (var currentMediaType = sourceReader.GetCurrentMediaType(videoStreamIndex)) { Console.WriteLine(MfTool.LogMediaType(currentMediaType)); var frameSize = currentMediaType.Get(MediaTypeAttributeKeys.FrameSize); var frameRate = currentMediaType.Get(MediaTypeAttributeKeys.FrameRate); OutputMediaType = new MediaType(); OutputMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); OutputMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.NV12); // VideoFormatGuids.Yv12); OutputMediaType.Set(MediaTypeAttributeKeys.FrameSize, frameSize); OutputMediaType.Set(MediaTypeAttributeKeys.FrameRate, frameRate); OutputMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); OutputMediaType.Set(MediaTypeAttributeKeys.AllSamplesIndependent, 1); sourceReader.SetCurrentMediaType(videoStreamIndex, OutputMediaType); Console.WriteLine("------------------NEW MediaType-------------------"); Console.WriteLine(MfTool.LogMediaType(OutputMediaType)); } }
private static SinkWriter CreateSinkWriter(string outputFile) { SinkWriter writer; using (var attributes = new MediaAttributes()) { MediaFactory.CreateAttributes(attributes, 1); attributes.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms.Guid, (UInt32)1); try { writer = MediaFactory.CreateSinkWriterFromURL(outputFile, IntPtr.Zero, attributes); } catch (COMException e) { if (e.ErrorCode == unchecked((int)0xC00D36D5)) { throw new ArgumentException("Was not able to create a sink writer for this file extension"); } throw; } } return writer; }