SinkStream AddStream(SinkWriter sinkWriter, MediaType input, MediaType encoding) { var sinkStream = sinkWriter.AddStream(encoding); sinkStream.InputMediaType = input; return(sinkStream); }
/// <summary> /// Disposes the <see cref="MediaFoundationEncoder" />. /// </summary> /// <param name="disposing"> /// True to release both managed and unmanaged resources; false to release only unmanaged /// resources. /// </param> protected virtual void Dispose(bool disposing) { if (!_disposed) { if (_sinkWriter != null && !_sinkWriter.IsDisposed) { //thanks to martin48 (and naudio??) for providing the following source code (see http://cscore.codeplex.com/discussions/574280): SinkWriterStatistics statistics = _sinkWriter.GetStatistics(_streamIndex); if (statistics.DwByteCountQueued > 0 || statistics.QwNumSamplesReceived > 0) { _sinkWriter.Finalize(); } _sinkWriter.Dispose(); _sinkWriter = null; } if (_targetStream != null) { _targetStream.Flush(); _targetStream.Dispose(); _targetStream = null; } } _disposed = true; }
public MfAudioWriter(string FileName, Guid MediaSubtype, WaveFormat Wf, int AudioQuality) { _writer = MediaFactory.CreateSinkWriterFromURL(FileName, null, null); _audioInBytesPerSecond = Wf.SampleRate * Wf.Channels * Wf.BitsPerSample / 8; using (var audioTypeOut = MfWriter.GetMediaType(Wf)) { audioTypeOut.Set(MediaTypeAttributeKeys.Subtype, MediaSubtype); if (MediaSubtype == AudioFormatGuids.Aac) { audioTypeOut.Set(MediaTypeAttributeKeys.AudioAvgBytesPerSecond, MfWriter.GetAacBitrate(AudioQuality)); } _writer.AddStream(audioTypeOut, out _); } using (var audioTypeIn = MfWriter.GetMediaType(Wf)) { audioTypeIn.Set(MediaTypeAttributeKeys.Subtype, AudioFormatGuids.Pcm); _writer.SetInputMediaType(StreamIndex, audioTypeIn, null); } _writer.BeginWriting(); }
public void Close() { logger.Debug("MfWriter::Close()"); closed = true; if (sinkWriter != null) { sinkWriter.Dispose(); sinkWriter = null; } if (bufTexture != null) { bufTexture.Dispose(); bufTexture = null; } if (videoSample != null) { videoSample.Dispose(); videoSample = null; } if (mediaBuffer != null) { mediaBuffer.Dispose(); mediaBuffer = null; } }
private long ConvertOneBuffer(SinkWriter writer, int streamIndex, IWaveProvider inputProvider, long position, byte[] managedBuffer) { long durationConverted = 0; using var buffer = MediaFactory.CreateMemoryBuffer(managedBuffer.Length); using var sample = MediaFactory.CreateSample(); sample.AddBuffer(buffer); var ptr = buffer.Lock(out int maxLength, out int currentLength); int read = inputProvider.Read(managedBuffer, 0, maxLength); if (read > 0) { durationConverted = BytesToNsPosition(read, inputProvider.WaveFormat); Marshal.Copy(managedBuffer, 0, ptr, read); buffer.CurrentLength = read; buffer.Unlock(); sample.SampleTime = position; sample.SampleDuration = durationConverted; writer.WriteSample(streamIndex, sample); //writer.Flush(streamIndex); } else { buffer.Unlock(); } return(durationConverted); }
/// <inheritdoc /> /// <summary> /// Starts the encoder after all properties have been initialied /// </summary> public override void Start() { MediaFactory.Startup(MediaFactory.Version, MFSTARTUP_NOSOCKET); using (var attrs = new MediaAttributes()) { attrs.Set(TranscodeAttributeKeys.TranscodeContainertype, this.containerType); attrs.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms, 1); attrs.Set(SinkWriterAttributeKeys.LowLatency, true); if (this.dxgiManager != null) { attrs.Set(SinkWriterAttributeKeys.D3DManager, this.dxgiManager); } // create byte stream and sink writer this.byteStream = new ByteStream(DestinationStream); this.sinkWriter = MediaFactory.CreateSinkWriterFromURL(null, this.byteStream, attrs); // create output media type using (var outMediaType = new SharpDX.MediaFoundation.MediaType()) { outMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); outMediaType.Set(MediaTypeAttributeKeys.Subtype, this.videoFormat); outMediaType.Set(MediaTypeAttributeKeys.AvgBitrate, this.bitRate); outMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); outMediaType.Set(MediaTypeAttributeKeys.FrameSize, ((long)FrameSize.Width << 32) | (uint)FrameSize.Height); outMediaType.Set(MediaTypeAttributeKeys.FrameRate, ((long)this.frameRate << 32) | 1); outMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, (1 << 32) | 1); this.sinkWriter.AddStream(outMediaType, out this.streamIdx); } // create input media type using (var inMediaType = new SharpDX.MediaFoundation.MediaType()) { inMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); inMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); inMediaType.Set(MediaTypeAttributeKeys.FrameSize, ((long)FrameSize.Width << 32) | (uint)FrameSize.Height); inMediaType.Set(MediaTypeAttributeKeys.FrameRate, ((long)this.frameRate << 32) | 1); inMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, (1 << 32) | 1); try { // use NV12 YUV encoding inMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.NV12); this.sinkWriter.SetInputMediaType(this.streamIdx, inMediaType, null); } catch (SharpDXException exception) when(exception.ResultCode == SharpDX.MediaFoundation.ResultCode.InvalidMediaType) { // XXX: fall back to ARGB32 inMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.Argb32); this.sinkWriter.SetInputMediaType(this.streamIdx, inMediaType, null); } } this.sinkWriter.BeginWriting(); } }
/// <inheritdoc /> /// <summary> /// Begins encoding the video. /// </summary> /// <param name="frameSize">Frame size.</param> /// <param name="stream">Output stream.</param> public void Initialize(Size frameSize, Stream stream) { MediaFactory.Startup(MediaFactory.Version, NoSocket); using (var attrs = new MediaAttributes()) { attrs.Set(TranscodeAttributeKeys.TranscodeContainertype, ContainerType); attrs.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms, 1); attrs.Set(SinkWriterAttributeKeys.DisableThrottling, 1); attrs.Set(SinkWriterAttributeKeys.LowLatency, true); if (SurfacePointer != IntPtr.Zero) { // get the source surface this.surface = new Texture2D(SurfacePointer); // create and bind a DXGI device manager this.dxgiManager = new DXGIDeviceManager(); this.dxgiManager.ResetDevice(this.surface.Device); attrs.Set(SinkWriterAttributeKeys.D3DManager, this.dxgiManager); } // create byte stream and sink writer this.byteStream = new ByteStream(stream); this.sinkWriter = MediaFactory.CreateSinkWriterFromURL(null, this.byteStream.NativePointer, attrs); // create output media type using (var outMediaType = new MediaType()) { outMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); outMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormat); outMediaType.Set(MediaTypeAttributeKeys.AvgBitrate, BitRate); outMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); outMediaType.Set(MediaTypeAttributeKeys.FrameSize, ((long)frameSize.Width << 32) | (uint)frameSize.Height); outMediaType.Set(MediaTypeAttributeKeys.FrameRate, ((long)FrameRate << 32) | 1); outMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, (1 << 32) | (uint)1); this.sinkWriter.AddStream(outMediaType, out this.streamIdx); } // create input media type using (var inMediaType = new MediaType()) { inMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); inMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.Rgb32); inMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); inMediaType.Set(MediaTypeAttributeKeys.FrameSize, ((long)frameSize.Width << 32) | (uint)frameSize.Height); inMediaType.Set(MediaTypeAttributeKeys.FrameRate, ((long)FrameRate << 32) | 1); inMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, (1 << 32) | (uint)1); this.sinkWriter.SetInputMediaType(this.streamIdx, inMediaType, null); this.sinkWriter.BeginWriting(); } } }
public static bool IsBuiltinEncoder(SinkWriter writer, int streamIndex) { var ptr = GetTransformPtr(writer, streamIndex); if (ptr == IntPtr.Zero) { return(false); } return(Marshal.GetObjectForIUnknown(ptr) as IMFObjectInformation != null); }
/// <inheritdoc /> /// <summary> /// Releases resources. /// </summary> public void Dispose() { this.byteStream?.Dispose(); this.sinkWriter?.Dispose(); this.dxgiManager?.Dispose(); MediaFactory.Shutdown(); this.sinkWriter = null; this.byteStream = null; this.dxgiManager = null; }
public static bool IsHardwareBasedEncoder(SinkWriter writer, int streamIndex) { using (var transform = GetTransform(writer, streamIndex)) { if (transform == null) { return(false); } return(IsHardwareBasedEncoder(transform)); } }
public static TOutputStreamInformation GetOutputStreamInfo(SinkWriter writer, int streamIndex) { using (var transform = GetTransform(writer, streamIndex)) { if (transform == null) { return(new TOutputStreamInformation()); } transform.GetOutputStreamInfo(streamIndex, out TOutputStreamInformation info); return(info); } }
protected override void CreateMediaTarget(SinkWriter sinkWriter, Size2 videoPixelSize, out int streamIndex) { using (MF.MediaType mediaTypeOut = new MF.MediaType()) { mediaTypeOut.Set <Guid>(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video); mediaTypeOut.Set <Guid>(MF.MediaTypeAttributeKeys.Subtype, VIDEO_ENCODING_FORMAT); mediaTypeOut.Set <int>(MF.MediaTypeAttributeKeys.AvgBitrate, Bitrate); mediaTypeOut.Set <int>(MF.MediaTypeAttributeKeys.InterlaceMode, (int)MF.VideoInterlaceMode.Progressive); mediaTypeOut.Set <long>(MF.MediaTypeAttributeKeys.FrameSize, MFHelper.GetMFEncodedIntsByValues(videoPixelSize.Width, videoPixelSize.Height)); mediaTypeOut.Set <long>(MF.MediaTypeAttributeKeys.FrameRate, MFHelper.GetMFEncodedIntsByValues(Framerate, 1)); sinkWriter.AddStream(mediaTypeOut, out streamIndex); } }
protected override void CreateMediaTarget(SinkWriter sinkWriter, Size2 videoPixelSize, out int streamIndex) { using (MF.MediaType mediaTypeOut = new MF.MediaType()) { mediaTypeOut.Set<Guid>(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video); mediaTypeOut.Set<Guid>(MF.MediaTypeAttributeKeys.Subtype, VIDEO_ENCODING_FORMAT); mediaTypeOut.Set<int>(MF.MediaTypeAttributeKeys.AvgBitrate, Bitrate); mediaTypeOut.Set<int>(MF.MediaTypeAttributeKeys.InterlaceMode, (int)MF.VideoInterlaceMode.Progressive); mediaTypeOut.Set<long>(MF.MediaTypeAttributeKeys.FrameSize, MFHelper.GetMFEncodedIntsByValues(videoPixelSize.Width, videoPixelSize.Height)); mediaTypeOut.Set<long>(MF.MediaTypeAttributeKeys.FrameRate, MFHelper.GetMFEncodedIntsByValues(Framerate, 1)); sinkWriter.AddStream(mediaTypeOut, out streamIndex); } }
/// <summary> /// Sets and initializes the targetstream for the encoding process. /// </summary> /// <param name="stream">Stream which should be used as the targetstream.</param> /// <param name="inputMediaType">Mediatype of the raw input data to encode.</param> /// <param name="targetMediaType">Mediatype of the encoded data.</param> /// <param name="containerType">Container type which should be used.</param> protected void SetTargetStream(Stream stream, MediaType inputMediaType, MediaType targetMediaType, Guid containerType) { MediaAttributes attributes = null; try { var buffer = new byte[stream.Length]; stream.Read(buffer, 0, buffer.Length); _targetStream = new ByteStream(buffer); attributes = new MediaAttributes(2); attributes.Set(MediaFoundationAttributes.MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, 1); attributes.Set(MediaFoundationAttributes.MF_TRANSCODE_CONTAINERTYPE, containerType); _sinkWriter = SinkWriter.Create(_targetStream, attributes); _streamIndex = _sinkWriter.AddStream(targetMediaType); _sinkWriter.SetInputMediaType(_streamIndex, inputMediaType, null); _targetMediaType = targetMediaType; _sourceBytesPerSecond = inputMediaType.AverageBytesPerSecond; //initialize the sinkwriter _sinkWriter.BeginWriting(); } catch (Exception) { if (_sinkWriter != null) { _sinkWriter.Dispose(); _sinkWriter = null; } if (_targetStream != null) { _targetStream.Dispose(); _targetStream = null; } throw; } finally { if (attributes != null) { attributes.Dispose(); } } }
private void PerformEncode(SinkWriter writer, int streamIndex, IWaveProvider inputProvider) { int maxLength = inputProvider.WaveFormat.AverageBytesPerSecond * 4; var managedBuffer = new byte[maxLength]; writer.BeginWriting(); long position = 0; long duration; do { duration = ConvertOneBuffer(writer, streamIndex, inputProvider, position, managedBuffer); position += duration; } while (duration > 0); writer.Finalize(); }
private ProcessSample ConnectStreams(IEnumerable<SourceReaderExtra> readers, SinkWriter sinkWriter) { foreach( var r in readers) { SetAudioMediaType(r.SourceReader); SetVideoMediaType(r.SourceReader); } var sourceAudioStream = SetAudioMediaType(readers.First().SourceReader); var sourceVideoStream = SetVideoMediaType(readers.First().SourceReader); var sinkAudioStream = AddStream(sinkWriter, sourceAudioStream.CurrentMediaType, CreateTargetAudioMediaType(sourceAudioStream.NativeMediaType)); var sinkVideoStream = AddStream(sinkWriter, sourceVideoStream.CurrentMediaType, CreateTargetVideoMediaType(sourceVideoStream.NativeMediaType)); var saveAudio = AVOperations.MediaTypeChange(sinkAudioStream, AVOperations.SaveTo(sinkAudioStream)); var saveVideo = AVOperations.MediaTypeChange(sinkVideoStream, AVOperations.SaveTo(sinkVideoStream)); return AVOperations.SeperateAudioVideo(saveAudio, saveVideo); }
private static IntPtr GetTransformPtr(SinkWriter writer, int streamIndex) { if (writer == null) { throw new ArgumentNullException(nameof(writer)); } var tf = IntPtr.Zero; try { writer.GetServiceForStream(streamIndex, Guid.Empty, typeof(Transform).GUID, out tf); } catch { // do nothing } return(tf); }
/// <inheritdoc /> /// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources /// </summary> public override void Dispose() { try { this.sinkWriter.Finalize(); } catch (SharpDXException exception) when(exception.ResultCode.Code == MF_E_SINK_NO_SAMPLES_PROCESSED) { this.byteStream.Length = 0; } this.byteStream?.Dispose(); this.sinkWriter?.Dispose(); this.dxgiManager?.Dispose(); MediaFactory.Shutdown(); this.sinkWriter = null; this.byteStream = null; this.dxgiManager = null; }
public static string GetEncoderFriendlyName(SinkWriter writer, int streamIndex) { try { using (var transform = GetTransform(writer, streamIndex)) { if (transform != null) { if (IsBuiltinEncoder(transform)) { return(Enumerate().First(e => e.IsBuiltin).FriendlyName); } var clsid = transform.Attributes.Get(TransformAttributeKeys.MftTransformClsidAttribute); return(Enumerate().First(e => e.Clsid == clsid).FriendlyName); } } } catch { // continue } return("Unknown"); }
public MfWriter(VideoWriterArgs Args, Device Device) { if (Args.ImageProvider.EditorType == typeof(Direct2DEditor)) { _inputFormat = VideoFormatGuids.NV12; } else { _inputFormat = VideoFormatGuids.Rgb32; } _device = Device; _frameDuration = TenPower7 / Args.FrameRate; var attr = GetSinkWriterAttributes(Device); _writer = MediaFactory.CreateSinkWriterFromURL(Args.FileName, null, attr); var w = Args.ImageProvider.Width; var h = Args.ImageProvider.Height; _bufferSize = w * h * 4; using (var mediaTypeOut = new MediaType()) { mediaTypeOut.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); mediaTypeOut.Set(MediaTypeAttributeKeys.Subtype, _encodingFormat); mediaTypeOut.Set(MediaTypeAttributeKeys.AvgBitrate, BitRate); mediaTypeOut.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); mediaTypeOut.Set(MediaTypeAttributeKeys.FrameSize, PackLong(w, h)); mediaTypeOut.Set(MediaTypeAttributeKeys.FrameRate, PackLong(Args.FrameRate, 1)); mediaTypeOut.Set(MediaTypeAttributeKeys.PixelAspectRatio, PackLong(1, 1)); _writer.AddStream(mediaTypeOut, out _); } using (var mediaTypeIn = new MediaType()) { mediaTypeIn.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); mediaTypeIn.Set(MediaTypeAttributeKeys.Subtype, _inputFormat); mediaTypeIn.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); mediaTypeIn.Set(MediaTypeAttributeKeys.FrameSize, PackLong(w, h)); mediaTypeIn.Set(MediaTypeAttributeKeys.FrameRate, PackLong(Args.FrameRate, 1)); mediaTypeIn.Set(MediaTypeAttributeKeys.PixelAspectRatio, PackLong(1, 1)); mediaTypeIn.Set(MediaTypeAttributeKeys.AllSamplesIndependent, 1); var encoderParams = new MediaAttributes(2); encoderParams.Set(RateControlModeKey, RateControlMode.Quality); encoderParams.Set(QualityKey, Args.VideoQuality); _writer.SetInputMediaType(VideoStreamIndex, mediaTypeIn, encoderParams); } if (Args.AudioProvider != null) { var wf = Args.AudioProvider.WaveFormat; _audioInBytesPerSecond = wf.SampleRate * wf.Channels * wf.BitsPerSample / 8; using (var audioTypeOut = GetMediaType(wf)) { audioTypeOut.Set(MediaTypeAttributeKeys.Subtype, _encodedAudioFormat); audioTypeOut.Set(MediaTypeAttributeKeys.AudioAvgBytesPerSecond, GetAacBitrate(Args.AudioQuality)); _writer.AddStream(audioTypeOut, out _); } using (var audioTypeIn = GetMediaType(wf)) { audioTypeIn.Set(MediaTypeAttributeKeys.Subtype, AudioFormatGuids.Pcm); _writer.SetInputMediaType(AudioStreamIndex, audioTypeIn, null); } } _writer.BeginWriting(); _copyTexture = new Texture2D(Device, new Texture2DDescription { CpuAccessFlags = CpuAccessFlags.Read, BindFlags = BindFlags.None, Format = Format.B8G8R8A8_UNorm, Width = w, Height = h, OptionFlags = ResourceOptionFlags.None, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1, Quality = 0 }, Usage = ResourceUsage.Staging }); _sample = MediaFactory.CreateVideoSampleFromSurface(null); // Create the media buffer from the texture MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, _copyTexture, 0, false, out _mediaBuffer); using (var buffer2D = _mediaBuffer.QueryInterface <Buffer2D>()) _mediaBuffer.CurrentLength = buffer2D.ContiguousLength; // Attach the created buffer to the sample _sample.AddBuffer(_mediaBuffer); }
public SinkWriterScope(SinkWriter sinkWriter) { this.sinkWriter = sinkWriter; sinkWriter.instance.BeginWriting().Hr(); }
public static Transform GetTransform(SinkWriter writer, int streamIndex) { var ptr = GetTransformPtr(writer, streamIndex); return(ptr != IntPtr.Zero ? new Transform(ptr) : null); }
private ProcessSample ConnectStreams(IEnumerable <SourceReaderExtra> readers, SinkWriter sinkWriter) { foreach (var r in readers) { SetAudioMediaType(r.SourceReader); SetVideoMediaType(r.SourceReader); } var sourceAudioStream = SetAudioMediaType(readers.First().SourceReader); var sourceVideoStream = SetVideoMediaType(readers.First().SourceReader); var sinkAudioStream = AddStream(sinkWriter, sourceAudioStream.CurrentMediaType, CreateTargetAudioMediaType(sourceAudioStream.NativeMediaType)); var sinkVideoStream = AddStream(sinkWriter, sourceVideoStream.CurrentMediaType, CreateTargetVideoMediaType(sourceVideoStream.NativeMediaType)); var saveAudio = AVOperations.MediaTypeChange(sinkAudioStream, AVOperations.SaveTo(sinkAudioStream)); var saveVideo = AVOperations.MediaTypeChange(sinkVideoStream, AVOperations.SaveTo(sinkVideoStream)); return(AVOperations.SeperateAudioVideo(saveAudio, saveVideo)); }
public SinkStream(SinkWriter sinkWriter, int streamIndex) { this.sinkWriter = sinkWriter; this.index = streamIndex; }
public void Setup(string fileName, MfVideoArgs Args) { logger.Debug("MfWriter::Init(...)"); var inputFormat = VideoFormatGuids.NV12; // var inputFormat = VideoFormatGuids.Rgb32; // VideoFormatGuids.NV12 frameDuration = 10_000_000 / Args.FrameRate; var width = Args.Width; var height = Args.Height; var bufSize = width * height * 4; try { using (var attr = new MediaAttributes(6)) { attr.Set(SinkWriterAttributeKeys.ReadwriteEnableHardwareTransforms, 1); attr.Set(SinkWriterAttributeKeys.ReadwriteDisableConverters, 0); attr.Set(TranscodeAttributeKeys.TranscodeContainertype, TranscodeContainerTypeGuids.Mpeg4); attr.Set(SinkWriterAttributeKeys.LowLatency, true); attr.Set(SinkWriterAttributeKeys.DisableThrottling, 1); using (var devMan = new DXGIDeviceManager()) { devMan.ResetDevice(device); attr.Set(SinkWriterAttributeKeys.D3DManager, devMan); } sinkWriter = MediaFactory.CreateSinkWriterFromURL(fileName, null, attr); } using (var outputMediaType = new MediaType()) { outputMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); outputMediaType.Set(MediaTypeAttributeKeys.Subtype, VideoFormatGuids.H264); outputMediaType.Set(MediaTypeAttributeKeys.AvgBitrate, 8_000_000); outputMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); outputMediaType.Set(MediaTypeAttributeKeys.FrameSize, MfTool.PackToLong(width, height)); outputMediaType.Set(MediaTypeAttributeKeys.FrameRate, Args.FrameRate); outputMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, MfTool.PackToLong(1, 1)); sinkWriter.AddStream(outputMediaType, out videoStreamIndex); Debug.WriteLine("mediaTypeOut " + videoStreamIndex); } using (var inputMediaType = new MediaType()) { inputMediaType.Set(MediaTypeAttributeKeys.MajorType, MediaTypeGuids.Video); inputMediaType.Set(MediaTypeAttributeKeys.Subtype, inputFormat); inputMediaType.Set(MediaTypeAttributeKeys.InterlaceMode, (int)VideoInterlaceMode.Progressive); inputMediaType.Set(MediaTypeAttributeKeys.FrameSize, MfTool.PackToLong(width, height)); inputMediaType.Set(MediaTypeAttributeKeys.FrameRate, Args.FrameRate); inputMediaType.Set(MediaTypeAttributeKeys.PixelAspectRatio, MfTool.PackToLong(1, 1)); inputMediaType.Set(MediaTypeAttributeKeys.AllSamplesIndependent, 1); using (var encoderParams = new MediaAttributes(2)) { encoderParams.Set(CodecApiPropertyKeys.AVEncCommonRateControlMode, RateControlMode.Quality); encoderParams.Set(CodecApiPropertyKeys.AVEncCommonQuality, Args.Quality); sinkWriter.SetInputMediaType(0, inputMediaType, encoderParams); } } bufTexture = new Texture2D(device, new Texture2DDescription { CpuAccessFlags = CpuAccessFlags.Read, BindFlags = BindFlags.None, Format = Format.B8G8R8A8_UNorm, Width = width, Height = height, OptionFlags = ResourceOptionFlags.None, MipLevels = 1, ArraySize = 1, SampleDescription = { Count = 1, Quality = 0 }, Usage = ResourceUsage.Staging }); videoSample = MediaFactory.CreateVideoSampleFromSurface(null); // Create the media buffer from the texture MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, bufTexture, 0, false, out mediaBuffer); using (var buffer2D = mediaBuffer.QueryInterface <Buffer2D>()) { mediaBuffer.CurrentLength = buffer2D.ContiguousLength; } // Attach the created buffer to the sample videoSample.AddBuffer(mediaBuffer); } catch (Exception ex) { logger.Error(ex); Close(); throw; } }
private ProcessSample ConnectStreams(SourceReader shortSourceReader, SourceReader shortSourceReader2, SourceReader sourceReader, SinkWriter sinkWriter) { SetAudioMediaType(shortSourceReader); SetVideoMediaType(shortSourceReader); SetAudioMediaType(shortSourceReader2); SetVideoMediaType(shortSourceReader2); return AVOperations.SeperateAudioVideo( ConnectAudioStreams(sourceReader, sinkWriter), ConnectVideoStreams(sourceReader, sinkWriter)); }
private ProcessSample ConnectStreams(SourceReader sourceReader, SinkWriter sinkWriter) { return AVOperations.SeperateAudioVideo( ConnectAudioStreams(sourceReader, sinkWriter), ConnectVideoStreams(sourceReader, sinkWriter)); }
SinkStream AddStream(SinkWriter sinkWriter, MediaType input, MediaType encoding) { var sinkStream = sinkWriter.AddStream(encoding); sinkStream.InputMediaType = input; return sinkStream; }
private ProcessSample ConnectVideoStreams(SourceReader sourceReader, SinkWriter sinkWriter) { var sourceVideoStream = SetVideoMediaType(sourceReader); var sinkVideoStream = AddStream(sinkWriter, sourceVideoStream.CurrentMediaType, CreateTargetVideoMediaType(sourceVideoStream.NativeMediaType)); return AVOperations.MediaTypeChange(sinkVideoStream, AVOperations.SaveTo(sinkVideoStream)); }