public void Disposes_Twice() { var ffmpegMock = new Mock <FFmpegClient>(); ffmpegMock .Setup(c => c.CloseFormatContext(It.IsAny <AVFormatContext>())) .Verifiable(); var ffmpeg = ffmpegMock.Object; var nativeAVFormatContext = new NativeAVFormatContext { nb_streams = 1, }; var nativeIOContext = new NativeAVIOContext { }; using (var handle = new AVFormatContextHandle(ffmpeg, &nativeAVFormatContext)) { var ioContextMock = new Mock <AVIOContext>(ffmpeg, new AVIOContextHandle(ffmpeg, &nativeIOContext)) { CallBase = true, }; var formatContext = new AVFormatContext(ffmpeg, ioContextMock.Object, handle); formatContext.Dispose(); formatContext.Dispose(); ioContextMock.Verify(c => c.Dispose(), Times.Once); } }
public void GetVideoStream_ThrowsOnMultipleStreams() { var ffmpegMock = new Mock <FFmpegClient>(); ffmpegMock .Setup(c => c.FreeAVFormatContext(It.IsAny <IntPtr>())) .Verifiable(); var ffmpeg = ffmpegMock.Object; var nativeAVFormatContext = new NativeAVFormatContext { nb_streams = 2, }; var nativeIOContext = new NativeAVIOContext { }; using (var handle = new AVFormatContextHandle(ffmpeg, &nativeAVFormatContext)) using (var ioContext = new AVIOContext(ffmpeg, new AVIOContextHandle(ffmpeg, &nativeIOContext))) using (var formatContext = new AVFormatContext(ffmpeg, ioContext, handle)) { Assert.Throws <InvalidOperationException>(() => formatContext.GetVideoStream()); } ffmpegMock.Verify(); }
public void Close_Closes() { var ffmpegMock = new Mock <FFmpegClient>(); ffmpegMock .Setup(c => c.CloseFormatContext(It.IsAny <AVFormatContext>())) .Verifiable(); var ffmpeg = ffmpegMock.Object; var nativeAVFormatContext = new NativeAVFormatContext { nb_streams = 1, }; var nativeIOContext = new NativeAVIOContext { }; using (var handle = new AVFormatContextHandle(ffmpeg, &nativeAVFormatContext)) using (var ioContext = new AVIOContext(ffmpeg, new AVIOContextHandle(ffmpeg, &nativeIOContext))) using (var formatContext = new AVFormatContext(ffmpeg, ioContext, handle)) { formatContext.Close(); } ffmpegMock.Verify(); }
public void GetStream_ThrowsOnIllegalIndex() { var ffmpegMock = new Mock <FFmpegClient>(); ffmpegMock .Setup(c => c.FreeAVFormatContext(It.IsAny <IntPtr>())) .Verifiable(); var ffmpeg = ffmpegMock.Object; var nativeAVFormatContext = new NativeAVFormatContext { nb_streams = 1, }; var nativeIOContext = new NativeAVIOContext { }; using (var handle = new AVFormatContextHandle(ffmpeg, &nativeAVFormatContext)) using (var ioContext = new AVIOContext(ffmpeg, new AVIOContextHandle(ffmpeg, &nativeIOContext))) using (var formatContext = new AVFormatContext(ffmpeg, ioContext, handle)) { Assert.Throws <ArgumentOutOfRangeException>("index", () => formatContext.GetStream(20)); Assert.Throws <ArgumentOutOfRangeException>("index", () => formatContext.GetStream(-1)); } ffmpegMock.Verify(); }
/// <summary> /// Find streams from here. /// </summary> /// <param name="AOptions">The options <see cref="Dictionary"/>.</param> /// <exception cref="ObjectDisposedException">This instance is disposed.</exception> /// <exception cref="InvalidOperationException">Stream is closed.</exception> /// <exception cref="FFmpegError">Error finding stream info.</exception> public void FindStreamInfo([CanBeNull] Dictionary AOptions = null) { ThrowIfDisposed(); if (Mode != StreamOpenMode.Input) { throw new InvalidOperationException("Stream is closed."); } Ref <Unsafe.AVDictionary> opt = null; if (AOptions != null) { opt = AOptions.Ref; } try { AVFormatContext.FindStreamInfo(Ref, ref opt); } finally { if (AOptions != null) { // Options were provided, update them. AOptions.Ref = opt; } else { // Options are unwanted, free them. AVDictionary.Free(ref opt); } } }
/// <inheritdoc /> protected override unsafe void Dispose(bool ADisposing) { switch (Mode) { case StreamOpenMode.Input: AVFormatContext.CloseInput(ref Ref); break; case StreamOpenMode.Output: // Close output AVIOContext first. Ref <Unsafe.AVIOContext> pb = Ref.Ptr->pb; Ref.Ptr->pb = null; AVIO.Close(ref pb); AVFormatContext.Free(ref Ref); break; case StreamOpenMode.Closed: AVFormatContext.Free(ref Ref); break; } Mode = StreamOpenMode.Closed; base.Dispose(ADisposing); }
public unsafe static void avformat_close_input(ref AVFormatContext s) { fixed(AVFormatContext *avc = &s) { IntPtr ptr = (IntPtr)avc; avformat_close_input(ref ptr); } }
/// <summary> /// Allocates an empty AVFormatContext with its default values set /// </summary> public static AVFormatContext av_alloc_format_context() { AVFormatContext *ptr = av_alloc_format_context_internal(); AVFormatContext ctx = *ptr; IntPtr p = new IntPtr(ptr); FFmpeg.av_freep(ref p); return(ctx); }
/// <summary> /// Allocates all the structures needed to read an input stream. /// This does not open the needed codecs for decoding the stream[s]. /// </summary> private static AVError av_open_input_stream(out AVFormatContext ctx, ref ByteIOContext pb, string filename, ref AVInputFormat fmt, ref AVFormatParameters ap) { AVFormatContext *ptr; AVError err = av_open_input_stream(out ptr, ref pb, filename, ref fmt, ref ap); ctx = *ptr; FFmpeg.av_free(ptr); return(err); }
/// <summary> /// Finish writing stream data and finalize output. /// </summary> /// <exception cref="ObjectDisposedException">This instance is disposed.</exception> /// <exception cref="InvalidOperationException"> /// Stream is not in the Ready state. /// </exception> /// <exception cref="FFmpegError">Error writing trailer.</exception> public void EndData() { ThrowIfDisposed(); if (State != OutputState.Ready) { throw new InvalidOperationException("Stream is not in the Ready state."); } AVFormatContext.WriteTrailer(Ref); State = OutputState.Finalized; }
/// <summary> /// Open the input context. /// </summary> /// <param name="AUrl">The input URL.</param> /// <param name="ADemuxer">The <see cref="Demuxer"/>.</param> /// <param name="AOptions">The options <see cref="Dictionary"/>.</param> /// <exception cref="ObjectDisposedException">This instance is disposed.</exception> /// <exception cref="InvalidOperationException">Stream is already open.</exception> /// <exception cref="ArgumentNullException"> /// <paramref name="AUrl"/> is <see langword="null"/>. /// </exception> /// <exception cref="FFmpegError">Error opening context.</exception> public void Open( string AUrl, [CanBeNull] Demuxer ADemuxer = null, [CanBeNull] Dictionary AOptions = null ) { ThrowIfDisposed(); if (Mode != StreamOpenMode.Closed) { throw new InvalidOperationException("Stream is already open."); } if (AUrl == null) { throw new ArgumentNullException(nameof(AUrl)); } Ref <Unsafe.AVDictionary> opt = null; // If options were provided, input them. if (AOptions != null) { opt = AOptions.Ref; } try { AVFormatContext.OpenInput( ref Ref, AUrl, ADemuxer != null ? ADemuxer.Ref : null, ref opt ); Mode = StreamOpenMode.Input; } finally { if (AOptions != null) { // Options were provided, update them. AOptions.Ref = opt; } else { // Options are unwanted, free them. AVDictionary.Free(ref opt); } } }
/// <summary> /// Opens a media file as input. The codecs are not opened. Only the file /// header (if present) is read. /// </summary> /// <param name="pFormatContext">the opened media file handle is put here</param> /// <param name="filename">filename to open</param> /// <returns>AVError</returns> public static AVError avformat_open_input_file(out AVFormatContext pFormatContext, string filename) { IntPtr ptr; AVError err = avformat_open_input(out ptr, filename, null, null); if (ptr == IntPtr.Zero) { pFormatContext = new AVFormatContext(); return(err); } pFormatContext = *(AVFormatContext *)ptr.ToPointer(); return(err); }
public void Constructor_InitializesInstance() { var ffmpegMock = new Mock <FFmpegClient>(); var codecParameters = new NativeAVCodecParameters { codec_type = NativeAVMediaType.AVMEDIA_TYPE_VIDEO, }; var nativeStream = new NativeAVStream { codecpar = &codecParameters, }; var streamPtr = new IntPtr(&nativeStream); var nativeAVFormatContext = new NativeAVFormatContext { duration = 10, nb_streams = 1, event_flags = (int)AVFormatContextEventFlags.MetadataUpdated, ctx_flags = (int)AVFormatContextFlags.NoHeader, streams = (NativeAVStream **)streamPtr, }; var nativeIOContext = new NativeAVIOContext { }; ffmpegMock .Setup(c => c.FreeAVFormatContext(It.IsAny <IntPtr>())) .Verifiable(); var ffmpeg = ffmpegMock.Object; using (var handle = new AVFormatContextHandle(ffmpeg, &nativeAVFormatContext)) using (var ioContext = new AVIOContext(ffmpeg, new AVIOContextHandle(ffmpeg, &nativeIOContext))) using (var formatContext = new AVFormatContext(ffmpeg, ioContext, handle)) { Assert.Equal(handle, formatContext.Handle); Assert.Equal <uint>(1, formatContext.StreamCount); Assert.False(formatContext.IsClosed); Assert.False(handle.IsClosed); Assert.Equal((int)AVFormatContextEventFlags.MetadataUpdated, (int)formatContext.EventFlags); Assert.Equal((int)AVFormatContextFlags.NoHeader, (int)formatContext.Flags); } ffmpegMock.Verify(); }
public void TestMetadata() { _context = AVFormat.OpenInput("test.mp3"); Assert.AreEqual(5, _context.Metadata.Count()); foreach (var entry in _context.Metadata) { if (entry.Key == "genre") { Assert.AreEqual("Acid", entry.Value); } else { Assert.IsTrue(entry.Value.Contains("Test")); } } }
/// <summary> /// Opens a media file as input. The codecs are not opened. Only the file /// header (if present) is read. /// </summary> /// <param name="pFormatContext">the opened media file handle is put here</param> /// <param name="filename">filename to open</param> /// <param name="pAVInputFormat">if non null, force the file format to use</param> /// <param name="buf_size">optional buffer size (zero as default is OK)</param> /// <param name="pAVFormatParameters">Additional parameters needed when open the file (Null if default)</param> /// <returns>AVError</returns> public static AVError av_open_input_file(out AVFormatContext pFormatContext, string filename, ref AVInputFormat fmt, int buf_size, ref AVFormatParameters ap) { IntPtr ptr; AVError err = av_open_input_file(out ptr, filename, ref fmt, 0, ref ap); if (ptr == IntPtr.Zero) { pFormatContext = new AVFormatContext(); return(err); } pFormatContext = *(AVFormatContext *)ptr.ToPointer(); FFmpeg.av_freep(ref ptr); return(err); }
public void GetVideoStream_ThrowsOnWrongType() { var ffmpegMock = new Mock <FFmpegClient>(); ffmpegMock .Setup(c => c.FreeAVFormatContext(It.IsAny <IntPtr>())) .Verifiable(); var ffmpeg = ffmpegMock.Object; var codecParameters = new NativeAVCodecParameters { format = 12346, codec_type = NativeAVMediaType.AVMEDIA_TYPE_AUDIO, }; var nativeStream = new NativeAVStream { codecpar = &codecParameters, }; var streamPtr = new IntPtr(&nativeStream); var nativeAVFormatContext = new NativeAVFormatContext { nb_streams = 1, streams = (NativeAVStream **)&streamPtr, }; var nativeIOContext = new NativeAVIOContext { }; using (var handle = new AVFormatContextHandle(ffmpeg, &nativeAVFormatContext)) using (var ioContext = new AVIOContext(ffmpeg, new AVIOContextHandle(ffmpeg, &nativeIOContext))) using (var formatContext = new AVFormatContext(ffmpeg, ioContext, handle)) { Assert.Throws <InvalidOperationException>(() => formatContext.GetVideoStream()); } ffmpegMock.Verify(); }
public void GetStream_ReturnsStream() { var ffmpegMock = new Mock <FFmpegClient>(); ffmpegMock .Setup(c => c.FreeAVFormatContext(It.IsAny <IntPtr>())) .Verifiable(); var ffmpeg = ffmpegMock.Object; var codecParameters = new NativeAVCodecParameters { format = 12346, }; var nativeStream = new NativeAVStream { codecpar = &codecParameters, }; var streamPtr = new IntPtr(&nativeStream); var nativeAVFormatContext = new NativeAVFormatContext { nb_streams = 1, streams = (NativeAVStream **)&streamPtr, }; var nativeIOContext = new NativeAVIOContext { }; using (var handle = new AVFormatContextHandle(ffmpeg, &nativeAVFormatContext)) using (var ioContext = new AVIOContext(ffmpeg, new AVIOContextHandle(ffmpeg, &nativeIOContext))) using (var formatContext = new AVFormatContext(ffmpeg, ioContext, handle)) { var stream = formatContext.GetStream(0); Assert.Equal(12346, stream.CodecParameters.Format); } ffmpegMock.Verify(); }
public void IOContext_Get() { var ffmpegMock = new Mock <FFmpegClient>(); ffmpegMock .Setup(c => c.FreeAVFormatContext(It.IsAny <IntPtr>())) .Verifiable(); var ffmpeg = ffmpegMock.Object; var bytes = new byte[] { (byte)'t', (byte)'e', (byte)'s', (byte)'t' }; var bytesHandle = Marshal.AllocHGlobal(bytes.Length); Marshal.Copy(bytes, 0, bytesHandle, bytes.Length); var nativeIOContext = new NativeAVIOContext { buffer = (byte *)bytesHandle, pos = 5, }; var nativeAVFormatContext = new NativeAVFormatContext { pb = &nativeIOContext, }; using (var handle = new AVFormatContextHandle(ffmpeg, &nativeAVFormatContext)) using (var ioContext = new AVIOContext(ffmpeg, new AVIOContextHandle(ffmpeg, &nativeIOContext))) using (var formatContext = new AVFormatContext(ffmpeg, ioContext, handle)) { var result = new byte[4]; Marshal.Copy((IntPtr)formatContext.IOContext.Buffer.NativeObject, result, 0, bytes.Length); Assert.Equal(bytes, result); } ffmpegMock.Verify(); Marshal.FreeHGlobal(bytesHandle); }
/// <summary> /// Begin writing stream data. /// </summary> /// <param name="AOptions"></param> /// <exception cref="ObjectDisposedException">This instance is disposed.</exception> /// <exception cref="InvalidOperationException"> /// Stream is not in the Opened state. /// </exception> /// <exception cref="FFmpegError">Error writing header.</exception> public void BeginData([CanBeNull] Dictionary AOptions = null) { ThrowIfDisposed(); if (State != OutputState.Opened) { throw new InvalidOperationException("Stream is not in the Opened state."); } // TODO : Implement stream check. Ref <Unsafe.AVDictionary> opt = null; // If options were provided, input them. if (AOptions != null) { opt = AOptions.Ref; } try { AVFormatContext.WriteHeader(Ref, ref opt); State = OutputState.Ready; } finally { if (AOptions != null) { // Options were provided, update them. AOptions.Ref = opt; } else { // Options are unwanted, free them. AVDictionary.Free(ref opt); } } }
/// <summary> /// Add a <see cref="Stream"/> to the container. /// </summary> /// <param name="ACodec"></param> public Stream AddStream(Codec ACodec) { ThrowIfDisposed(); if (State != OutputState.Opened) { throw new InvalidOperationException("Stream is not in the Opened state."); } if (ACodec == null) { throw new ArgumentNullException(nameof(ACodec)); } if (!ACodec.IsEncoder) { throw new ArgumentException("Not an encoder.", nameof(ACodec)); } var stream = AVFormatContext.NewStream(Ref, ACodec.Ref); // TODO : Wrap AVStream. throw new NotImplementedException(); }
public void Setup() { AVFormat.RegisterAll(); _context = AVFormat.OpenInput("test.ts"); }
public static extern void av_dump_format(ref AVFormatContext pAVFormatContext, int index, string url, int is_output);
public static extern int av_write_trailer(ref AVFormatContext pAVFormatContext);
public static extern int av_interleave_packet_per_dts(ref AVFormatContext pAVFormatContext, ref AVPacket p_out_AVPacket, ref AVPacket pAVPacket, [MarshalAs(UnmanagedType.Bool)] bool flush);
public static bool ReadFrame(AVFormatContext context, AVPacket packet) { return FFmpegInvoke.av_read_frame(context.NativeObj, packet.NativeObj) >= 0; }
public static extern void av_update_cur_dts(ref AVFormatContext pAVFormatContext, ref AVStream pAVStream, long timestamp);
public static extern AVError av_find_default_stream_index(ref AVFormatContext pAVFormatContext);
public static extern AVStream *av_new_stream(ref AVFormatContext pAVFormatContext, int id);
public static extern AVError avformat_write_header(ref AVFormatContext pAVFormatContext, AVDictionary **options);
public static extern int av_interleaved_write_frame(ref AVFormatContext pAVFormatContext, ref AVPacket pAVPacket);
public AudioEncoderStream(string Filename, EncoderInformation EncoderInfo) { // Initialize instance variables m_filename = Filename; m_disposed = m_fileOpen = false; m_buffer = new FifoMemoryStream(); // Open FFmpeg FFmpeg.av_register_all(); // Initialize the output format context m_avFormatCtx = FFmpeg.av_alloc_format_context(); // Get output format m_avFormatCtx.oformat = FFmpeg.guess_format(EncoderInfo.Codec.ShortName, null, null); if (m_avFormatCtx.oformat != null) { throw new EncoderException("Could not find output format."); } FFmpeg.av_set_parameters(ref m_avFormatCtx, null); // Initialize the new output stream AVStream *stream = FFmpeg.av_new_stream(ref m_avFormatCtx, 1); if (stream == null) { throw new EncoderException("Could not alloc output audio stream"); } m_avStream = *stream; // Initialize output codec context m_avCodecCtx = *m_avStream.codec; m_avCodecCtx.codec_id = EncoderInfo.Codec.CodecID; m_avCodecCtx.codec_type = CodecType.CODEC_TYPE_AUDIO; m_avCodecCtx.sample_rate = EncoderInfo.SampleRate; m_avCodecCtx.channels = EncoderInfo.Channels; m_avCodecCtx.bits_per_sample = EncoderInfo.SampleSize; m_avCodecCtx.bit_rate = EncoderInfo.Bitrate; if (EncoderInfo.VBR) { m_avCodecCtx.flags |= FFmpeg.CODEC_FLAG_QSCALE; m_avCodecCtx.global_quality = EncoderInfo.FFmpegQualityScale; } // Open codec AVCodec *outCodec = FFmpeg.avcodec_find_encoder(m_avCodecCtx.codec_id); if (outCodec == null) { throw new EncoderException("Could not find encoder"); } if (FFmpeg.avcodec_open(ref m_avCodecCtx, outCodec) < 0) { throw new EncoderException("Could not open codec."); } // Open and prep file if (FFmpeg.url_fopen(ref m_avFormatCtx.pb, m_filename, FFmpeg.URL_WRONLY) < 0) { throw new EncoderException("Could not open output file."); } m_fileOpen = true; FFmpeg.av_write_header(ref m_avFormatCtx); }
public static bool FindStreamInfo(AVFormatContext context) { return FFmpegInvoke.avformat_find_stream_info(context.NativeObj, null) < 0; }