/// <summary> /// transcode audio /// </summary> /// <param name="input">input audio file</param> /// <param name="output">output audio file</param> /// <param name="outChannels">output audio file channels</param> /// <param name="outSampleRate">output audio file sample rate</param> public AudioTranscode(string input, string output, int outChannels = 2, int outSampleRate = 44100) { using (MediaWriter writer = new MediaWriter(output)) using (MediaReader reader = new MediaReader(input)) { int audioIndex = reader.First(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_AUDIO).Index; writer.AddStream(MediaEncoder.CreateAudioEncode(writer.Format, outChannels, outSampleRate)); writer.Initialize(); AudioFrame dst = AudioFrame.CreateFrameByCodec(writer[0].Codec); SampleConverter converter = new SampleConverter(dst); long pts = 0; foreach (var packet in reader.ReadPacket()) { foreach (var srcframe in reader[audioIndex].ReadFrame(packet)) { foreach (var dstframe in converter.Convert(srcframe)) { pts += dstframe.AVFrame.nb_samples; dstframe.Pts = pts; // audio's pts is total samples, pts can only increase. foreach (var outpacket in writer[0].WriteFrame(dstframe)) { writer.WritePacket(outpacket); } } } } writer.FlushMuxer(); } }
public Video2Frame2Video(string inputFile, string outputFile) { using (MediaReader reader = new MediaReader(inputFile)) using (MediaWriter writer = new MediaWriter(outputFile)) { var videoIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index; writer.AddStream(reader[videoIndex]); writer.Initialize(); PixelConverter pixelConverter = new PixelConverter(writer.First().Codec); foreach (var packet in reader.ReadPacket()) { foreach (var frame in reader[videoIndex].ReadFrame(packet)) { foreach (var dstFrame in pixelConverter.Convert(frame)) { foreach (var dstPacket in writer[0].WriteFrame(dstFrame)) { writer.WritePacket(dstPacket); } } } } writer.FlushMuxer(); } }
public unsafe Remuxing(string inputFile) { string outputFile = Path.GetFileNameWithoutExtension(inputFile) + "_remuxing" + Path.GetExtension(inputFile); using (MediaReader reader = new MediaReader(inputFile)) using (MediaWriter writer = new MediaWriter(outputFile)) { // add stream with reader's codec_id for (int i = 0; i < reader.Count; i++) { writer.AddStream(reader[i], writer.Format.Flags); } writer.Initialize(); // read and write packet foreach (var packet in reader.ReadPacket()) { int index = packet.StreamIndex; AVRounding rounding = AVRounding.AV_ROUND_NEAR_INF | AVRounding.AV_ROUND_PASS_MINMAX; AVRational inTimeBase = reader[index].TimeBase; AVRational outTimeBase = writer[index].TimeBase; packet.Pts = ffmpeg.av_rescale_q_rnd(packet.Pts, inTimeBase, outTimeBase, rounding); packet.Dts = ffmpeg.av_rescale_q_rnd(packet.Dts, inTimeBase, outTimeBase, rounding); packet.Duration = ffmpeg.av_rescale_q(packet.Duration, inTimeBase, outTimeBase); packet.Pos = -1; writer.WritePacket(packet); } writer.FlushMuxer(); } }
/// <summary> /// a red cheomekey filter for .png image example. /// <para> /// ffmpeg -i <paramref name="input"/> -vf chromakey=red:0.1:0.0 <paramref name="output"/> /// </para> /// </summary> /// <param name="input"></param> /// <param name="output"></param> public unsafe PngChromekeyFilter(string input, string output) { using (MediaReader reader = new MediaReader(input)) using (MediaWriter writer = new MediaWriter(output)) { var videoIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index; // init filter int height = reader[videoIndex].Codec.AVCodecContext.height; int width = reader[videoIndex].Codec.AVCodecContext.width; int format = (int)reader[videoIndex].Codec.AVCodecContext.pix_fmt; AVRational time_base = reader[videoIndex].TimeBase; AVRational sample_aspect_ratio = reader[videoIndex].Codec.AVCodecContext.sample_aspect_ratio; MediaFilterGraph filterGraph = new MediaFilterGraph(); filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width, height, (AVPixelFormat)format, time_base, sample_aspect_ratio).LinkTo(0, filterGraph.AddFilter(new MediaFilter("chromakey"), "red:0.1:0.0")).LinkTo(0, filterGraph.AddVideoSinkFilter(new MediaFilter(MediaFilter.VideoSinks.Buffersink))); filterGraph.Initialize(); // add stream by reader and init writer writer.AddStream(reader[videoIndex]); writer.Initialize(); // init video frame format converter by dstcodec PixelConverter pixelConverter = new PixelConverter(writer[0].Codec); foreach (var srcPacket in reader.ReadPacket()) { foreach (var srcFrame in reader[videoIndex].ReadFrame(srcPacket)) { filterGraph.Inputs.First().WriteFrame(srcFrame); foreach (var filterFrame in filterGraph.Outputs.First().ReadFrame()) { // can use filterFrame.ToMat() gets the output image directly without the need for a writer. //using EmguFFmpeg.EmguCV; //using (var mat = filterFrame.ToMat()) //{ // mat.Save(output); //} foreach (var dstFrame in pixelConverter.Convert(filterFrame)) { foreach (var dstPacket in writer[0].WriteFrame(dstFrame)) { writer.WritePacket(dstPacket); } } } } } // flush codec cache writer.FlushMuxer(); } }
static void Main(string[] args) { var reader = new MediaReader(@"Z:\output.mp4"); var decoder = reader.Decoders.OfType <VideoDecoder>().First(); var packet = new Packet(); using (var writer = new MediaRemuxer(@"Z:\output.h264", decoder)) { while (reader.ReadPacket(packet, decoder.StreamIndex)) { writer.Write(packet); } } }
static void Main(string[] args) { using (var reader = new MediaReader(@"Z:\game.dat_000004.wmv")) { var decoder = reader.Decoders.OfType <VideoDecoder>().First(); using (var writer = new MediaRemuxer(@"Z:\test.mkv", decoder)) { Packet packet = new Packet(); while (reader.ReadPacket(packet)) { packet.StreamIndex = decoder.StreamIndex; writer.Write(packet); } } } }
/// <summary> /// decode video to image /// filter graph: /// ┌──────┐ ┌──────┐ ┌─────┐ ┌──────────┐ ┌──────┐ /// │input0│---->│buffer│---->│scale│---->│buffersink│---->│output│ /// └──────┘ └──────┘ └─────┘ └──────────┘ └──────┘ /// </summary> /// <param name="inputFile">input video file</param> /// <param name="outDirectory">folder for output image files</param> /// <param name="scaleOptions">scale options <see cref="http://ffmpeg.org/ffmpeg-filters.html#scale-1"/></param> public DecodeVideoWithCustomCodecScaledToMat(string inputFile, string outDirectory, string scaleOptions = "512:288") { using (MediaReader reader = new MediaReader(inputFile, null, null)) { var videoIndex = reader.First(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).Index; unsafe { // relpace the default vide decode // !!! IMPORTANT NOTE: This sample won't work, if you haven't downloaded ffmpeg (GPL license, as it is more complete), and you don't have NVIDIA hardware (CUDA) !!! reader[videoIndex].Codec = MediaDecode.CreateDecode("h264_cuvid", _ => ffmpeg.avcodec_parameters_to_context(_, reader[videoIndex].Stream.codecpar)); } int height = reader[videoIndex].Codec.AVCodecContext.height; int width = reader[videoIndex].Codec.AVCodecContext.width; int format = (int)reader[videoIndex].Codec.AVCodecContext.pix_fmt; AVRational time_base = reader[videoIndex].TimeBase; AVRational sample_aspect_ratio = reader[videoIndex].Codec.AVCodecContext.sample_aspect_ratio; /* We are moving the packet to CUDA to perform the scaling. * We can then: * - remove hwdownload and format to leave it in CUDA, and forward the pointer to any other function, or write the frame to the output video * - convert it to MAT whereas converting speed depends on the size of the scaled frame. */ MediaFilterGraph filterGraph = new MediaFilterGraph(); filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width, height, (AVPixelFormat)format, time_base, sample_aspect_ratio) .LinkTo(0, filterGraph.AddFilter(new MediaFilter("scale"), scaleOptions)) .LinkTo(0, filterGraph.AddVideoSinkFilter(new MediaFilter(MediaFilter.VideoSinks.Buffersink))); filterGraph.Initialize(); var sw = Stopwatch.StartNew(); foreach (var packet in reader.ReadPacket()) { foreach (var frame in reader[videoIndex].ReadFrame(packet)) { filterGraph.Inputs.First().WriteFrame(frame); foreach (var filterFrame in filterGraph.Outputs.First().ReadFrame()) { using (var image = filterFrame.ToMat()) { image.Save(Path.Combine(Directory.CreateDirectory(outDirectory).FullName, $"{DateTime.Now.Ticks}.jpg")); } } } } Console.WriteLine($"Converting to MAT [ processed in {sw.Elapsed.TotalMilliseconds:0} ms ]"); } }
/// <summary> /// recording audio. /// <para> /// first set inputDeviceName = null, you will get inputDeviceName list in vs output, /// </para> /// <para> /// then set inputDeviceName to your real device name and run again,you will get a audio output. /// </para> /// <para> /// if you want stop record, exit console; /// </para> /// <para>ffmpeg </para> /// </summary> /// <param name="outputFile"></param> public RecordingAudio(string outputFile, string inputDeviceName = null) { // console output FFmpegHelper.SetupLogging(logWrite: _ => Console.Write(_)); // register all device FFmpegHelper.RegisterDevice(); var dshowInput = new InFormat("dshow"); // list all "dshow" device at console output, ffmpeg does not support direct reading of device names MediaDevice.PrintDeviceInfos(dshowInput, "list", MediaDevice.ListDevicesOptions); if (string.IsNullOrWhiteSpace(inputDeviceName)) { return; } // get your audio input device name from console output // NOTE: DO NOT delete "audio=" using (MediaReader reader = new MediaReader($"audio={inputDeviceName}", dshowInput)) using (MediaWriter writer = new MediaWriter(outputFile)) { var stream = reader.Where(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_AUDIO).First(); writer.AddStream(MediaEncode.CreateAudioEncode(writer.Format, stream.Codec.AVCodecContext.channels, stream.Codec.AVCodecContext.sample_rate)); writer.Initialize(); AudioFrame dstFrame = AudioFrame.CreateFrameByCodec(writer[0].Codec); SampleConverter converter = new SampleConverter(dstFrame); long pts = 0; foreach (var packet in reader.ReadPacket()) { foreach (var frame in stream.ReadFrame(packet)) { foreach (var dstframe in converter.Convert(frame)) { pts += dstFrame.AVFrame.nb_samples; dstFrame.Pts = pts; foreach (var dstpacket in writer[0].WriteFrame(dstFrame)) { writer.WritePacket(dstpacket); } } } } writer.FlushMuxer(); } }
public DecodeAudioToMat(string inputFile) { using (MediaReader reader = new MediaReader(inputFile)) { foreach (var packet in reader.ReadPacket()) { // audio maybe have one more stream, e.g. 0 is mp3 audio, 1 is mpeg cover var audioIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO).First().Index; AudioFrame audioFrame = new AudioFrame(AVSampleFormat.AV_SAMPLE_FMT_S16P, 2, 1024, 44100); SampleConverter converter = new SampleConverter(audioFrame); foreach (var frame in reader[audioIndex].ReadFrame(packet)) { Mat mat = frame.ToMat(); } } } }
static void Main(string[] args) { const string TestFile = @"Z:\Halozy-厄神様のジレンマ.mp3"; MediaReader media = new MediaReader(TestFile); var decoder = media.Decoders[0] as AudioDecoder; decoder.OutFormat = new AudioFormat(48000, 2, 32); var packet = new Packet(); while (media.ReadPacket(packet, decoder.StreamIndex)) { Console.Write($"\r{packet.PresentTimestamp}"); using (var frame = new AudioFrame()) { decoder.Decode(packet, frame); } } Console.WriteLine($"\r{packet.PresentTimestamp}"); Console.ReadKey(); }
public RtmpPull(string input) { MediaDictionary options = new MediaDictionary(); options.Add("stimeout", "30000000"); // set connect timeout 30s using (MediaReader reader = new MediaReader(input, null, options)) { var codecContext = reader.Where(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Codec.AVCodecContext; PixelConverter videoFrameConverter = new PixelConverter(AVPixelFormat.AV_PIX_FMT_BGR24, codecContext.width, codecContext.height); foreach (var packet in reader.ReadPacket()) { foreach (var frame in reader[packet.StreamIndex].ReadFrame(packet)) { // TODO } } } }
/// <summary> /// decode video to image /// </summary> /// <param name="inputFile">input video file</param> /// <param name="outDirectory">folder for output image files</param> public DecodeVideoToMat(string inputFile, string outDirectory) { string outputdir = Directory.CreateDirectory(outDirectory).FullName; using (MediaReader reader = new MediaReader(inputFile)) { var videoIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index; foreach (var packet in reader.ReadPacket()) { foreach (var frame in reader[videoIndex].ReadFrame(packet)) { using (var image = frame.ToMat()) { image.Save(Path.Combine(outputdir, $"{frame.Pts}.bmp")); } } } } }