/// <summary> /// transcode audio /// </summary> /// <param name="input">input audio file</param> /// <param name="output">output audio file</param> /// <param name="outChannels">output audio file channels</param> /// <param name="outSampleRate">output audio file sample rate</param> public AudioTranscode(string input, string output, int outChannels = 2, int outSampleRate = 44100) { using (MediaWriter writer = new MediaWriter(output)) using (MediaReader reader = new MediaReader(input)) { int audioIndex = reader.First(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_AUDIO).Index; writer.AddStream(MediaEncoder.CreateAudioEncode(writer.Format, outChannels, outSampleRate)); writer.Initialize(); AudioFrame dst = AudioFrame.CreateFrameByCodec(writer[0].Codec); SampleConverter converter = new SampleConverter(dst); long pts = 0; foreach (var packet in reader.ReadPacket()) { foreach (var srcframe in reader[audioIndex].ReadFrame(packet)) { foreach (var dstframe in converter.Convert(srcframe)) { pts += dstframe.AVFrame.nb_samples; dstframe.Pts = pts; // audio's pts is total samples, pts can only increase. foreach (var outpacket in writer[0].WriteFrame(dstframe)) { writer.WritePacket(outpacket); } } } } writer.FlushMuxer(); } }
public Video2Frame2Video(string inputFile, string outputFile) { using (MediaReader reader = new MediaReader(inputFile)) using (MediaWriter writer = new MediaWriter(outputFile)) { var videoIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index; writer.AddStream(reader[videoIndex]); writer.Initialize(); PixelConverter pixelConverter = new PixelConverter(writer.First().Codec); foreach (var packet in reader.ReadPacket()) { foreach (var frame in reader[videoIndex].ReadFrame(packet)) { foreach (var dstFrame in pixelConverter.Convert(frame)) { foreach (var dstPacket in writer[0].WriteFrame(dstFrame)) { writer.WritePacket(dstPacket); } } } } writer.FlushMuxer(); } }
public unsafe Remuxing(string inputFile) { string outputFile = Path.GetFileNameWithoutExtension(inputFile) + "_remuxing" + Path.GetExtension(inputFile); using (MediaReader reader = new MediaReader(inputFile)) using (MediaWriter writer = new MediaWriter(outputFile)) { // add stream with reader's codec_id for (int i = 0; i < reader.Count; i++) { writer.AddStream(reader[i], writer.Format.Flags); } writer.Initialize(); // read and write packet foreach (var packet in reader.ReadPacket()) { int index = packet.StreamIndex; AVRounding rounding = AVRounding.AV_ROUND_NEAR_INF | AVRounding.AV_ROUND_PASS_MINMAX; AVRational inTimeBase = reader[index].TimeBase; AVRational outTimeBase = writer[index].TimeBase; packet.Pts = ffmpeg.av_rescale_q_rnd(packet.Pts, inTimeBase, outTimeBase, rounding); packet.Dts = ffmpeg.av_rescale_q_rnd(packet.Dts, inTimeBase, outTimeBase, rounding); packet.Duration = ffmpeg.av_rescale_q(packet.Duration, inTimeBase, outTimeBase); packet.Pos = -1; writer.WritePacket(packet); } writer.FlushMuxer(); } }
public EncodeAudioByMat(string output) { using (MediaWriter writer = new MediaWriter(output)) { writer.AddStream(MediaEncoder.CreateAudioEncode(writer.Format, 2, 44100)); writer.Initialize(); AudioFrame dstFrame = AudioFrame.CreateFrameByCodec(writer[0].Codec); SampleConverter converter = new SampleConverter(dstFrame); using (Mat mat = CreateMat(writer[0].Codec.AVCodecContext.channels)) { long pts = 0; for (int i = 0; i < 1000; i++) { foreach (var item in converter.Convert(mat.ToAudioFrame(dstSampleRate: writer[0].Codec.AVCodecContext.sample_rate))) { pts += item.NbSamples; item.Pts = pts; foreach (var packet in writer[0].WriteFrame(item)) { writer.WritePacket(packet); } } } } writer.FlushMuxer(); } }
/// <summary> /// Yuv420P sample /// </summary> /// <param name="outputFile">output file</param> /// <param name="width">video width</param> /// <param name="height">video height</param> /// <param name="fps">video fps</param> public FillYuv420PSample(string outputFile, int width, int height, int fps) { var dir = Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(outputFile), Path.GetFileNameWithoutExtension(outputFile))).FullName; using (MediaWriter writer = new MediaWriter(outputFile)) { writer.AddStream(MediaEncoder.CreateVideoEncode(writer.Format, width, height, fps)); writer.Initialize(); VideoFrame srcframe = new VideoFrame(width, height, FFmpeg.AutoGen.AVPixelFormat.AV_PIX_FMT_YUV420P); PixelConverter pixelConverter = new PixelConverter(writer[0].Codec); Random random = new Random(); for (int i = 0; i < fps * 10; i++) { // fill video frame FillYuv420P(srcframe, i); foreach (var dstframe in pixelConverter.Convert(srcframe)) { dstframe.Pts = i; SaveFrame(dstframe, Path.Combine(dir, $"{i}.bmp")); foreach (var packet in writer[0].WriteFrame(dstframe)) { writer.WritePacket(packet); } } } // flush cache writer.FlushMuxer(); } }
/// <summary> /// a red cheomekey filter for .png image example. /// <para> /// ffmpeg -i <paramref name="input"/> -vf chromakey=red:0.1:0.0 <paramref name="output"/> /// </para> /// </summary> /// <param name="input"></param> /// <param name="output"></param> public unsafe PngChromekeyFilter(string input, string output) { using (MediaReader reader = new MediaReader(input)) using (MediaWriter writer = new MediaWriter(output)) { var videoIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index; // init filter int height = reader[videoIndex].Codec.AVCodecContext.height; int width = reader[videoIndex].Codec.AVCodecContext.width; int format = (int)reader[videoIndex].Codec.AVCodecContext.pix_fmt; AVRational time_base = reader[videoIndex].TimeBase; AVRational sample_aspect_ratio = reader[videoIndex].Codec.AVCodecContext.sample_aspect_ratio; MediaFilterGraph filterGraph = new MediaFilterGraph(); filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width, height, (AVPixelFormat)format, time_base, sample_aspect_ratio).LinkTo(0, filterGraph.AddFilter(new MediaFilter("chromakey"), "red:0.1:0.0")).LinkTo(0, filterGraph.AddVideoSinkFilter(new MediaFilter(MediaFilter.VideoSinks.Buffersink))); filterGraph.Initialize(); // add stream by reader and init writer writer.AddStream(reader[videoIndex]); writer.Initialize(); // init video frame format converter by dstcodec PixelConverter pixelConverter = new PixelConverter(writer[0].Codec); foreach (var srcPacket in reader.ReadPacket()) { foreach (var srcFrame in reader[videoIndex].ReadFrame(srcPacket)) { filterGraph.Inputs.First().WriteFrame(srcFrame); foreach (var filterFrame in filterGraph.Outputs.First().ReadFrame()) { // can use filterFrame.ToMat() gets the output image directly without the need for a writer. //using EmguFFmpeg.EmguCV; //using (var mat = filterFrame.ToMat()) //{ // mat.Save(output); //} foreach (var dstFrame in pixelConverter.Convert(filterFrame)) { foreach (var dstPacket in writer[0].WriteFrame(dstFrame)) { writer.WritePacket(dstPacket); } } } } } // flush codec cache writer.FlushMuxer(); } }
/// <summary> /// recording audio. /// <para> /// first set inputDeviceName = null, you will get inputDeviceName list in vs output, /// </para> /// <para> /// then set inputDeviceName to your real device name and run again,you will get a audio output. /// </para> /// <para> /// if you want stop record, exit console; /// </para> /// <para>ffmpeg </para> /// </summary> /// <param name="outputFile"></param> public RecordingAudio(string outputFile, string inputDeviceName = null) { // console output FFmpegHelper.SetupLogging(logWrite: _ => Console.Write(_)); // register all device FFmpegHelper.RegisterDevice(); var dshowInput = new InFormat("dshow"); // list all "dshow" device at console output, ffmpeg does not support direct reading of device names MediaDevice.PrintDeviceInfos(dshowInput, "list", MediaDevice.ListDevicesOptions); if (string.IsNullOrWhiteSpace(inputDeviceName)) { return; } // get your audio input device name from console output // NOTE: DO NOT delete "audio=" using (MediaReader reader = new MediaReader($"audio={inputDeviceName}", dshowInput)) using (MediaWriter writer = new MediaWriter(outputFile)) { var stream = reader.Where(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_AUDIO).First(); writer.AddStream(MediaEncode.CreateAudioEncode(writer.Format, stream.Codec.AVCodecContext.channels, stream.Codec.AVCodecContext.sample_rate)); writer.Initialize(); AudioFrame dstFrame = AudioFrame.CreateFrameByCodec(writer[0].Codec); SampleConverter converter = new SampleConverter(dstFrame); long pts = 0; foreach (var packet in reader.ReadPacket()) { foreach (var frame in stream.ReadFrame(packet)) { foreach (var dstframe in converter.Convert(frame)) { pts += dstFrame.AVFrame.nb_samples; dstFrame.Pts = pts; foreach (var dstpacket in writer[0].WriteFrame(dstFrame)) { writer.WritePacket(dstpacket); } } } } writer.FlushMuxer(); } }
public override void OnStart(EncoderOption option) { this.option = option; var audio_file = StoryboardInstanceManager.ActivityInstance?.Info?.audio_file_path; audio_reader = new MediaReader(audio_file); audio_decoder = audio_reader.Decoders.OfType <AudioDecoder>().FirstOrDefault(); #region Video Init var video_format = new VideoFormat(option.Width, option.Height, AVPixelFormat.Bgr24); var video_param = new VideoEncoderParameters() { FrameRate = new Fraction(option.FPS), BitRate = option.BitRate }; video_encoder = new VideoEncoder(option.EncoderName, video_format, video_param); #endregion Video Init writer = new MediaWriter(option.OutputPath, false).AddEncoder(video_encoder); if (audio_decoder != null) { audio_encoder = new AudioEncoder(audio_decoder.ID, audio_decoder.OutFormat, BitRate._192Kbps); writer.AddEncoder(audio_encoder); } writer.Initialize(); Log.User($"Format :{video_format.ToString()}\nVideo Encoder :{video_encoder.ToString()}"); video_frame = new VideoFrame(video_format); audio_frame = new AudioFrame(audio_decoder.OutFormat); audio_encoding_thread = new Thread(AudioEncoding); audio_encoding_thread.Name = "Audio Encoder Thread"; audio_encoding_thread.Start(); }
/// <summary> /// create a 60 / <paramref name="fps"/> second video /// </summary> /// <param name="outputFile">video output</param> /// <param name="width">video width</param> /// <param name="height">video height</param> /// <param name="fps">video fps</param> public EncodeVideoByMat(string outputFile, int width, int height, int fps) { using (MediaWriter writer = new MediaWriter(outputFile)) { writer.AddStream(MediaEncoder.CreateVideoEncode(writer.Format, width, height, fps)); writer.Initialize(); VideoFrame dstframe = VideoFrame.CreateFrameByCodec(writer[0].Codec); Random random = new Random(); for (int i = 0; i < 61; i++) { // create a video frame by Mat byte b = (byte)random.Next(0, 255); byte g = (byte)random.Next(0, 255); byte r = (byte)random.Next(0, 255); using (Image <Bgr, byte> image = new Image <Bgr, byte>(width, height, new Bgr(b, g, r))) { string line1 = $"pts = {i}, color = [{b,3},{g,3},{r,3}]"; string line2 = $"time = {DateTime.Now:HH:mm:ss.fff}"; image.Draw(line1, new System.Drawing.Point(30, 50), Emgu.CV.CvEnum.FontFace.HersheyDuplex, 1, new Bgr(255 - b, 255 - g, 255 - r)); image.Draw(line2, new System.Drawing.Point(30, 100), Emgu.CV.CvEnum.FontFace.HersheyDuplex, 1, new Bgr(255 - b, 255 - g, 255 - r)); dstframe = image.Mat.ToVideoFrame(AVPixelFormat.AV_PIX_FMT_YUV420P); } dstframe.Pts = i; // video pts = seconds from video start * fps (pts can only increase). // write video frame, many cases: one frame more out packet, first frame no out packet, etc. // so use IEnumerable. foreach (var packet in writer[0].WriteFrame(dstframe)) { writer.WritePacket(packet); } } // flush cache writer.FlushMuxer(); } }
public Mp4VideoWriter Init() { writer.Initialize(); return(this); }
/// <summary> /// Make the specified color of <paramref name="input0"/> transparent and overlay it on the <paramref name="input1"/> video to <paramref name="output"/> /// <para> /// NOTE: green [R:0 G:128 B:0] /// </para> /// <para> /// ffmpeg -i <paramref name="input0"/> -i <paramref name="input1"/> -filter_complex "[1:v]chromakey=green:0.1:0.0[ckout];[0:v][ckout]overlay[out]" -map "[out]" <paramref name="output"/> /// </para> /// filter graph: /// ┌──────┐ ┌──────┐ ┌─────────┐ ┌─────────┐ /// │input0│---->│buffer│---->│chromakey│---->│ │ /// └──────┘ └──────┘ └─────────┘ │ │ ┌──────────┐ ┌──────┐ /// │ overlay │---->│buffersink│---->│output│ /// ┌──────┐ ┌──────┐ │ │ └──────────┘ └──────┘ /// │input1│-----│buffer│-------------------->│ │ /// └──────┘ └──────┘ └─────────┘ /// </summary> /// <param name="input0">foreground</param> /// <param name="input1">background</param> /// <param name="output">output</param> /// <param name="chromakeyOptions">rgb(green or 0x008000):similarity:blend, see http://ffmpeg.org/ffmpeg-filters.html#chromakey </param> public VideoChromekeyFilter(string input0, string input1, string output, string chromakeyOptions = "green:0.1:0.0") { using (MediaReader reader0 = new MediaReader(input0)) using (MediaReader reader1 = new MediaReader(input1)) using (MediaWriter writer = new MediaWriter(output)) { var videoIndex0 = reader0.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index; var videoIndex1 = reader1.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index; // init complex filter graph int height0 = reader0[videoIndex0].Codec.AVCodecContext.height; int width0 = reader0[videoIndex0].Codec.AVCodecContext.width; int format0 = (int)reader0[videoIndex0].Codec.AVCodecContext.pix_fmt; AVRational time_base0 = reader0[videoIndex0].TimeBase; AVRational sample_aspect_ratio0 = reader0[videoIndex0].Codec.AVCodecContext.sample_aspect_ratio; int height1 = reader1[videoIndex1].Codec.AVCodecContext.height; int width1 = reader1[videoIndex1].Codec.AVCodecContext.width; int format1 = (int)reader1[videoIndex1].Codec.AVCodecContext.pix_fmt; AVRational time_base1 = reader1[videoIndex1].TimeBase; AVRational sample_aspect_ratio1 = reader1[videoIndex1].Codec.AVCodecContext.sample_aspect_ratio; MediaFilterGraph filterGraph = new MediaFilterGraph(); var in0 = filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width0, height0, (AVPixelFormat)format0, time_base0, sample_aspect_ratio0); var in1 = filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width1, height1, (AVPixelFormat)format1, time_base1, sample_aspect_ratio1); var chromakey = filterGraph.AddFilter(new MediaFilter("chromakey"), chromakeyOptions); var overlay = filterGraph.AddFilter(new MediaFilter("overlay")); var out0 = filterGraph.AddVideoSinkFilter(new MediaFilter(MediaFilter.VideoSinks.Buffersink)); in0.LinkTo(0, chromakey, 0).LinkTo(0, overlay, 1).LinkTo(0, out0, 0); in1.LinkTo(0, overlay, 0); filterGraph.Initialize(); // add stream by reader and init writer writer.AddStream(reader0[videoIndex0]); writer.Initialize(); // init video frame format converter by dstcodec PixelConverter pixelConverter = new PixelConverter(writer[0].Codec); long pts = 0; MediaReader[] readers = new MediaReader[] { reader0, reader1 }; int[] index = new int[] { videoIndex0, videoIndex1 }; for (int i = 0; i < readers.Length; i++) { var reader = readers[i]; foreach (var srcPacket in reader.ReadPacket()) { foreach (var srcFrame in reader[index[i]].ReadFrame(srcPacket)) { filterGraph.Inputs[i].WriteFrame(srcFrame); foreach (var filterFrame in filterGraph.Outputs.First().ReadFrame()) { foreach (var dstFrame in pixelConverter.Convert(filterFrame)) { dstFrame.Pts = pts++; foreach (var dstPacket in writer[0].WriteFrame(dstFrame)) { writer.WritePacket(dstPacket); } } } } } } // flush codec cache writer.FlushMuxer(); } }