public unsafe Remuxing(string inputFile) { string outputFile = Path.GetFileNameWithoutExtension(inputFile) + "_remuxing" + Path.GetExtension(inputFile); using (MediaReader reader = new MediaReader(inputFile)) using (MediaWriter writer = new MediaWriter(outputFile)) { // add stream with reader's codec_id for (int i = 0; i < reader.Count; i++) { writer.AddStream(reader[i], writer.Format.Flags); } writer.Initialize(); // read and write packet foreach (var packet in reader.ReadPacket()) { int index = packet.StreamIndex; AVRounding rounding = AVRounding.AV_ROUND_NEAR_INF | AVRounding.AV_ROUND_PASS_MINMAX; AVRational inTimeBase = reader[index].TimeBase; AVRational outTimeBase = writer[index].TimeBase; packet.Pts = ffmpeg.av_rescale_q_rnd(packet.Pts, inTimeBase, outTimeBase, rounding); packet.Dts = ffmpeg.av_rescale_q_rnd(packet.Dts, inTimeBase, outTimeBase, rounding); packet.Duration = ffmpeg.av_rescale_q(packet.Duration, inTimeBase, outTimeBase); packet.Pos = -1; writer.WritePacket(packet); } writer.FlushMuxer(); } }
/// <summary> /// Yuv420P sample /// </summary> /// <param name="outputFile">output file</param> /// <param name="width">video width</param> /// <param name="height">video height</param> /// <param name="fps">video fps</param> public FillYuv420PSample(string outputFile, int width, int height, int fps) { var dir = Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(outputFile), Path.GetFileNameWithoutExtension(outputFile))).FullName; using (MediaWriter writer = new MediaWriter(outputFile)) { writer.AddStream(MediaEncoder.CreateVideoEncode(writer.Format, width, height, fps)); writer.Initialize(); VideoFrame srcframe = new VideoFrame(width, height, FFmpeg.AutoGen.AVPixelFormat.AV_PIX_FMT_YUV420P); PixelConverter pixelConverter = new PixelConverter(writer[0].Codec); Random random = new Random(); for (int i = 0; i < fps * 10; i++) { // fill video frame FillYuv420P(srcframe, i); foreach (var dstframe in pixelConverter.Convert(srcframe)) { dstframe.Pts = i; SaveFrame(dstframe, Path.Combine(dir, $"{i}.bmp")); foreach (var packet in writer[0].WriteFrame(dstframe)) { writer.WritePacket(packet); } } } // flush cache writer.FlushMuxer(); } }
public EncodeAudioByMat(string output) { using (MediaWriter writer = new MediaWriter(output)) { writer.AddStream(MediaEncoder.CreateAudioEncode(writer.Format, 2, 44100)); writer.Initialize(); AudioFrame dstFrame = AudioFrame.CreateFrameByCodec(writer[0].Codec); SampleConverter converter = new SampleConverter(dstFrame); using (Mat mat = CreateMat(writer[0].Codec.AVCodecContext.channels)) { long pts = 0; for (int i = 0; i < 1000; i++) { foreach (var item in converter.Convert(mat.ToAudioFrame(dstSampleRate: writer[0].Codec.AVCodecContext.sample_rate))) { pts += item.NbSamples; item.Pts = pts; foreach (var packet in writer[0].WriteFrame(item)) { writer.WritePacket(packet); } } } } writer.FlushMuxer(); } }
/// <summary> /// transcode audio /// </summary> /// <param name="input">input audio file</param> /// <param name="output">output audio file</param> /// <param name="outChannels">output audio file channels</param> /// <param name="outSampleRate">output audio file sample rate</param> public AudioTranscode(string input, string output, int outChannels = 2, int outSampleRate = 44100) { using (MediaWriter writer = new MediaWriter(output)) using (MediaReader reader = new MediaReader(input)) { int audioIndex = reader.First(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_AUDIO).Index; writer.AddStream(MediaEncoder.CreateAudioEncode(writer.Format, outChannels, outSampleRate)); writer.Initialize(); AudioFrame dst = AudioFrame.CreateFrameByCodec(writer[0].Codec); SampleConverter converter = new SampleConverter(dst); long pts = 0; foreach (var packet in reader.ReadPacket()) { foreach (var srcframe in reader[audioIndex].ReadFrame(packet)) { foreach (var dstframe in converter.Convert(srcframe)) { pts += dstframe.AVFrame.nb_samples; dstframe.Pts = pts; // audio's pts is total samples, pts can only increase. foreach (var outpacket in writer[0].WriteFrame(dstframe)) { writer.WritePacket(outpacket); } } } } writer.FlushMuxer(); } }
public Video2Frame2Video(string inputFile, string outputFile) { using (MediaReader reader = new MediaReader(inputFile)) using (MediaWriter writer = new MediaWriter(outputFile)) { var videoIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index; writer.AddStream(reader[videoIndex]); writer.Initialize(); PixelConverter pixelConverter = new PixelConverter(writer.First().Codec); foreach (var packet in reader.ReadPacket()) { foreach (var frame in reader[videoIndex].ReadFrame(packet)) { foreach (var dstFrame in pixelConverter.Convert(frame)) { foreach (var dstPacket in writer[0].WriteFrame(dstFrame)) { writer.WritePacket(dstPacket); } } } } writer.FlushMuxer(); } }
private void Clean() { if (writer != null) { writer.Dispose(); writer = null; } }
/// <summary> /// a red cheomekey filter for .png image example. /// <para> /// ffmpeg -i <paramref name="input"/> -vf chromakey=red:0.1:0.0 <paramref name="output"/> /// </para> /// </summary> /// <param name="input"></param> /// <param name="output"></param> public unsafe PngChromekeyFilter(string input, string output) { using (MediaReader reader = new MediaReader(input)) using (MediaWriter writer = new MediaWriter(output)) { var videoIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index; // init filter int height = reader[videoIndex].Codec.AVCodecContext.height; int width = reader[videoIndex].Codec.AVCodecContext.width; int format = (int)reader[videoIndex].Codec.AVCodecContext.pix_fmt; AVRational time_base = reader[videoIndex].TimeBase; AVRational sample_aspect_ratio = reader[videoIndex].Codec.AVCodecContext.sample_aspect_ratio; MediaFilterGraph filterGraph = new MediaFilterGraph(); filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width, height, (AVPixelFormat)format, time_base, sample_aspect_ratio).LinkTo(0, filterGraph.AddFilter(new MediaFilter("chromakey"), "red:0.1:0.0")).LinkTo(0, filterGraph.AddVideoSinkFilter(new MediaFilter(MediaFilter.VideoSinks.Buffersink))); filterGraph.Initialize(); // add stream by reader and init writer writer.AddStream(reader[videoIndex]); writer.Initialize(); // init video frame format converter by dstcodec PixelConverter pixelConverter = new PixelConverter(writer[0].Codec); foreach (var srcPacket in reader.ReadPacket()) { foreach (var srcFrame in reader[videoIndex].ReadFrame(srcPacket)) { filterGraph.Inputs.First().WriteFrame(srcFrame); foreach (var filterFrame in filterGraph.Outputs.First().ReadFrame()) { // can use filterFrame.ToMat() gets the output image directly without the need for a writer. //using EmguFFmpeg.EmguCV; //using (var mat = filterFrame.ToMat()) //{ // mat.Save(output); //} foreach (var dstFrame in pixelConverter.Convert(filterFrame)) { foreach (var dstPacket in writer[0].WriteFrame(dstFrame)) { writer.WritePacket(dstPacket); } } } } } // flush codec cache writer.FlushMuxer(); } }
private void ConfigureMediaWriter(int width, int height, Guid mediaSubType, short bitCount) { AMMediaType mediaType = new AMMediaType(); VideoInfoHeader videoInfo = new VideoInfoHeader(); // Create the VideoInfoHeader using info from the bitmap videoInfo.BmiHeader.Size = Marshal.SizeOf(typeof(BitmapInfoHeader)); videoInfo.BmiHeader.Width = width; videoInfo.BmiHeader.Height = height; videoInfo.BmiHeader.Planes = 1; // compression thru clrimportant don't seem to be used. Init them anyway videoInfo.BmiHeader.Compression = 0; videoInfo.BmiHeader.ImageSize = 0; videoInfo.BmiHeader.XPelsPerMeter = 0; videoInfo.BmiHeader.YPelsPerMeter = 0; videoInfo.BmiHeader.ClrUsed = 0; videoInfo.BmiHeader.ClrImportant = 0; mediaType.subType = mediaSubType; videoInfo.BmiHeader.BitCount = bitCount; videoInfo.SrcRect = new Rectangle(0, 0, width, height); videoInfo.TargetRect = videoInfo.SrcRect; videoInfo.BmiHeader.ImageSize = width * height * (videoInfo.BmiHeader.BitCount / 8); videoInfo.BitRate = videoInfo.BmiHeader.ImageSize * Constants.VideoFrameRate; videoInfo.BitErrorRate = 0; videoInfo.AvgTimePerFrame = 10000 * 1000 / Constants.VideoFrameRate; mediaType.majorType = MediaType.Video; mediaType.fixedSizeSamples = true; mediaType.temporalCompression = false; mediaType.sampleSize = videoInfo.BmiHeader.ImageSize; mediaType.formatType = FormatType.VideoInfo; mediaType.unkPtr = IntPtr.Zero; mediaType.formatSize = Marshal.SizeOf(typeof(VideoInfoHeader)); // Lock the videoInfo structure, and put the pointer // into the mediatype structure GCHandle handle = GCHandle.Alloc(videoInfo, GCHandleType.Pinned); try { // Set the inputprops using the structures mediaType.formatPtr = handle.AddrOfPinnedObject(); MediaProperties.SetMediaType(mediaType); } finally { handle.Free(); mediaType.formatPtr = IntPtr.Zero; } // Now take the inputprops, and set them on the file writer MediaWriter.SetInputProps(VideoChannelIndex, MediaProperties); }
private bool CheckIfImageFile(IFormFile image) { byte[] fileBytes; using (var ms = new MemoryStream()) { image.CopyTo(ms); fileBytes = ms.ToArray(); } return(MediaWriter.GetMediaFormat(fileBytes) != MediaWriter.MediaFormat.unknown); }
/// <summary> /// recording audio. /// <para> /// first set inputDeviceName = null, you will get inputDeviceName list in vs output, /// </para> /// <para> /// then set inputDeviceName to your real device name and run again,you will get a audio output. /// </para> /// <para> /// if you want stop record, exit console; /// </para> /// <para>ffmpeg </para> /// </summary> /// <param name="outputFile"></param> public RecordingAudio(string outputFile, string inputDeviceName = null) { // console output FFmpegHelper.SetupLogging(logWrite: _ => Console.Write(_)); // register all device FFmpegHelper.RegisterDevice(); var dshowInput = new InFormat("dshow"); // list all "dshow" device at console output, ffmpeg does not support direct reading of device names MediaDevice.PrintDeviceInfos(dshowInput, "list", MediaDevice.ListDevicesOptions); if (string.IsNullOrWhiteSpace(inputDeviceName)) { return; } // get your audio input device name from console output // NOTE: DO NOT delete "audio=" using (MediaReader reader = new MediaReader($"audio={inputDeviceName}", dshowInput)) using (MediaWriter writer = new MediaWriter(outputFile)) { var stream = reader.Where(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_AUDIO).First(); writer.AddStream(MediaEncode.CreateAudioEncode(writer.Format, stream.Codec.AVCodecContext.channels, stream.Codec.AVCodecContext.sample_rate)); writer.Initialize(); AudioFrame dstFrame = AudioFrame.CreateFrameByCodec(writer[0].Codec); SampleConverter converter = new SampleConverter(dstFrame); long pts = 0; foreach (var packet in reader.ReadPacket()) { foreach (var frame in stream.ReadFrame(packet)) { foreach (var dstframe in converter.Convert(frame)) { pts += dstFrame.AVFrame.nb_samples; dstFrame.Pts = pts; foreach (var dstpacket in writer[0].WriteFrame(dstFrame)) { writer.WritePacket(dstpacket); } } } } writer.FlushMuxer(); } }
static void Main(string[] args) { var encoder = new VideoEncoder(AVCodecID.H264, new VideoFormat(100, 100, AVPixelFormat.Yuv420p, 16), new VideoEncoderParameters { BitRate = BitRate.FromKBitPerSecond(1080), FrameRate = new Fraction(15), GopSize = 15, ResampleFlags = SwsFlags.FastBilinear } ); var memory = new System.IO.MemoryStream(); using (var writer = new MediaWriter(memory).AddEncoder(encoder).Initialize()) { } }
unsafe static void Main(string[] args) { using (var reader = new MediaReader(@"D:\MyDocuments\Music\HEALTH\04 21世紀難民 feat. れをる(from REOL).m4a")) { var decoder = reader.Decoders.OfType <AudioDecoder>().First(); var frame = new AudioFrame(); using (var writer = new MediaWriter(@"D:\test.flac").AddAudio(decoder.OutFormat, BitRate.Zero).Initialize()) { var enc = writer.Encoders[0] as AudioEncoder; while (reader.NextFrame(frame, decoder.StreamIndex)) { var pos = reader.Position; writer.Write(frame); Console.Write($"\rframes: {enc.InputFrames}, time: {enc.InputTimestamp}"); } writer.Flush(); Console.WriteLine($"\rframes: {enc.InputFrames}, time: {enc.InputTimestamp}"); } } }
unsafe static void Main(string[] args) { using (var reader = new MediaReader(@"D:\MyDocuments\Music\夕立のりぼん+inst(NoMastering)_island__201411091428.mp3")) { var decoder = reader.Decoders.OfType <AudioDecoder>().First(); var frame = new AudioFrame(); using (var writer = new MediaWriter(@"D:\MyDocuments\Music\夕立のりぼん+inst(NoMastering)_island__201411091428-output.mp3").AddAudio(decoder.OutFormat, BitRate._320Kbps).Initialize()) { var enc = writer.Encoders[0] as AudioEncoder; while (reader.NextFrame(frame, decoder.StreamIndex)) { var pos = reader.Position; writer.Write(frame); Console.Write($"\rframes: {enc.InputFrames}, time: {enc.InputTimestamp}"); } writer.Flush(); Console.WriteLine($"\rframes: {enc.InputFrames}, time: {enc.InputTimestamp}"); } } }
public void SetUp() { var projection = new Projection <Address>(DisplayFormatting.RawValues); projection.Value(x => x.Address1); projection.Value(x => x.Address2); projection.Value(x => x.City); projection.Value(x => x.StateOrProvince).Name("State"); theXmlMediaOptions = new XmlMediaOptions() { Root = "Address" }; theDocument = new XmlMediaDocument(theXmlMediaOptions); var urls = new StubUrlRegistry(); var linkSource = new LinksSource <Address>(); linkSource.ToSubject().Rel("self"); linkSource.To(a => new AddressAction("change")).Rel("change"); linkSource.To(a => new AddressAction("delete")).Rel("delete"); theOutput = new InMemoryOutputWriter(); var media = new MediaWriter <Address>(theDocument, linkSource, urls, projection, null, theOutput); theAddress = new Address() { Address1 = "22 Cherry Lane", Address2 = "Apt A", City = "Austin", StateOrProvince = "Texas" }; media.Write("text/plain", theAddress); Debug.WriteLine(theOutput); }
internal void Dispose(bool disposing) { if (!disposed) { if (disposing) { //No managed resources to expose. } #region Dispose MediaWriter if (MediaWriterIsWriting) //We are currently writing { if (MediaWriter != null) { // Close the file try { MediaWriter.EndWriting(); } catch { } } MediaWriterIsWriting = false; } if (MediaProperties != null) { Marshal.ReleaseComObject(MediaProperties); MediaProperties = null; } if (MediaWriter != null) { Marshal.ReleaseComObject(MediaWriter); MediaWriter = null; } #endregion disposed = true; } }
public override void OnStart(EncoderOption option) { this.option = option; var audio_file = StoryboardInstanceManager.ActivityInstance?.Info?.audio_file_path; audio_reader = new MediaReader(audio_file); audio_decoder = audio_reader.Decoders.OfType <AudioDecoder>().FirstOrDefault(); #region Video Init var video_format = new VideoFormat(option.Width, option.Height, AVPixelFormat.Bgr24); var video_param = new VideoEncoderParameters() { FrameRate = new Fraction(option.FPS), BitRate = option.BitRate }; video_encoder = new VideoEncoder(option.EncoderName, video_format, video_param); #endregion Video Init writer = new MediaWriter(option.OutputPath, false).AddEncoder(video_encoder); if (audio_decoder != null) { audio_encoder = new AudioEncoder(audio_decoder.ID, audio_decoder.OutFormat, BitRate._192Kbps); writer.AddEncoder(audio_encoder); } writer.Initialize(); Log.User($"Format :{video_format.ToString()}\nVideo Encoder :{video_encoder.ToString()}"); video_frame = new VideoFrame(video_format); audio_frame = new AudioFrame(audio_decoder.OutFormat); audio_encoding_thread = new Thread(AudioEncoding); audio_encoding_thread.Name = "Audio Encoder Thread"; audio_encoding_thread.Start(); }
unsafe static void Main(string[] args) { var audioFormat = new AudioFormat(44100, AVChannelLayout.LayoutStereo, AVSampleFormat.FloatPlanar); var videoFormat = new VideoFormat(1280, 720, AVPixelFormat.Bgr24); using (var writer = new MediaWriter(@"Z:\test.mkv") .AddVideo(videoFormat, new VideoEncoderParameters { FrameRate = new Fraction(25) }) .AddAudio(audioFormat) .Initialize()) { var aframe = new AudioFrame(audioFormat); var vframe = new VideoFrame(videoFormat); float[] left = new float[1024]; byte[] bitmap = new byte[videoFormat.Bytes]; vframe.Update(bitmap); for (int j = 0; j < 1000; j++) { writer.Write(encoder => { if (encoder is AudioEncoder) { int samples = left.Length; long offset = encoder.InputFrames; for (int i = 0; i < samples; i++) { float value = (float)Math.Sin(440d / 44100 * Math.PI * 2 * (i + offset)); left[i] = value; } aframe.Update <float>(samples, left, left); return(aframe); } else { Console.WriteLine($"{encoder.FullName} ---> ({encoder.InputFrames}) {encoder.InputTimestamp}"); return(vframe); } }); } } }
public void SetUp() { var projection = new Projection<Address>(DisplayFormatting.RawValues); projection.Value(x => x.Address1); projection.Value(x => x.Address2); projection.Value(x => x.City); projection.Value(x => x.StateOrProvince).Name("State"); theXmlMediaOptions = new XmlMediaOptions(){ Root = "Address" }; theDocument = new XmlMediaDocument(theXmlMediaOptions); var urls = new StubUrlRegistry(); var linkSource = new LinksSource<Address>(); linkSource.ToSubject().Rel("self"); linkSource.To(a => new AddressAction("change")).Rel("change"); linkSource.To(a => new AddressAction("delete")).Rel("delete"); theOutput = new InMemoryOutputWriter(); var media = new MediaWriter<Address>(theDocument, linkSource, urls, projection, null, theOutput); theAddress = new Address(){ Address1 = "22 Cherry Lane", Address2 = "Apt A", City = "Austin", StateOrProvince = "Texas" }; media.Write("text/plain", theAddress); Debug.WriteLine(theOutput); }
/// <summary> /// create a 60 / <paramref name="fps"/> second video /// </summary> /// <param name="outputFile">video output</param> /// <param name="width">video width</param> /// <param name="height">video height</param> /// <param name="fps">video fps</param> public EncodeVideoByMat(string outputFile, int width, int height, int fps) { using (MediaWriter writer = new MediaWriter(outputFile)) { writer.AddStream(MediaEncoder.CreateVideoEncode(writer.Format, width, height, fps)); writer.Initialize(); VideoFrame dstframe = VideoFrame.CreateFrameByCodec(writer[0].Codec); Random random = new Random(); for (int i = 0; i < 61; i++) { // create a video frame by Mat byte b = (byte)random.Next(0, 255); byte g = (byte)random.Next(0, 255); byte r = (byte)random.Next(0, 255); using (Image <Bgr, byte> image = new Image <Bgr, byte>(width, height, new Bgr(b, g, r))) { string line1 = $"pts = {i}, color = [{b,3},{g,3},{r,3}]"; string line2 = $"time = {DateTime.Now:HH:mm:ss.fff}"; image.Draw(line1, new System.Drawing.Point(30, 50), Emgu.CV.CvEnum.FontFace.HersheyDuplex, 1, new Bgr(255 - b, 255 - g, 255 - r)); image.Draw(line2, new System.Drawing.Point(30, 100), Emgu.CV.CvEnum.FontFace.HersheyDuplex, 1, new Bgr(255 - b, 255 - g, 255 - r)); dstframe = image.Mat.ToVideoFrame(AVPixelFormat.AV_PIX_FMT_YUV420P); } dstframe.Pts = i; // video pts = seconds from video start * fps (pts can only increase). // write video frame, many cases: one frame more out packet, first frame no out packet, etc. // so use IEnumerable. foreach (var packet in writer[0].WriteFrame(dstframe)) { writer.WritePacket(packet); } } // flush cache writer.FlushMuxer(); } }
private void MainForm_Load(object sender, EventArgs e) { try { gLMediaWriter = new MediaWriter(); } catch (Exception ex) { } progressBar1.Minimum = 0; progressBar1.Maximum = 10000; BuildDriveList(); BuildWriteSpeedList(); gBurnerDrive = gLMediaWriter.CurrentDrive; _txtVolumeName.Text = "LEAD-IMAGES"; _txtInputPath.Text = @"C:\InputFiles"; _chkAutoEject.Checked = gBurnerDrive.AutoEject; _chkReserveCDTrackOnWriting.Checked = gBurnerDrive.ReserveCDTrackOnWriting; EnableCtrls(); }
public Mp4VideoWriter(Stream output) { writer = new MediaWriter(output, OutFormat.Get("mp4")); }
/// <summary> /// Make the specified color of <paramref name="input0"/> transparent and overlay it on the <paramref name="input1"/> video to <paramref name="output"/> /// <para> /// NOTE: green [R:0 G:128 B:0] /// </para> /// <para> /// ffmpeg -i <paramref name="input0"/> -i <paramref name="input1"/> -filter_complex "[1:v]chromakey=green:0.1:0.0[ckout];[0:v][ckout]overlay[out]" -map "[out]" <paramref name="output"/> /// </para> /// filter graph: /// ┌──────┐ ┌──────┐ ┌─────────┐ ┌─────────┐ /// │input0│---->│buffer│---->│chromakey│---->│ │ /// └──────┘ └──────┘ └─────────┘ │ │ ┌──────────┐ ┌──────┐ /// │ overlay │---->│buffersink│---->│output│ /// ┌──────┐ ┌──────┐ │ │ └──────────┘ └──────┘ /// │input1│-----│buffer│-------------------->│ │ /// └──────┘ └──────┘ └─────────┘ /// </summary> /// <param name="input0">foreground</param> /// <param name="input1">background</param> /// <param name="output">output</param> /// <param name="chromakeyOptions">rgb(green or 0x008000):similarity:blend, see http://ffmpeg.org/ffmpeg-filters.html#chromakey </param> public VideoChromekeyFilter(string input0, string input1, string output, string chromakeyOptions = "green:0.1:0.0") { using (MediaReader reader0 = new MediaReader(input0)) using (MediaReader reader1 = new MediaReader(input1)) using (MediaWriter writer = new MediaWriter(output)) { var videoIndex0 = reader0.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index; var videoIndex1 = reader1.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index; // init complex filter graph int height0 = reader0[videoIndex0].Codec.AVCodecContext.height; int width0 = reader0[videoIndex0].Codec.AVCodecContext.width; int format0 = (int)reader0[videoIndex0].Codec.AVCodecContext.pix_fmt; AVRational time_base0 = reader0[videoIndex0].TimeBase; AVRational sample_aspect_ratio0 = reader0[videoIndex0].Codec.AVCodecContext.sample_aspect_ratio; int height1 = reader1[videoIndex1].Codec.AVCodecContext.height; int width1 = reader1[videoIndex1].Codec.AVCodecContext.width; int format1 = (int)reader1[videoIndex1].Codec.AVCodecContext.pix_fmt; AVRational time_base1 = reader1[videoIndex1].TimeBase; AVRational sample_aspect_ratio1 = reader1[videoIndex1].Codec.AVCodecContext.sample_aspect_ratio; MediaFilterGraph filterGraph = new MediaFilterGraph(); var in0 = filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width0, height0, (AVPixelFormat)format0, time_base0, sample_aspect_ratio0); var in1 = filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width1, height1, (AVPixelFormat)format1, time_base1, sample_aspect_ratio1); var chromakey = filterGraph.AddFilter(new MediaFilter("chromakey"), chromakeyOptions); var overlay = filterGraph.AddFilter(new MediaFilter("overlay")); var out0 = filterGraph.AddVideoSinkFilter(new MediaFilter(MediaFilter.VideoSinks.Buffersink)); in0.LinkTo(0, chromakey, 0).LinkTo(0, overlay, 1).LinkTo(0, out0, 0); in1.LinkTo(0, overlay, 0); filterGraph.Initialize(); // add stream by reader and init writer writer.AddStream(reader0[videoIndex0]); writer.Initialize(); // init video frame format converter by dstcodec PixelConverter pixelConverter = new PixelConverter(writer[0].Codec); long pts = 0; MediaReader[] readers = new MediaReader[] { reader0, reader1 }; int[] index = new int[] { videoIndex0, videoIndex1 }; for (int i = 0; i < readers.Length; i++) { var reader = readers[i]; foreach (var srcPacket in reader.ReadPacket()) { foreach (var srcFrame in reader[index[i]].ReadFrame(srcPacket)) { filterGraph.Inputs[i].WriteFrame(srcFrame); foreach (var filterFrame in filterGraph.Outputs.First().ReadFrame()) { foreach (var dstFrame in pixelConverter.Convert(filterFrame)) { dstFrame.Pts = pts++; foreach (var dstPacket in writer[0].WriteFrame(dstFrame)) { writer.WritePacket(dstPacket); } } } } } } // flush codec cache writer.FlushMuxer(); } }
private bool Initialize(string destinationFileName, VideoQuality videoQuality) { IWMProfileManager profileManagerTemp = null; IWMProfile profile = null; int inputCount = 0; Guid inputTypeId; bool success = false; try { #region Initialize Properties CurrentSampleIndex = 0; CurrentSampleTimeStamp = 0; VideoChannelIndex = -1; MediaWriterConfigured = false; MediaWriterIsWriting = false; #endregion #region Create ProfileManager WMUtils.WMCreateProfileManager(out profileManagerTemp); IWMProfileManager2 profileManager = (IWMProfileManager2)profileManagerTemp; #endregion #region Configure ProfileManager profileManager.SetSystemProfileVersion(WMVersion.V8_0); #endregion #region Load Profile switch (videoQuality) { case VideoQuality.Kbps128: profileManager.LoadProfileByData(Wilke.Interactive.Drone.Control.Properties.Resources.VideoProfile128kbps, out profile); break; case VideoQuality.Kbps256: profileManager.LoadProfileByData(Wilke.Interactive.Drone.Control.Properties.Resources.VideoProfile256kbps, out profile); break; } #endregion #region Create Writer WMUtils.WMCreateWriter(IntPtr.Zero, out mediaWriter); #endregion #region Configure Writer MediaWriter.SetProfile(profile); MediaWriter.GetInputCount(out inputCount); // Find the first video input on the writer for (int index = 0; index < inputCount; index++) { // Get the properties of channel #index MediaWriter.GetInputProps(index, out mediaProperties); // Read the type of the channel MediaProperties.GetType(out inputTypeId); // If it is video, we are done if (inputTypeId == MediaType.Video) { VideoChannelIndex = index; break; } } // Didn't find a video channel if (VideoChannelIndex == -1) { throw new Exception("Profile does not accept video input"); } MediaWriter.SetOutputFilename(destinationFileName); #endregion success = true; } catch { throw; } return(success); }
public Mp4VideoWriter(string output) { writer = new MediaWriter(output, new OutFormat("mp4")); }
unsafe static void Main(string[] args) { const double MinimumFrequency = 10; const double MaximumFrequency = 20000; const double MaxDB = 65; const int fftSize = 4192 * 6; var reader = new MediaReader(@"D:\CloudMusic\MAN WITH A MISSION - My Hero.mp3"); var decoder = reader.Decoders.OfType <AudioDecoder>().First(); var videoFormat = new VideoFormat(1280, 720, AVPixelFormat.Rgb0); var writer = new MediaWriter(@"D:\CloudMusic\MAN WITH A MISSION - My Hero-fft.mkv") .AddEncoder(new VideoEncoder(AVCodecID.H264, videoFormat, new VideoEncoderParameters { FrameRate = new Fraction(30), GopSize = 10 })) .AddEncoder(new AudioEncoder(AVCodecID.Mp3, decoder.InFormat)) //.AddVideo(videoFormat, new VideoEncoderParameters { FrameRate = new Fraction(30), GopSize = 10 }) //.AddAudio(decoder.InFormat) .Initialize(); int sampleRate = decoder.InFormat.SampleRate; int channels = decoder.InFormat.Channels; var resampler = new AudioResampler(decoder.InFormat, new AudioFormat(sampleRate, channels, 64)); var inFrame = new AudioFrame(); var outFrame = new AudioFrame(); var image = new VideoFrame(videoFormat); var viewHeight = videoFormat.Height / 2; var observer = new StreamObserver <double>(fftSize * 2, fftSize / 6, 2); var fft = DoubleFFT.Create(fftSize); var inFFT = fft.AllocInput(); var outFFT = fft.AllocOutput(); var cutLength = FFTTools.CutFrequencyLength(fftSize, MinimumFrequency, MaximumFrequency, sampleRate, fft.FFTComplexCount); var cutFFT = Marshal.AllocHGlobal(cutLength * sizeof(double)); var outFFT2 = Marshal.AllocHGlobal(fft.FFTComplexCount * sizeof(double)); var outFFTFinal = Marshal.AllocHGlobal(viewHeight * sizeof(double)); var window = new BlackmanHarrisWindow(fftSize); var log = new Spectrum3DLog(); void FFT() { window.Apply((double *)inFFT, (double *)inFFT); fft.Execute(inFFT, outFFT); FFTTools.Abs(fftSize, (double *)outFFT, (double *)outFFT2); FFTTools.CutFrequency(fftSize, (double *)outFFT2, fft.FFTComplexCount, MinimumFrequency, MaximumFrequency, sampleRate, (double *)cutFFT, cutLength); FFTTools.Logarithm((double *)cutFFT, cutLength, MinimumFrequency, MaximumFrequency, (double *)outFFTFinal, viewHeight, log); FFTTools.ToDB((double *)outFFTFinal, viewHeight, MaxDB); FFTTools.Scale((double *)outFFTFinal, viewHeight, 1 / MaxDB); } void LeftShiftImage() { int w = image.Format.Width - 1; int h = image.Format.Height; for (int y = 0; y < h; y++) { var p = (uint *)(image.Data[0] + image.Format.Strides[0] * y); for (int x = 0; x < w; x++) { p[x] = p[x + 1]; } } } observer.Completed += data => { LeftShiftImage(); int w = image.Format.Width - 1; int h = image.Format.Height; var p = (byte *)((uint *)image.Data[0] + w); var stride = image.Format.Strides[0]; for (int i = 0; i < fftSize; i++) { ((double *)inFFT)[i] = ((double *)data)[2 * i]; } FFT(); for (int y = 0; y < viewHeight; y++, p += stride) { var val = ((double *)outFFTFinal)[viewHeight - y - 1] * 256; if (val < 0) { val = 0; } else if (val > 255) { val = 255; } p[0] = p[1] = p[2] = (byte)val; } for (int i = 0; i < fftSize; i++) { ((double *)inFFT)[i] = ((double *)data)[2 * i + 1]; } FFT(); for (int y = 0; y < viewHeight; y++, p += stride) { var val = ((double *)outFFTFinal)[viewHeight - y - 1] * 256; if (val < 0) { val = 0; } else if (val > 255) { val = 255; } p[0] = p[1] = p[2] = (byte)val; } }; bool end = false, run = true; while (run) { writer.Write(encoder => { switch (encoder) { case AudioEncoder audioEncoder: Console.Write($"\r{audioEncoder.InputTimestamp}"); if (reader.NextFrame(inFrame, decoder.StreamIndex)) { resampler.Resample(inFrame, outFrame); observer.Write(outFrame.Data[0], outFrame.SampleCount * channels); return(inFrame); } else { resampler.ResampleFinal(outFrame); observer.Write(outFrame.Data[0], outFrame.SampleCount * channels); end = true; Console.WriteLine($"\r{audioEncoder.InputTimestamp}"); return(null); } case VideoEncoder videoEncoder: if (end) { run = false; } return(image); default: throw new NotImplementedException(); } }); } writer.Dispose(); }
internal void AddFrame(WriteableBitmap bitmap) { INSSBuffer sampleBuffer = null; if (BitmapBounds.Width != bitmap.Width) { Guid mediaSubType; short bitCount; BitmapBounds = new Rectangle(0, 0, (int)bitmap.Width, (int)bitmap.Height); GetMediaType(bitmap.Format, out mediaSubType, out bitCount); ConfigureMediaWriter(BitmapBounds.Width, BitmapBounds.Height, mediaSubType, bitCount); MediaWriterConfigured = true; if (!MediaWriterIsWriting) { MediaWriter.BeginWriting(); MediaWriterIsWriting = true; } } try { // Compute size of bitmap in bytes. Strides may be negative. int bitmapSize = Math.Abs(bitmap.BackBufferStride * (int)bitmap.Height); IntPtr framePointer; // Get a sample interface MediaWriter.AllocateSample(bitmapSize, out sampleBuffer); // Get the buffer from the sample interface. This is // where we copy the bitmap data to sampleBuffer.GetBuffer(out framePointer); // Copy the bitmap data into the sample buffer CopyFrame(bitmap.BackBuffer, framePointer, bitmapSize); // Write the sample to the output - Sometimes, for reasons I can't explain, // writing a sample fails. However, writing the same sample again // works. Go figure. int iRetry = 0; do { try { MediaWriter.WriteSample(VideoChannelIndex, 10000 * CurrentSampleTimeStamp, SampleFlag.CleanPoint, sampleBuffer); break; } catch (COMException e) { if ((iRetry++ < 3) && (e.ErrorCode != NSResults.E_INVALID_DATA)) { continue; } else { throw; } } } while (true); // update the time based on the framerate CurrentSampleTimeStamp = (++CurrentSampleIndex * 1000) / Constants.VideoFrameRate; } finally { // Release the locals if (sampleBuffer != null) { Marshal.ReleaseComObject(sampleBuffer); sampleBuffer = null; } } }