Example #1
0
        public Video2Frame2Video(string inputFile, string outputFile)
        {
            using (MediaReader reader = new MediaReader(inputFile))
                using (MediaWriter writer = new MediaWriter(outputFile))
                {
                    var videoIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index;
                    writer.AddStream(reader[videoIndex]);
                    writer.Initialize();

                    PixelConverter pixelConverter = new PixelConverter(writer.First().Codec);

                    foreach (var packet in reader.ReadPacket())
                    {
                        foreach (var frame in reader[videoIndex].ReadFrame(packet))
                        {
                            foreach (var dstFrame in pixelConverter.Convert(frame))
                            {
                                foreach (var dstPacket in writer[0].WriteFrame(dstFrame))
                                {
                                    writer.WritePacket(dstPacket);
                                }
                            }
                        }
                    }
                    writer.FlushMuxer();
                }
        }
        public async Task CS_W_MediaReader_SaveAllFrameAsJpeg()
        {
            var folder = await KnownFolders.PicturesLibrary.CreateFolderAsync("MediaCaptureReaderTests", CreationCollisionOption.OpenIfExists);

            folder = await folder.CreateFolderAsync("CS_W_MediaReader_SaveAllFrameAsJpeg", CreationCollisionOption.ReplaceExisting);

            using (var reader = await MediaReader.CreateFromPathAsync("ms-appx:///car.mp4"))
            {
                while (true)
                {
                    using (var result = await reader.VideoStream.ReadAsync())
                    {
                        if (result.EndOfStream || result.Error)
                        {
                            break;
                        }

                        var sample = (MediaSample2D)result.Sample;
                        if (sample == null)
                        {
                            continue;
                        }

                        var file = await folder.CreateFileAsync(((int)sample.Timestamp.TotalMilliseconds).ToString("D6") + ".jpg");

                        await sample.SaveToFileAsync(file, ImageCompression.Jpeg);
                    }
                }
            }
        }
Example #3
0
        /// <summary>
        /// transcode audio
        /// </summary>
        /// <param name="input">input audio file</param>
        /// <param name="output">output audio file</param>
        /// <param name="outChannels">output audio file channels</param>
        /// <param name="outSampleRate">output audio file sample rate</param>
        public AudioTranscode(string input, string output, int outChannels = 2, int outSampleRate = 44100)
        {
            using (MediaWriter writer = new MediaWriter(output))
                using (MediaReader reader = new MediaReader(input))
                {
                    int audioIndex = reader.First(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_AUDIO).Index;

                    writer.AddStream(MediaEncoder.CreateAudioEncode(writer.Format, outChannels, outSampleRate));
                    writer.Initialize();

                    AudioFrame      dst       = AudioFrame.CreateFrameByCodec(writer[0].Codec);
                    SampleConverter converter = new SampleConverter(dst);
                    long            pts       = 0;
                    foreach (var packet in reader.ReadPacket())
                    {
                        foreach (var srcframe in reader[audioIndex].ReadFrame(packet))
                        {
                            foreach (var dstframe in converter.Convert(srcframe))
                            {
                                pts         += dstframe.AVFrame.nb_samples;
                                dstframe.Pts = pts; // audio's pts is total samples, pts can only increase.
                                foreach (var outpacket in writer[0].WriteFrame(dstframe))
                                {
                                    writer.WritePacket(outpacket);
                                }
                            }
                        }
                    }
                    writer.FlushMuxer();
                }
        }
Example #4
0
        public unsafe Remuxing(string inputFile)
        {
            string outputFile = Path.GetFileNameWithoutExtension(inputFile) + "_remuxing" + Path.GetExtension(inputFile);

            using (MediaReader reader = new MediaReader(inputFile))
                using (MediaWriter writer = new MediaWriter(outputFile))
                {
                    // add stream with reader's codec_id
                    for (int i = 0; i < reader.Count; i++)
                    {
                        writer.AddStream(reader[i], writer.Format.Flags);
                    }
                    writer.Initialize();

                    // read and write packet
                    foreach (var packet in reader.ReadPacket())
                    {
                        int        index       = packet.StreamIndex;
                        AVRounding rounding    = AVRounding.AV_ROUND_NEAR_INF | AVRounding.AV_ROUND_PASS_MINMAX;
                        AVRational inTimeBase  = reader[index].TimeBase;
                        AVRational outTimeBase = writer[index].TimeBase;
                        packet.Pts      = ffmpeg.av_rescale_q_rnd(packet.Pts, inTimeBase, outTimeBase, rounding);
                        packet.Dts      = ffmpeg.av_rescale_q_rnd(packet.Dts, inTimeBase, outTimeBase, rounding);
                        packet.Duration = ffmpeg.av_rescale_q(packet.Duration, inTimeBase, outTimeBase);
                        packet.Pos      = -1;
                        writer.WritePacket(packet);
                    }
                    writer.FlushMuxer();
                }
        }
        public async Task CS_W_MediaReader_ZXing()
        {
            var barcodeReader = new BarcodeReader
            {
                PossibleFormats = new BarcodeFormat[] { BarcodeFormat.QR_CODE }
            };

            using (var mediaReader = await MediaReader.CreateFromPathAsync("ms-appx:///QR_12345678.mp4", AudioInitialization.Deselected, VideoInitialization.Bgra8))
                using (var mediaResult = await mediaReader.VideoStream.ReadAsync())
                {
                    var sample = (MediaSample2D)mediaResult.Sample;
                    Assert.AreEqual(MediaSample2DFormat.Bgra8, sample.Format);
                    Assert.AreEqual(320, sample.Width);
                    Assert.AreEqual(180, sample.Height);

                    using (var buffer = sample.LockBuffer(BufferAccessMode.Read))
                    {
                        var barcodeResult = barcodeReader.Decode(
                            buffer.Planes[0].Buffer.ToArray(),
                            buffer.Width,
                            buffer.Height,
                            BitmapFormat.BGR32
                            );

                        Assert.IsNotNull(barcodeResult);
                        Assert.AreEqual("12345678", barcodeResult.Text);
                    }
                }
        }
        public async Task CS_W_MediaReader_LumiaEffect()
        {
            using (var mediaReader = await MediaReader.CreateFromPathAsync("ms-appx:///car.mp4", AudioInitialization.Deselected, VideoInitialization.Nv12))
                using (var mediaResult = await mediaReader.VideoStream.ReadAsync())
                {
                    var streamProperties = mediaReader.VideoStream.GetCurrentStreamProperties();
                    int width            = (int)streamProperties.Width;
                    int height           = (int)streamProperties.Height;
                    Assert.AreEqual(320, width);
                    Assert.AreEqual(240, height);

                    var inputSample = (MediaSample2D)mediaResult.Sample;
                    Assert.AreEqual(MediaSample2DFormat.Nv12, inputSample.Format);
                    Assert.AreEqual(320, inputSample.Width);
                    Assert.AreEqual(240, inputSample.Height);

                    using (var outputSample = new MediaSample2D(MediaSample2DFormat.Nv12, width, height))
                    {
                        Assert.AreEqual(MediaSample2DFormat.Nv12, outputSample.Format);
                        Assert.AreEqual(320, outputSample.Width);
                        Assert.AreEqual(240, outputSample.Height);

                        using (var inputBuffer = inputSample.LockBuffer(BufferAccessMode.Read))
                            using (var outputBuffer = outputSample.LockBuffer(BufferAccessMode.Write))
                            {
                                // Wrap MediaBuffer2D in Bitmap
                                var inputBitmap = new Bitmap(
                                    new Size(width, height),
                                    ColorMode.Yuv420Sp,
                                    new uint[] { inputBuffer.Planes[0].Pitch, inputBuffer.Planes[1].Pitch },
                                    new IBuffer[] { inputBuffer.Planes[0].Buffer, inputBuffer.Planes[1].Buffer }
                                    );
                                var outputBitmap = new Bitmap(
                                    new Size(width, height),
                                    ColorMode.Yuv420Sp,
                                    new uint[] { outputBuffer.Planes[0].Pitch, outputBuffer.Planes[1].Pitch },
                                    new IBuffer[] { outputBuffer.Planes[0].Buffer, outputBuffer.Planes[1].Buffer }
                                    );

                                // Apply effect
                                var effect = new FilterEffect();
                                effect.Filters = new IFilter[] { new WatercolorFilter() };
                                effect.Source  = new BitmapImageSource(inputBitmap);
                                var renderer = new BitmapRenderer(effect, outputBitmap);
                                await renderer.RenderAsync();
                            }

                        // Save the file
                        var folder = await KnownFolders.PicturesLibrary.CreateFolderAsync("MediaCaptureReaderTests", CreationCollisionOption.OpenIfExists);

                        var file = await folder.CreateFileAsync("CS_W_MediaReader_TestLumiaEffect.jpg", CreationCollisionOption.ReplaceExisting);

                        await outputSample.SaveToFileAsync(file, ImageCompression.Jpeg);

                        Logger.LogMessage("Saved {0}", file.Path);
                    }
                }
        }
Example #7
0
        /// <summary>
        /// a red cheomekey filter for .png image example.
        /// <para>
        /// ffmpeg -i <paramref name="input"/> -vf chromakey=red:0.1:0.0 <paramref name="output"/>
        /// </para>
        /// </summary>
        /// <param name="input"></param>
        /// <param name="output"></param>
        public unsafe PngChromekeyFilter(string input, string output)
        {
            using (MediaReader reader = new MediaReader(input))
                using (MediaWriter writer = new MediaWriter(output))
                {
                    var videoIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index;

                    // init filter
                    int        height              = reader[videoIndex].Codec.AVCodecContext.height;
                    int        width               = reader[videoIndex].Codec.AVCodecContext.width;
                    int        format              = (int)reader[videoIndex].Codec.AVCodecContext.pix_fmt;
                    AVRational time_base           = reader[videoIndex].TimeBase;
                    AVRational sample_aspect_ratio = reader[videoIndex].Codec.AVCodecContext.sample_aspect_ratio;

                    MediaFilterGraph filterGraph = new MediaFilterGraph();
                    filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width, height, (AVPixelFormat)format, time_base, sample_aspect_ratio).LinkTo(0,
                                                                                                                                                                                 filterGraph.AddFilter(new MediaFilter("chromakey"), "red:0.1:0.0")).LinkTo(0,
                                                                                                                                                                                                                                                            filterGraph.AddVideoSinkFilter(new MediaFilter(MediaFilter.VideoSinks.Buffersink)));
                    filterGraph.Initialize();

                    // add stream by reader and init writer
                    writer.AddStream(reader[videoIndex]);
                    writer.Initialize();

                    // init video frame format converter by dstcodec
                    PixelConverter pixelConverter = new PixelConverter(writer[0].Codec);


                    foreach (var srcPacket in reader.ReadPacket())
                    {
                        foreach (var srcFrame in reader[videoIndex].ReadFrame(srcPacket))
                        {
                            filterGraph.Inputs.First().WriteFrame(srcFrame);
                            foreach (var filterFrame in filterGraph.Outputs.First().ReadFrame())
                            {
                                // can use filterFrame.ToMat() gets the output image directly without the need for a writer.
                                //using EmguFFmpeg.EmguCV;
                                //using (var mat = filterFrame.ToMat())
                                //{
                                //    mat.Save(output);
                                //}

                                foreach (var dstFrame in pixelConverter.Convert(filterFrame))
                                {
                                    foreach (var dstPacket in writer[0].WriteFrame(dstFrame))
                                    {
                                        writer.WritePacket(dstPacket);
                                    }
                                }
                            }
                        }
                    }

                    // flush codec cache
                    writer.FlushMuxer();
                }
        }
 public async Task CS_W_MediaReader_PixelDataAccess()
 {
     using (var reader = await MediaReader.CreateFromPathAsync("ms-appx:///car.mp4", AudioInitialization.Deselected, VideoInitialization.Bgra8))
         using (var result = await reader.VideoStream.ReadAsync())
         {
             var sample = (MediaSample2D)result.Sample;
             ProcessSample(sample);
         }
 }
Example #9
0
        protected override async void OnNavigatedTo(NavigationEventArgs e)
        {
            DisplayInformation.AutoRotationPreferences = DisplayOrientations.Landscape;

            //
            // Doing all that video processing is too much for low-end phones like the Lumia 520
            // Pick-and-choose which piece should run
            //

            VideoPreview.MediaFailed += VideoPreview_MediaFailed;
            //var file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/video.cvmpilj.mjpg"));
            //var stream = await file.OpenAsync(FileAccessMode.Read);
            //var source = await HttpMjpegCaptureSource.CreateFromStreamAsync(stream, "myboundary");
            var source = await HttpMjpegCaptureSource.CreateFromUriAsync("http://216.123.238.208/axis-cgi/mjpg/video.cgi?camera&resolution=640x480");

            VideoPreview.SetMediaStreamSource(source.Source);

            var settings = new MediaCaptureInitializationSettings
            {
                StreamingCaptureMode = StreamingCaptureMode.Video
            };
            await settings.SelectVideoDeviceAsync(VideoDeviceSelection.BackOrFirst);

            _capture = new MediaCapture();
            await _capture.InitializeAsync(settings);

            var graphicsDevice = MediaGraphicsDevice.CreateFromMediaCapture(_capture);

            var previewProps = (VideoEncodingProperties)_capture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview);

            TextLog.Text += String.Format("Preview: {0} {1}x{2} {3}fps\n", previewProps.Subtype, previewProps.Width, previewProps.Height, previewProps.FrameRate.Numerator / (float)previewProps.FrameRate.Denominator);

            TextLog.Text += "Creating MediaSamplePresenter from SurfaceImageSource\n";

            var image = new SurfaceImageSource((int)previewProps.Width, (int)previewProps.Height);

            ImagePreview.Source = image;
            _imagePresenter     = ImagePresenter.CreateFromSurfaceImageSource(image, graphicsDevice, (int)previewProps.Width, (int)previewProps.Height);

            TextLog.Text += "Creating MediaSamplePresenter from SwapChainPanel\n";

            _swapChainPresenter = ImagePresenter.CreateFromSwapChainPanel(
                SwapChainPreview,
                graphicsDevice,
                (int)previewProps.Width,
                (int)previewProps.Height
                );

            TextLog.Text += "Creating MediaReader\n";

            _mediaReader = await MediaReader.CreateFromMediaCaptureAsync(_capture, AudioInitialization.Deselected, VideoInitialization.Bgra8);

            TextLog.Text += "Starting video loop\n";

            var ignore = Task.Run(() => VideoLoop());
        }
Example #10
0
        static void Main(string[] args)
        {
            var reader  = new MediaReader(@"Z:\output.mp4");
            var decoder = reader.Decoders.OfType <VideoDecoder>().First();
            var packet  = new Packet();

            using (var writer = new MediaRemuxer(@"Z:\output.h264", decoder)) {
                while (reader.ReadPacket(packet, decoder.StreamIndex))
                {
                    writer.Write(packet);
                }
            }
        }
Example #11
0
 static void Main(string[] args)
 {
     using (var reader = new MediaReader(@"Z:\game.dat_000004.wmv")) {
         var decoder = reader.Decoders.OfType <VideoDecoder>().First();
         using (var writer = new MediaRemuxer(@"Z:\test.mkv", decoder)) {
             Packet packet = new Packet();
             while (reader.ReadPacket(packet))
             {
                 packet.StreamIndex = decoder.StreamIndex;
                 writer.Write(packet);
             }
         }
     }
 }
Example #12
0
        public async Task CS_W_MediaReader_IpCam()
        {
            using (var source = await HttpMjpegCaptureSource.CreateFromUriAsync("http://216.123.238.208/axis-cgi/mjpg/video.cgi?camera&resolution=640x480"))
                using (var mediaReader = await MediaReader.CreateFromMediaSourceAsync(source.Source))
                    using (var result = await mediaReader.VideoStream.ReadAsync())
                    {
                        // Save the file
                        var folder = await KnownFolders.PicturesLibrary.CreateFolderAsync("MediaCaptureReaderTests", CreationCollisionOption.OpenIfExists);

                        var file = await folder.CreateFileAsync("CS_W_MediaReader_IpCam.jpg", CreationCollisionOption.ReplaceExisting);

                        await((MediaSample2D)result.Sample).SaveToFileAsync(file, ImageCompression.Jpeg);
                        Logger.LogMessage("Saved {0}", file.Path);
                    }
        }
        /// <summary>
        /// decode video to image
        /// filter graph:
        /// ┌──────┐     ┌──────┐     ┌─────┐     ┌──────────┐     ┌──────┐
        /// │input0│---->│buffer│---->│scale│---->│buffersink│---->│output│
        /// └──────┘     └──────┘     └─────┘     └──────────┘     └──────┘
        /// </summary>
        /// <param name="inputFile">input video file</param>
        /// <param name="outDirectory">folder for output image files</param>
        /// <param name="scaleOptions">scale options <see cref="http://ffmpeg.org/ffmpeg-filters.html#scale-1"/></param>
        public DecodeVideoWithCustomCodecScaledToMat(string inputFile, string outDirectory, string scaleOptions = "512:288")
        {
            using (MediaReader reader = new MediaReader(inputFile, null, null))
            {
                var videoIndex = reader.First(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).Index;

                unsafe
                {
                    // relpace the default vide decode
                    // !!! IMPORTANT NOTE: This sample won't work, if you haven't downloaded ffmpeg (GPL license, as it is more complete), and you don't have NVIDIA hardware (CUDA) !!!
                    reader[videoIndex].Codec = MediaDecode.CreateDecode("h264_cuvid", _ => ffmpeg.avcodec_parameters_to_context(_, reader[videoIndex].Stream.codecpar));
                }

                int        height              = reader[videoIndex].Codec.AVCodecContext.height;
                int        width               = reader[videoIndex].Codec.AVCodecContext.width;
                int        format              = (int)reader[videoIndex].Codec.AVCodecContext.pix_fmt;
                AVRational time_base           = reader[videoIndex].TimeBase;
                AVRational sample_aspect_ratio = reader[videoIndex].Codec.AVCodecContext.sample_aspect_ratio;

                /* We are moving the packet to CUDA to perform the scaling.
                 * We can then:
                 * - remove hwdownload and format to leave it in CUDA, and forward the pointer to any other function, or write the frame to the output video
                 * - convert it to MAT whereas converting speed depends on the size of the scaled frame.
                 */
                MediaFilterGraph filterGraph = new MediaFilterGraph();
                filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width, height, (AVPixelFormat)format, time_base, sample_aspect_ratio)
                .LinkTo(0, filterGraph.AddFilter(new MediaFilter("scale"), scaleOptions))
                .LinkTo(0, filterGraph.AddVideoSinkFilter(new MediaFilter(MediaFilter.VideoSinks.Buffersink)));
                filterGraph.Initialize();

                var sw = Stopwatch.StartNew();
                foreach (var packet in reader.ReadPacket())
                {
                    foreach (var frame in reader[videoIndex].ReadFrame(packet))
                    {
                        filterGraph.Inputs.First().WriteFrame(frame);
                        foreach (var filterFrame in filterGraph.Outputs.First().ReadFrame())
                        {
                            using (var image = filterFrame.ToMat())
                            {
                                image.Save(Path.Combine(Directory.CreateDirectory(outDirectory).FullName, $"{DateTime.Now.Ticks}.jpg"));
                            }
                        }
                    }
                }
                Console.WriteLine($"Converting to MAT [ processed in {sw.Elapsed.TotalMilliseconds:0} ms ]");
            }
        }
Example #14
0
        /// <summary>
        /// recording audio.
        /// <para>
        /// first set inputDeviceName = null, you will get inputDeviceName list in vs output,
        /// </para>
        /// <para>
        /// then set inputDeviceName to your real device name and run again,you will get a audio output.
        /// </para>
        /// <para>
        /// if you want stop record, exit console;
        /// </para>
        /// <para>ffmpeg </para>
        /// </summary>
        /// <param name="outputFile"></param>
        public RecordingAudio(string outputFile, string inputDeviceName = null)
        {
            // console output
            FFmpegHelper.SetupLogging(logWrite: _ => Console.Write(_));
            // register all device
            FFmpegHelper.RegisterDevice();

            var dshowInput = new InFormat("dshow");

            // list all "dshow" device at console output, ffmpeg does not support direct reading of device names
            MediaDevice.PrintDeviceInfos(dshowInput, "list", MediaDevice.ListDevicesOptions);

            if (string.IsNullOrWhiteSpace(inputDeviceName))
            {
                return;
            }
            // get your audio input device name from console output
            // NOTE: DO NOT delete "audio="
            using (MediaReader reader = new MediaReader($"audio={inputDeviceName}", dshowInput))
                using (MediaWriter writer = new MediaWriter(outputFile))
                {
                    var stream = reader.Where(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_AUDIO).First();

                    writer.AddStream(MediaEncode.CreateAudioEncode(writer.Format, stream.Codec.AVCodecContext.channels, stream.Codec.AVCodecContext.sample_rate));
                    writer.Initialize();

                    AudioFrame      dstFrame  = AudioFrame.CreateFrameByCodec(writer[0].Codec);
                    SampleConverter converter = new SampleConverter(dstFrame);
                    long            pts       = 0;
                    foreach (var packet in reader.ReadPacket())
                    {
                        foreach (var frame in stream.ReadFrame(packet))
                        {
                            foreach (var dstframe in converter.Convert(frame))
                            {
                                pts         += dstFrame.AVFrame.nb_samples;
                                dstFrame.Pts = pts;
                                foreach (var dstpacket in writer[0].WriteFrame(dstFrame))
                                {
                                    writer.WritePacket(dstpacket);
                                }
                            }
                        }
                    }
                    writer.FlushMuxer();
                }
        }
Example #15
0
        static void Main(string[] args)
        {
            var media   = new MediaReader(@"Z:\【神之超赛】来自深渊+01话.mp4");
            var decoder = media.Decoders.OfType <VideoDecoder>().First();

            decoder.OutFormat = new VideoFormat(decoder.InFormat.Width, decoder.InFormat.Height, AVPixelFormat.Bgr24, 4);
            VideoFrame frame = new VideoFrame();

            for (int i = 0; i < 10; i++)
            {
                if (media.NextFrame(frame, decoder.StreamIndex))
                {
                    Bitmap image = new Bitmap(frame.Format.Width, frame.Format.Height, frame.Format.Strides[0], PixelFormat.Format24bppRgb, frame.Scan0);
                    image.Save($@"Z:\{i}.png");
                }
            }
        }
Example #16
0
        public override void Execute(byte[] bytes)
        {
            if (bytes.Length == 0)
            {
                return;
            }
            var aa = Thread.CurrentThread.ManagedThreadId.ToString();

            Console.WriteLine($"DecodeDataImpl {aa} ==> {bytes.Length}");
            if (null == stream)
            {
                stream = new MemoryStream();
            }
            else
            {
                try
                {
                    stream.Seek(0, SeekOrigin.Begin);
                }
                catch (Exception ex)
                {
                    stream = new MemoryStream();
                    Console.WriteLine($"Execute   {ex.ToString()}");
                }
            }
            stream.Write(bytes, 0, bytes.Length);
            stream.Seek(0, SeekOrigin.Begin);

            if (null == media)
            {
                media             = new MediaReader(stream);
                decoder           = media.Decoders.OfType <VideoDecoder>().First();
                decoder.OutFormat = new VideoFormat(decoder.InFormat.Width, decoder.InFormat.Height, AVPixelFormat.Bgr24, 4);
            }

            if (null == frame)
            {
                frame = new VideoFrame();
            }
            if (media.NextFrame(frame, decoder.StreamIndex))
            {
                Bitmap image = new Bitmap(frame.Format.Width, frame.Format.Height, frame.Format.Strides[0], PixelFormat.Format24bppRgb, frame.Scan0);
                ImgMgr.Get().SetImg("live.png", image);
            }
        }
Example #17
0
 unsafe static void Main(string[] args)
 {
     using (var reader = new MediaReader(@"D:\MyDocuments\Music\HEALTH\04 21世紀難民 feat. れをる(from REOL).m4a")) {
         var decoder = reader.Decoders.OfType <AudioDecoder>().First();
         var frame   = new AudioFrame();
         using (var writer = new MediaWriter(@"D:\test.flac").AddAudio(decoder.OutFormat, BitRate.Zero).Initialize()) {
             var enc = writer.Encoders[0] as AudioEncoder;
             while (reader.NextFrame(frame, decoder.StreamIndex))
             {
                 var pos = reader.Position;
                 writer.Write(frame);
                 Console.Write($"\rframes: {enc.InputFrames}, time: {enc.InputTimestamp}");
             }
             writer.Flush();
             Console.WriteLine($"\rframes: {enc.InputFrames}, time: {enc.InputTimestamp}");
         }
     }
 }
Example #18
0
 unsafe static void Main(string[] args)
 {
     using (var reader = new MediaReader(@"D:\MyDocuments\Music\夕立のりぼん+inst(NoMastering)_island__201411091428.mp3")) {
         var decoder = reader.Decoders.OfType <AudioDecoder>().First();
         var frame   = new AudioFrame();
         using (var writer = new MediaWriter(@"D:\MyDocuments\Music\夕立のりぼん+inst(NoMastering)_island__201411091428-output.mp3").AddAudio(decoder.OutFormat, BitRate._320Kbps).Initialize()) {
             var enc = writer.Encoders[0] as AudioEncoder;
             while (reader.NextFrame(frame, decoder.StreamIndex))
             {
                 var pos = reader.Position;
                 writer.Write(frame);
                 Console.Write($"\rframes: {enc.InputFrames}, time: {enc.InputTimestamp}");
             }
             writer.Flush();
             Console.WriteLine($"\rframes: {enc.InputFrames}, time: {enc.InputTimestamp}");
         }
     }
 }
Example #19
0
        static void Main(string[] args)
        {
            var httpRequest = WebRequest.CreateHttp("https://txy.live-play.acgvideo.com/live-txy/148029/live_16159326_5264177.flv?wsSecret=f120730be328d6c1bdbe50dc42f1fc65&wsTime=1510802458");

            httpRequest.Method = "GET";
            var httpResponse = httpRequest.GetResponse() as HttpWebResponse;
            var reader       = new MediaReader(httpResponse.GetResponseStream());
            var decoder      = reader.Decoders.OfType <VideoDecoder>().First();

            decoder.OutFormat = new VideoFormat(decoder.InFormat.Width, decoder.InFormat.Height, AVPixelFormat.Bgr24, 4);
            var frame = new VideoFrame();

            for (int i = 0; i < 100; i++)
            {
                reader.NextFrame(frame, decoder.StreamIndex);
                var bitmap = new Bitmap(frame.Format.Width, frame.Format.Height, frame.Format.Strides[0], System.Drawing.Imaging.PixelFormat.Format24bppRgb, frame.Scan0);
                bitmap.Save($@"Z:\{i}.png");
            }
        }
Example #20
0
        static void Main(string[] args)
        {
            const string TestFile = @"Z:\Halozy-厄神様のジレンマ.mp3";
            MediaReader  media    = new MediaReader(TestFile);
            var          decoder  = media.Decoders[0] as AudioDecoder;

            decoder.OutFormat = new AudioFormat(48000, 2, 32);
            var packet = new Packet();

            while (media.ReadPacket(packet, decoder.StreamIndex))
            {
                Console.Write($"\r{packet.PresentTimestamp}");
                using (var frame = new AudioFrame()) {
                    decoder.Decode(packet, frame);
                }
            }
            Console.WriteLine($"\r{packet.PresentTimestamp}");
            Console.ReadKey();
        }
Example #21
0
        public DecodeAudioToMat(string inputFile)
        {
            using (MediaReader reader = new MediaReader(inputFile))
            {
                foreach (var packet in reader.ReadPacket())
                {
                    // audio maybe have one more stream, e.g. 0 is mp3 audio, 1 is mpeg cover
                    var audioIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO).First().Index;

                    AudioFrame      audioFrame = new AudioFrame(AVSampleFormat.AV_SAMPLE_FMT_S16P, 2, 1024, 44100);
                    SampleConverter converter  = new SampleConverter(audioFrame);

                    foreach (var frame in reader[audioIndex].ReadFrame(packet))
                    {
                        Mat mat = frame.ToMat();
                    }
                }
            }
        }
Example #22
0
        public RtmpPull(string input)
        {
            MediaDictionary options = new MediaDictionary();

            options.Add("stimeout", "30000000"); // set connect timeout 30s

            using (MediaReader reader = new MediaReader(input, null, options))
            {
                var codecContext = reader.Where(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Codec.AVCodecContext;

                PixelConverter videoFrameConverter = new PixelConverter(AVPixelFormat.AV_PIX_FMT_BGR24, codecContext.width, codecContext.height);
                foreach (var packet in reader.ReadPacket())
                {
                    foreach (var frame in reader[packet.StreamIndex].ReadFrame(packet))
                    {
                        // TODO
                    }
                }
            }
        }
Example #23
0
            /// <summary>
            /// decode video to image
            /// </summary>
            /// <param name="inputFile">input video file</param>
            /// <param name="outDirectory">folder for output image files</param>
            public DecodeVideoToMat(string inputFile, string outDirectory)
            {
                string outputdir = Directory.CreateDirectory(outDirectory).FullName;

                using (MediaReader reader = new MediaReader(inputFile))
                {
                    var videoIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index;

                    foreach (var packet in reader.ReadPacket())
                    {
                        foreach (var frame in reader[videoIndex].ReadFrame(packet))
                        {
                            using (var image = frame.ToMat())
                            {
                                image.Save(Path.Combine(outputdir, $"{frame.Pts}.bmp"));
                            }
                        }
                    }
                }
            }
        public override void OnStart(EncoderOption option)
        {
            this.option = option;

            var audio_file = StoryboardInstanceManager.ActivityInstance?.Info?.audio_file_path;

            audio_reader = new MediaReader(audio_file);

            audio_decoder = audio_reader.Decoders.OfType <AudioDecoder>().FirstOrDefault();

            #region Video Init

            var video_format = new VideoFormat(option.Width, option.Height, AVPixelFormat.Bgr24);
            var video_param  = new VideoEncoderParameters()
            {
                FrameRate = new Fraction(option.FPS), BitRate = option.BitRate
            };

            video_encoder = new VideoEncoder(option.EncoderName, video_format, video_param);

            #endregion Video Init

            writer = new MediaWriter(option.OutputPath, false).AddEncoder(video_encoder);

            if (audio_decoder != null)
            {
                audio_encoder = new AudioEncoder(audio_decoder.ID, audio_decoder.OutFormat, BitRate._192Kbps);
                writer.AddEncoder(audio_encoder);
            }

            writer.Initialize();

            Log.User($"Format :{video_format.ToString()}\nVideo Encoder :{video_encoder.ToString()}");

            video_frame = new VideoFrame(video_format);
            audio_frame = new AudioFrame(audio_decoder.OutFormat);

            audio_encoding_thread      = new Thread(AudioEncoding);
            audio_encoding_thread.Name = "Audio Encoder Thread";
            audio_encoding_thread.Start();
        }
Example #25
0
        static void Main(string[] args)
        {
            var reader  = new MediaReader(@"G:\请问您今天来点兔子吗\[Hakugetsu&VCB-Studio]Gochuumon wa Usagi Desuka[1080p]\[Hakugetsu&VCB-Studio]Gochuumon wa Usagi Desuka[01][Hi10p_1080p][x264_2flac].mkv");
            var decoder = reader.Decoders.OfType <VideoDecoder>().First();

            decoder.OutFormat = new VideoFormat(
                decoder.InFormat.Width,
                decoder.InFormat.Height,
                AVPixelFormat.Yuv420p);
            VideoFrame frame = new VideoFrame();

            for (int i = 0; i < 10; i++)
            {
                if (reader.NextFrame(frame, decoder.StreamIndex))
                {
                    var writer = File.OpenWrite($@"Z:\{i}.yuv");
                    Write(writer, frame.Data[0], frame.Format.Strides[0] * frame.Format.Height);
                    Write(writer, frame.Data[1], frame.Format.Strides[1] * frame.Format.Height / 2);
                    Write(writer, frame.Data[2], frame.Format.Strides[2] * frame.Format.Height / 2);
                    writer.Close();
                }
            }
        }
Example #26
0
        public async Task CS_W_MediaReader_ReadAudio()
        {
            using (var reader = await MediaReader.CreateFromPathAsync("ms-appx:///Recording.m4a"))
            {
                while (true)
                {
                    using (var result = await reader.AudioStream.ReadAsync())
                    {
                        if (result.EndOfStream || result.Error)
                        {
                            break;
                        }

                        var sample = (MediaSample1D)result.Sample;
                        if (sample == null)
                        {
                            continue;
                        }

                        // Use audio data here
                    }
                }
            }
        }
        /// <summary>
        /// Make the specified color of <paramref name="input0"/> transparent and overlay it on the <paramref name="input1"/> video to <paramref name="output"/>
        /// <para>
        /// NOTE: green [R:0 G:128 B:0]
        /// </para>
        /// <para>
        /// ffmpeg -i <paramref name="input0"/> -i <paramref name="input1"/> -filter_complex "[1:v]chromakey=green:0.1:0.0[ckout];[0:v][ckout]overlay[out]" -map "[out]" <paramref name="output"/>
        /// </para>
        /// filter graph:
        /// ┌──────┐     ┌──────┐     ┌─────────┐     ┌─────────┐
        /// │input0│---->│buffer│---->│chromakey│---->│         │
        /// └──────┘     └──────┘     └─────────┘     │         │     ┌──────────┐     ┌──────┐
        ///                                           │ overlay │---->│buffersink│---->│output│
        /// ┌──────┐     ┌──────┐                     │         │     └──────────┘     └──────┘
        /// │input1│-----│buffer│-------------------->│         │
        /// └──────┘     └──────┘                     └─────────┘
        /// </summary>
        /// <param name="input0">foreground</param>
        /// <param name="input1">background</param>
        /// <param name="output">output</param>
        /// <param name="chromakeyOptions">rgb(green or 0x008000):similarity:blend, see http://ffmpeg.org/ffmpeg-filters.html#chromakey </param>
        public VideoChromekeyFilter(string input0, string input1, string output, string chromakeyOptions = "green:0.1:0.0")
        {
            using (MediaReader reader0 = new MediaReader(input0))
                using (MediaReader reader1 = new MediaReader(input1))
                    using (MediaWriter writer = new MediaWriter(output))
                    {
                        var videoIndex0 = reader0.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index;
                        var videoIndex1 = reader1.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index;

                        // init complex filter graph
                        int        height0              = reader0[videoIndex0].Codec.AVCodecContext.height;
                        int        width0               = reader0[videoIndex0].Codec.AVCodecContext.width;
                        int        format0              = (int)reader0[videoIndex0].Codec.AVCodecContext.pix_fmt;
                        AVRational time_base0           = reader0[videoIndex0].TimeBase;
                        AVRational sample_aspect_ratio0 = reader0[videoIndex0].Codec.AVCodecContext.sample_aspect_ratio;

                        int        height1              = reader1[videoIndex1].Codec.AVCodecContext.height;
                        int        width1               = reader1[videoIndex1].Codec.AVCodecContext.width;
                        int        format1              = (int)reader1[videoIndex1].Codec.AVCodecContext.pix_fmt;
                        AVRational time_base1           = reader1[videoIndex1].TimeBase;
                        AVRational sample_aspect_ratio1 = reader1[videoIndex1].Codec.AVCodecContext.sample_aspect_ratio;

                        MediaFilterGraph filterGraph = new MediaFilterGraph();
                        var in0       = filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width0, height0, (AVPixelFormat)format0, time_base0, sample_aspect_ratio0);
                        var in1       = filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width1, height1, (AVPixelFormat)format1, time_base1, sample_aspect_ratio1);
                        var chromakey = filterGraph.AddFilter(new MediaFilter("chromakey"), chromakeyOptions);
                        var overlay   = filterGraph.AddFilter(new MediaFilter("overlay"));
                        var out0      = filterGraph.AddVideoSinkFilter(new MediaFilter(MediaFilter.VideoSinks.Buffersink));
                        in0.LinkTo(0, chromakey, 0).LinkTo(0, overlay, 1).LinkTo(0, out0, 0);
                        in1.LinkTo(0, overlay, 0);
                        filterGraph.Initialize();

                        // add stream by reader and init writer
                        writer.AddStream(reader0[videoIndex0]);
                        writer.Initialize();

                        // init video frame format converter by dstcodec
                        PixelConverter pixelConverter = new PixelConverter(writer[0].Codec);

                        long          pts     = 0;
                        MediaReader[] readers = new MediaReader[] { reader0, reader1 };
                        int[]         index   = new int[] { videoIndex0, videoIndex1 };
                        for (int i = 0; i < readers.Length; i++)
                        {
                            var reader = readers[i];
                            foreach (var srcPacket in reader.ReadPacket())
                            {
                                foreach (var srcFrame in reader[index[i]].ReadFrame(srcPacket))
                                {
                                    filterGraph.Inputs[i].WriteFrame(srcFrame);
                                    foreach (var filterFrame in filterGraph.Outputs.First().ReadFrame())
                                    {
                                        foreach (var dstFrame in pixelConverter.Convert(filterFrame))
                                        {
                                            dstFrame.Pts = pts++;
                                            foreach (var dstPacket in writer[0].WriteFrame(dstFrame))
                                            {
                                                writer.WritePacket(dstPacket);
                                            }
                                        }
                                    }
                                }
                            }
                        }

                        // flush codec cache
                        writer.FlushMuxer();
                    }
        }
Example #28
0
        static void Main(string[] args)
        {
            var media   = new MediaReader(@"D:\MyDocuments\Videos\bandicam 2018-05-07 20-38-22-722.mp4");
            var decoder = media.Decoders.OfType <VideoDecoder>().First();

            decoder.OutFormat = new VideoFormat(decoder.InFormat.Width, decoder.InFormat.Height, AVPixelFormat.Bgr24, 4);
            VideoFrame frame = new VideoFrame();

            for (int i = 0; i < 100; i++)
            {
                if (media.NextFrame(frame, decoder.StreamIndex))
                {
                    Bitmap image = new Bitmap(frame.Format.Width, frame.Format.Height, frame.Format.Strides[0], PixelFormat.Format24bppRgb, frame.Scan0);
                    image.Save($@"D:\MyDocuments\Videos\{i}.png");
                }
            }

            //AVFormatContext* formatCtx = null;
            //const string filename = @"D:\MyDocuments\Music\虾米音乐\Cyua-Blumenkranz.mp3";
            //FF.avformat_open_input(&formatCtx, filename, null, null).CheckFFmpegCode();
            //FF.avformat_find_stream_info(formatCtx, null);

            //AVCodec* codec;
            //int index = FF.av_find_best_stream(formatCtx, AVMediaType.Audio, -1, -1, &codec, 0).CheckFFmpegCode();
            //AVStream* stream = formatCtx->Streams[index];

            //// var codec = FF.avcodec_find_decoder(stream->Codecpar->CodecId);
            //var parser = FF.av_parser_init(codec->Id);
            //var codecCtx = FF.avcodec_alloc_context3(codec);
            //FF.avcodec_parameters_to_context(codecCtx, stream->Codecpar);
            //Debug.Print(*stream->Codecpar);
            //FF.avcodec_open2(codecCtx, codec, null).CheckFFmpegCode();

            //// FF.av_dump_format(formatCtx, 0, filename, false);

            //var frame = FF.av_frame_alloc();
            //AVPacket packet;
            //FF.av_init_packet(&packet);
            //packet.Data = null;
            //packet.Size = 0;

            //void decode_packet(AVPacket* _packet) {
            //	int r = FF.avcodec_send_packet(codecCtx, _packet).CheckFFmpegCode();
            //	AVRational rational = codecCtx->PktTimebase;
            //	while (r >= 0) {
            //		r = FF.avcodec_receive_frame(codecCtx, frame);
            //		switch (r) {
            //			case var _ when r == Error.EAGAIN.AVError():
            //				Console.Write('.');
            //				return;
            //			case (int)AVError.Eof:
            //				Console.WriteLine("EOF");
            //				return;
            //			case var _ when r < 0:
            //				throw new FFmpegException(r);
            //		}
            //		Console.Write('!');
            //	}
            //}

            //while (FF.av_read_frame(formatCtx, &packet) >= 0) {
            //	if (packet.StreamIndex == index) {
            //		decode_packet(&packet);
            //	}
            //	FF.av_packet_unref(&packet);
            //}
            //decode_packet(null);
        }
Example #29
0
        unsafe static void Main(string[] args)
        {
            const double MinimumFrequency = 10;
            const double MaximumFrequency = 20000;
            const double MaxDB            = 65;
            const int    fftSize          = 4192 * 6;

            var reader      = new MediaReader(@"D:\CloudMusic\MAN WITH A MISSION - My Hero.mp3");
            var decoder     = reader.Decoders.OfType <AudioDecoder>().First();
            var videoFormat = new VideoFormat(1280, 720, AVPixelFormat.Rgb0);
            var writer      = new MediaWriter(@"D:\CloudMusic\MAN WITH A MISSION - My Hero-fft.mkv")
                              .AddEncoder(new VideoEncoder(AVCodecID.H264, videoFormat, new VideoEncoderParameters {
                FrameRate = new Fraction(30), GopSize = 10
            }))
                              .AddEncoder(new AudioEncoder(AVCodecID.Mp3, decoder.InFormat))
                              //.AddVideo(videoFormat, new VideoEncoderParameters { FrameRate = new Fraction(30), GopSize = 10 })
                              //.AddAudio(decoder.InFormat)
                              .Initialize();

            int sampleRate = decoder.InFormat.SampleRate;
            int channels   = decoder.InFormat.Channels;
            var resampler  = new AudioResampler(decoder.InFormat, new AudioFormat(sampleRate, channels, 64));
            var inFrame    = new AudioFrame();
            var outFrame   = new AudioFrame();
            var image      = new VideoFrame(videoFormat);

            var viewHeight  = videoFormat.Height / 2;
            var observer    = new StreamObserver <double>(fftSize * 2, fftSize / 6, 2);
            var fft         = DoubleFFT.Create(fftSize);
            var inFFT       = fft.AllocInput();
            var outFFT      = fft.AllocOutput();
            var cutLength   = FFTTools.CutFrequencyLength(fftSize, MinimumFrequency, MaximumFrequency, sampleRate, fft.FFTComplexCount);
            var cutFFT      = Marshal.AllocHGlobal(cutLength * sizeof(double));
            var outFFT2     = Marshal.AllocHGlobal(fft.FFTComplexCount * sizeof(double));
            var outFFTFinal = Marshal.AllocHGlobal(viewHeight * sizeof(double));
            var window      = new BlackmanHarrisWindow(fftSize);
            var log         = new Spectrum3DLog();

            void FFT()
            {
                window.Apply((double *)inFFT, (double *)inFFT);
                fft.Execute(inFFT, outFFT);
                FFTTools.Abs(fftSize, (double *)outFFT, (double *)outFFT2);
                FFTTools.CutFrequency(fftSize, (double *)outFFT2, fft.FFTComplexCount, MinimumFrequency, MaximumFrequency, sampleRate, (double *)cutFFT, cutLength);
                FFTTools.Logarithm((double *)cutFFT, cutLength, MinimumFrequency, MaximumFrequency, (double *)outFFTFinal, viewHeight, log);
                FFTTools.ToDB((double *)outFFTFinal, viewHeight, MaxDB);
                FFTTools.Scale((double *)outFFTFinal, viewHeight, 1 / MaxDB);
            }

            void LeftShiftImage()
            {
                int w = image.Format.Width - 1;
                int h = image.Format.Height;

                for (int y = 0; y < h; y++)
                {
                    var p = (uint *)(image.Data[0] + image.Format.Strides[0] * y);
                    for (int x = 0; x < w; x++)
                    {
                        p[x] = p[x + 1];
                    }
                }
            }

            observer.Completed += data => {
                LeftShiftImage();

                int w      = image.Format.Width - 1;
                int h      = image.Format.Height;
                var p      = (byte *)((uint *)image.Data[0] + w);
                var stride = image.Format.Strides[0];

                for (int i = 0; i < fftSize; i++)
                {
                    ((double *)inFFT)[i] = ((double *)data)[2 * i];
                }
                FFT();
                for (int y = 0; y < viewHeight; y++, p += stride)
                {
                    var val = ((double *)outFFTFinal)[viewHeight - y - 1] * 256;
                    if (val < 0)
                    {
                        val = 0;
                    }
                    else if (val > 255)
                    {
                        val = 255;
                    }
                    p[0] = p[1] = p[2] = (byte)val;
                }

                for (int i = 0; i < fftSize; i++)
                {
                    ((double *)inFFT)[i] = ((double *)data)[2 * i + 1];
                }
                FFT();
                for (int y = 0; y < viewHeight; y++, p += stride)
                {
                    var val = ((double *)outFFTFinal)[viewHeight - y - 1] * 256;
                    if (val < 0)
                    {
                        val = 0;
                    }
                    else if (val > 255)
                    {
                        val = 255;
                    }
                    p[0] = p[1] = p[2] = (byte)val;
                }
            };

            bool end = false, run = true;

            while (run)
            {
                writer.Write(encoder => {
                    switch (encoder)
                    {
                    case AudioEncoder audioEncoder:
                        Console.Write($"\r{audioEncoder.InputTimestamp}");
                        if (reader.NextFrame(inFrame, decoder.StreamIndex))
                        {
                            resampler.Resample(inFrame, outFrame);
                            observer.Write(outFrame.Data[0], outFrame.SampleCount * channels);
                            return(inFrame);
                        }
                        else
                        {
                            resampler.ResampleFinal(outFrame);
                            observer.Write(outFrame.Data[0], outFrame.SampleCount * channels);
                            end = true;
                            Console.WriteLine($"\r{audioEncoder.InputTimestamp}");
                            return(null);
                        }

                    case VideoEncoder videoEncoder:
                        if (end)
                        {
                            run = false;
                        }
                        return(image);

                    default:
                        throw new NotImplementedException();
                    }
                });
            }
            writer.Dispose();
        }
Example #30
0
 public AudioStream(Stream stream)
 {
     media   = new MediaReader(stream);
     decoder = media.Decoders.OfType <AudioDecoder>().First();
 }