コード例 #1
0
        /// <summary>
        /// Yuv420P sample
        /// </summary>
        /// <param name="outputFile">output file</param>
        /// <param name="width">video width</param>
        /// <param name="height">video height</param>
        /// <param name="fps">video fps</param>
        public FillYuv420PSample(string outputFile, int width, int height, int fps)
        {
            var dir = Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(outputFile), Path.GetFileNameWithoutExtension(outputFile))).FullName;

            using (MediaWriter writer = new MediaWriter(outputFile))
            {
                writer.AddStream(MediaEncoder.CreateVideoEncode(writer.Format, width, height, fps));
                writer.Initialize();

                VideoFrame     srcframe       = new VideoFrame(width, height, FFmpeg.AutoGen.AVPixelFormat.AV_PIX_FMT_YUV420P);
                PixelConverter pixelConverter = new PixelConverter(writer[0].Codec);

                Random random = new Random();
                for (int i = 0; i < fps * 10; i++)
                {
                    // fill video frame
                    FillYuv420P(srcframe, i);

                    foreach (var dstframe in pixelConverter.Convert(srcframe))
                    {
                        dstframe.Pts = i;
                        SaveFrame(dstframe, Path.Combine(dir, $"{i}.bmp"));
                        foreach (var packet in writer[0].WriteFrame(dstframe))
                        {
                            writer.WritePacket(packet);
                        }
                    }
                }

                // flush cache
                writer.FlushMuxer();
            }
        }
コード例 #2
0
        public Video2Frame2Video(string inputFile, string outputFile)
        {
            using (MediaReader reader = new MediaReader(inputFile))
                using (MediaWriter writer = new MediaWriter(outputFile))
                {
                    var videoIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index;
                    writer.AddStream(reader[videoIndex]);
                    writer.Initialize();

                    PixelConverter pixelConverter = new PixelConverter(writer.First().Codec);

                    foreach (var packet in reader.ReadPacket())
                    {
                        foreach (var frame in reader[videoIndex].ReadFrame(packet))
                        {
                            foreach (var dstFrame in pixelConverter.Convert(frame))
                            {
                                foreach (var dstPacket in writer[0].WriteFrame(dstFrame))
                                {
                                    writer.WritePacket(dstPacket);
                                }
                            }
                        }
                    }
                    writer.FlushMuxer();
                }
        }
コード例 #3
0
        /// <summary>
        /// a red cheomekey filter for .png image example.
        /// <para>
        /// ffmpeg -i <paramref name="input"/> -vf chromakey=red:0.1:0.0 <paramref name="output"/>
        /// </para>
        /// </summary>
        /// <param name="input"></param>
        /// <param name="output"></param>
        public unsafe PngChromekeyFilter(string input, string output)
        {
            using (MediaReader reader = new MediaReader(input))
                using (MediaWriter writer = new MediaWriter(output))
                {
                    var videoIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index;

                    // init filter
                    int        height              = reader[videoIndex].Codec.AVCodecContext.height;
                    int        width               = reader[videoIndex].Codec.AVCodecContext.width;
                    int        format              = (int)reader[videoIndex].Codec.AVCodecContext.pix_fmt;
                    AVRational time_base           = reader[videoIndex].TimeBase;
                    AVRational sample_aspect_ratio = reader[videoIndex].Codec.AVCodecContext.sample_aspect_ratio;

                    MediaFilterGraph filterGraph = new MediaFilterGraph();
                    filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width, height, (AVPixelFormat)format, time_base, sample_aspect_ratio).LinkTo(0,
                                                                                                                                                                                 filterGraph.AddFilter(new MediaFilter("chromakey"), "red:0.1:0.0")).LinkTo(0,
                                                                                                                                                                                                                                                            filterGraph.AddVideoSinkFilter(new MediaFilter(MediaFilter.VideoSinks.Buffersink)));
                    filterGraph.Initialize();

                    // add stream by reader and init writer
                    writer.AddStream(reader[videoIndex]);
                    writer.Initialize();

                    // init video frame format converter by dstcodec
                    PixelConverter pixelConverter = new PixelConverter(writer[0].Codec);


                    foreach (var srcPacket in reader.ReadPacket())
                    {
                        foreach (var srcFrame in reader[videoIndex].ReadFrame(srcPacket))
                        {
                            filterGraph.Inputs.First().WriteFrame(srcFrame);
                            foreach (var filterFrame in filterGraph.Outputs.First().ReadFrame())
                            {
                                // can use filterFrame.ToMat() gets the output image directly without the need for a writer.
                                //using EmguFFmpeg.EmguCV;
                                //using (var mat = filterFrame.ToMat())
                                //{
                                //    mat.Save(output);
                                //}

                                foreach (var dstFrame in pixelConverter.Convert(filterFrame))
                                {
                                    foreach (var dstPacket in writer[0].WriteFrame(dstFrame))
                                    {
                                        writer.WritePacket(dstPacket);
                                    }
                                }
                            }
                        }
                    }

                    // flush codec cache
                    writer.FlushMuxer();
                }
        }
コード例 #4
0
 public void WriteVideoFrame(VideoFrame videoFrame)
 {
     if (videoIndex < 0)
     {
         throw new NotSupportedException();
     }
     foreach (var dstframe in pixelConverter.Convert(videoFrame))
     {
         dstframe.Pts = videoFrame.Pts;
         foreach (var packet in writer[videoIndex].WriteFrame(dstframe))
         {
             writer.WritePacket(packet);
         }
     }
 }
コード例 #5
0
ファイル: TestNCB.xaml.cs プロジェクト: marzlia/CXPortal
        private void XRayDataStateStopBtn_Checked(object sender, RoutedEventArgs eventArguments)
        {
            try { AccessDetectorsData.Detectors.SetDataTransferMode(dataTransferMode.Stop); }
            catch (Exception ex) { AnomalyShow(ex); }

            try
            {
                PxeWriteAccess pxeAccess = new PxeWriteAccess();
                pxeAccess.CreatePXE("Test" + DateTime.Now.Ticks.ToString() + ".pxe");
                pxeAccess.CreatePXEHeader(1, (uint)AccessDetectorsData.Detectors.RawDataCollection.Count, (uint)AccessDetectorsData.Detectors.PixelsPerColumn);
                while (AccessDetectorsData.Detectors.RawDataCollection.Count > 0)
                {
                    DataInfo information = AccessDetectorsData.Detectors.RawDataCollection.Take();
                    float[]  data        = PixelConverter.Convert(information.LineData);
                    pxeAccess.WriteDataLines(1, data, 1);
                }
                pxeAccess.ClosePXEWrite();
            }
            catch (Exception ex) { AnomalyShow(ex); }
        }
コード例 #6
0
        /// <summary>
        /// Make the specified color of <paramref name="input0"/> transparent and overlay it on the <paramref name="input1"/> video to <paramref name="output"/>
        /// <para>
        /// NOTE: green [R:0 G:128 B:0]
        /// </para>
        /// <para>
        /// ffmpeg -i <paramref name="input0"/> -i <paramref name="input1"/> -filter_complex "[1:v]chromakey=green:0.1:0.0[ckout];[0:v][ckout]overlay[out]" -map "[out]" <paramref name="output"/>
        /// </para>
        /// filter graph:
        /// ┌──────┐     ┌──────┐     ┌─────────┐     ┌─────────┐
        /// │input0│---->│buffer│---->│chromakey│---->│         │
        /// └──────┘     └──────┘     └─────────┘     │         │     ┌──────────┐     ┌──────┐
        ///                                           │ overlay │---->│buffersink│---->│output│
        /// ┌──────┐     ┌──────┐                     │         │     └──────────┘     └──────┘
        /// │input1│-----│buffer│-------------------->│         │
        /// └──────┘     └──────┘                     └─────────┘
        /// </summary>
        /// <param name="input0">foreground</param>
        /// <param name="input1">background</param>
        /// <param name="output">output</param>
        /// <param name="chromakeyOptions">rgb(green or 0x008000):similarity:blend, see http://ffmpeg.org/ffmpeg-filters.html#chromakey </param>
        public VideoChromekeyFilter(string input0, string input1, string output, string chromakeyOptions = "green:0.1:0.0")
        {
            using (MediaReader reader0 = new MediaReader(input0))
                using (MediaReader reader1 = new MediaReader(input1))
                    using (MediaWriter writer = new MediaWriter(output))
                    {
                        var videoIndex0 = reader0.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index;
                        var videoIndex1 = reader1.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index;

                        // init complex filter graph
                        int        height0              = reader0[videoIndex0].Codec.AVCodecContext.height;
                        int        width0               = reader0[videoIndex0].Codec.AVCodecContext.width;
                        int        format0              = (int)reader0[videoIndex0].Codec.AVCodecContext.pix_fmt;
                        AVRational time_base0           = reader0[videoIndex0].TimeBase;
                        AVRational sample_aspect_ratio0 = reader0[videoIndex0].Codec.AVCodecContext.sample_aspect_ratio;

                        int        height1              = reader1[videoIndex1].Codec.AVCodecContext.height;
                        int        width1               = reader1[videoIndex1].Codec.AVCodecContext.width;
                        int        format1              = (int)reader1[videoIndex1].Codec.AVCodecContext.pix_fmt;
                        AVRational time_base1           = reader1[videoIndex1].TimeBase;
                        AVRational sample_aspect_ratio1 = reader1[videoIndex1].Codec.AVCodecContext.sample_aspect_ratio;

                        MediaFilterGraph filterGraph = new MediaFilterGraph();
                        var in0       = filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width0, height0, (AVPixelFormat)format0, time_base0, sample_aspect_ratio0);
                        var in1       = filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width1, height1, (AVPixelFormat)format1, time_base1, sample_aspect_ratio1);
                        var chromakey = filterGraph.AddFilter(new MediaFilter("chromakey"), chromakeyOptions);
                        var overlay   = filterGraph.AddFilter(new MediaFilter("overlay"));
                        var out0      = filterGraph.AddVideoSinkFilter(new MediaFilter(MediaFilter.VideoSinks.Buffersink));
                        in0.LinkTo(0, chromakey, 0).LinkTo(0, overlay, 1).LinkTo(0, out0, 0);
                        in1.LinkTo(0, overlay, 0);
                        filterGraph.Initialize();

                        // add stream by reader and init writer
                        writer.AddStream(reader0[videoIndex0]);
                        writer.Initialize();

                        // init video frame format converter by dstcodec
                        PixelConverter pixelConverter = new PixelConverter(writer[0].Codec);

                        long          pts     = 0;
                        MediaReader[] readers = new MediaReader[] { reader0, reader1 };
                        int[]         index   = new int[] { videoIndex0, videoIndex1 };
                        for (int i = 0; i < readers.Length; i++)
                        {
                            var reader = readers[i];
                            foreach (var srcPacket in reader.ReadPacket())
                            {
                                foreach (var srcFrame in reader[index[i]].ReadFrame(srcPacket))
                                {
                                    filterGraph.Inputs[i].WriteFrame(srcFrame);
                                    foreach (var filterFrame in filterGraph.Outputs.First().ReadFrame())
                                    {
                                        foreach (var dstFrame in pixelConverter.Convert(filterFrame))
                                        {
                                            dstFrame.Pts = pts++;
                                            foreach (var dstPacket in writer[0].WriteFrame(dstFrame))
                                            {
                                                writer.WritePacket(dstPacket);
                                            }
                                        }
                                    }
                                }
                            }
                        }

                        // flush codec cache
                        writer.FlushMuxer();
                    }
        }
コード例 #7
0
ファイル: ArchiveData.cs プロジェクト: marzlia/CXPortal
        public string CreatePXEFile(LINAC_ENERGY_TYPE_VALUE energy, PulseWidth pulseWidth, List <DataInfo> ObjectLines)
        {
            lock (_writeLock)
            {
                string pxeFile = Path.Combine(AppConfiguration.HostTempFileLocation, DateTime.Now.ToString(_dateFormat) + _pxeExtension);
                _pxeWriteAccess.CreatePXE(pxeFile);

                try
                {
                    List <Pixel[]>   highEnergyLines = new List <Pixel[]>();
                    List <Pixel[]>   lowEnergyLines  = new List <Pixel[]>();
                    XRayInfoIDStruct highEnergyInfo  = default(XRayInfoIDStruct);
                    XRayInfoIDStruct lowEnergyInfo   = default(XRayInfoIDStruct);

                    if (energy == LINAC_ENERGY_TYPE_VALUE.Dual)
                    {
                        if (ObjectLines[0].XRayInfo.Energy == XRayEnergyEnum.HighEnergy)
                        {
                            ObjectLines.RemoveAt(0);
                        }
                    }

                    //store object line data
                    foreach (DataInfo dataLine in ObjectLines)
                    {
                        if (dataLine.XRayInfo.Energy == XRayEnergyEnum.HighEnergy)
                        {
                            highEnergyLines.Add(dataLine.LineData);
                            highEnergyInfo = dataLine.XRayInfo;
                        }
                        else //low energy
                        {
                            lowEnergyLines.Add(dataLine.LineData);
                            lowEnergyInfo = dataLine.XRayInfo;
                        }
                    }

                    int maxLength = Math.Min(lowEnergyLines.Count, highEnergyLines.Count);

                    if (maxLength == 0)
                    {
                        maxLength = Math.Max(lowEnergyLines.Count, highEnergyLines.Count);
                    }

                    if (maxLength > AppConfiguration.MaxPXEWidth)
                    {
                        maxLength = AppConfiguration.MaxPXEWidth;
                    }

                    if (energy != LINAC_ENERGY_TYPE_VALUE.Low && highEnergyLines.Count > 0)
                    {
                        int bufferSize = maxLength * highEnergyLines[0].Length;

                        if (bufferSize > 0)
                        {
                            float[] buffer = new float[bufferSize];
                            Parallel.For(highEnergyLines.Count - maxLength, maxLength, index =>
                            {
                                float[] pixelArray = PixelConverter.Convert(highEnergyLines[index]);
                                int length         = Buffer.ByteLength(pixelArray);
                                Buffer.BlockCopy(pixelArray, 0, buffer, index * length, length);
                            });


                            if (energy == LINAC_ENERGY_TYPE_VALUE.Dual)
                            {
                                _pxeWriteAccess.CreateHiPXEHeader((uint)maxLength, (uint)highEnergyLines[0].Length);
                                _pxeWriteAccess.WriteHiDataLines(buffer, (uint)maxLength);
                                _pxeWriteAccess.WriteHighEngDarkSample(PixelConverter.Convert(_calibration.GetDarkData(highEnergyInfo)));
                                _pxeWriteAccess.WriteHighEngAirSample(PixelConverter.Convert(_calibration.GetAirData(highEnergyInfo)));

                                if (AppConfiguration.StoreReferenceCorrection)
                                {
                                    _pxeWriteAccess.WriteHiRef(_calibration.GetReferenceCorrections(highEnergyInfo));
                                }

                                if (AppConfiguration.StoreScaleFactor)
                                {
                                    _pxeWriteAccess.WriteHiScaleFactor(_calibration.GetScaleFactor(highEnergyInfo));
                                }

                                if (AppConfiguration.StoreAirDarkSamples)
                                {
                                    _pxeWriteAccess.WriteHiFullAirData(PixelConverter.Convert(_calibration.GetAirDataCollection(highEnergyInfo)), Convert.ToUInt32(AppConfiguration.CalibrationDataLines));
                                    _pxeWriteAccess.WriteHiFullDarkData(PixelConverter.Convert(_calibration.GetDarkDataCollection(highEnergyInfo)), Convert.ToUInt32(AppConfiguration.CalibrationDataLines));
                                }
                            }
                            else
                            {
                                _pxeWriteAccess.CreateHiPXEHeader((uint)maxLength, (uint)highEnergyLines[0].Length);
                                _pxeWriteAccess.WriteHiDataLines(buffer, (uint)maxLength);
                                _pxeWriteAccess.WriteHighEngDarkSample(PixelConverter.Convert(_calibration.GetDarkData(highEnergyInfo)));
                                _pxeWriteAccess.WriteHighEngAirSample(PixelConverter.Convert(_calibration.GetAirData(highEnergyInfo)));

                                if (AppConfiguration.StoreReferenceCorrection)
                                {
                                    _pxeWriteAccess.WriteHiRef(_calibration.GetReferenceCorrections(highEnergyInfo));
                                }

                                if (AppConfiguration.StoreScaleFactor)
                                {
                                    _pxeWriteAccess.WriteHiScaleFactor(_calibration.GetScaleFactor(highEnergyInfo));
                                }

                                if (AppConfiguration.StoreAirDarkSamples)
                                {
                                    _pxeWriteAccess.WriteHiFullAirData(PixelConverter.Convert(_calibration.GetAirDataCollection(highEnergyInfo)), Convert.ToUInt32(AppConfiguration.CalibrationDataLines));
                                    _pxeWriteAccess.WriteHiFullDarkData(PixelConverter.Convert(_calibration.GetDarkDataCollection(highEnergyInfo)), Convert.ToUInt32(AppConfiguration.CalibrationDataLines));
                                }
                            }
                        }

                        highEnergyLines.Clear();
                    }

                    if (energy != LINAC_ENERGY_TYPE_VALUE.High && lowEnergyLines.Count > 0)
                    {
                        int bufferSize = maxLength * lowEnergyLines[0].Length;

                        if (bufferSize > 0)
                        {
                            float[] buffer = new float[bufferSize];
                            Parallel.For(0, maxLength, index =>
                            {
                                float[] pixelArray = PixelConverter.Convert(lowEnergyLines[index]);
                                int length         = Buffer.ByteLength(pixelArray);
                                Buffer.BlockCopy(pixelArray, 0, buffer, index * length, length);
                            });

                            if (energy == LINAC_ENERGY_TYPE_VALUE.Dual)
                            {
                                _pxeWriteAccess.CreateLoPXEHeader((uint)maxLength, (uint)lowEnergyLines[0].Length);
                                _pxeWriteAccess.WriteLoDataLines(buffer, (uint)maxLength);
                                _pxeWriteAccess.WriteLowEngDarkSample(PixelConverter.Convert(_calibration.GetDarkData(lowEnergyInfo)));
                                _pxeWriteAccess.WriteLowEngAirSample(PixelConverter.Convert(_calibration.GetAirData(lowEnergyInfo)));

                                if (AppConfiguration.StoreReferenceCorrection)
                                {
                                    _pxeWriteAccess.WriteLoRef(_calibration.GetReferenceCorrections(lowEnergyInfo));
                                }

                                if (AppConfiguration.StoreScaleFactor)
                                {
                                    _pxeWriteAccess.WriteLoScaleFactor(_calibration.GetScaleFactor(lowEnergyInfo));
                                }

                                if (AppConfiguration.StoreAirDarkSamples)
                                {
                                    _pxeWriteAccess.WriteLoFullAirData(PixelConverter.Convert(_calibration.GetAirDataCollection(lowEnergyInfo)), Convert.ToUInt32(AppConfiguration.CalibrationDataLines));
                                    _pxeWriteAccess.WriteLoFullDarkData(PixelConverter.Convert(_calibration.GetDarkDataCollection(lowEnergyInfo)), Convert.ToUInt32(AppConfiguration.CalibrationDataLines));
                                }
                            }
                            else
                            {
                                _pxeWriteAccess.CreateLoPXEHeader((uint)maxLength, (uint)lowEnergyLines[0].Length);
                                _pxeWriteAccess.WriteLoDataLines(buffer, (uint)maxLength);
                                _pxeWriteAccess.WriteLowEngDarkSample(PixelConverter.Convert(_calibration.GetDarkData(lowEnergyInfo)));
                                _pxeWriteAccess.WriteLowEngAirSample(PixelConverter.Convert(_calibration.GetAirData(lowEnergyInfo)));

                                if (AppConfiguration.StoreReferenceCorrection)
                                {
                                    _pxeWriteAccess.WriteLoRef(_calibration.GetReferenceCorrections(lowEnergyInfo));
                                }

                                if (AppConfiguration.StoreScaleFactor)
                                {
                                    _pxeWriteAccess.WriteLoScaleFactor(_calibration.GetScaleFactor(lowEnergyInfo));
                                }

                                if (AppConfiguration.StoreAirDarkSamples)
                                {
                                    _pxeWriteAccess.WriteLoFullAirData(PixelConverter.Convert(_calibration.GetAirDataCollection(lowEnergyInfo)), Convert.ToUInt32(AppConfiguration.CalibrationDataLines));
                                    _pxeWriteAccess.WriteLoFullDarkData(PixelConverter.Convert(_calibration.GetDarkDataCollection(lowEnergyInfo)), Convert.ToUInt32(AppConfiguration.CalibrationDataLines));
                                }
                            }
                        }

                        lowEnergyLines.Clear();
                    }
                }
                catch { }

                try
                {
                    _pxeWriteAccess.ClosePXEWrite();
                }
                catch (Exception e)
                {
                    _log.LogError("Exception closing PXEWrite.");
                    _log.LogError(e.GetType().ToString() + ": " + e.Message);
                    _log.LogError(e.StackTrace);
                }
                return(pxeFile);
            }
        }