Ejemplo n.º 1
0
        private static void RoundTripTestPattern()
        {
            var stampedTestPattern = _testPattern.Clone() as System.Drawing.Image;
            int tWidth             = _testPattern.Width;
            int tHeight            = _testPattern.Height;

            AddTimeStampAndLocation(stampedTestPattern, DateTime.UtcNow.ToString("dd MMM yyyy HH:mm:ss:fff"), "Test Pattern");
            var sampleBuffer = PixelConverter.BitmapToRGBA(stampedTestPattern as System.Drawing.Bitmap, _testPattern.Width, _testPattern.Height);

            byte[] i420 = PixelConverter.RGBAtoYUV420Planar(sampleBuffer, _testPattern.Width, _testPattern.Height);

            var encodedBuffer = _vp8Encoder.Encode(i420, false);

            Console.WriteLine($"VP8 encoded buffer length {encodedBuffer.Length}.");

            List <byte[]> i420Frames = _vp8Decoder.Decode(encodedBuffer, encodedBuffer.Length, out var dWidth, out var dHeight);

            Console.WriteLine($"VP8 decoded frames count {i420Frames.Count}, first frame length {i420Frames.First().Length}, width {dWidth}, height {dHeight}.");

            byte[] rgb = i420Frames.First();

            unsafe
            {
                fixed(byte *s = rgb)
                {
                    System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)dWidth, (int)dHeight, rgb.Length / (int)dHeight, PixelFormat.Format24bppRgb, (IntPtr)s);
                    bmpImage.Save("encodedroundtrip.bmp");
                    bmpImage.Dispose();
                }
            }
        }
Ejemplo n.º 2
0
 public Rgba64(Argb32 source)
 {
     this.R = PixelConverter.UpscaleFrom8BitTo16Bit(source.R);
     this.G = PixelConverter.UpscaleFrom8BitTo16Bit(source.G);
     this.B = PixelConverter.UpscaleFrom8BitTo16Bit(source.B);
     this.A = PixelConverter.UpscaleFrom8BitTo16Bit(source.A);
 }
Ejemplo n.º 3
0
        public void ExternalVideoSourceRawSample(uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat)
        {
            if (!_isClosed)
            {
                if (OnVideoSourceEncodedSample != null)
                {
                    uint fps = (durationMilliseconds > 0) ? 1000 / durationMilliseconds : DEFAULT_FRAMES_PER_SECOND;
                    if (fps == 0)
                    {
                        fps = 1;
                    }

                    int stride     = (pixelFormat == VideoPixelFormatsEnum.Bgra) ? 4 * width : 3 * width;
                    var i420Buffer = PixelConverter.ToI420(width, height, stride, sample, pixelFormat);
                    byte[]? encodedBuffer = _ffmpegEncoder.Encode(FFmpegConvert.GetAVCodecID(_videoFormatManager.SelectedFormat.Codec), i420Buffer, width, height, (int)fps, _forceKeyFrame);

                    if (encodedBuffer != null)
                    {
                        //Console.WriteLine($"encoded buffer: {encodedBuffer.HexStr()}");
                        uint durationRtpTS = VIDEO_SAMPLING_RATE / fps;

                        // Note the event handler can be removed while the encoding is in progress.
                        OnVideoSourceEncodedSample?.Invoke(durationRtpTS, encodedBuffer);
                    }

                    if (_forceKeyFrame)
                    {
                        _forceKeyFrame = false;
                    }
                }
            }
        }
        static void Main(string[] args)
        {
            Console.WriteLine("Pixel Convert Test Console:");

            //StreamReader sr = new StreamReader("ref-bgra32.bmp");
            var img    = Image.Load("ref-bgra32.bmp");
            int width  = img.Width;
            int height = img.Height;

            var refImage = img.CloneAs <Rgba32>();

            //var refImage = img.CloneAs<Rgb24>();
            //bgra32Img.SaveAsPng("ref-bgra32.png");

            if (refImage.TryGetSinglePixelSpan(out var pixelSpan))
            {
                byte[] rgba = MemoryMarshal.AsBytes(pixelSpan).ToArray();

                //var i420 = PixelConverter.RGBAtoYUV420Planar(rgba, width, height);
                var i420 = PixelConverter.RGBAtoI420(rgba, width, height);
                //var i420 = PixelConverter.RGBtoI420(rgb, width, height);
                var rgbRndTrip = PixelConverter.I420toRGB(i420, width, height);

                using (var imgRndTrip = Image.LoadPixelData <Rgb24>(rgbRndTrip, width, height))
                {
                    imgRndTrip.SaveAsPng("rndtrip-rgb24.png");
                }
            }

            Console.WriteLine("Finished.");
        }
Ejemplo n.º 5
0
        protected override void OnCreate(Bundle savedInstanceState)
        {
            base.OnCreate(savedInstanceState);
            SetContentView(Resource.Layout.activity_images_repeat);

            var toolbarMain = FindViewById <Toolbar>(Resource.Id.toolbar_images_repeat);

            SetSupportActionBar(toolbarMain);
            SupportActionBar.SetDisplayHomeAsUpEnabled(true);

            using (var background = BitmapHelper.GetBackgroung(Resources, _displayWidth - PixelConverter.DpToPX(20),
                                                               PixelConverter.DpToPX(190)))
            {
                FindViewById <LinearLayout>(Resource.Id.repeat_background).Background = background;
            }

            Buttons = new List <Button>
            {
                FindViewById <Button>(Resource.Id.button_I_choice1),
                FindViewById <Button>(Resource.Id.button_I_choice2),
                FindViewById <Button>(Resource.Id.button_I_choice3),
                FindViewById <Button>(Resource.Id.button_I_choice4)
            };
            ButtonNext = new ButtonNext
            {
                button = FindViewById <Button>(Resource.Id.button_I_Next),
                State  = StateButton.Next
            };
            Button_Images_Next_Click(null);
        }
Ejemplo n.º 6
0
        public void Blit(ref PixelBox src, BasicBox dstBox)
        {
            var srcBox = src;

            if (srcBox.Width != dstBox.Width || srcBox.Height != dstBox.Height)
            {
                // we need to rescale src to dst size first (also confvert format)
                var tmpData = new float[dstBox.Width * dstBox.Height];
                srcBox = new PixelBox(dstBox.Width, dstBox.Height, 1, PixelFormat.L8, BufferBase.Wrap(tmpData, tmpData.Length * sizeof(float)));
                Image.Scale(src, srcBox);
            }

            //pixel conversion
            var dstMemBox = new PixelBox(dstBox.Width, dstBox.Height, dstBox.Depth, PixelFormat.L8, BufferBase.Wrap(this.mData, mData.Length * sizeof(float)));

            PixelConverter.BulkPixelConversion(src, dstMemBox);

            if (srcBox != src)
            {
                // free temp
                srcBox = null;
            }
            var dRect = new Rectangle(dstBox.Left, dstBox.Top, dstBox.Right, dstBox.Bottom);

            DirtyRect(dRect);
        }
Ejemplo n.º 7
0
        protected override void OnCreate(Bundle savedInstanceState)
        {
            base.OnCreate(savedInstanceState);
            SetContentView(Resource.Layout.activity_languages_blitz_poll);
            var toolbarMain = FindViewById <Toolbar>(Resource.Id.toolbar_languages_blitz_poll);

            SetSupportActionBar(toolbarMain);
            SupportActionBar.SetDisplayHomeAsUpEnabled(true); // отображаем кнопку домой

            var displayMetrics = new DisplayMetrics();

            WindowManager.DefaultDisplay.GetRealMetrics(displayMetrics);

            BackgroundWord = BitmapHelper.GetBackgroung(Resources,
                                                        displayMetrics.WidthPixels - PixelConverter.DpToPX(50), PixelConverter.DpToPX(300));
            using (var background = BitmapHelper.GetBackgroung(Resources,
                                                               displayMetrics.WidthPixels - PixelConverter.DpToPX(200), PixelConverter.DpToPX(50)))
            {
                FindViewById <TextView>(Resource.Id.textView_Timer_language).Background = background;
            }

            FindViewById <RelativeLayout>(Resource.Id.RelativeLayoutLanguagesBlitzPoll).Touch += Swipes;

            ViewCurrent = GetTextView();
            FindViewById <RelativeLayout>(Resource.Id.RelativeLayoutLanguagesBlitzPoll).AddView(ViewCurrent, 1);
            ViewModel.TimerStart();
        }
Ejemplo n.º 8
0
 public Rgba64(Bgr24 source)
 {
     this.R = PixelConverter.UpscaleFrom8BitTo16Bit(source.R);
     this.G = PixelConverter.UpscaleFrom8BitTo16Bit(source.G);
     this.B = PixelConverter.UpscaleFrom8BitTo16Bit(source.B);
     this.A = ushort.MaxValue;
 }
Ejemplo n.º 9
0
        private TextView GetTextView()
        {
            var param = PixelConverter.GetParamsRelative(ViewGroup.LayoutParams.MatchParent, PixelConverter.DpToPX(320),
                                                         10, 160, 10, 10);

            ViewModel.CurrentNumber = new Random(unchecked ((int)DateTime.Now.Ticks)).Next(ViewModel.Database.Count);
            var randIndex =
                (ViewModel.CurrentNumber +
                 new Random(unchecked ((int)DateTime.Now.Ticks)).Next(1, ViewModel.Database.Count)) %
                ViewModel.Database.Count;

            ViewModel.Answer = new Random(unchecked ((int)DateTime.Now.Ticks)).Next(2) == 1 ? true : false;
            var translationWord = ViewModel.Database[ViewModel.Answer ? ViewModel.CurrentNumber : randIndex]
                                  .TranslationWord;
            var textView = new TextView(this)
            {
                TextSize         = 30,
                Elevation        = PixelConverter.DpToPX(10),
                LayoutParameters = param,
                Text             = $"{ViewModel.Database[ViewModel.CurrentNumber].Word}\n\n{translationWord}",
                Gravity          = GravityFlags.CenterHorizontal | GravityFlags.Center
            };

            textView.Background = BackgroundWord;
            textView.SetTextColor(Colors.White);
            return(textView);
        }
Ejemplo n.º 10
0
        public override async Task Answer(bool userAnswer)
        {
            await API.Statistics.Add(ViewModel.Database, ViewModel.CurrentNumber,
                                     !(ViewModel.Answer ^ userAnswer)? -1 : 1);

            if (!(ViewModel.Answer ^ userAnswer))
            {
                ViewModel.True++;
            }
            else
            {
                ViewModel.False++;
            }

            if (ViewPrev != null)
            {
                FindViewById <RelativeLayout>(Resource.Id.RelativeLayoutLanguagesBlitzPoll).RemoveView(ViewPrev);
            }

            ViewCurrent.Background = GetDrawable(!(ViewModel.Answer ^ userAnswer)
                ? Resource.Drawable.view_true
                : Resource.Drawable.view_false);
            RunAnimation(ViewCurrent, (userAnswer ? 1 : -1) * PixelConverter.DpToPX(5000));
            ViewPrev    = ViewCurrent;
            ViewCurrent = GetTextView();
            FindViewById <RelativeLayout>(Resource.Id.RelativeLayoutLanguagesBlitzPoll).AddView(ViewCurrent, 0);
            ViewModel.TitleCount = $"{ViewModel.True + ViewModel.False + 1}";
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Yuv420P sample
        /// </summary>
        /// <param name="outputFile">output file</param>
        /// <param name="width">video width</param>
        /// <param name="height">video height</param>
        /// <param name="fps">video fps</param>
        public FillYuv420PSample(string outputFile, int width, int height, int fps)
        {
            var dir = Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(outputFile), Path.GetFileNameWithoutExtension(outputFile))).FullName;

            using (MediaWriter writer = new MediaWriter(outputFile))
            {
                writer.AddStream(MediaEncoder.CreateVideoEncode(writer.Format, width, height, fps));
                writer.Initialize();

                VideoFrame     srcframe       = new VideoFrame(width, height, FFmpeg.AutoGen.AVPixelFormat.AV_PIX_FMT_YUV420P);
                PixelConverter pixelConverter = new PixelConverter(writer[0].Codec);

                Random random = new Random();
                for (int i = 0; i < fps * 10; i++)
                {
                    // fill video frame
                    FillYuv420P(srcframe, i);

                    foreach (var dstframe in pixelConverter.Convert(srcframe))
                    {
                        dstframe.Pts = i;
                        SaveFrame(dstframe, Path.Combine(dir, $"{i}.bmp"));
                        foreach (var packet in writer[0].WriteFrame(dstframe))
                        {
                            writer.WritePacket(packet);
                        }
                    }
                }

                // flush cache
                writer.FlushMuxer();
            }
        }
Ejemplo n.º 12
0
        private static void _packI <T>(T r, T g, T b, T a, PixelFormat pf, BufferBase dest)
        {
            var destType = typeof(T);

            if (destType == typeof(byte))
            {
                PixelConverter.PackColor((uint)System.Convert.ChangeType(r, typeof(uint)),
                                         (uint)System.Convert.ChangeType(g, typeof(uint)),
                                         (uint)System.Convert.ChangeType(b, typeof(uint)),
                                         (uint)System.Convert.ChangeType(a, typeof(uint)), pf, dest);
            }
            else if (destType == typeof(ushort))
            {
                PixelConverter.PackColor((float)System.Convert.ChangeType(r, destType) / 65535.0f,
                                         (float)System.Convert.ChangeType(g, destType) / 65535.0f,
                                         (float)System.Convert.ChangeType(b, destType) / 65535.0f,
                                         (float)System.Convert.ChangeType(a, destType) / 65535.0f, pf, dest);
            }
            else if (destType == typeof(float))
            {
                PixelConverter.PackColor((float)System.Convert.ChangeType(r, destType),
                                         (float)System.Convert.ChangeType(g, destType),
                                         (float)System.Convert.ChangeType(b, destType),
                                         (float)System.Convert.ChangeType(a, destType), pf, dest);
            }
            else
            {
                throw new AxiomException("Unsupported type!");
            }
        }
Ejemplo n.º 13
0
        public unsafe void Roundtrip_Bitmap_719x405()
        {
            int width  = 719;
            int height = 405;

            using (Bitmap bmp = new Bitmap($"img/testpattern_{width}x{height}.bmp"))
            {
                byte[] bgr = BitmapToBuffer(bmp, out int stride);

                byte[] i420 = PixelConverter.BGRtoI420(bgr, width, height, stride);

                Assert.NotNull(i420);

                byte[] rtBgr = PixelConverter.I420toBGR(i420, width, height, out int rtStride);

                Assert.NotNull(rtBgr);

                fixed(byte *pBgr = rtBgr)
                {
                    Bitmap rtBmp = new Bitmap(width, height, rtStride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, new IntPtr(pBgr));

                    rtBmp.Save($"roundtrip_bitmap_{width}x{height}.bmp");
                    rtBmp.Dispose();
                }
            }
        }
Ejemplo n.º 14
0
        public Video2Frame2Video(string inputFile, string outputFile)
        {
            using (MediaReader reader = new MediaReader(inputFile))
                using (MediaWriter writer = new MediaWriter(outputFile))
                {
                    var videoIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index;
                    writer.AddStream(reader[videoIndex]);
                    writer.Initialize();

                    PixelConverter pixelConverter = new PixelConverter(writer.First().Codec);

                    foreach (var packet in reader.ReadPacket())
                    {
                        foreach (var frame in reader[videoIndex].ReadFrame(packet))
                        {
                            foreach (var dstFrame in pixelConverter.Convert(frame))
                            {
                                foreach (var dstPacket in writer[0].WriteFrame(dstFrame))
                                {
                                    writer.WritePacket(dstPacket);
                                }
                            }
                        }
                    }
                    writer.FlushMuxer();
                }
        }
Ejemplo n.º 15
0
        /// <summary>
        /// </summary>
        /// <param name="srcBox"> </param>
        /// <param name="dst"> </param>
        public override void BlitToMemory(BasicBox srcBox, PixelBox dst)
        {
            if (!this._buffer.Contains(srcBox))
            {
                throw new ArgumentOutOfRangeException("source boux out of range");
            }

            if (srcBox.Left == 0 && srcBox.Right == Width && srcBox.Top == 0 && srcBox.Bottom == Height && srcBox.Front == 0 && srcBox.Back == Depth && dst.Width == Width && dst.Height == Height && dst.Depth == Depth && GLESPixelUtil.GetGLOriginFormat(dst.Format) != 0)
            {
                // The direct case: the user wants the entire texture in a format supported by GL
                // so we don't need an intermediate buffer
                Download(dst);
            }
            else
            {
                // Use buffer for intermediate copy
                AllocateBuffer();
                //download entire buffer
                Download(this._buffer);
                if (srcBox.Width != dst.Width || srcBox.Height != dst.Height || srcBox.Depth != dst.Depth)
                {
                    // we need scaling
                    Image.Scale(this._buffer.GetSubVolume(srcBox), dst, ImageFilter.Bilinear);
                }
                else
                {
                    // Just copy the bit that we need
                    PixelConverter.BulkPixelConversion(this._buffer.GetSubVolume(srcBox), dst);
                }
                FreeBuffer();
            }
        }
Ejemplo n.º 16
0
            public void CopyToImage(X11Image image)
            {
                if (image.Height != height || image.Width != width)
                {
                    throw new InvalidOperationException($"Source size ({width} x {height}) does not match image size ({image.Width} x {image.Height}).");
                }

                using (X11Bitmap xBitmap = X11Bitmap.Create(image.Display, image.Visual, width, height))
                {
                    if (pixelFormat == PixelFormat.RGBA_32)
                    {
                        PixelConverter.Convert_RGBA_32BE_To_PARGB_32(dataPtr, stride, xBitmap.ImageData, 4 * width, width, height);
                    }
                    else if (pixelFormat == PixelFormat.RGB_24)
                    {
                        PixelConverter.Convert_RGB_24BE_To_ARGB_32(dataPtr, stride, xBitmap.ImageData, 4 * width, width, height);
                    }
                    else
                    {
                        throw new InvalidOperationException($"Unexpected pixel format: {pixelFormat}.");
                    }

                    var gcValues = new XGCValues();
                    var gc       = LibX11.XCreateGC(image.Display, image.PixmapId, 0, ref gcValues);
                    try
                    {
                        LibX11.XPutImage(image.Display, image.PixmapId, gc, xBitmap.XImage, 0, 0, 0, 0, (uint)width, (uint)height);
                    }
                    finally
                    {
                        LibX11.XFreeGC(image.Display, gc);
                    }
                }
            }
Ejemplo n.º 17
0
        private static void SendTestPattern(object state)
        {
            lock (_sendTestPatternTimer)
            {
                unsafe
                {
                    if (OnTestPatternSampleReady != null)
                    {
                        var stampedTestPattern = _testPattern.Clone() as System.Drawing.Image;
                        AddTimeStampAndLocation(stampedTestPattern, DateTime.UtcNow.ToString("dd MMM yyyy HH:mm:ss:fff"), "Test Pattern");
                        var sampleBuffer = PixelConverter.BitmapToRGBA(stampedTestPattern as System.Drawing.Bitmap, _testPattern.Width, _testPattern.Height);

                        byte[] i420Buffer    = PixelConverter.RGBAtoYUV420Planar(sampleBuffer, _testPattern.Width, _testPattern.Height);
                        var    encodedBuffer = _vp8Encoder.Encode(i420Buffer, false);

                        _presentationTimestamp += VIDEO_TIMESTAMP_SPACING;

                        if (encodedBuffer != null)
                        {
                            OnTestPatternSampleReady?.Invoke(SDPMediaTypesEnum.video, VIDEO_TIMESTAMP_SPACING, encodedBuffer);
                        }

                        stampedTestPattern.Dispose();
                    }
                }
            }
        }
Ejemplo n.º 18
0
        /// <summary>
        /// Get an appropriately defined 'null' texture, ie one which will always result in no shadows.
        /// </summary>
        public Texture GetNullShadowTexture(PixelFormat format)
        {
            foreach (var wr in this.nullTextureList)
            {
                if (wr.IsAlive)
                {
                    var tex = (Texture)wr.Target;
                    if (format == tex.Format)
                    {
                        // Ok, a match
                        return(tex);
                    }
                }
            }

            // not found, create a new one
            // A 1x1 texture of the correct format, not a render target
            var baseName  = "Axiom/ShadowTextureNull";
            var targName  = baseName + this.count++;
            var shadowTex = TextureManager.Instance.CreateManual(targName, "", TextureType.TwoD, 1, 1, 1, 0,
                                                                 TextureUsage.Default);

            this.nullTextureList.Add(new WeakReference(shadowTex));

            // Populate the texture based on format
            shadowTex.GetBuffer().Lock(BufferLocking.Discard);
            var box = shadowTex.GetBuffer().CurrentLock;

            //set high values across all bytes of the format
            PixelConverter.PackColor(1.0f, 1.0f, 1.0f, 1.0f, format, box.Data);

            shadowTex.GetBuffer().Unlock();

            return(shadowTex);
        }
Ejemplo n.º 19
0
        public unsafe void WrongSizeI420ToBGRTest()
        {
            int width  = 720;
            int height = 405;

            byte[] i420 = new byte[width * height * 3 / 2];
            Assert.Throws <ApplicationException>(() => PixelConverter.I420toBGR(i420, width, height, out _));
        }
Ejemplo n.º 20
0
        /// <summary>
        /// a red cheomekey filter for .png image example.
        /// <para>
        /// ffmpeg -i <paramref name="input"/> -vf chromakey=red:0.1:0.0 <paramref name="output"/>
        /// </para>
        /// </summary>
        /// <param name="input"></param>
        /// <param name="output"></param>
        public unsafe PngChromekeyFilter(string input, string output)
        {
            using (MediaReader reader = new MediaReader(input))
                using (MediaWriter writer = new MediaWriter(output))
                {
                    var videoIndex = reader.Where(_ => _.Codec.AVCodecContext.codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO).First().Index;

                    // init filter
                    int        height              = reader[videoIndex].Codec.AVCodecContext.height;
                    int        width               = reader[videoIndex].Codec.AVCodecContext.width;
                    int        format              = (int)reader[videoIndex].Codec.AVCodecContext.pix_fmt;
                    AVRational time_base           = reader[videoIndex].TimeBase;
                    AVRational sample_aspect_ratio = reader[videoIndex].Codec.AVCodecContext.sample_aspect_ratio;

                    MediaFilterGraph filterGraph = new MediaFilterGraph();
                    filterGraph.AddVideoSrcFilter(new MediaFilter(MediaFilter.VideoSources.Buffer), width, height, (AVPixelFormat)format, time_base, sample_aspect_ratio).LinkTo(0,
                                                                                                                                                                                 filterGraph.AddFilter(new MediaFilter("chromakey"), "red:0.1:0.0")).LinkTo(0,
                                                                                                                                                                                                                                                            filterGraph.AddVideoSinkFilter(new MediaFilter(MediaFilter.VideoSinks.Buffersink)));
                    filterGraph.Initialize();

                    // add stream by reader and init writer
                    writer.AddStream(reader[videoIndex]);
                    writer.Initialize();

                    // init video frame format converter by dstcodec
                    PixelConverter pixelConverter = new PixelConverter(writer[0].Codec);


                    foreach (var srcPacket in reader.ReadPacket())
                    {
                        foreach (var srcFrame in reader[videoIndex].ReadFrame(srcPacket))
                        {
                            filterGraph.Inputs.First().WriteFrame(srcFrame);
                            foreach (var filterFrame in filterGraph.Outputs.First().ReadFrame())
                            {
                                // can use filterFrame.ToMat() gets the output image directly without the need for a writer.
                                //using EmguFFmpeg.EmguCV;
                                //using (var mat = filterFrame.ToMat())
                                //{
                                //    mat.Save(output);
                                //}

                                foreach (var dstFrame in pixelConverter.Convert(filterFrame))
                                {
                                    foreach (var dstPacket in writer[0].WriteFrame(dstFrame))
                                    {
                                        writer.WritePacket(dstPacket);
                                    }
                                }
                            }
                        }
                    }

                    // flush codec cache
                    writer.FlushMuxer();
                }
        }
Ejemplo n.º 21
0
        public unsafe void ConvertOddDimensionI420ToBGRTest()
        {
            int width  = 4;
            int height = 3;

            byte[] i420 = new byte[20];
            byte[] bgr  = PixelConverter.I420toBGR(i420, width, height, out _);

            Assert.NotNull(bgr);
            Assert.Equal(36, bgr.Length);
        }
Ejemplo n.º 22
0
 /// <summary>
 /// Convert video frame to <paramref name="dstFormat"/> with Bgr24 mat
 /// <para>
 /// NOTE: only support CV_8U3 Mat!!
 /// </para>
 /// </summary>
 /// <param name="mat">must bge format</param>
 /// <param name="dstFormat">video frame format</param>
 /// <returns></returns>
 public static VideoFrame ToVideoFrame(this Mat mat, AVPixelFormat dstFormat = AVPixelFormat.AV_PIX_FMT_BGR24)
 {
     if (dstFormat != AVPixelFormat.AV_PIX_FMT_BGR24)
     {
         using (PixelConverter converter = new PixelConverter(dstFormat, mat.Width, mat.Height))
         {
             return(converter.ConvertFrame(MatToVideoFrame(mat)));
         }
     }
     return(MatToVideoFrame(mat));
 }
        Control createControl(Composite parent)
        {
            PixelConverter converter = new PixelConverter(parent);

            var composite = new Composite(parent, SWT.NONE);

            composite.setFont(parent.getFont());

            var layout = new GridLayout();

            layout.numColumns = 2;
            composite.setLayout(layout);

            //
            // Label
            //
            var label = new Label(composite, SWT.LEFT | SWT.WRAP);

            label.setFont(composite.getFont());
            label.setText(Messages.librariesPreferencesLabelText);

            var gd = new GridData(GridData.HORIZONTAL_ALIGN_FILL);

            gd.horizontalSpan    = 2;
            gd.verticalAlignment = GridData.BEGINNING;
            label.setLayoutData(gd);

            //
            // Table
            //
            var tableComposite = new Composite(composite, SWT.NONE);

            tableComposite.setFont(composite.getFont());
            var tableColumnLayout = new TableColumnLayout();

            tableComposite.setLayout(tableColumnLayout);

            var table = new Table(tableComposite, SWT.BORDER | SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL | SWT.CHECK);

            table.setFont(composite.getFont());
            table.setHeaderVisible(false);
            table.setLinesVisible(false);
            var column = new TableColumn(table, SWT.NONE);

            tableColumnLayout.setColumnData(column, new ColumnWeightData(100, false));

            // Table viewer
            tableViewer = new CheckboxTableViewer(table);
            tableViewer.setContentProvider(tableContentProvider);
            tableViewer.setCheckStateProvider(tableCheckStateProvider);
            tableViewer.setLabelProvider(tableLabelProvider);
            tableViewer.addCheckStateListener(tableCheckStateListener);
            tableViewer.addSelectionChangedListener(event => {
Ejemplo n.º 24
0
 private static Mat VideoFrameToMat(VideoFrame frame)
 {
     if ((AVPixelFormat)frame.AVFrame.format != AVPixelFormat.AV_PIX_FMT_BGRA)
     {
         using (VideoFrame dstFrame = new VideoFrame(AVPixelFormat.AV_PIX_FMT_BGRA, frame.AVFrame.width, frame.AVFrame.height))
             using (PixelConverter converter = new PixelConverter(dstFrame))
             {
                 return(BgraToMat(converter.ConvertFrame(frame)));
             }
     }
     return(BgraToMat(frame));
 }
Ejemplo n.º 25
0
        protected void Generate()
        {
            var   julia  = new Julia(this.globalReal, this.globalImag, this.globalTheta);
            float scale  = 2.5f;
            float vcut   = 29.0f;
            float vscale = 1.0f / vcut;

            HardwarePixelBuffer buffer = this.ptex.GetBuffer(0, 0);

            LogManager.Instance.Write("Volume Texture Sample [info]: HardwarePixelBuffer " + buffer.Width + "x" + buffer.Height);

            buffer.Lock(BufferLocking.Normal);
            PixelBox pb = buffer.CurrentLock;

            LogManager.Instance.Write("Volume Texture Sample [info]: PixelBox " + pb.Width + "x" + pb.Height + "x" + pb.Depth);

            unsafe
            {
                var pbptr = (BufferBase)pb.Data.Clone();
                for (int z = pb.Front; z < pb.Back; z++)
                {
                    for (int y = pb.Top; y < pb.Bottom; y++)
                    {
                        pbptr += pb.Left * sizeof(uint);
                        for (int x = pb.Left; x < pb.Right; x++)
                        {
                            if (z == pb.Front || z == (pb.Back - 1) || y == pb.Top || y == (pb.Bottom - 1) || x == pb.Left ||
                                x == (pb.Right - 1))
                            {
                                pbptr.ToUIntPointer()[0] = 0;
                            }
                            else
                            {
                                float val = julia.Eval(((float)x / pb.Width - 0.5f) * scale, ((float)y / pb.Height - 0.5f) * scale,
                                                       ((float)z / pb.Depth - 0.5f) * scale);
                                if (val > vcut)
                                {
                                    val = vcut;
                                }

                                PixelConverter.PackColor((float)x / pb.Width, (float)y / pb.Height, (float)z / pb.Depth,
                                                         (1.0f - (val * vscale)) * 0.7f, PixelFormat.A8R8G8B8, pbptr);
                            }
                            pbptr++;
                        }
                        pbptr += (pb.RowPitch - pb.Right) * sizeof(uint);
                    }
                    pbptr += pb.SliceSkip * sizeof(uint);
                }
                buffer.Unlock();
            }
        }
Ejemplo n.º 26
0
        /// <summary>
        /// </summary>
        /// <param name="src"> </param>
        /// <param name="dstBox"> </param>
        public override void BlitFromMemory(PixelBox src, Media.BasicBox dstBox)
        {
            if (!this._buffer.Contains(dstBox))
            {
                throw new ArgumentOutOfRangeException("Destination box out of range, GLESHardwarePixelBuffer.BlitToMemory");
            }

            PixelBox scaled;

            if (src.Width != dstBox.Width || src.Height != dstBox.Height || src.Depth != dstBox.Depth)
            {
                LogManager.Instance.Write("[GLESHardwarePixelBuffer] Scale to destination size.");
                // Scale to destination size. Use DevIL and not iluScale because ILU screws up for
                // floating point textures and cannot cope with 3D images.
                // This also does pixel format conversion if needed
                AllocateBuffer();
                scaled = this._buffer.GetSubVolume(dstBox);
                Image.Scale(src, scaled, ImageFilter.Bilinear);
            }
            else if ((src.Format != Format) || ((GLESPixelUtil.GetGLOriginFormat(src.Format) == 0) && (src.Format != PixelFormat.R8G8B8)))
            {
                LogManager.Instance.Write("[GLESHardwarePixelBuffer] Extents match, but format is not accepted as valid source format for GL.");
                LogManager.Instance.Write("[GLESHardwarePixelBuffer] Source.Format = {0}, Format = {1}, GLOriginFormat = {2}", src.Format, Format, GLESPixelUtil.GetGLOriginFormat(src.Format));
                // Extents match, but format is not accepted as valid source format for GL
                // do conversion in temporary buffer
                AllocateBuffer();
                scaled = this._buffer.GetSubVolume(dstBox);

                PixelConverter.BulkPixelConversion(src, scaled);
            }
            else
            {
                LogManager.Instance.Write("[GLESHardwarePixelBuffer] No scaling or conversion needed.");
                scaled = src;
                if (src.Format == PixelFormat.R8G8B8)
                {
                    scaled.Format = PixelFormat.R8G8B8;
                    PixelConverter.BulkPixelConversion(src, scaled);
                }
                // No scaling or conversion needed
                // Set extents for upload
                scaled.Left   = dstBox.Left;
                scaled.Right  = dstBox.Right;
                scaled.Top    = dstBox.Top;
                scaled.Bottom = dstBox.Bottom;
                scaled.Front  = dstBox.Front;
                scaled.Back   = dstBox.Back;
            }

            Upload(scaled, dstBox);
            FreeBuffer();
        }
Ejemplo n.º 27
0
        private static void RoundTripNoEncoding()
        {
            int width  = 32;
            int height = 32;

            // Create dummy bitmap.
            byte[] srcRgb = new byte[width * height * 3];
            for (int row = 0; row < 32; row++)
            {
                for (int col = 0; col < 32; col++)
                {
                    int index = row * width * 3 + col * 3;

                    int red   = (row < 16 && col < 16) ? 255 : 0;
                    int green = (row < 16 && col > 16) ? 255 : 0;
                    int blue  = (row > 16 && col < 16) ? 255 : 0;

                    srcRgb[index]     = (byte)red;
                    srcRgb[index + 1] = (byte)green;
                    srcRgb[index + 2] = (byte)blue;
                }
            }

            //Console.WriteLine(srcRgb.HexStr());

            unsafe
            {
                fixed(byte *src = srcRgb)
                {
                    System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap(width, height, srcRgb.Length / height, PixelFormat.Format24bppRgb, (IntPtr)src);
                    bmpImage.Save("test-source.bmp");
                    bmpImage.Dispose();
                }
            }

            // Convert bitmap to i420.
            byte[] i420Buffer = PixelConverter.RGBtoI420(srcRgb, width, height);

            Console.WriteLine($"Converted rgb to i420.");

            byte[] rgbResult = PixelConverter.I420toRGB(i420Buffer, width, height);

            unsafe
            {
                fixed(byte *s = rgbResult)
                {
                    System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap(width, height, rgbResult.Length / height, PixelFormat.Format24bppRgb, (IntPtr)s);
                    bmpImage.Save("test-result.bmp");
                    bmpImage.Dispose();
                }
            }
        }
Ejemplo n.º 28
0
 private TextView GetTextView()
 {
     var param = PixelConverter.GetParams(ViewGroup.LayoutParams.MatchParent, ViewGroup.LayoutParams.MatchParent,
                                          10, 0, 10, 20);
     var randIndex =
         (ViewModel.CurrentNumber +
          new Random(unchecked ((int)DateTime.Now.Ticks)).Next(1, ViewModel.Database.Count)) %
         ViewModel.Database.Count;
     var textView = new TextView(this)
     {
         TextSize         = 20,
         LayoutParameters = param,
         Text             = $"{ViewModel.Database[ViewModel.Answer ? ViewModel.CurrentNumber : randIndex].ImageName}",
Ejemplo n.º 29
0
 public Mp4VideoWriter AddVideo(int width, int height, int fps)
 {
     if (writer.Where(_ => _.Codec.Type == AVMediaType.AVMEDIA_TYPE_VIDEO).Count() == 0)
     {
         Height = height;
         Width  = width;
         FPS    = fps;
         var st = writer.AddStream(MediaEncoder.CreateVideoEncode(writer.Format, width, height, fps));
         videoIndex     = writer.Count() - 1;
         pixelConverter = new PixelConverter(st.Codec);
     }
     return(this);
 }
Ejemplo n.º 30
0
        private void CreateLastStat(LinearLayout viewLastStat)
        {
            using var bitmapLastStat = Bitmap.CreateBitmap(
                      Resources.DisplayMetrics.WidthPixels - PixelConverter.DpToPX(20),
                      PixelConverter.DpToPX(70), Bitmap.Config.Argb4444);
            using var canvasLastStat = new Canvas(bitmapLastStat);
            var lastStat = new DrawStatistics(canvasLastStat);

            lastStat.DrawBackground(6, 6, Paints.Background, Paints.Border, Paints.Gradient);
            lastStat.ProgressLine(ViewModel.True ?? 0, ViewModel.False ?? 1, StatisticsFragment.LightColor,
                                  StatisticsFragment.DarkColor, Paints.BackgroundLine);
            viewLastStat.Background = new BitmapDrawable(Resources, bitmapLastStat);
        }
		Control createControl(Composite parent) {
			PixelConverter converter = new PixelConverter(parent);
			
			var composite = new Composite(parent, SWT.NONE);
			composite.setFont(parent.getFont());
			
			var layout = new GridLayout();
			layout.numColumns = 2;
			composite.setLayout(layout);

			//
			// Label
			//
			var label = new Label(composite, SWT.LEFT | SWT.WRAP);
			label.setFont(composite.getFont());
			label.setText(Messages.librariesPreferencesLabelText);
	
			var gd = new GridData(GridData.HORIZONTAL_ALIGN_FILL);
			gd.horizontalSpan = 2;
			gd.verticalAlignment = GridData.BEGINNING;
			label.setLayoutData(gd);

			//
			// Table
			//
			var tableComposite = new Composite(composite, SWT.NONE);
			tableComposite.setFont(composite.getFont());
			var tableColumnLayout = new TableColumnLayout();
			tableComposite.setLayout(tableColumnLayout);
			
			var table = new Table(tableComposite, SWT.BORDER | SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL | SWT.CHECK);
			table.setFont(composite.getFont());
			table.setHeaderVisible(false);
			table.setLinesVisible(false);
			var column = new TableColumn(table, SWT.NONE);
			tableColumnLayout.setColumnData(column, new ColumnWeightData(100, false));
	
			// Table viewer
			tableViewer = new CheckboxTableViewer(table);
			tableViewer.setContentProvider(tableContentProvider);
			tableViewer.setCheckStateProvider(tableCheckStateProvider);
			tableViewer.setLabelProvider(tableLabelProvider);
			tableViewer.addCheckStateListener(tableCheckStateListener);
			tableViewer.addSelectionChangedListener(event => {