Crop an image.

The filter crops an image providing a new image, which contains only the specified rectangle of the original image.

The filter accepts 8 and 16 bpp grayscale images and 24, 32, 48 and 64 bpp color images for processing.

Sample usage:

// create filter Crop filter = new Crop( new Rectangle( 75, 75, 320, 240 ) ); // apply the filter Bitmap newImage = filter.Apply( image );

Initial image:

Result image:

Inheritance: BaseTransformationFilter
Exemple #1
0
        private void SetFilter()
        {
            ImageType = ImageTypes.ARgb32bpp;

            if (originalSize)
            {
                Af.CanvasCrop newFilter = new Af.CanvasCrop(region);
                newFilter.FillColorRGB = color;

                imageFilter = newFilter;
            }
            else
            {
                Af.Crop newFilter = new Af.Crop(region);
                imageFilter = newFilter;
            }
        }
        public void Example1()
        {
            Bitmap image = Accord.Imaging.Image.Clone(Properties.Resources.lena512);

            // Create a new Haar Wavelet transform filter
            var wavelet = new WaveletTransform(new Haar(1));

            // Apply the Wavelet transformation
            Bitmap result = wavelet.Apply(image);

            // Show on the screen
            //ImageBox.Show(result);
            Assert.IsNotNull(result);
            
            // Extract only one of the resulting images
            var crop = new Crop(new Rectangle(0, 0, 
                image.Width / 2, image.Height / 2));

            Bitmap quarter = crop.Apply(result);

            // Show on the screen
            //ImageBox.Show(quarter);
            Assert.IsNotNull(quarter);
        }
 /// <summary>
 ///   Constructs a new <see cref="MatchingTracker"/> object tracker.
 /// </summary>
 /// 
 public MatchingTracker()
 {
     matcher = new ExhaustiveTemplateMatching(0);
     crop = new Crop(Rectangle.Empty);
     trackingObject = new TrackingObject();
 }
Exemple #4
0
        /// <summary>
        ///   Camshift algorithm
        /// </summary>
        /// 
        private void camshift(UnmanagedImage frame)
        {
            int width = frame.Width;
            int height = frame.Height;

            Rectangle area = new Rectangle(0, 0, width, height);

            // Compute tracking object center
            float objX = Math.Max(0, Math.Min(searchWindow.X + searchWindow.Width * 0.5f, width));
            float objY = Math.Max(0, Math.Min(searchWindow.Y + searchWindow.Height * 0.5f, height));
            float objAngle;


            // Compute mean shift
            CentralMoments moments = meanShift(frame);

            SizeF objSize = moments.GetSizeAndOrientation(out objAngle);


            if (Single.IsNaN(objSize.Width) || Single.IsNaN(objSize.Height) ||
                Single.IsNaN(objAngle) || objSize.Width < 1 || objSize.Height < 1)
            {
                Reset();
                return;
            }

            // Truncate to integer coordinates
            IntPoint center = new IntPoint((int)objX, (int)objY);

            Rectangle rec = new Rectangle((int)(objX - objSize.Width * 0.5f),
                                          (int)(objY - objSize.Height * 0.5f),
                                          (int)objSize.Width, (int)objSize.Height);

            angleHistory.Push(objAngle);

            // Create tracking object
            IsSteady = checkSteadiness();
            trackingObject.Rectangle = rec;
            trackingObject.Center = center;
            trackingObject.Angle = smooth ? (float)angleHistory.Mean : objAngle;

            if (extract)
            {
                Rectangle inner = rec;

                xHistory.Push(rec.X);
                yHistory.Push(rec.Y);
                widthHistory.Push(rec.Width);
                heightHistory.Push(rec.Height);

                inner.X = (int)xHistory.Mean;
                inner.Y = (int)yHistory.Mean;
                inner.Width = (int)widthHistory.Mean;
                inner.Height = (int)heightHistory.Mean;

                inner.Intersect(area);

                Crop crop = new Crop(inner);

                // TODO: Perform rotation of the extracted object
                //RotateNearestNeighbor rotate = new RotateNearestNeighbor((objAngle - Math.PI / 2) * 180f / Math.PI, true);
                //trackingObject.Image = rotate.Apply(crop.Apply(frame));

                trackingObject.Image = crop.Apply(frame);
            }

            // Compute a new search window size
            searchWindow.Width = (int)(1.1f * objSize.Width);
            searchWindow.Height = (int)((aspectRatio != 0) ?
                (aspectRatio * objSize.Width) : (1.1f * objSize.Height));
        }
Exemple #5
0
        private void captureHand(UnmanagedImage mask, Rectangle rect, PictureBox pbArm, PictureBox pbHand)
        {
            Crop c = new Crop(rect);
            var handImage = c.Apply(mask);

            var ps = handImage.Collect16bppPixelValues(handImage.CollectActivePixels());

            if (ps.Length > 0)
            {
                ushort max = Matrix.Max(ps);

                LevelsLinear16bpp levels = new LevelsLinear16bpp();
                levels.InGray = new IntRange(0, max);
                levels.OutGray = new IntRange(0, 65535);
                levels.ApplyInPlace(handImage);


                // pbArm.Image = handImage.ToManagedImage();


                double cutoff = 30000;
                Threshold th = new Threshold((int)cutoff);
                var handMask = th.Apply(handImage);

                var handMask8bit = Accord.Imaging.Image.Convert16bppTo8bpp(handMask.ToManagedImage());

                BlobCounter bch = new BlobCounter();
                bch.ObjectsOrder = ObjectsOrder.Area;
                bch.ProcessImage(handMask8bit);
                var blob = bch.GetObjectsInformation();

                if (blob.Length > 0)
                {
                    Intersect inters = new Intersect();
                    inters.UnmanagedOverlayImage = handMask;
                    inters.ApplyInPlace(handImage);

                    Crop ch = new Crop(blob[0].Rectangle);
                    handImage = ch.Apply(handImage);

                    ResizeNearestNeighbor res = new ResizeNearestNeighbor(25, 25);
                    handImage = res.Apply(handImage);

                    var leftHand = Accord.Imaging.Image.Convert16bppTo8bpp(handImage.ToManagedImage());

                    pbHand.Image = leftHand;
                }
            }
        }
Exemple #6
0
        private void videoSourcePlayer1_NewFrame(object sender, ref Bitmap image)
        {
            Invert inv = new Invert();
            inv.ApplyInPlace(image);

            UnmanagedImage ui = UnmanagedImage.FromManagedImage(image);

            pictureBox1.Image = image;


            if (controller.Tracker.TrackingObject == null)
                return;

            if (controller.Tracker.TrackingObject.IsEmpty)
                return;

            var rect = controller.Tracker.TrackingObject.Rectangle;
            Crop crop = new Crop(rect);

            UnmanagedImage head = crop.Apply(ui);

            var points = new List<IntPoint>() { new IntPoint(head.Width / 2, head.Height / 2) };
            var pps = head.Collect16bppPixelValues(points);

            double mean = pps.Mean();

            double cutoff = mean + 15;
            Threshold t = new Threshold((int)cutoff);
            var mask = t.Apply(ui);



            LevelsLinear16bpp levels = new LevelsLinear16bpp();
            levels.InGray = new IntRange((int)cutoff, 65535);
            levels.OutGray = new IntRange(0, 65535);
            levels.ApplyInPlace(ui);


            var mask8bit = Accord.Imaging.Image.Convert16bppTo8bpp(mask.ToManagedImage());



            BlobCounter bc = new BlobCounter();
            bc.ObjectsOrder = ObjectsOrder.Area;
            bc.ProcessImage(mask8bit);
            var blobs = bc.GetObjectsInformation();

            inv.ApplyInPlace(image);
            Intersect intersect = new Intersect();
            intersect.UnmanagedOverlayImage = mask;
            mask = intersect.Apply(ui);

            List<Rectangle> rects = new List<Rectangle>();

            // Extract the uppermost largest blobs.
            for (int i = 0; i < blobs.Length; i++)
            {
                double dx = (blobs[i].Rectangle.Top - controller.Tracker.TrackingObject.Center.Y);
                double d = (dx * dx) / controller.Tracker.TrackingObject.Area;
                if (d < 2 && blobs[i].Area > 1000)
                    rects.Add(blobs[i].Rectangle);
            }

            rects.Sort(compare);

            if (rects.Count > 0)
            {
                captureHand(mask, rects[0], pbLeftArm, pbLeftHand);
            }
            if (rects.Count > 1)
            {
                captureHand(mask, rects[1], pbRightArm, pbRightHand);

            }

            RectanglesMarker marker = new RectanglesMarker(rects);
            marker.MarkerColor = Color.White;
            marker.ApplyInPlace(mask8bit);

            image = mask.ToManagedImage();
        }