示例#1
0
文件: img.cs 项目: ttdoucet/image
    double mag(OrientationField f, int r, int c)
    {
        double a = f.Re[r, c];
        double b = f.Im[r, c];

        return(Math.Sqrt(a * a + b * b));
    }
示例#2
0
        public void CreatTemplateTest()
        {
            /* double[,] img1 = ImageHelper.LoadImage<double>(Resources._1_1);
             * double[,] img2 = ImageHelper.LoadImage<double>(Resources._1_2*/
            double[,] imgDoubles = ImageHelper.LoadImage <double>(Resources._1_1);

            imgDoubles.DoNormalization(100, 100);

            int[,] imgInts = imgDoubles.Select2D((x => (int)x));
            OrientationField orf = new OrientationField(imgInts, 16);

            double[,] orient = orf.GetOrientationMatrix(imgInts.GetLength(0), imgInts.GetLength(1));

            var freqMatrx = LocalRidgeFrequency.GetFrequencies(imgDoubles, orient);

            var res = ImageEnhancement.Enhance(imgDoubles, orient, freqMatrx, 32, 8);


            /* var img = ImageHelper.LoadImage<int>(Resources._1_1);
             *
             * int h = img.GetLength(0);
             * int w = img.GetLength(1);
             *
             * int[] withoutHolesAndIslands = HolesAndIslandsResolver.ResolveHolesAndIslands(
             *   Array2Dto1D(ImageProcessing.Binarization.ImageBinarization.Binarize2D(img, 128)),
             *   16,
             *   9,
             *   w, h);*/
        }
示例#3
0
文件: img.cs 项目: ttdoucet/image
    private OrientationField orientationField(Matrix image)
    {
        OrientationField j = new OrientationField();

        Matrix vDiff = new float[, ] {
            { 0 },
            { -1 },
            { 1 }
        };

        Matrix hDiff = new float[, ] {
            { 0, -1, 1 }
        };

        // horizontal component of gradient
        Matrix gX = ImageUtils.Convolve(image, hDiff);

        // vertical component of gradient
        Matrix gY = ImageUtils.Convolve(image, vDiff);

        // The complex quantiy gX - i gY is the gradient matrix. (The
        // imaginary part is negative because the y axis increases
        // downward, yet we still measure angles positive ccw.) For
        // averaging directions, we create a complex number j which is
        // the square of g.  This effectively doubles the angles and
        // makes 180-degree differing angles the same.

        j.Im = -2 * termwiseMultiply(gX, gY);
        j.Re = termwiseMultiply(gX, gX) - termwiseMultiply(gY, gY);
        return(j);
    }
示例#4
0
文件: img.cs 项目: ttdoucet/image
    double report(string tag, OrientationField f, int x, int y)
    {
        // degrees is along the normal, so we add 90 degrees to get the
        // orientation.
        double deg = degrees(f, y, x) + 90;

        Console.WriteLine("{4}: ({0}, {1}): {2:0.00} degrees, abs={3:0.00}",
                          x, y, deg, mag(f, y, x), tag);
        return(deg);
    }
示例#5
0
文件: img.cs 项目: ttdoucet/image
    private Matrix doImage(Matrix image)
    {
        originalImage  = image;
        displayedImage = image;

        j      = orientationField(image);
        jAve4  = filterField(j, 4);
        jAve8  = filterField(j, 8);
        jAve16 = filterField(j, 16);

        Matrix result = fingerprintEnhance(image);

        return(result);
    }
示例#6
0
文件: img.cs 项目: ttdoucet/image
    OrientationField filterField(OrientationField f, int radius)
    {
// why does this affect the speed?  very strange.

//      const float variance = 1000.0f;
        const float variance = 100.0f;
//        const float variance = 3.0f;
//        const float variance = 0.33f;
        OrientationField jAve = new OrientationField();

        jAve.Re = filterGaussian(f.Re, variance, radius);
        jAve.Im = filterGaussian(f.Im, variance, radius);
        return(jAve);
    }
示例#7
0
        public static double[,] CreatTemplateTest(double[,] imgDoubles)
        {
            imgDoubles.DoNormalization(100, 100);

            int[,] imgInts = imgDoubles.Select2D((x => (int)x));
            OrientationField orf = new OrientationField(imgInts, 16);

            double[,] orient = orf.GetOrientationMatrix(imgInts.GetLength(0), imgInts.GetLength(1));

            var freqMatrx = LocalRidgeFrequency.GetFrequencies(imgDoubles, orient);

            var res = ImageEnhancement.Enhance(imgDoubles, orient, freqMatrx, 32, 8);

            return(ImageBinarization.Binarize2D(res, 128));
        }
        public static int[] ProjectionX(int xCentre, int yCentre, int[,] arr)
        {
            int fieldSizey = FieldSizex / 2;

            int[]            projX = new int[FieldSizex];
            OrientationField img   = new OrientationField(arr);
            var    angleOfX        = img.GetOrientation(xCentre, yCentre) - Math.PI / 2.0;
            Point  tmpPoint;
            double angleSin = Math.Sin(angleOfX);
            double angleCos = Math.Cos(angleOfX);

            for (int i = -FieldSizex / 2; i < FieldSizex / 2; i++)
            {
                projX[i + FieldSizex / 2] = 255;
                for (int j = -fieldSizey / 2; j < fieldSizey / 2; j++) // find the darkest
                {
                    double localSegment = Math.Sqrt(i * i + j * j);
                    if (Math.Abs(localSegment) > 0.000001)  //  double tolerance
                    {
                        angleSin = Math.Sin(angleOfX + Math.Asin(find_sin(1.0, 0.0, i, j)));
                        angleCos = Math.Cos(angleOfX + Math.Acos(find_cos(1.0, 0.0, i, j)));
                        tmpPoint = Turn(0, (int)Math.Round(localSegment), 0, 0, angleCos, angleSin);
                    }
                    else
                    {
                        tmpPoint.X = 0;
                        tmpPoint.Y = 0;
                    }

                    if (tmpPoint.X + xCentre < 0 || tmpPoint.X + xCentre >= arr.GetLength(0) || tmpPoint.Y + yCentre < 0 ||
                        tmpPoint.Y + yCentre >= arr.GetLength(1))
                    {
                        continue;
                    }

                    if (projX[i + FieldSizex / 2] >= arr[tmpPoint.X + xCentre, tmpPoint.Y + yCentre])
                    {
                        projX[i + FieldSizex / 2] = arr[tmpPoint.X + xCentre, tmpPoint.Y + yCentre];
                    }
                }
            }
            return(projX);
        }
示例#9
0
文件: img.cs 项目: ttdoucet/image
    protected Matrix markupImage(Matrix image, OrientationField narrow, OrientationField wide)
    {
        Matrix result = image.Clone();

        image.Apply(delegate(int r, int c){
            double n = mag(narrow, r, c);
            double w = mag(wide, r, c);
            if ((n > 50) && (n < 0.7 * w))
            {
                result[r, c] = image[r, c];
            }
            else
            {
                result[r, c] = 0;
            }
        }
                    );
        return(result);
    }
        public void ImageEnhancmentTest()
        {
            var bmp = Resources.SampleFinger;

            double[,] imgDoubles = ImageHelper.LoadImage <double>(bmp);

            imgDoubles.DoNormalization(100, 100);

            int[,] imgInts = imgDoubles.Select2D((x => (int)x));
            OrientationField orf = new OrientationField(imgInts, 16);

            double[,] orient = orf.GetOrientationMatrix(imgInts.GetLength(0), imgInts.GetLength(1));

            var freqMatrx = LocalRidgeFrequency.GetFrequencies(imgDoubles, orient);

            var res  = ImageEnhancement.Enhance(imgDoubles, orient, freqMatrx, 32, 8);
            var bmp2 = ImageHelper.SaveArrayToBitmap(res);

            bmp2.Save("001.bmp", ImageHelper.GetImageFormatFromExtension("009.bmp"));
        }
示例#11
0
文件: img.cs 项目: ttdoucet/image
 double degrees(OrientationField f, int r, int c)
 {
     return(radians(f, r, c) * 180.0 / Math.PI);
 }
示例#12
0
文件: img.cs 项目: ttdoucet/image
 double radians(OrientationField f, int r, int c)
 {
     return(Math.Atan2(f.Im[r, c], f.Re[r, c]) / 2);
 }