Example #1
0
        public ImageDescription filter(ImageDescription inputImage)
        {
            ImageDescription outputImage = new ImageDescription();

            outputImage.sizeX = newSizeX;
            outputImage.sizeY = newSizeY;

            foreach (ColorChannelEnum colorChannel in colorChannelsToFilter)
            {
                byte[,] channel = inputImage.getColorChannel(colorChannel);
                ImageDescription temp = new ImageDescription();
                temp.sizeX     = inputImage.sizeX;
                temp.sizeY     = inputImage.sizeY;
                temp.grayscale = true;
                temp.setColorChannel(ColorChannelEnum.Gray, channel);

                Bitmap tempBitmap = ImageDescriptionUtil.convertToBitmap(temp);
                Bitmap output     = ImageDescriptionUtil.resizeImage(tempBitmap, newSizeX, newSizeY);
                temp = ImageDescriptionUtil.fromBitmap(output);
                temp.computeGrayscale();
                outputImage.setColorChannel(colorChannel, temp.gray);
            }

            if (colorChannelsToFilter.Count == 1 && colorChannelsToFilter.Contains(ColorChannelEnum.Gray))
            {
                outputImage.grayscale = true;
            }
            return(outputImage);
        }
Example #2
0
        public virtual ImageDescription filter(ImageDescription inputImage)
        {
            inputImage.computeGrayscale();

            ImageDescription outputImage = new ImageDescription();

            outputImage.sizeX     = inputImage.sizeX;
            outputImage.sizeY     = inputImage.sizeY;
            outputImage.grayscale = true;
            byte[,] inputGray     = inputImage.gray;
            byte[,] outputGray    = new byte[inputImage.sizeY, inputImage.sizeX];
            outputImage.gray      = outputGray;

            for (int i = 0; i < inputImage.sizeY; i++)
            {
                for (int j = 0; j < inputImage.sizeX; j++)
                {
                    if (inputGray[i, j] >= threshold)
                    {
                        outputGray[i, j] = 255;
                    }
                    else
                    {
                        outputGray[i, j] = 0;
                    }
                }
            }
            return(outputImage);
        }
Example #3
0
 private void setInputImageToContexts(ImageDescription inputImage)
 {
     currentInputImage = inputImage;
     if (inputImageFilterChain != null)
     {
         currentInputImage = inputImageFilterChain.applyFiltering(inputImage);
     }
     currentInputImage.computeGrayscale();
 }
Example #4
0
        public virtual ImageDescription filter(ImageDescription inputImage)
        {
            inputImage.computeGrayscale();

            int imageSizeX = inputImage.sizeX;
            int imageSizeY = inputImage.sizeY;

            byte[,] inputGray  = inputImage.gray;
            byte[,] outputGray = new byte[imageSizeY, imageSizeX];

            // 1. Gauss
            float[,] gaussConvolutionMatrix = FilterBankUtil.generateNormalizedGaussConvolutionMatrix(sigma, 5);
            float[,] gaussResult            = ImageDescriptionUtil.mirroredMarginConvolution(inputGray, gaussConvolutionMatrix);

            // 2. Gradient
            float[,] dx = ImageDescriptionUtil.mirroredMarginConvolution(gaussResult, FilterBankUtil.normalizedSobelX);
            float[,] dy = ImageDescriptionUtil.mirroredMarginConvolution(gaussResult, FilterBankUtil.normalizedSobelY);

            // 3. Gradient Amplitude
            float[,] amplitudeResult = new float[imageSizeY, imageSizeX];
            for (int i = 0; i < imageSizeY; i++)
            {
                for (int j = 0; j < imageSizeX; j++)
                {
                    amplitudeResult[i, j] = (float)Math.Sqrt(dx[i, j] * dx[i, j] + dy[i, j] * dy[i, j]);
                }
            }

            for (var i = 0; i < imageSizeY; i++)
            {
                for (var j = 0; j < imageSizeX; j++)
                {
                    if (amplitudeResult[i, j] < 255)
                    {
                        outputGray[i, j] = (byte)(amplitudeResult[i, j] + 0.5f);
                    }
                    else
                    {
                        outputGray[i, j] = 255;
                    }
                }
            }

            ImageDescription outputImage = new ImageDescription();

            outputImage.sizeX = imageSizeX;
            outputImage.sizeY = imageSizeY;
            foreach (ColorChannelEnum colorChannel in Enum.GetValues(typeof(ColorChannelEnum)))
            {
                outputImage.setColorChannel(colorChannel, inputImage.getColorChannel(colorChannel));
            }
            outputImage.setColorChannel(ColorChannelEnum.Sobel, outputGray);

            return(outputImage);
        }
Example #5
0
        public void trainWithBaseAlgorithm(EdgeDetectionAlgorithm algorithm, EdgeDetectionAlgorithm baseAlgorithm, int resizeFactor)
        {
            DateTime      trainingStart = DateTime.Now;
            float         totalLoss     = 0;
            List <String> fileList      = new List <string>(benchmark.getTrainingFilesPathList());

            int totalNumberOfFiles = numberOfTrainingSetPasses * fileList.Count;
            int totalIndex         = 0;

            for (int pass = 0; pass < numberOfTrainingSetPasses; pass++)
            {
                ListUtils.Shuffle(fileList);
                int      index             = 1;
                float    totalPassLoss     = 0;
                DateTime trainingPassStart = DateTime.Now;
                foreach (string trainingFileName in fileList)
                {
                    DateTime start = DateTime.Now;

                    Console.WriteLine("Pass: "******"/" + numberOfTrainingSetPasses + ", " + index + "/" + fileList.Count + " Training file: " + Path.GetFileName(trainingFileName));
                    ImageDescription inputImage    = ImageFileHandler.loadFromPath(trainingFileName);
                    ImageDescription computedImage = baseAlgorithm.test(inputImage);

                    ResizeFilter     resizeColor   = new ResizeFilter(inputImage.sizeX / resizeFactor, inputImage.sizeY / resizeFactor, ImageDescriptionUtil.colorChannels);
                    ImageDescription newInputImage = resizeColor.filter(inputImage);

                    ImageDescription inputImageGroundTruth = ImageFileHandler.loadFromPath(benchmark.getTrainingFileGroundTruth(trainingFileName));
                    inputImageGroundTruth.computeGrayscale();
                    ResizeFilter     resizeGrayscale          = new ResizeFilter(inputImage.sizeX / resizeFactor, inputImage.sizeY / resizeFactor, ImageDescriptionUtil.grayscaleChannel);
                    ImageDescription newInputImageGroundTruth = resizeGrayscale.filter(inputImageGroundTruth);

                    ImageDescription resizedComputed = resizeGrayscale.filter(computedImage);
                    newInputImage.setColorChannel(ColorChannelEnum.Layer, resizedComputed.gray);

                    float loss = algorithm.train(newInputImage, newInputImageGroundTruth);
                    totalLoss     += loss;
                    totalPassLoss += loss;
                    index++;
                    totalIndex++;

                    double timeElapsed      = (DateTime.Now - start).TotalSeconds;
                    double timeElapsedSoFar = (DateTime.Now - trainingStart).TotalSeconds;
                    double estimatedTime    = (timeElapsedSoFar / totalIndex) * (totalNumberOfFiles - totalIndex);
                    Console.WriteLine("Loss: " + loss.ToString("0.00") + " Time: " + timeElapsed.ToString("0.00") + "s Time elapsed: "
                                      + timeElapsedSoFar.ToString("0.00") + "s ETA: " + estimatedTime.ToString("0.00") + "s");
                }
                double tariningPassTimeElapsed = (DateTime.Now - trainingPassStart).TotalSeconds;
                Console.WriteLine("Pass took " + tariningPassTimeElapsed.ToString("0.00") + " sec. Pass loss: " + totalPassLoss.ToString("0.00")
                                  + " Avg loss: " + (totalPassLoss / (fileList.Count)).ToString("0.00"));
            }
            double totalTimeElapsed = (DateTime.Now - trainingStart).TotalSeconds;

            Console.WriteLine("Training took " + totalTimeElapsed.ToString("0.00") + " sec. Total loss: " + totalLoss.ToString("0.00")
                              + " Avg loss: " + (totalLoss / (totalNumberOfFiles)).ToString("0.00"));
        }
Example #6
0
        public void train(EdgeDetectionAlgorithm algorithm)
        {
            BlackAndWhiteConverter blackAndWhiteConverter = new BlackAndWhiteConverter(1);

            DateTime      trainingStart = DateTime.Now;
            float         totalLoss     = 0;
            List <String> fileList      = new List <string>(benchmark.getTrainingFilesPathList());

            int totalNumberOfFiles = numberOfTrainingSetPasses * fileList.Count;
            int totalIndex         = 0;

            for (int pass = 0; pass < numberOfTrainingSetPasses; pass++)
            {
                ListUtils.Shuffle(fileList);
                int      index             = 1;
                float    totalPassLoss     = 0;
                DateTime trainingPassStart = DateTime.Now;
                foreach (string trainingFileName in fileList)
                {
                    DateTime start = DateTime.Now;

                    Console.WriteLine("Pass: "******"/" + numberOfTrainingSetPasses + ", " + index + "/" + fileList.Count + " Training file: " + Path.GetFileName(trainingFileName));
                    ImageDescription inputImage            = ImageFileHandler.loadFromPath(trainingFileName);
                    ImageDescription inputImageGroundTruth = ImageFileHandler.loadFromPath(benchmark.getTrainingFileGroundTruth(trainingFileName));
                    inputImageGroundTruth.computeGrayscale();
                    inputImageGroundTruth = blackAndWhiteConverter.filter(inputImageGroundTruth);
                    float loss = algorithm.train(inputImage, inputImageGroundTruth);
                    totalLoss     += loss;
                    totalPassLoss += loss;
                    index++;
                    totalIndex++;

                    double timeElapsed      = (DateTime.Now - start).TotalSeconds;
                    double timeElapsedSoFar = (DateTime.Now - trainingStart).TotalSeconds;
                    double estimatedTime    = (timeElapsedSoFar / totalIndex) * (totalNumberOfFiles - totalIndex);
                    Console.WriteLine("Loss: " + loss.ToString("0.00") + " Time: " + timeElapsed.ToString("0.00") + "s Time elapsed: "
                                      + timeElapsedSoFar.ToString("0.00") + "s ETA: " + estimatedTime.ToString("0.00") + "s");
                }
                double tariningPassTimeElapsed = (DateTime.Now - trainingPassStart).TotalSeconds;
                Console.WriteLine("Pass took " + tariningPassTimeElapsed.ToString("0.00") + " sec. Pass loss: " + totalPassLoss.ToString("0.00")
                                  + " Avg loss: " + (totalPassLoss / (fileList.Count)).ToString("0.00"));
            }
            double totalTimeElapsed = (DateTime.Now - trainingStart).TotalSeconds;

            Console.WriteLine("Training took " + totalTimeElapsed.ToString("0.00") + " sec. Total loss: " + totalLoss.ToString("0.00")
                              + " Avg loss: " + (totalLoss / (totalNumberOfFiles)).ToString("0.00"));
        }
Example #7
0
        public void validate()
        {
            DateTime validateStart = DateTime.Now;

            List <String> fileList = benchmark.getTestFilesPathList();

            float totalCrossEntropy = 0;

            foreach (string testFilePath in fileList)
            {
                DateTime start           = DateTime.Now;
                string   outputFilePath  = Path.ChangeExtension(benchmark.getTestFileOutputPathWithoutExtension(testFilePath), outputFileExtension);
                string   groundTruthPath = benchmark.getTestingFileGroundTruth(testFilePath);

                ImageDescription outputImage      = ImageFileHandler.loadFromPath(outputFilePath);
                ImageDescription groundTruthImage = ImageFileHandler.loadFromPath(groundTruthPath);

                byte[,] outputGray      = outputImage.getColorChannel(ColorChannelEnum.Gray);
                byte[,] groundTruthGray = groundTruthImage.getColorChannel(ColorChannelEnum.Gray);
                // might be a bug in GDI
                if (outputGray == null)
                {
                    outputImage.computeGrayscale();
                    outputGray = outputImage.getColorChannel(ColorChannelEnum.Gray);
                }
                if (groundTruthGray == null)
                {
                    groundTruthImage.computeGrayscale();
                    groundTruthGray = groundTruthImage.getColorChannel(ColorChannelEnum.Gray);
                }

                float crossEntropy = 0;
                for (int i = 0; i < outputGray.GetLength(0); i++)
                {
                    for (int j = 0; j < outputGray.GetLength(1); j++)
                    {
                        byte output      = outputGray[i, j];
                        byte groundTruth = groundTruthGray[i, j];

                        float groundTruthProbability;
                        float outputProbability;
                        if (groundTruth != 0)
                        {
                            groundTruthProbability = 1.0f;
                        }
                        else
                        {
                            groundTruthProbability = 0;
                        }
                        //groundTruthProbability = groundTruth / 255.0f;


                        if (output == 0)
                        {
                            outputProbability = 1 / 255.0f;
                        }
                        else
                        {
                            if (output == 255)
                            {
                                outputProbability = 254 / 255.0f;
                            }
                            else
                            {
                                outputProbability = output / 255.0f;
                            }
                        }
                        float loss = LogisticHelper.computeEntropyLoss(outputProbability, groundTruthProbability);
                        crossEntropy += loss;
                    }
                }

                totalCrossEntropy += crossEntropy;
                Console.WriteLine(testFilePath);
                Console.WriteLine("Cross entropy: " + crossEntropy.ToString("0.00"));
            }
            Console.WriteLine("Total cross entropy: " + totalCrossEntropy.ToString("0.00"));
            double totalTimeElapsed = (DateTime.Now - validateStart).TotalSeconds;

            Console.WriteLine("Validation took " + totalTimeElapsed.ToString("0.00") + " sec.");
        }
Example #8
0
        public virtual ImageDescription filter(ImageDescription inputImage)
        {
            inputImage.computeGrayscale();

            int imageSizeX = inputImage.sizeX;
            int imageSizeY = inputImage.sizeY;

            byte[,] inputGray  = inputImage.gray;
            byte[,] outputGray = new byte[imageSizeY, imageSizeX];

            float[,] gaussConvolutionMatrix = FilterBankUtil.generateNormalizedGaussConvolutionMatrix(sigma, 7);
            float[,] gaussResult            = ImageDescriptionUtil.mirroredMarginConvolution(inputGray, gaussConvolutionMatrix);

            List <float[, ]> templates = FilterBankUtil.normalizedKirschTemplates;
            List <float[, ]> results   = new List <float[, ]>(templates.Count);

            foreach (float[,] template in templates)
            {
                results.Add(ImageDescriptionUtil.mirroredMarginConvolution(gaussResult, template));
            }

            float[,] amplitudeResult = new float[imageSizeY, imageSizeX];
            int[,] anglesResult      = new int[imageSizeY, imageSizeX];
            for (int i = 0; i < imageSizeY; i++)
            {
                for (int j = 0; j < imageSizeX; j++)
                {
                    int   direction = 0;
                    float maxValue  = 0;
                    for (int templateIndex = 0; templateIndex < templates.Count; templateIndex++)
                    {
                        float value = results[templateIndex][i, j];
                        if (value > maxValue)
                        {
                            maxValue  = value;
                            direction = templateIndex;
                        }
                    }
                    amplitudeResult[i, j] = maxValue;
                    anglesResult[i, j]    = direction;
                }
            }

            if (!applyNms)
            {
                for (var i = 0; i < imageSizeY; i++)
                {
                    for (var j = 0; j < imageSizeX; j++)
                    {
                        if (amplitudeResult[i, j] < 255)
                        {
                            outputGray[i, j] = (byte)(amplitudeResult[i, j] + 0.5f);
                        }
                        else
                        {
                            outputGray[i, j] = 255;
                        }
                    }
                }
            }
            else
            {
                float[,] nmsResult = new float[imageSizeY, imageSizeX];
                for (int i = 0; i < imageSizeY; i++)
                {
                    for (int j = 0; j < imageSizeX; j++)
                    {
                        int angle = anglesResult[i, j];
                        if (angle == 2 || angle == 6)
                        {
                            if ((i == 0 || amplitudeResult[i, j] >= amplitudeResult[i - 1, j]) &&
                                (i == imageSizeY - 1 || amplitudeResult[i, j] > amplitudeResult[i + 1, j]))
                            {
                                nmsResult[i, j] = amplitudeResult[i, j];
                            }
                        }
                        else
                        {
                            if (angle == 1 || angle == 5)
                            {
                                if ((i == 0 || j == imageSizeX - 1 || amplitudeResult[i, j] >= amplitudeResult[i - 1, j + 1]) &&
                                    (i == imageSizeY - 1 || j == 0 || amplitudeResult[i, j] > amplitudeResult[i + 1, j - 1]))
                                {
                                    nmsResult[i, j] = amplitudeResult[i, j];
                                }
                            }
                            else
                            {
                                if (angle == 3 || angle == 7)
                                {
                                    if ((i == 0 || j == 0 || amplitudeResult[i, j] >= amplitudeResult[i - 1, j - 1]) &&
                                        (i == imageSizeY - 1 || j == imageSizeX - 1 || amplitudeResult[i, j] > amplitudeResult[i + 1, j + 1]))
                                    {
                                        nmsResult[i, j] = amplitudeResult[i, j];
                                    }
                                }
                                else
                                {
                                    if ((j == 0 || amplitudeResult[i, j] >= amplitudeResult[i, j - 1]) &&
                                        (j == imageSizeX - 1 || amplitudeResult[i, j] > amplitudeResult[i, j + 1]))
                                    {
                                        nmsResult[i, j] = amplitudeResult[i, j];
                                    }
                                }
                            }
                        }
                    }
                }

                float[,] hysteresisResult = new float[imageSizeY, imageSizeX];
                bool[,] retainedPositions = applyHysteresisThreshold(nmsResult, imageSizeX, imageSizeY);

                for (var i = 0; i < imageSizeY; i++)
                {
                    for (var j = 0; j < imageSizeX; j++)
                    {
                        if (retainedPositions[i, j])
                        {
                            hysteresisResult[i, j] = nmsResult[i, j];
                        }
                    }
                }

                for (var i = 0; i < imageSizeY; i++)
                {
                    for (var j = 0; j < imageSizeX; j++)
                    {
                        if (hysteresisResult[i, j] < 255)
                        {
                            outputGray[i, j] = (byte)(hysteresisResult[i, j] + 0.5f);
                        }
                        else
                        {
                            outputGray[i, j] = 255;
                        }
                    }
                }
            }

            ImageDescription outputImage = new ImageDescription();

            outputImage.sizeX = imageSizeX;
            outputImage.sizeY = imageSizeY;
            foreach (ColorChannelEnum colorChannel in Enum.GetValues(typeof(ColorChannelEnum)))
            {
                outputImage.setColorChannel(colorChannel, inputImage.getColorChannel(colorChannel));
            }
            outputImage.setColorChannel(ColorChannelEnum.Kirsch, outputGray);

            return(outputImage);
        }
Example #9
0
        public virtual ImageDescription filter(ImageDescription inputImage)
        {
            inputImage.computeGrayscale();

            int imageSizeX = inputImage.sizeX;
            int imageSizeY = inputImage.sizeY;

            byte[,] inputGray  = inputImage.gray;
            byte[,] outputGray = new byte[imageSizeY, imageSizeX];

            // 1. Gauss
            float[,] gaussConvolutionMatrix = FilterBankUtil.generateNormalizedGaussConvolutionMatrix(sigma, 5);
            float[,] gaussResult            = ImageDescriptionUtil.mirroredMarginConvolution(inputGray, gaussConvolutionMatrix);

            // 2. Gradient
            float[,] dx = ImageDescriptionUtil.mirroredMarginConvolution(gaussResult, FilterBankUtil.normalizedSobelX);
            float[,] dy = ImageDescriptionUtil.mirroredMarginConvolution(gaussResult, FilterBankUtil.normalizedSobelY);

            // 3. Gradient Amplitude
            float[,] amplitudeResult = new float[imageSizeY, imageSizeX];
            for (int i = 0; i < imageSizeY; i++)
            {
                for (int j = 0; j < imageSizeX; j++)
                {
                    amplitudeResult[i, j] = (float)Math.Sqrt(dx[i, j] * dx[i, j] + dy[i, j] * dy[i, j]);
                }
            }

            // 4. Angle of gradient
            float[,] anglesResult = new float[imageSizeY, imageSizeX];
            for (int i = 0; i < imageSizeY; i++)
            {
                for (int j = 0; j < imageSizeX; j++)
                {
                    anglesResult[i, j] = (float)Math.Atan2(dx[i, j], dy[i, j]);
                }
            }

            // 5. Non maximal suppresion
            float[,] nmsResult = new float[imageSizeY, imageSizeX];
            for (int i = 1; i < imageSizeY - 1; i++)
            {
                for (int j = 1; j < imageSizeX - 1; j++)
                {
                    float angle = anglesResult[i, j];
                    if ((angle <= (5 * Math.PI) / 8 && angle > (3 * Math.PI) / 8) || (angle > -(5 * Math.PI) / 8 && angle <= -(3 * Math.PI) / 8))
                    {
                        if (amplitudeResult[i, j] > amplitudeResult[i - 1, j] && amplitudeResult[i, j] > amplitudeResult[i + 1, j])
                        {
                            nmsResult[i, j] = amplitudeResult[i, j];
                        }
                    }
                    else
                    {
                        if (angle <= (3 * Math.PI) / 8 && angle > Math.PI / 8 || angle > -(7 * Math.PI) / 8 && angle <= -(5 * Math.PI) / 8)
                        {
                            if (amplitudeResult[i, j] > amplitudeResult[i - 1, j + 1] && amplitudeResult[i, j] > amplitudeResult[i + 1, j - 1])
                            {
                                nmsResult[i, j] = amplitudeResult[i, j];
                            }
                        }
                        else
                        {
                            if (angle <= (7 * Math.PI / 8) && angle > (5 * Math.PI / 8) || angle > -(3 * Math.PI) / 8 && angle < -(Math.PI / 8))
                            {
                                if (amplitudeResult[i, j] > amplitudeResult[i - 1, j - 1] && amplitudeResult[i, j] > amplitudeResult[i + 1, j + 1])
                                {
                                    nmsResult[i, j] = amplitudeResult[i, j];
                                }
                            }
                            else
                            {
                                if (amplitudeResult[i, j] > amplitudeResult[i, j - 1] && amplitudeResult[i, j] > amplitudeResult[i, j + 1])
                                {
                                    nmsResult[i, j] = amplitudeResult[i, j];
                                }
                            }
                        }
                    }
                }
            }

            // 6. Hysteresis thresolding
            float[,] hysteresisResult = new float[imageSizeY, imageSizeX];
            bool[,] retainedPositions = applyHysteresisThreshold(nmsResult, imageSizeX, imageSizeY);

            for (var i = 0; i < imageSizeY; i++)
            {
                for (var j = 0; j < imageSizeX; j++)
                {
                    if (retainedPositions[i, j])
                    {
                        hysteresisResult[i, j] = nmsResult[i, j];
                    }
                }
            }

            for (var i = 0; i < imageSizeY; i++)
            {
                for (var j = 0; j < imageSizeX; j++)
                {
                    if (hysteresisResult[i, j] < 255)
                    {
                        outputGray[i, j] = (byte)(hysteresisResult[i, j] + 0.5f);
                    }
                    else
                    {
                        outputGray[i, j] = 255;
                    }
                }
            }

            ImageDescription outputImage = new ImageDescription();

            outputImage.sizeX = imageSizeX;
            outputImage.sizeY = imageSizeY;
            foreach (ColorChannelEnum colorChannel in Enum.GetValues(typeof(ColorChannelEnum)))
            {
                outputImage.setColorChannel(colorChannel, inputImage.getColorChannel(colorChannel));
            }
            outputImage.setColorChannel(ColorChannelEnum.Canny, outputGray);

            return(outputImage);
        }