Exemplo n.º 1
0
        public ImageDescription filter(ImageDescription inputImage)
        {
            ImageDescription outputImage = new ImageDescription();

            outputImage.sizeX = newSizeX;
            outputImage.sizeY = newSizeY;

            foreach (ColorChannelEnum colorChannel in colorChannelsToFilter)
            {
                byte[,] channel = inputImage.getColorChannel(colorChannel);
                ImageDescription temp = new ImageDescription();
                temp.sizeX     = inputImage.sizeX;
                temp.sizeY     = inputImage.sizeY;
                temp.grayscale = true;
                temp.setColorChannel(ColorChannelEnum.Gray, channel);

                Bitmap tempBitmap = ImageDescriptionUtil.convertToBitmap(temp);
                Bitmap output     = ImageDescriptionUtil.resizeImage(tempBitmap, newSizeX, newSizeY);
                temp = ImageDescriptionUtil.fromBitmap(output);
                temp.computeGrayscale();
                outputImage.setColorChannel(colorChannel, temp.gray);
            }

            if (colorChannelsToFilter.Count == 1 && colorChannelsToFilter.Contains(ColorChannelEnum.Gray))
            {
                outputImage.grayscale = true;
            }
            return(outputImage);
        }
Exemplo n.º 2
0
        public virtual ImageDescription filter(ImageDescription inputImage)
        {
            inputImage.computeGrayscale();

            int imageSizeX = inputImage.sizeX;
            int imageSizeY = inputImage.sizeY;

            byte[,] inputGray  = inputImage.gray;
            byte[,] outputGray = new byte[imageSizeY, imageSizeX];

            // 1. Gauss
            float[,] gaussConvolutionMatrix = FilterBankUtil.generateNormalizedGaussConvolutionMatrix(sigma, 5);
            float[,] gaussResult            = ImageDescriptionUtil.mirroredMarginConvolution(inputGray, gaussConvolutionMatrix);

            // 2. Gradient
            float[,] dx = ImageDescriptionUtil.mirroredMarginConvolution(gaussResult, FilterBankUtil.normalizedSobelX);
            float[,] dy = ImageDescriptionUtil.mirroredMarginConvolution(gaussResult, FilterBankUtil.normalizedSobelY);

            // 3. Gradient Amplitude
            float[,] amplitudeResult = new float[imageSizeY, imageSizeX];
            for (int i = 0; i < imageSizeY; i++)
            {
                for (int j = 0; j < imageSizeX; j++)
                {
                    amplitudeResult[i, j] = (float)Math.Sqrt(dx[i, j] * dx[i, j] + dy[i, j] * dy[i, j]);
                }
            }

            for (var i = 0; i < imageSizeY; i++)
            {
                for (var j = 0; j < imageSizeX; j++)
                {
                    if (amplitudeResult[i, j] < 255)
                    {
                        outputGray[i, j] = (byte)(amplitudeResult[i, j] + 0.5f);
                    }
                    else
                    {
                        outputGray[i, j] = 255;
                    }
                }
            }

            ImageDescription outputImage = new ImageDescription();

            outputImage.sizeX = imageSizeX;
            outputImage.sizeY = imageSizeY;
            foreach (ColorChannelEnum colorChannel in Enum.GetValues(typeof(ColorChannelEnum)))
            {
                outputImage.setColorChannel(colorChannel, inputImage.getColorChannel(colorChannel));
            }
            outputImage.setColorChannel(ColorChannelEnum.Sobel, outputGray);

            return(outputImage);
        }
        public ImageDescription filter(ImageDescription inputImage)
        {
            ImageDescription outputImage = new ImageDescription();

            outputImage.sizeX = inputImage.sizeX;
            outputImage.sizeY = inputImage.sizeY;
            if (colorChannelsToFilter.Count == 1 && colorChannelsToFilter.Contains(ColorChannelEnum.Gray))
            {
                outputImage.grayscale = true;
            }

            foreach (ColorChannelEnum channelEnum in colorChannelsToFilter)
            {
                byte[,] inputChannel  = inputImage.getColorChannel(channelEnum);
                byte[,] outputChannel = new byte[outputImage.sizeY, outputImage.sizeX];
                outputImage.setColorChannel(channelEnum, outputChannel);

                for (int y = 0; y < inputImage.sizeY; y++)
                {
                    for (int x = 0; x < inputImage.sizeX; x++)
                    {
                        int index = 0;
                        for (int i = -halfSize; i <= halfSize; i++)
                        {
                            for (int j = -halfSize; j <= halfSize; j++)
                            {
                                medianValueArray[index++] = inputChannel[ImageDescriptionUtil.outsideMirroredPosition(y + i, inputImage.sizeY), ImageDescriptionUtil.outsideMirroredPosition(x + j, inputImage.sizeX)];
                            }
                        }
                        Array.Sort(medianValueArray);
                        outputChannel[y, x] = medianValueArray[medianPosition];
                    }
                }
            }

            return(outputImage);
        }
Exemplo n.º 4
0
        public void validate()
        {
            DateTime validateStart = DateTime.Now;

            List <String> fileList = benchmark.getTestFilesPathList();

            float totalCrossEntropy = 0;

            foreach (string testFilePath in fileList)
            {
                DateTime start           = DateTime.Now;
                string   outputFilePath  = Path.ChangeExtension(benchmark.getTestFileOutputPathWithoutExtension(testFilePath), outputFileExtension);
                string   groundTruthPath = benchmark.getTestingFileGroundTruth(testFilePath);

                ImageDescription outputImage      = ImageFileHandler.loadFromPath(outputFilePath);
                ImageDescription groundTruthImage = ImageFileHandler.loadFromPath(groundTruthPath);

                byte[,] outputGray      = outputImage.getColorChannel(ColorChannelEnum.Gray);
                byte[,] groundTruthGray = groundTruthImage.getColorChannel(ColorChannelEnum.Gray);
                // might be a bug in GDI
                if (outputGray == null)
                {
                    outputImage.computeGrayscale();
                    outputGray = outputImage.getColorChannel(ColorChannelEnum.Gray);
                }
                if (groundTruthGray == null)
                {
                    groundTruthImage.computeGrayscale();
                    groundTruthGray = groundTruthImage.getColorChannel(ColorChannelEnum.Gray);
                }

                float crossEntropy = 0;
                for (int i = 0; i < outputGray.GetLength(0); i++)
                {
                    for (int j = 0; j < outputGray.GetLength(1); j++)
                    {
                        byte output      = outputGray[i, j];
                        byte groundTruth = groundTruthGray[i, j];

                        float groundTruthProbability;
                        float outputProbability;
                        if (groundTruth != 0)
                        {
                            groundTruthProbability = 1.0f;
                        }
                        else
                        {
                            groundTruthProbability = 0;
                        }
                        //groundTruthProbability = groundTruth / 255.0f;


                        if (output == 0)
                        {
                            outputProbability = 1 / 255.0f;
                        }
                        else
                        {
                            if (output == 255)
                            {
                                outputProbability = 254 / 255.0f;
                            }
                            else
                            {
                                outputProbability = output / 255.0f;
                            }
                        }
                        float loss = LogisticHelper.computeEntropyLoss(outputProbability, groundTruthProbability);
                        crossEntropy += loss;
                    }
                }

                totalCrossEntropy += crossEntropy;
                Console.WriteLine(testFilePath);
                Console.WriteLine("Cross entropy: " + crossEntropy.ToString("0.00"));
            }
            Console.WriteLine("Total cross entropy: " + totalCrossEntropy.ToString("0.00"));
            double totalTimeElapsed = (DateTime.Now - validateStart).TotalSeconds;

            Console.WriteLine("Validation took " + totalTimeElapsed.ToString("0.00") + " sec.");
        }
Exemplo n.º 5
0
        public virtual ImageDescription filter(ImageDescription inputImage)
        {
            inputImage.computeGrayscale();

            int imageSizeX = inputImage.sizeX;
            int imageSizeY = inputImage.sizeY;

            byte[,] inputGray  = inputImage.gray;
            byte[,] outputGray = new byte[imageSizeY, imageSizeX];

            float[,] gaussConvolutionMatrix = FilterBankUtil.generateNormalizedGaussConvolutionMatrix(sigma, 7);
            float[,] gaussResult            = ImageDescriptionUtil.mirroredMarginConvolution(inputGray, gaussConvolutionMatrix);

            List <float[, ]> templates = FilterBankUtil.normalizedKirschTemplates;
            List <float[, ]> results   = new List <float[, ]>(templates.Count);

            foreach (float[,] template in templates)
            {
                results.Add(ImageDescriptionUtil.mirroredMarginConvolution(gaussResult, template));
            }

            float[,] amplitudeResult = new float[imageSizeY, imageSizeX];
            int[,] anglesResult      = new int[imageSizeY, imageSizeX];
            for (int i = 0; i < imageSizeY; i++)
            {
                for (int j = 0; j < imageSizeX; j++)
                {
                    int   direction = 0;
                    float maxValue  = 0;
                    for (int templateIndex = 0; templateIndex < templates.Count; templateIndex++)
                    {
                        float value = results[templateIndex][i, j];
                        if (value > maxValue)
                        {
                            maxValue  = value;
                            direction = templateIndex;
                        }
                    }
                    amplitudeResult[i, j] = maxValue;
                    anglesResult[i, j]    = direction;
                }
            }

            if (!applyNms)
            {
                for (var i = 0; i < imageSizeY; i++)
                {
                    for (var j = 0; j < imageSizeX; j++)
                    {
                        if (amplitudeResult[i, j] < 255)
                        {
                            outputGray[i, j] = (byte)(amplitudeResult[i, j] + 0.5f);
                        }
                        else
                        {
                            outputGray[i, j] = 255;
                        }
                    }
                }
            }
            else
            {
                float[,] nmsResult = new float[imageSizeY, imageSizeX];
                for (int i = 0; i < imageSizeY; i++)
                {
                    for (int j = 0; j < imageSizeX; j++)
                    {
                        int angle = anglesResult[i, j];
                        if (angle == 2 || angle == 6)
                        {
                            if ((i == 0 || amplitudeResult[i, j] >= amplitudeResult[i - 1, j]) &&
                                (i == imageSizeY - 1 || amplitudeResult[i, j] > amplitudeResult[i + 1, j]))
                            {
                                nmsResult[i, j] = amplitudeResult[i, j];
                            }
                        }
                        else
                        {
                            if (angle == 1 || angle == 5)
                            {
                                if ((i == 0 || j == imageSizeX - 1 || amplitudeResult[i, j] >= amplitudeResult[i - 1, j + 1]) &&
                                    (i == imageSizeY - 1 || j == 0 || amplitudeResult[i, j] > amplitudeResult[i + 1, j - 1]))
                                {
                                    nmsResult[i, j] = amplitudeResult[i, j];
                                }
                            }
                            else
                            {
                                if (angle == 3 || angle == 7)
                                {
                                    if ((i == 0 || j == 0 || amplitudeResult[i, j] >= amplitudeResult[i - 1, j - 1]) &&
                                        (i == imageSizeY - 1 || j == imageSizeX - 1 || amplitudeResult[i, j] > amplitudeResult[i + 1, j + 1]))
                                    {
                                        nmsResult[i, j] = amplitudeResult[i, j];
                                    }
                                }
                                else
                                {
                                    if ((j == 0 || amplitudeResult[i, j] >= amplitudeResult[i, j - 1]) &&
                                        (j == imageSizeX - 1 || amplitudeResult[i, j] > amplitudeResult[i, j + 1]))
                                    {
                                        nmsResult[i, j] = amplitudeResult[i, j];
                                    }
                                }
                            }
                        }
                    }
                }

                float[,] hysteresisResult = new float[imageSizeY, imageSizeX];
                bool[,] retainedPositions = applyHysteresisThreshold(nmsResult, imageSizeX, imageSizeY);

                for (var i = 0; i < imageSizeY; i++)
                {
                    for (var j = 0; j < imageSizeX; j++)
                    {
                        if (retainedPositions[i, j])
                        {
                            hysteresisResult[i, j] = nmsResult[i, j];
                        }
                    }
                }

                for (var i = 0; i < imageSizeY; i++)
                {
                    for (var j = 0; j < imageSizeX; j++)
                    {
                        if (hysteresisResult[i, j] < 255)
                        {
                            outputGray[i, j] = (byte)(hysteresisResult[i, j] + 0.5f);
                        }
                        else
                        {
                            outputGray[i, j] = 255;
                        }
                    }
                }
            }

            ImageDescription outputImage = new ImageDescription();

            outputImage.sizeX = imageSizeX;
            outputImage.sizeY = imageSizeY;
            foreach (ColorChannelEnum colorChannel in Enum.GetValues(typeof(ColorChannelEnum)))
            {
                outputImage.setColorChannel(colorChannel, inputImage.getColorChannel(colorChannel));
            }
            outputImage.setColorChannel(ColorChannelEnum.Kirsch, outputGray);

            return(outputImage);
        }
        public override void computeIndexes(ImageDescription inputImage, int positionX, int positionY, int computedIndex, int[,] computedIndexes)
        {
            byte[] byteValues          = new byte[longestRay + 1];
            int[]  contextHashesForRay = new int[longestRay + 1];

#if useQuantizedDerivative
            byte[] byteDerivativeValues          = new byte[longestRay + 1];
            int[]  contextDerivativeHashesForRay = new int[longestRay + 1];
#endif

            int currentIndex = 0;
            foreach (ColorChannelEnum selectedColorChannel in selectedColorChannels)
            {
                byte[,] colorChannel = inputImage.getColorChannel(selectedColorChannel);
                for (int ray = 0; ray < numberOfRays; ray++)
                {
                    RelativePixelInformation[] relativePixelInformation = relativePixelInformationsForRay[ray];
                    for (int i = 0; i < relativePixelInformation.Length; i++)
                    {
                        byteValues[i] = ImageDescriptionUtil.getPixelMirrored(colorChannel, positionX + relativePixelInformation[i].deltaX, positionY + relativePixelInformation[i].deltaY);
#if useQuantizedDerivative
                        byteDerivativeValues[i] = byteValues[i];
#endif
                    }
#if useQuantizedDerivative
                    if (trueColorChannels.Contains(selectedColorChannel))
                    {
                        for (int i = relativePixelInformation.Length - 1; i > 0; i--)
                        {
                            byteDerivativeValues[i] = (byte)((byteDerivativeValues[i] - byteDerivativeValues[i - 1] + 256) & 255);
                        }
                    }
#endif
                    NumberUtils.fnvOneAtATimeHash(byteValues, relativePixelInformation.Length, contextHashesForRay);
                    //NumberUtils.fnvOneAtATimeWithMaskHash(byteValues, byteMaskValues, relativePixelInformation.Length, contextHashesForRay);
                    //NumberUtils.jenkinsOneAtATimeHash(byteValues, relativePixelInformation.Length, contextHashesForRay);

#if useQuantizedDerivative
                    if (trueColorChannels.Contains(selectedColorChannel))
                    {
                        NumberUtils.fnvOneAtATimeWithMaskHash(byteDerivativeValues, byteMaskValues, relativePixelInformation.Length, contextDerivativeHashesForRay);
                    }
#endif
                    for (int i = 0; i < contextLenghtsForRay[ray].Length; i++)
                    {
                        int contextLength = contextLenghtsForRay[ray][i];
                        int contextIndex;
#if useQuantizedDerivative
                        int contextDerivativeIndex;
#endif
                        if (contextLength == 1)
                        {
                            contextIndex = byteValues[0];
#if useQuantizedDerivative
                            contextDerivativeIndex = byteDerivativeValues[0];
#endif
                        }
                        else
                        {
                            if (contextLength == 2)
                            {
                                contextIndex = (byteValues[0] << 8) + byteValues[1];
#if useQuantizedDerivative
                                contextDerivativeIndex = (byteDerivativeValues[0] << 8) + byteDerivativeValues[1];
#endif
                            }
                            else
                            {
                                if (contextLength == 3 && maxTableSizeBits >= 24)
                                {
                                    contextIndex = (byteValues[0] << 16) + (byteValues[1] << 8) + byteValues[2];
#if useQuantizedDerivative
                                    contextDerivativeIndex = (byteDerivativeValues[0] << 16) + (byteDerivativeValues[1] << 8) + byteDerivativeValues[2];
#endif
                                }
                                else
                                {
                                    contextIndex = contextHashesForRay[contextLength - 1];
#if useQuantizedDerivative
                                    contextDerivativeIndex = contextDerivativeHashesForRay[contextLength - 1];
#endif
                                }
                            }
                        }
                        computedIndexes[computedIndex, currentIndex] = contextIndex;
                        currentIndex += 1;
#if useQuantizedDerivative
                        if (trueColorChannels.Contains(selectedColorChannel))
                        {
                            computedIndexes[computedIndex, currentIndex] = contextDerivativeIndex;
                            currentIndex += 1;
                        }
#endif
                    }
                }
            }
        }
Exemplo n.º 7
0
        public virtual ImageDescription filter(ImageDescription inputImage)
        {
            inputImage.computeGrayscale();

            int imageSizeX = inputImage.sizeX;
            int imageSizeY = inputImage.sizeY;

            byte[,] inputGray  = inputImage.gray;
            byte[,] outputGray = new byte[imageSizeY, imageSizeX];

            // 1. Gauss
            float[,] gaussConvolutionMatrix = FilterBankUtil.generateNormalizedGaussConvolutionMatrix(sigma, 5);
            float[,] gaussResult            = ImageDescriptionUtil.mirroredMarginConvolution(inputGray, gaussConvolutionMatrix);

            // 2. Gradient
            float[,] dx = ImageDescriptionUtil.mirroredMarginConvolution(gaussResult, FilterBankUtil.normalizedSobelX);
            float[,] dy = ImageDescriptionUtil.mirroredMarginConvolution(gaussResult, FilterBankUtil.normalizedSobelY);

            // 3. Gradient Amplitude
            float[,] amplitudeResult = new float[imageSizeY, imageSizeX];
            for (int i = 0; i < imageSizeY; i++)
            {
                for (int j = 0; j < imageSizeX; j++)
                {
                    amplitudeResult[i, j] = (float)Math.Sqrt(dx[i, j] * dx[i, j] + dy[i, j] * dy[i, j]);
                }
            }

            // 4. Angle of gradient
            float[,] anglesResult = new float[imageSizeY, imageSizeX];
            for (int i = 0; i < imageSizeY; i++)
            {
                for (int j = 0; j < imageSizeX; j++)
                {
                    anglesResult[i, j] = (float)Math.Atan2(dx[i, j], dy[i, j]);
                }
            }

            // 5. Non maximal suppresion
            float[,] nmsResult = new float[imageSizeY, imageSizeX];
            for (int i = 1; i < imageSizeY - 1; i++)
            {
                for (int j = 1; j < imageSizeX - 1; j++)
                {
                    float angle = anglesResult[i, j];
                    if ((angle <= (5 * Math.PI) / 8 && angle > (3 * Math.PI) / 8) || (angle > -(5 * Math.PI) / 8 && angle <= -(3 * Math.PI) / 8))
                    {
                        if (amplitudeResult[i, j] > amplitudeResult[i - 1, j] && amplitudeResult[i, j] > amplitudeResult[i + 1, j])
                        {
                            nmsResult[i, j] = amplitudeResult[i, j];
                        }
                    }
                    else
                    {
                        if (angle <= (3 * Math.PI) / 8 && angle > Math.PI / 8 || angle > -(7 * Math.PI) / 8 && angle <= -(5 * Math.PI) / 8)
                        {
                            if (amplitudeResult[i, j] > amplitudeResult[i - 1, j + 1] && amplitudeResult[i, j] > amplitudeResult[i + 1, j - 1])
                            {
                                nmsResult[i, j] = amplitudeResult[i, j];
                            }
                        }
                        else
                        {
                            if (angle <= (7 * Math.PI / 8) && angle > (5 * Math.PI / 8) || angle > -(3 * Math.PI) / 8 && angle < -(Math.PI / 8))
                            {
                                if (amplitudeResult[i, j] > amplitudeResult[i - 1, j - 1] && amplitudeResult[i, j] > amplitudeResult[i + 1, j + 1])
                                {
                                    nmsResult[i, j] = amplitudeResult[i, j];
                                }
                            }
                            else
                            {
                                if (amplitudeResult[i, j] > amplitudeResult[i, j - 1] && amplitudeResult[i, j] > amplitudeResult[i, j + 1])
                                {
                                    nmsResult[i, j] = amplitudeResult[i, j];
                                }
                            }
                        }
                    }
                }
            }

            // 6. Hysteresis thresolding
            float[,] hysteresisResult = new float[imageSizeY, imageSizeX];
            bool[,] retainedPositions = applyHysteresisThreshold(nmsResult, imageSizeX, imageSizeY);

            for (var i = 0; i < imageSizeY; i++)
            {
                for (var j = 0; j < imageSizeX; j++)
                {
                    if (retainedPositions[i, j])
                    {
                        hysteresisResult[i, j] = nmsResult[i, j];
                    }
                }
            }

            for (var i = 0; i < imageSizeY; i++)
            {
                for (var j = 0; j < imageSizeX; j++)
                {
                    if (hysteresisResult[i, j] < 255)
                    {
                        outputGray[i, j] = (byte)(hysteresisResult[i, j] + 0.5f);
                    }
                    else
                    {
                        outputGray[i, j] = 255;
                    }
                }
            }

            ImageDescription outputImage = new ImageDescription();

            outputImage.sizeX = imageSizeX;
            outputImage.sizeY = imageSizeY;
            foreach (ColorChannelEnum colorChannel in Enum.GetValues(typeof(ColorChannelEnum)))
            {
                outputImage.setColorChannel(colorChannel, inputImage.getColorChannel(colorChannel));
            }
            outputImage.setColorChannel(ColorChannelEnum.Canny, outputGray);

            return(outputImage);
        }