public ImageDescription filter(ImageDescription inputImage) { ImageDescription outputImage = new ImageDescription(); outputImage.sizeX = newSizeX; outputImage.sizeY = newSizeY; foreach (ColorChannelEnum colorChannel in colorChannelsToFilter) { byte[,] channel = inputImage.getColorChannel(colorChannel); ImageDescription temp = new ImageDescription(); temp.sizeX = inputImage.sizeX; temp.sizeY = inputImage.sizeY; temp.grayscale = true; temp.setColorChannel(ColorChannelEnum.Gray, channel); Bitmap tempBitmap = ImageDescriptionUtil.convertToBitmap(temp); Bitmap output = ImageDescriptionUtil.resizeImage(tempBitmap, newSizeX, newSizeY); temp = ImageDescriptionUtil.fromBitmap(output); temp.computeGrayscale(); outputImage.setColorChannel(colorChannel, temp.gray); } if (colorChannelsToFilter.Count == 1 && colorChannelsToFilter.Contains(ColorChannelEnum.Gray)) { outputImage.grayscale = true; } return(outputImage); }
public ImageDescription blendImages(List <ImageDescription> images) { int newSizeX, newSizeY; List <ImageDescription> imagesToBlend; ImageDescriptionUtil.makeAllImagesSameSize(images, out newSizeX, out newSizeY, out imagesToBlend); ImageDescription output = new ImageDescription(); output.sizeX = newSizeX; output.sizeY = newSizeY; output.grayscale = true; float blendFactor = 1.0f / images.Count(); byte[,] outputGray = new byte[newSizeY, newSizeX]; for (int i = 0; i < newSizeY; i++) { for (int j = 0; j < newSizeX; j++) { float sum = 0; for (int imageIndex = 0; imageIndex < images.Count; imageIndex++) { sum += imagesToBlend[imageIndex].gray[i, j]; } sum *= blendFactor; outputGray[i, j] = (byte)(sum + 0.5f); } } output.gray = outputGray; return(output); }
public static void saveToPath(ImageDescription imageDescription, string filePath, string fileExtension) { Image imageToSave = ImageDescriptionUtil.convertToBitmap(imageDescription); string fullFileName = Path.ChangeExtension(filePath, fileExtension); switch (fileExtension) { case ".png": imageToSave.Save(fullFileName, ImageFormat.Png); break; case ".jpg": imageToSave.Save(fullFileName, ImageFormat.Jpeg); break; case ".bmp": imageToSave.Save(fullFileName, ImageFormat.Bmp); break; case ".gif": imageToSave.Save(fullFileName, ImageFormat.Gif); break; case ".ico": imageToSave.Save(fullFileName, ImageFormat.Icon); break; case ".emf": imageToSave.Save(fullFileName, ImageFormat.Emf); break; case ".exif": imageToSave.Save(fullFileName, ImageFormat.Exif); break; case ".tiff": imageToSave.Save(fullFileName, ImageFormat.Tiff); break; case ".wmf": imageToSave.Save(fullFileName, ImageFormat.Wmf); break; default: imageToSave.Save(fullFileName, ImageFormat.Png); break; } }
public ImageDescription test(ImageDescription inputImage) { setInputImageToContexts(inputImage); ImageDescription outputImage = ImageDescriptionUtil.createGrayscaleImageWithSameSize(inputImage); computeIndexes(); //for (int positionY = 0; positionY < inputImage.sizeY; positionY++) //{ // for (int positionX = 0; positionX < inputImage.sizeX; positionX++) // { int numberOfIndexes = currentInputImage.sizeX * currentInputImage.sizeY; //for (int computedIndex = 0; computedIndex < numberOfIndexes; computedIndex++) Parallel.For(0, numberOfIndexes, new ParallelOptions { MaxDegreeOfParallelism = GeneralConfiguration.maximumNumberOfThreads }, (computedIndex) => { int positionX = computedIndex % currentInputImage.sizeX; int positionY = computedIndex / currentInputImage.sizeX; float probability; float feebackProbability; float outputProbability; computeProbabilityForPosition(positionX, positionY, computedIndex, out probability, out feebackProbability, out outputProbability); outputImage.gray[positionY, positionX] = (byte)(outputProbability * 255 + 0.5f); } ); // } //} return outputImage; }
public virtual ImageDescription filter(ImageDescription inputImage) { inputImage.computeGrayscale(); int imageSizeX = inputImage.sizeX; int imageSizeY = inputImage.sizeY; byte[,] inputGray = inputImage.gray; byte[,] outputGray = new byte[imageSizeY, imageSizeX]; // 1. Gauss float[,] gaussConvolutionMatrix = FilterBankUtil.generateNormalizedGaussConvolutionMatrix(sigma, 5); float[,] gaussResult = ImageDescriptionUtil.mirroredMarginConvolution(inputGray, gaussConvolutionMatrix); // 2. Gradient float[,] dx = ImageDescriptionUtil.mirroredMarginConvolution(gaussResult, FilterBankUtil.normalizedSobelX); float[,] dy = ImageDescriptionUtil.mirroredMarginConvolution(gaussResult, FilterBankUtil.normalizedSobelY); // 3. Gradient Amplitude float[,] amplitudeResult = new float[imageSizeY, imageSizeX]; for (int i = 0; i < imageSizeY; i++) { for (int j = 0; j < imageSizeX; j++) { amplitudeResult[i, j] = (float)Math.Sqrt(dx[i, j] * dx[i, j] + dy[i, j] * dy[i, j]); } } for (var i = 0; i < imageSizeY; i++) { for (var j = 0; j < imageSizeX; j++) { if (amplitudeResult[i, j] < 255) { outputGray[i, j] = (byte)(amplitudeResult[i, j] + 0.5f); } else { outputGray[i, j] = 255; } } } ImageDescription outputImage = new ImageDescription(); outputImage.sizeX = imageSizeX; outputImage.sizeY = imageSizeY; foreach (ColorChannelEnum colorChannel in Enum.GetValues(typeof(ColorChannelEnum))) { outputImage.setColorChannel(colorChannel, inputImage.getColorChannel(colorChannel)); } outputImage.setColorChannel(ColorChannelEnum.Sobel, outputGray); return(outputImage); }
public float train(List <ImageDescription> inputImages, ImageDescription inputImageGroundTruth) { int newSizeX, newSizeY; List <ImageDescription> imagesToBlend; ImageDescriptionUtil.makeAllImagesSameSize(inputImages, out newSizeX, out newSizeY, out imagesToBlend); float entropyLoss = 0; for (int i = 0; i < newSizeY; i++) { for (int j = 0; j < newSizeX; j++) { float stretchedProbability = computePerPixelStretchedProbability(imagesToBlend, i, j); float probability = LogisticHelper.squash(stretchedProbability); if (probability < LogisticHelper.probabilityMinValue) { probability = LogisticHelper.probabilityMinValue; } else { if (probability > LogisticHelper.probabilityMaxValue) { probability = LogisticHelper.probabilityMaxValue; } } float groundTruthProbability = groundTruthProbabilityCache[inputImageGroundTruth.gray[i, j]]; entropyLoss += LogisticHelper.computeEntropyLoss(probability, groundTruthProbability); #if useEntropyLoss float loss = groundTruthProbability - probability; #else float loss = (groundTruthProbability - probability) * probability * (1 - probability); #endif for (int imageIndex = 0; imageIndex < imagesToBlend.Count; imageIndex++) { byte pixelValue = imagesToBlend[imageIndex].gray[i, j]; weights[imageIndex] += learningConstant * stretchedPixelValueCache[pixelValue] * loss; } } } return(entropyLoss); }
public ImageDescription blendImages(List <ImageDescription> images) { int newSizeX, newSizeY; List <ImageDescription> imagesToBlend; ImageDescriptionUtil.makeAllImagesSameSize(images, out newSizeX, out newSizeY, out imagesToBlend); ImageDescription output = new ImageDescription(); output.sizeX = newSizeX; output.sizeY = newSizeY; output.grayscale = true; byte[,] outputGray = new byte[newSizeY, newSizeX]; for (int i = 0; i < newSizeY; i++) { for (int j = 0; j < newSizeX; j++) { float stretchedProbability = computePerPixelStretchedProbability(imagesToBlend, i, j); float probability = LogisticHelper.squash(stretchedProbability); if (probability < LogisticHelper.probabilityMinValue) { probability = LogisticHelper.probabilityMinValue; } else { if (probability > LogisticHelper.probabilityMaxValue) { probability = LogisticHelper.probabilityMaxValue; } } outputGray[i, j] = (byte)(probability * 255.0f + 0.5f); } } output.gray = outputGray; return(output); }
public ImageDescription filter(ImageDescription inputImage) { ImageDescription outputImage = new ImageDescription(); outputImage.sizeX = inputImage.sizeX; outputImage.sizeY = inputImage.sizeY; if (colorChannelsToFilter.Count == 1 && colorChannelsToFilter.Contains(ColorChannelEnum.Gray)) { outputImage.grayscale = true; } foreach (ColorChannelEnum channelEnum in colorChannelsToFilter) { byte[,] inputChannel = inputImage.getColorChannel(channelEnum); byte[,] outputChannel = new byte[outputImage.sizeY, outputImage.sizeX]; outputImage.setColorChannel(channelEnum, outputChannel); for (int y = 0; y < inputImage.sizeY; y++) { for (int x = 0; x < inputImage.sizeX; x++) { int index = 0; for (int i = -halfSize; i <= halfSize; i++) { for (int j = -halfSize; j <= halfSize; j++) { medianValueArray[index++] = inputChannel[ImageDescriptionUtil.outsideMirroredPosition(y + i, inputImage.sizeY), ImageDescriptionUtil.outsideMirroredPosition(x + j, inputImage.sizeX)]; } } Array.Sort(medianValueArray); outputChannel[y, x] = medianValueArray[medianPosition]; } } } return(outputImage); }
public static ImageDescription loadFromPath(string filePath) { Bitmap bitmap = new Bitmap(filePath); return(ImageDescriptionUtil.fromBitmap(bitmap)); }
public virtual ImageDescription filter(ImageDescription inputImage) { inputImage.computeGrayscale(); int imageSizeX = inputImage.sizeX; int imageSizeY = inputImage.sizeY; byte[,] inputGray = inputImage.gray; byte[,] outputGray = new byte[imageSizeY, imageSizeX]; float[,] gaussConvolutionMatrix = FilterBankUtil.generateNormalizedGaussConvolutionMatrix(sigma, 7); float[,] gaussResult = ImageDescriptionUtil.mirroredMarginConvolution(inputGray, gaussConvolutionMatrix); List <float[, ]> templates = FilterBankUtil.normalizedKirschTemplates; List <float[, ]> results = new List <float[, ]>(templates.Count); foreach (float[,] template in templates) { results.Add(ImageDescriptionUtil.mirroredMarginConvolution(gaussResult, template)); } float[,] amplitudeResult = new float[imageSizeY, imageSizeX]; int[,] anglesResult = new int[imageSizeY, imageSizeX]; for (int i = 0; i < imageSizeY; i++) { for (int j = 0; j < imageSizeX; j++) { int direction = 0; float maxValue = 0; for (int templateIndex = 0; templateIndex < templates.Count; templateIndex++) { float value = results[templateIndex][i, j]; if (value > maxValue) { maxValue = value; direction = templateIndex; } } amplitudeResult[i, j] = maxValue; anglesResult[i, j] = direction; } } if (!applyNms) { for (var i = 0; i < imageSizeY; i++) { for (var j = 0; j < imageSizeX; j++) { if (amplitudeResult[i, j] < 255) { outputGray[i, j] = (byte)(amplitudeResult[i, j] + 0.5f); } else { outputGray[i, j] = 255; } } } } else { float[,] nmsResult = new float[imageSizeY, imageSizeX]; for (int i = 0; i < imageSizeY; i++) { for (int j = 0; j < imageSizeX; j++) { int angle = anglesResult[i, j]; if (angle == 2 || angle == 6) { if ((i == 0 || amplitudeResult[i, j] >= amplitudeResult[i - 1, j]) && (i == imageSizeY - 1 || amplitudeResult[i, j] > amplitudeResult[i + 1, j])) { nmsResult[i, j] = amplitudeResult[i, j]; } } else { if (angle == 1 || angle == 5) { if ((i == 0 || j == imageSizeX - 1 || amplitudeResult[i, j] >= amplitudeResult[i - 1, j + 1]) && (i == imageSizeY - 1 || j == 0 || amplitudeResult[i, j] > amplitudeResult[i + 1, j - 1])) { nmsResult[i, j] = amplitudeResult[i, j]; } } else { if (angle == 3 || angle == 7) { if ((i == 0 || j == 0 || amplitudeResult[i, j] >= amplitudeResult[i - 1, j - 1]) && (i == imageSizeY - 1 || j == imageSizeX - 1 || amplitudeResult[i, j] > amplitudeResult[i + 1, j + 1])) { nmsResult[i, j] = amplitudeResult[i, j]; } } else { if ((j == 0 || amplitudeResult[i, j] >= amplitudeResult[i, j - 1]) && (j == imageSizeX - 1 || amplitudeResult[i, j] > amplitudeResult[i, j + 1])) { nmsResult[i, j] = amplitudeResult[i, j]; } } } } } } float[,] hysteresisResult = new float[imageSizeY, imageSizeX]; bool[,] retainedPositions = applyHysteresisThreshold(nmsResult, imageSizeX, imageSizeY); for (var i = 0; i < imageSizeY; i++) { for (var j = 0; j < imageSizeX; j++) { if (retainedPositions[i, j]) { hysteresisResult[i, j] = nmsResult[i, j]; } } } for (var i = 0; i < imageSizeY; i++) { for (var j = 0; j < imageSizeX; j++) { if (hysteresisResult[i, j] < 255) { outputGray[i, j] = (byte)(hysteresisResult[i, j] + 0.5f); } else { outputGray[i, j] = 255; } } } } ImageDescription outputImage = new ImageDescription(); outputImage.sizeX = imageSizeX; outputImage.sizeY = imageSizeY; foreach (ColorChannelEnum colorChannel in Enum.GetValues(typeof(ColorChannelEnum))) { outputImage.setColorChannel(colorChannel, inputImage.getColorChannel(colorChannel)); } outputImage.setColorChannel(ColorChannelEnum.Kirsch, outputGray); return(outputImage); }
public virtual ImageDescription filter(ImageDescription inputImage) { return(ImageDescriptionUtil.mirroredMarginConvolution(inputImage, colorChannelsToFilter, convolutionMatrix)); }
public override void computeIndexes(ImageDescription inputImage, int positionX, int positionY, int computedIndex, int[,] computedIndexes) { byte[] byteValues = new byte[longestRay + 1]; int[] contextHashesForRay = new int[longestRay + 1]; #if useQuantizedDerivative byte[] byteDerivativeValues = new byte[longestRay + 1]; int[] contextDerivativeHashesForRay = new int[longestRay + 1]; #endif int currentIndex = 0; foreach (ColorChannelEnum selectedColorChannel in selectedColorChannels) { byte[,] colorChannel = inputImage.getColorChannel(selectedColorChannel); for (int ray = 0; ray < numberOfRays; ray++) { RelativePixelInformation[] relativePixelInformation = relativePixelInformationsForRay[ray]; for (int i = 0; i < relativePixelInformation.Length; i++) { byteValues[i] = ImageDescriptionUtil.getPixelMirrored(colorChannel, positionX + relativePixelInformation[i].deltaX, positionY + relativePixelInformation[i].deltaY); #if useQuantizedDerivative byteDerivativeValues[i] = byteValues[i]; #endif } #if useQuantizedDerivative if (trueColorChannels.Contains(selectedColorChannel)) { for (int i = relativePixelInformation.Length - 1; i > 0; i--) { byteDerivativeValues[i] = (byte)((byteDerivativeValues[i] - byteDerivativeValues[i - 1] + 256) & 255); } } #endif NumberUtils.fnvOneAtATimeHash(byteValues, relativePixelInformation.Length, contextHashesForRay); //NumberUtils.fnvOneAtATimeWithMaskHash(byteValues, byteMaskValues, relativePixelInformation.Length, contextHashesForRay); //NumberUtils.jenkinsOneAtATimeHash(byteValues, relativePixelInformation.Length, contextHashesForRay); #if useQuantizedDerivative if (trueColorChannels.Contains(selectedColorChannel)) { NumberUtils.fnvOneAtATimeWithMaskHash(byteDerivativeValues, byteMaskValues, relativePixelInformation.Length, contextDerivativeHashesForRay); } #endif for (int i = 0; i < contextLenghtsForRay[ray].Length; i++) { int contextLength = contextLenghtsForRay[ray][i]; int contextIndex; #if useQuantizedDerivative int contextDerivativeIndex; #endif if (contextLength == 1) { contextIndex = byteValues[0]; #if useQuantizedDerivative contextDerivativeIndex = byteDerivativeValues[0]; #endif } else { if (contextLength == 2) { contextIndex = (byteValues[0] << 8) + byteValues[1]; #if useQuantizedDerivative contextDerivativeIndex = (byteDerivativeValues[0] << 8) + byteDerivativeValues[1]; #endif } else { if (contextLength == 3 && maxTableSizeBits >= 24) { contextIndex = (byteValues[0] << 16) + (byteValues[1] << 8) + byteValues[2]; #if useQuantizedDerivative contextDerivativeIndex = (byteDerivativeValues[0] << 16) + (byteDerivativeValues[1] << 8) + byteDerivativeValues[2]; #endif } else { contextIndex = contextHashesForRay[contextLength - 1]; #if useQuantizedDerivative contextDerivativeIndex = contextDerivativeHashesForRay[contextLength - 1]; #endif } } } computedIndexes[computedIndex, currentIndex] = contextIndex; currentIndex += 1; #if useQuantizedDerivative if (trueColorChannels.Contains(selectedColorChannel)) { computedIndexes[computedIndex, currentIndex] = contextDerivativeIndex; currentIndex += 1; } #endif } } } }
public virtual ImageDescription filter(ImageDescription inputImage) { inputImage.computeGrayscale(); int imageSizeX = inputImage.sizeX; int imageSizeY = inputImage.sizeY; byte[,] inputGray = inputImage.gray; byte[,] outputGray = new byte[imageSizeY, imageSizeX]; // 1. Gauss float[,] gaussConvolutionMatrix = FilterBankUtil.generateNormalizedGaussConvolutionMatrix(sigma, 5); float[,] gaussResult = ImageDescriptionUtil.mirroredMarginConvolution(inputGray, gaussConvolutionMatrix); // 2. Gradient float[,] dx = ImageDescriptionUtil.mirroredMarginConvolution(gaussResult, FilterBankUtil.normalizedSobelX); float[,] dy = ImageDescriptionUtil.mirroredMarginConvolution(gaussResult, FilterBankUtil.normalizedSobelY); // 3. Gradient Amplitude float[,] amplitudeResult = new float[imageSizeY, imageSizeX]; for (int i = 0; i < imageSizeY; i++) { for (int j = 0; j < imageSizeX; j++) { amplitudeResult[i, j] = (float)Math.Sqrt(dx[i, j] * dx[i, j] + dy[i, j] * dy[i, j]); } } // 4. Angle of gradient float[,] anglesResult = new float[imageSizeY, imageSizeX]; for (int i = 0; i < imageSizeY; i++) { for (int j = 0; j < imageSizeX; j++) { anglesResult[i, j] = (float)Math.Atan2(dx[i, j], dy[i, j]); } } // 5. Non maximal suppresion float[,] nmsResult = new float[imageSizeY, imageSizeX]; for (int i = 1; i < imageSizeY - 1; i++) { for (int j = 1; j < imageSizeX - 1; j++) { float angle = anglesResult[i, j]; if ((angle <= (5 * Math.PI) / 8 && angle > (3 * Math.PI) / 8) || (angle > -(5 * Math.PI) / 8 && angle <= -(3 * Math.PI) / 8)) { if (amplitudeResult[i, j] > amplitudeResult[i - 1, j] && amplitudeResult[i, j] > amplitudeResult[i + 1, j]) { nmsResult[i, j] = amplitudeResult[i, j]; } } else { if (angle <= (3 * Math.PI) / 8 && angle > Math.PI / 8 || angle > -(7 * Math.PI) / 8 && angle <= -(5 * Math.PI) / 8) { if (amplitudeResult[i, j] > amplitudeResult[i - 1, j + 1] && amplitudeResult[i, j] > amplitudeResult[i + 1, j - 1]) { nmsResult[i, j] = amplitudeResult[i, j]; } } else { if (angle <= (7 * Math.PI / 8) && angle > (5 * Math.PI / 8) || angle > -(3 * Math.PI) / 8 && angle < -(Math.PI / 8)) { if (amplitudeResult[i, j] > amplitudeResult[i - 1, j - 1] && amplitudeResult[i, j] > amplitudeResult[i + 1, j + 1]) { nmsResult[i, j] = amplitudeResult[i, j]; } } else { if (amplitudeResult[i, j] > amplitudeResult[i, j - 1] && amplitudeResult[i, j] > amplitudeResult[i, j + 1]) { nmsResult[i, j] = amplitudeResult[i, j]; } } } } } } // 6. Hysteresis thresolding float[,] hysteresisResult = new float[imageSizeY, imageSizeX]; bool[,] retainedPositions = applyHysteresisThreshold(nmsResult, imageSizeX, imageSizeY); for (var i = 0; i < imageSizeY; i++) { for (var j = 0; j < imageSizeX; j++) { if (retainedPositions[i, j]) { hysteresisResult[i, j] = nmsResult[i, j]; } } } for (var i = 0; i < imageSizeY; i++) { for (var j = 0; j < imageSizeX; j++) { if (hysteresisResult[i, j] < 255) { outputGray[i, j] = (byte)(hysteresisResult[i, j] + 0.5f); } else { outputGray[i, j] = 255; } } } ImageDescription outputImage = new ImageDescription(); outputImage.sizeX = imageSizeX; outputImage.sizeY = imageSizeY; foreach (ColorChannelEnum colorChannel in Enum.GetValues(typeof(ColorChannelEnum))) { outputImage.setColorChannel(colorChannel, inputImage.getColorChannel(colorChannel)); } outputImage.setColorChannel(ColorChannelEnum.Canny, outputGray); return(outputImage); }