public LogisticMixGrayscaleImageBlender(int numberOfInputImages, float learningConstant = 0.0004f) { this.numberOfInputImages = numberOfInputImages; this.learningConstant = learningConstant; weights = new float[numberOfInputImages]; groundTruthProbabilityCache = new float[256]; #if !useBinaryFeedback groundTruthProbabilityCache[0] = LogisticHelper.probabilityMinValue; groundTruthProbabilityCache[255] = LogisticHelper.probabilityMaxValue; for (int i = 1; i < 255; i++) { groundTruthProbabilityCache[i] = i / 255.0f; } #else groundTruthProbabilityCache[0] = 0; for (int i = 1; i < 256; i++) { groundTruthProbabilityCache[i] = 1.0f; } #endif stretchedPixelValueCache = new float[256]; stretchedPixelValueCache[0] = -LogisticHelper.squashAbsoluteMaximumValue; stretchedPixelValueCache[255] = LogisticHelper.squashAbsoluteMaximumValue; for (int i = 1; i < 255; i++) { stretchedPixelValueCache[i] = LogisticHelper.stretch(i / 255.0f); } }
private void computeProbabilityForPosition(int positionX, int positionY, int computedIndex, out float probability, out float feebackProbability, out float outputProbability) { int sum = 0; for (int i = 0; i < numberOfContexts; i++) { sum += contextTableMap[i][computedIndexes[computedIndex, i] & tableSizeMasks[i]]; } probability = LogisticHelper.squash(sum * outputProbabilitySquashFactor); int sseContext = currentInputImage.gray[positionY, positionX]; feebackProbability = sse.getEstimation(sseContext, probability); //int sse1Context = NumberUtils.hash3(currentInputImage.r[positionY, positionX], currentInputImage.g[positionY, positionX], currentInputImage.b[positionY, positionX]) & 65535; //outputProbability = sse1.getEstimation(sse1Context, feebackProbability); outputProbability = feebackProbability; }
public SecondarySymbolEstimationStretchInput(int numberOfContexts, int numberOfInterpolationIntervals, float learningRate) { this.numberOfInterpolationIntervals = numberOfInterpolationIntervals; this.learningRate = learningRate; estimators = new float[numberOfContexts, numberOfInterpolationIntervals + 1]; for (int i = 0; i < numberOfContexts; i++) { for (int j = 0; j <= numberOfInterpolationIntervals; j++) { float p = (float)j / numberOfInterpolationIntervals; estimators[i, j] = LogisticHelper.squash((2 * LogisticHelper.squashAbsoluteMaximumValue) * p - LogisticHelper.squashAbsoluteMaximumValue); } } intervalWeightFactor = numberOfInterpolationIntervals / (2 * LogisticHelper.squashAbsoluteMaximumValue); }
private void updateProbability(int positionX, int positionY, float groundTruthProbability, float probability, float feebackProbability, int computedIndex) { int sseContext = currentInputImage.gray[positionY, positionX]; sse.update(sseContext, probability, groundTruthProbability); //sse1.update(groundTruthProbability); #if useEntropyLoss float globalError = (feebackProbability - groundTruthProbability) * globalErrorWeight; // entropy loss #else float globalError = (feebackProbability - groundTruthProbability) * feebackProbability * (1 - feebackProbability) * globalErrorWeight; // square loss #endif for (int i = 0; i < numberOfContexts; i++) { int tableValue; short? contextValue = contextTableMap[i].getContextValue(computedIndexes[computedIndex, i]); if (contextValue != null) { tableValue = contextValue.Value; } else { tableValue = 0; } #if useEntropyLoss float localError = (LogisticHelper.squash(tableValue * tableValueSquashFactor) - groundTruthProbability) * localErrorWeight; // entropy loss #else float localProbability = LogisticHelper.squash(tableValue * tableValueSquashFactor); float localError = (localProbability - groundTruthProbability) * localProbability * (1 - localProbability) * localErrorWeight; // square loss #endif tableValue = tableValue - (int)(floatUnit * (globalError + localError) + 0.5f); if (tableValue > short.MaxValue) { tableValue = short.MaxValue; } else { if (tableValue < short.MinValue) { tableValue = short.MinValue; } } contextTableMap[i].updateLastContextValue((short)tableValue); } }
public float train(List <ImageDescription> inputImages, ImageDescription inputImageGroundTruth) { int newSizeX, newSizeY; List <ImageDescription> imagesToBlend; ImageDescriptionUtil.makeAllImagesSameSize(inputImages, out newSizeX, out newSizeY, out imagesToBlend); float entropyLoss = 0; for (int i = 0; i < newSizeY; i++) { for (int j = 0; j < newSizeX; j++) { float stretchedProbability = computePerPixelStretchedProbability(imagesToBlend, i, j); float probability = LogisticHelper.squash(stretchedProbability); if (probability < LogisticHelper.probabilityMinValue) { probability = LogisticHelper.probabilityMinValue; } else { if (probability > LogisticHelper.probabilityMaxValue) { probability = LogisticHelper.probabilityMaxValue; } } float groundTruthProbability = groundTruthProbabilityCache[inputImageGroundTruth.gray[i, j]]; entropyLoss += LogisticHelper.computeEntropyLoss(probability, groundTruthProbability); #if useEntropyLoss float loss = groundTruthProbability - probability; #else float loss = (groundTruthProbability - probability) * probability * (1 - probability); #endif for (int imageIndex = 0; imageIndex < imagesToBlend.Count; imageIndex++) { byte pixelValue = imagesToBlend[imageIndex].gray[i, j]; weights[imageIndex] += learningConstant * stretchedPixelValueCache[pixelValue] * loss; } } } return(entropyLoss); }
private void computeProbabilityForPosition(int positionX, int positionY, int computedIndex, out float probability, out float feebackProbability, out float outputProbability) { int sum = 0; int numberOfHits = 0; for (int i = 0; i < numberOfContexts; i++) { short? contextValue = contextTableMap[i].getContextValue(computedIndexes[computedIndex, i]); if (contextValue != null) { sum += contextValue.Value; numberOfHits++; } } probability = LogisticHelper.squash(sum * outputProbabilitySquashFactor / ((numberOfContexts + numberOfHits + 1) >> 1)); int sseContext = currentInputImage.gray[positionY, positionX]; feebackProbability = sse.getEstimation(sseContext, probability); //int sse1Context = NumberUtils.hash3(currentInputImage.r[positionY, positionX], currentInputImage.g[positionY, positionX], currentInputImage.b[positionY, positionX]) & 65535; //outputProbability = sse1.getEstimation(sse1Context, feebackProbability); outputProbability = feebackProbability; }
public ImageDescription blendImages(List <ImageDescription> images) { int newSizeX, newSizeY; List <ImageDescription> imagesToBlend; ImageDescriptionUtil.makeAllImagesSameSize(images, out newSizeX, out newSizeY, out imagesToBlend); ImageDescription output = new ImageDescription(); output.sizeX = newSizeX; output.sizeY = newSizeY; output.grayscale = true; byte[,] outputGray = new byte[newSizeY, newSizeX]; for (int i = 0; i < newSizeY; i++) { for (int j = 0; j < newSizeX; j++) { float stretchedProbability = computePerPixelStretchedProbability(imagesToBlend, i, j); float probability = LogisticHelper.squash(stretchedProbability); if (probability < LogisticHelper.probabilityMinValue) { probability = LogisticHelper.probabilityMinValue; } else { if (probability > LogisticHelper.probabilityMaxValue) { probability = LogisticHelper.probabilityMaxValue; } } outputGray[i, j] = (byte)(probability * 255.0f + 0.5f); } } output.gray = outputGray; return(output); }
private void computeIntervals(float probability, out int intervalLowIndex, out int intervalHighIndex, out float intervalWeight) { intervalWeight = intervalWeightFactor * ((LogisticHelper.stretch(probability) + LogisticHelper.squashAbsoluteMaximumValue)); intervalLowIndex = (int)(intervalWeight); if (intervalLowIndex >= numberOfInterpolationIntervals) { intervalLowIndex = numberOfInterpolationIntervals - 1; intervalWeight = 1; } else { if (intervalLowIndex <= 0) { intervalLowIndex = 0; intervalWeight = 0; } else { intervalWeight -= intervalLowIndex; } } intervalHighIndex = intervalLowIndex + 1; }
public float train(ImageDescription inputImage, ImageDescription inputImageGroundTruth) { float entropyLoss = 0; setInputImageToContexts(inputImage); computeIndexes(); //for (int positionY = 0; positionY < inputImage.sizeY; positionY++) //{ // for (int positionX = 0; positionX < inputImage.sizeX; positionX++) // { int numberOfIndexes = currentInputImage.sizeX * currentInputImage.sizeY; #if !useParallelTraining for (int computedIndex = 0; computedIndex < numberOfIndexes; computedIndex++) #else Parallel.For(0, numberOfIndexes, new ParallelOptions { MaxDegreeOfParallelism = GeneralConfiguration.maximumNumberOfThreads }, (computedIndex) => #endif { int positionX = computedIndex % currentInputImage.sizeX; int positionY = computedIndex / currentInputImage.sizeX; float probability; float feebackProbability; float outputProbability; computeProbabilityForPosition(positionX, positionY, computedIndex, out probability, out feebackProbability, out outputProbability); float groundTruthProbability = groundTruthProbabilityCache[inputImageGroundTruth.gray[positionY, positionX]]; entropyLoss += LogisticHelper.computeEntropyLoss(outputProbability, groundTruthProbability); updateProbability(positionX, positionY, groundTruthProbability, probability, feebackProbability, computedIndex); } #if useParallelTraining ); #endif // } //} return entropyLoss; }
public void validate() { DateTime validateStart = DateTime.Now; List <String> fileList = benchmark.getTestFilesPathList(); float totalCrossEntropy = 0; foreach (string testFilePath in fileList) { DateTime start = DateTime.Now; string outputFilePath = Path.ChangeExtension(benchmark.getTestFileOutputPathWithoutExtension(testFilePath), outputFileExtension); string groundTruthPath = benchmark.getTestingFileGroundTruth(testFilePath); ImageDescription outputImage = ImageFileHandler.loadFromPath(outputFilePath); ImageDescription groundTruthImage = ImageFileHandler.loadFromPath(groundTruthPath); byte[,] outputGray = outputImage.getColorChannel(ColorChannelEnum.Gray); byte[,] groundTruthGray = groundTruthImage.getColorChannel(ColorChannelEnum.Gray); // might be a bug in GDI if (outputGray == null) { outputImage.computeGrayscale(); outputGray = outputImage.getColorChannel(ColorChannelEnum.Gray); } if (groundTruthGray == null) { groundTruthImage.computeGrayscale(); groundTruthGray = groundTruthImage.getColorChannel(ColorChannelEnum.Gray); } float crossEntropy = 0; for (int i = 0; i < outputGray.GetLength(0); i++) { for (int j = 0; j < outputGray.GetLength(1); j++) { byte output = outputGray[i, j]; byte groundTruth = groundTruthGray[i, j]; float groundTruthProbability; float outputProbability; if (groundTruth != 0) { groundTruthProbability = 1.0f; } else { groundTruthProbability = 0; } //groundTruthProbability = groundTruth / 255.0f; if (output == 0) { outputProbability = 1 / 255.0f; } else { if (output == 255) { outputProbability = 254 / 255.0f; } else { outputProbability = output / 255.0f; } } float loss = LogisticHelper.computeEntropyLoss(outputProbability, groundTruthProbability); crossEntropy += loss; } } totalCrossEntropy += crossEntropy; Console.WriteLine(testFilePath); Console.WriteLine("Cross entropy: " + crossEntropy.ToString("0.00")); } Console.WriteLine("Total cross entropy: " + totalCrossEntropy.ToString("0.00")); double totalTimeElapsed = (DateTime.Now - validateStart).TotalSeconds; Console.WriteLine("Validation took " + totalTimeElapsed.ToString("0.00") + " sec."); }