protected override void ExecuteEffect(ref MagickImage input) { float otherStrength = 1.0f - strenght; MagickImage otherImage = new MagickImage(input); Texture2D otherTex = new Texture2D(base.tex.width, base.tex.height, base.tex.format, false); ImageProcessingHelper.WriteToTexture(otherImage, ref base.tex); input.AutoThreshold((AutoThresholdMethod)Mode); ImageProcessingHelper.WriteToTexture(input, ref otherTex); Texture2D newTex = new Texture2D(otherTex.width, otherTex.height, otherTex.format, false); for (int x = 0; x < newTex.width; x++) { for (int y = 0; y < newTex.height; y++) { Color a = otherTex.GetPixel(x, y) * strenght + otherStrength * base.tex.GetPixel(x, y); newTex.SetPixel(x, y, a); } } newTex.Apply(); input = ImageProcessingHelper.GenerateMagicImage(newTex); }
public async Task LookForBpScreen(CancellationToken cancellationToken) { try { OcrAsyncChecker.CheckThread(OcrAsyncChecker.LookForBpScreenAsyncChecker); var finder = new Finder(); while (!cancellationToken.IsCancellationRequested) { if (SuspendScanning) { await Task.Delay(1000, cancellationToken).ConfigureAwait(false); continue; } lock (ImageProcessingHelper.GDILock) { var screenPath = finder.CaptureScreen(); var isBp = ImageProcessingHelper.CheckIfInBp(screenPath); if (isBp) { return; } } await Task.Delay(1000, cancellationToken).ConfigureAwait(false); } } finally { OcrAsyncChecker.CleanThread(OcrAsyncChecker.LookForBpScreenAsyncChecker); } }
public SmartConverterViewModel() { imageProcessing = new ImageProcessingHelper(); TakePhoto = new Command(CameraButtonClickedAsync); EnsureCacheIsUpToDate(); }
public void ApplyEffect(ref Texture2D otherTex) { tex = new Texture2D(otherTex.width, otherTex.height, otherTex.format, false); MagickImage tempImage = ImageProcessingHelper.GenerateMagicImage(otherTex); ExecuteEffect(ref tempImage); ImageProcessingHelper.WriteToTexture(tempImage, ref otherTex); }
public byte[] GetThumbnailImage(Guid _fileGuid, string _extension) { string bucketName = Config.GetSettingValue("S3Bucket_Images"); string folderName = Config.GetSettingValue("S3MediaRootFolder"); string filename = ImageProcessingHelper.GetPath(_fileGuid, folderName, _extension, true); var s3 = S3Helper.GetS3(); return(S3Helper.GetFile(s3, bucketName, filename)); }
public void PersistThumbnailImage(Guid _fileGuid, string _extension, string _mimetype, byte[] _data) { string bucketName = Config.GetSettingValue("S3Bucket_Images"); string folderName = Config.GetSettingValue("S3MediaRootFolder"); string filename = ImageProcessingHelper.GetPath(_fileGuid, folderName, _extension, true); var s3 = S3Helper.GetS3(); S3Helper.PersistFile(s3, bucketName, filename, _mimetype, _data); }
public bool ProcessMap(FilePath file, StringBuilder sb) { var tempPath = TempDirectoryPath + "temp.tiff"; using (var image = ImageProcessingHelper.GetCroppedMap(file)) image.Save(tempPath); OcrResult pendingMatchResult; try { pendingMatchResult = _engine.ProcessOcr(tempPath, OcrEngine.CandidateMaps); } catch (Exception) { return(false); } if (!pendingMatchResult.Results.Any() || !pendingMatchResult.Values.First().Trustable) { return(false); } if (OcrEngine.Debug) { var i = 0; var path = TempDirectoryPath + pendingMatchResult.Values.First().Value + ".tiff"; while (File.Exists(path)) { ++i; path = TempDirectoryPath + pendingMatchResult.Values.First().Value + i + ".tiff"; } if (OcrEngine.Delete) { File.Move(file, path); } else { File.Copy(file, path, true); } } if (!OcrEngine.Debug && OcrEngine.Delete) { file.DeleteIfExists(); } sb.Append(pendingMatchResult.Values.First().Value); return(pendingMatchResult.Values.First().FullyTrustable); }
public void AdjustPlaceHolderPosition() { var finder = new Finder(); FilePath screenPath = finder.CaptureScreen(); var points = ImageProcessingHelper.LookForPoints(screenPath); var height = ScreenUtil.GetScreenResolution().Height; App.AppSetting.Position.Left.HeroName1 = new Point(points[0] + (int)(0.0017 * height), points[1] + (int)(0.0035 * ScreenUtil.GetScreenResolution().Height)); App.AppSetting.Position.Right.HeroName1 = new Point(points[2] + (int)(0.0035 * height), points[3] + (int)(0.0045 * ScreenUtil.GetScreenResolution().Height)); File.WriteAllText(@".\coord.txt", App.AppSetting.Position.Left.HeroName1.X + @" " + App.AppSetting.Position.Left.HeroName1.Y + " " + App.AppSetting.Position.Right.HeroName1.X + " " + App.AppSetting.Position.Right.HeroName1.Y); }
public List <double> SGDTrain( int epochs, double learningRate, int batchSize, double lossEps, string[] filesPath, int[] inputResults, int classCount, CancellationToken cancellation) { var toReturn = new List <double>(); var inputDataCount = filesPath.Length; for (int i = 0; i < epochs; ++i) { var batchCount = Convert.ToInt32(inputDataCount / batchSize); var loss = 0.0; var losses = new List <double>(); var outputLayerGradient = new double[Perceptron.OutputLayer.NeuronsCount]; var inputResult = new double[classCount]; for (int j = 0; j < inputResults.Length; ++j) { inputResult = PrepareInputResult(inputResults[j], classCount); var input = ImageProcessingHelper.PrepareData(filesPath[j]); loss = SGDStep(learningRate, input, inputResult); Console.WriteLine($"Epoch - {i}, step - {j}, loss - {loss}"); toReturn.Add(Math.Abs(loss)); if (Math.Abs(loss) < lossEps) { break; } if (cancellation.IsCancellationRequested) { break; } } } return(toReturn); }
public void ProcessLoadingHero(FilePath file, StringBuilder sb) { var tempPath = TempDirectoryPath + "temp.tiff"; using (var image = ImageProcessingHelper.GetCroppeddHero(file)) image.Save(tempPath); var pendingMatchResult = _engine.ProcessOcr(tempPath, OcrEngine.CandidateHeroes); if (!pendingMatchResult.Results.Any() || !pendingMatchResult.Values.First().Trustable) { return; } if (OcrEngine.Debug) { var i = 0; var path = TempDirectoryPath + pendingMatchResult.Values.First().Value + ".tiff"; while (File.Exists(path)) { ++i; path = TempDirectoryPath + pendingMatchResult.Values.First().Value + i + ".tiff"; } if (!string.IsNullOrEmpty(sb.ToString()) || sb.ToString() != PickingText) { if (OcrEngine.Delete) { File.Move(file, path); } else { File.Copy(file, path, true); } } } else { file.DeleteIfExists(); } sb.Append(pendingMatchResult.Values.First().Value); }
public List <double> SGDTrainFromDir( int epochs, double learningRate, double lossEps, string[] filesPath, int[] inputResults, int classCount) { var toReturn = new List <double>(); var inputDataCount = filesPath.Length; for (int i = 0; i < epochs; ++i) { for (int j = 0; j < inputDataCount; ++j) { var inputResult = PrepareInputResult(inputResults[j], classCount); var input = ImageProcessingHelper.PrepareData(filesPath[j]); var loss = SGDStep(learningRate, input, inputResult); var results = Perceptron.OutputLayer.OutputNonMatrix.ToList(); Console.WriteLine($"Epoch - {i}, step - {j}, loss - {loss}, prediction - {results.IndexOf(results.Max())}, actual result - {Array.IndexOf(inputResult, inputResult.Max())}"); toReturn.Add(Math.Abs(loss)); if (Math.Abs(loss) < lossEps) { break; } } } return(toReturn); }
private ProcessedResult ProcessHero(Bitmap bitMap, float rotationAngle, StringBuilder sb, int offset, bool textInWhite) { var tempPath = TempDirectoryPath + "temp.tiff"; var mode = textInWhite ? 0 : ImageProcessingHelper.CheckMode(bitMap, rotationAngle); FilePath file = null; if (OcrEngine.Debug) { file = Path.GetTempFileName(); bitMap.Save(TempDirectoryPath.Parent + file.GetFileNameWithoutExtension() + ".bmp"); } if (mode == -1) { return(ProcessedResult.Fail); } var startThresholding = LightModeThreshold; int sampleWidth; var image = ImageProcessingHelper.GetRotatedImage(rotationAngle, bitMap, textInWhite, out sampleWidth); if (OcrEngine.Debug) { image.Save(TempDirectoryPath + "RotatedImage.bmp"); } if (_engine is OcrEngineAsian) { _engine.Engine.SetVariable(@"textord_min_xheight", 25); } string pendingMatchResult = string.Empty; var scoreDictionary = new Dictionary <string, int>(); // 130 - 135 - 125 - 140 - 120 - 145 - 115 // 75 - 80 - 70 - 85 - 65 - 90 - 60 var switcher = 0; int faultCount = 0; int failBinaryCheckCount = 0; for (var index = startThresholding + 15; index <= startThresholding + 30; index += switcher) { switcher = -switcher; if (switcher > 0) { switcher += offset; } else { switcher -= offset; } double count; var segmentationCount = ImageProcessingHelper.ProcessOnce(index, image, tempPath, rotationAngle, textInWhite, out count); if (segmentationCount == 0) { failBinaryCheckCount++; if (failBinaryCheckCount > 5) { scoreDictionary.Clear(); break; } continue; } failBinaryCheckCount = -5; var newCount = count; if (segmentationCount < count && count - segmentationCount >= 2) { newCount = segmentationCount; } OcrResult result; try { result = _engine.ProcessOcr(newCount, tempPath, OcrEngine.CandidateHeroes); } catch (Exception) { return(ProcessedResult.Fail); } // 100% match case if (result.Values.Any(v => v.FullyTrustable)) { scoreDictionary[result.Values.First(v => v.FullyTrustable).Value] = int.MaxValue; break; } // emptry case if (!result.Values.Any()) { faultCount++; if (faultCount > 3) { break; } continue; } var maxScoreInSuite = result.Values.Max(c => c.Score); var matchResultsWithMaxScore = result.Values.Where(c => c.Score == maxScoreInSuite).ToList(); // unique 60%+ case if (matchResultsWithMaxScore.Count == 1 && matchResultsWithMaxScore[0].Trustable) { matchResultsWithMaxScore[0].Score *= 2; } // normal case foreach (var matchResultWithMaxScore in matchResultsWithMaxScore) { if (scoreDictionary.ContainsKey(matchResultWithMaxScore.Value)) { scoreDictionary[matchResultWithMaxScore.Value] += matchResultWithMaxScore.Score; } else { scoreDictionary[matchResultWithMaxScore.Value] = matchResultWithMaxScore.Score; } if (OcrEngine.Debug) { Console.WriteLine(@"Thresdhold " + index + @" : " + matchResultWithMaxScore.Key.Replace("\n", string.Empty) + @" => " + matchResultWithMaxScore.Value); } } } int maxValue = 0; foreach (var scorePair in scoreDictionary) { if (scorePair.Value > maxValue) { pendingMatchResult = scorePair.Key; maxValue = scorePair.Value; } } if (OcrEngine.Debug) { var i = 0; var path = TempDirectoryPath + (rotationAngle > 0 ? @"left\" : @"right\") + pendingMatchResult + ".tiff"; while (File.Exists(path)) { ++i; path = TempDirectoryPath + (rotationAngle > 0 ? @"left\" : @"right\") + pendingMatchResult + i + ".tiff"; } if (!string.IsNullOrEmpty(sb.ToString()) || sb.ToString() != PickingText) { if (OcrEngine.Delete) { File.Move(file, path); } else { File.Copy(file, path, true); } } } sb.Append(pendingMatchResult); image.Dispose(); if (!OcrEngine.Debug && OcrEngine.Delete) { file?.DeleteIfExists(); } if (maxValue == 0) { return(ProcessedResult.Fail); } return(maxValue == int.MaxValue ? ProcessedResult.Trustable : ProcessedResult.Success); }
public Tuple <List <double>, List <double> > MiniBatchSGD( int epochs, double learningRate, int batchSize, double lossEps, string[] filesPath, int[] inputResults, int classCount, CancellationToken cancellation) { Console.WriteLine("MINI-BATCH"); var toReturn = new List <double>(); var toReturnAc = new List <double>(); var countPrec = 0; var inputDataCount = filesPath.Length; for (int i = 0; i < epochs; ++i) { var batchCount = Convert.ToInt32(inputDataCount / batchSize); for (int j = 0; j < batchCount; j++) { var loss = 0.0; var losses = new List <double>(); var accuracy = 0.0; var accuracyList = new List <double>(); var outputLayerGradient = new double[Perceptron.OutputLayer.NeuronsCount]; var inputResult = new double[classCount]; for (int k = 0; k < batchSize; k++) { inputResult = PrepareInputResult(inputResults[k + j * batchSize], classCount); List <double[][]> input = new List <double[][]>(); try { input = ImageProcessingHelper.PrepareData(filesPath[k + j * batchSize]); } catch (Exception ex) { continue; } var stepResults = FeedForwardStep(learningRate, input, inputResult); losses.Add(stepResults.Item1); accuracyList.Add(stepResults.Item2); outputLayerGradient.Add(Perceptron.GetOutputLayerGradient(inputResult)); } outputLayerGradient.ForEach(item => item = item / batchSize); BackwardStep(outputLayerGradient, learningRate, inputResult); loss = losses.Sum() / losses.Count; accuracy = accuracyList.Sum() / accuracyList.Count; Console.WriteLine($"Epoch - {i}, step - {j}, loss - {loss}, accuracy - {accuracy}"); toReturn.Add(Math.Abs(loss)); toReturnAc.Add(Math.Abs(accuracy)); if (Math.Abs(loss) < lossEps) { break; } if (cancellation.IsCancellationRequested) { break; } } } return(new Tuple <List <double>, List <double> >(toReturn, toReturnAc)); }
/// <summary> /// Routine to train the network /// </summary> /// <param name="cnnProject">The CNN project.</param> /// <param name="numberOfRounds">The number of training rounds.</param> public void TrainPattern(CNNProject cnnProject, long numberOfTrainingRounds) { // 1. preparation - both project's imageLists generalized images to the shuffled trainingItem-list #region 1. preparation List<TrainingItem> trainingItemList = new List<TrainingItem>(); #region fills the trainingItemList ImageList imgList; bool matching; for (int i = 0; i < 2; i++) { // at first we include the matching images if (i == 0) { imgList = cnnProject.Matching; matching = true; } // and then the not matching images else { imgList = cnnProject.Matching; matching = false; } foreach (Image img in imgList.Images) { // generalizes the image Image generalizedImg = ImageHandling.GeneralizeImage(img); trainingItemList.Add(new TrainingItem(generalizedImg, matching)); } } #endregion // filled list gets shuffled // (maybe this optimizes the result) StaticClasses.Shuffle<TrainingItem>(trainingItemList); #endregion // 2. build of training data items and add it to the helper #region 2. trainingItem // used later on to create the training thread NetworkHelper helper = new NetworkHelper(_network); foreach (TrainingItem trainingItem in trainingItemList) { Image img = trainingItem.Image; ArrayList arryListInput; #region fills arryListInput // Converts an image of any size to a pattern that can be feed to the network. ImageProcessingHelper imgHelper = new ImageProcessingHelper(); //note: this is a (monochrome) collection of 0's and 1's !! arryListInput = imgHelper.ArrayListFromImage(img); if (img.Width * img.Height != _network.InputLayer.Count) { throw new InvalidInputException("The number of pixels in the input image doesn't match the number of input layer neurons!", null); } #region Debugging /* // Convert an arrayList by rounding each value to a pattern of 0s and 1s PatternProcessingHelper patHelper = new PatternProcessingHelper(); String tmpPatern = patHelper.PatternFromArraylist(tmpImgList); Debug.WriteLine("Added : " + tmpPatern); */ #endregion #endregion ArrayList arryListOutput; #region fills arryListOutput arryListOutput = new ArrayList(); // true is going to be a single 1, false a single 0 arryListOutput.Add(trainingItem.Matching ? 1 : 0); #endregion // a training data item is used for a single training round TrainingData trainingDataItem = new TrainingData(arryListInput, arryListOutput); // this could be also used; one training round directly //_network.TrainNetwork(trainingDataItem); helper.AddTrainingData(trainingDataItem); } #endregion // Let's go! _trainStart = DateTime.Now; // 3. training #region 3. training // ShowProgress delegate helper.TrainingProgress += new NetworkHelper.TrainingProgressEventHandler(ShowProgress); // Start training // --- here we are going to wait -- helper.Train(numberOfTrainingRounds, true); // <-- // releasing helper.TrainingProgress -= new NetworkHelper.TrainingProgressEventHandler(ShowProgress); // show message box if (StopTrainingSilently == false) { MessageBox.Show("Training of the neuronal network completed at " + DateTime.Now, "Training Completed", MessageBoxButtons.OK, MessageBoxIcon.Information); } #endregion }
/// <summary> /// Routine to detect an image (right sized!) /// </summary> /// <param name="img">The resized and generalized image.</param> /// <returns></returns> public bool DetectPattern(Image img) { //Step 1 : Convert the image to an arrayList ArrayList arryListInput; #region fills arryListInput // Converts an image of any size to a pattern that can be feed to the network. ImageProcessingHelper imgHelper = new ImageProcessingHelper(); //note: this is a (monochrome) collection of 0's and 1's !! arryListInput = imgHelper.ArrayListFromImage(img); if (img.Width * img.Height != _network.InputLayer.Count) { throw new InvalidInputException("The number of pixels in the input image doesn't match the number of input layer neurons!", null); } #endregion //Step 2: Run the network and obtain the output ArrayList output = null; output = _network.RunNetwork(arryListInput); // finally: the result is in the first and only index float result = (float)output[0]; // displays rounded result (better readable) Debug.WriteLine("Detection Result: " + Math.Round(result, 3) + " (" + result + ")"); //TODO: what is the best value for this? if (result <= 0.5) { return false; } else { return true; } }