private static void DrawImage(Image image, double[] values) { double widthHeight = Math.Sqrt(values.Length); // they should be square int widthHeightInt = widthHeight.ToInt_Round(); if (!Math1D.IsNearValue(widthHeight, widthHeightInt)) { throw new ApplicationException("Expected square images"); } BitmapSource source; //if (isColor) //{ // source = UtilityWPF.GetBitmap_RGB(example, width, height); //} //else //{ source = UtilityWPF.GetBitmap(values, widthHeightInt, widthHeightInt); //} image.Source = source; image.Width = source.PixelWidth; // if this isn't set, the image will take up all of the width, and be huge image.Height = source.PixelHeight; }
private static BitmapSource GetBitmap(double[] input) { int size = Convert.ToInt32(Math.Sqrt(input.Length)); if (size * size != input.Length) { throw new ArgumentException("Must pass in a square image"); } byte[][] colors = input.Select(o => { byte alpha = Convert.ToByte(Math.Round(o * 255)); return(new byte[] { alpha, 0, 0, 0 }); }). ToArray(); return(UtilityWPF.GetBitmap(colors, size, size)); }
private void Timer_Tick(object sender, EventArgs e) { try { #region prep lblError.Text = ""; lblError.Visibility = Visibility.Collapsed; if (_recognizer == null) { _timer.Stop(); ClearVisuals("no recognizer set"); } if (!_recognizer.IsOn) { ClearVisuals("Powered Off"); return; } Tuple <int, int> cameraWidthHeight = _recognizer.CameraWidthHeight; if (cameraWidthHeight == null) { ClearVisuals("recognizer's camera not set"); return; } bool isColor = _recognizer.IsColor; #endregion #region latest image double[] image = _recognizer.LatestImage; if (image == null) { canvasPixels.Source = null; } else { if (isColor) { canvasPixels.Source = UtilityWPF.GetBitmap_RGB(image, cameraWidthHeight.Item1, cameraWidthHeight.Item2); } else { canvasPixels.Source = UtilityWPF.GetBitmap(image, cameraWidthHeight.Item1, cameraWidthHeight.Item2); } } #endregion #region nn outputs Tuple <LifeEventType, double>[] nnOutputs = _recognizer.CurrentOutput; panelOutputs.Children.Clear(); if (nnOutputs != null) { DrawNNOutputs(panelOutputs, nnOutputs); } #endregion #region som SOMResult som = _recognizer.SOM; bool shouldRenderSOM = false; if (som == null) { panelSOM.Visibility = Visibility.Collapsed; panelSOM.Child = null; _currentSom = null; } else if (som != null) { shouldRenderSOM = true; if (_currentSom != null && som.Nodes.Length == _currentSom.Nodes.Length && som.Nodes.Length > 0 && som.Nodes[0].Token == _currentSom.Nodes[0].Token) { shouldRenderSOM = false; } } if (shouldRenderSOM) { SelfOrganizingMapsWPF.ShowResults2D_Tiled(panelSOM, som, cameraWidthHeight.Item1, cameraWidthHeight.Item2, DrawSOMTile); _currentSom = som; panelSOM.Visibility = Visibility.Visible; } #endregion #region training data var trainingData = _recognizer.TrainingData; if (trainingData == null || trainingData.Item1 == null || trainingData.Item1.ImportantEvents == null || trainingData.Item1.ImportantEvents.Length == 0) { _currentTrainingData = null; panelTrainingData.Child = null; panelTrainingData.Visibility = Visibility.Collapsed; } else { if (!IsSame(_currentTrainingData, trainingData)) { _currentTrainingData = trainingData; DrawTrainingData(panelTrainingData, trainingData); panelTrainingData.Visibility = Visibility.Visible; } } #endregion } catch (Exception ex) { lblError.Text = ex.Message; lblError.Visibility = Visibility.Visible; } }
private static void DrawTrainingData_Classification(Grid grid, string heading, IEnumerable <double[]> examples, int width, int height, bool isColor) { if (grid.RowDefinitions.Count > 0) { grid.RowDefinitions.Add(new RowDefinition() { Height = new GridLength(6) }); } grid.RowDefinitions.Add(new RowDefinition() { Height = new GridLength(1, GridUnitType.Auto) }); #region heading // Heading (just reusing the progress bar header to get a consistent look) FrameworkElement header = GetNNOutputBar(heading, null); //header.LayoutTransform = new RotateTransform(-90); // this just takes up too much vertical space Grid.SetColumn(header, 0); Grid.SetRow(header, grid.RowDefinitions.Count - 1); grid.Children.Add(header); #endregion #region examples //TODO: May want to use a lighter weight way of drawing these WrapPanel examplePanel = new WrapPanel() { HorizontalAlignment = HorizontalAlignment.Left, VerticalAlignment = VerticalAlignment.Top, }; foreach (var example in examples) { BitmapSource source; if (isColor) { source = UtilityWPF.GetBitmap_RGB(example, width, height); } else { source = UtilityWPF.GetBitmap(example, width, height); } Image image = new Image() { Source = source, Width = source.PixelWidth * 2, // if this isn't set, the image will take up all of the width, and be huge Height = source.PixelHeight * 2, }; examplePanel.Children.Add(image); } Grid.SetColumn(examplePanel, 2); Grid.SetRow(examplePanel, grid.RowDefinitions.Count - 1); grid.Children.Add(examplePanel); #endregion }
private void ShowConvolution(Convolution2D conv) { byte[][] colors = Convolutions.GetColors(conv, ConvolutionResultNegPosColoring.BlackWhite); _image.Source = UtilityWPF.GetBitmap(colors, conv.Width, conv.Height); }
private void ResetTraining() { _patternStorage = null; panelTrainingImages.Children.Clear(); if (_images.Count == 0) { return; } #region choose images int numImages; if (!int.TryParse(txtTrainCount.Text, out numImages)) { MessageBox.Show("Couldn't parse count as an integer", this.Title, MessageBoxButton.OK, MessageBoxImage.Warning); return; } numImages = Math.Min(_images.Count, numImages); var trainImages = UtilityCore.RandomRange(0, _images.Count, numImages). Select(o => new { File = _images[o], Conv = GetTrainingImage(_images[o], _trainKernel), }). ToArray(); #endregion //_patternStorage = new RandomPatternStorage(); _patternStorage = new Hopfield(IMAGESIZE * IMAGESIZE, 0, 1, .9); #region show thumbnails // Display thumbnails //< !--Run them through a KMeans, then sort in 1D-- > //< !--Show full resolution over the canvas on mouseover-- > //< !--Show full resolution under the canvas on click-- > foreach (var trainImage in trainImages) { double widthHeight = Math.Sqrt(trainImage.Conv.Length); // they should be square int widthHeightInt = widthHeight.ToInt_Round(); if (!Math1D.IsNearValue(widthHeight, widthHeightInt)) { throw new ApplicationException("Expected square images"); } double[] imageConv = trainImage.Conv; imageConv = _patternStorage.Convert_Local_External(imageConv); BitmapSource source; //if (isColor) //{ // source = UtilityWPF.GetBitmap_RGB(example, width, height); //} //else //{ source = UtilityWPF.GetBitmap(imageConv, widthHeightInt, widthHeightInt); //} Image image = new Image() { Source = source, Width = source.PixelWidth, // if this isn't set, the image will take up all of the width, and be huge Height = source.PixelHeight, Margin = new Thickness(8), }; panelTrainingImages.Children.Add(image); } #endregion _patternStorage.AddItems(trainImages.Select(o => o.Conv).ToArray()); }
private void Timer_Tick(object sender, EventArgs e) { // This uses the eval's duration, and uses a neural net for the trial tick count. This is to recreate what the training actually sees try { imageInput.Source = null; canvasMain.Children.Clear(); DateTime now = DateTime.UtcNow; if (_harness == null || _evalArgs == null) { _prevTick = now; return; } // Reset if (_tickCounter >= _evalArgs.TotalNumberEvaluations) { #region reset if (_experiment != null) { NeatGenome winner = _ea.CurrentChampGenome; _genomeViewer.RefreshView(winner); if (winner == null) { _prevTick = now; return; } bool showedHyper = false; if (_experiment.IsHyperNEAT) { var decoder = _experiment.CreateGenomeDecoder(_hyperneatArgs); if (decoder is HyperNeatDecoder hyperDecoder) { //NOTE: There is a flaw. The INetworkDefinition that this method returns is acyclic, but the method that generates the IBlackBox is CreateSubstrateNetwork_FastCyclicNetwork. //So that INetworkDefinition seems to be limited or incorrect // //Actually, that might not be a flaw. The FastCyclic may be processing the CPPN which is cyclic var finalPhenome = hyperDecoder.Decode2(winner); _genomeViewer2.RefreshView(finalPhenome.Item2); nnViewerHost2.Visibility = Visibility.Visible; showedHyper = true; } } if (!showedHyper) { nnViewerHost2.Visibility = Visibility.Collapsed; } if (_experiment.IsHyperNEAT) { _winningBrain = _experiment.GetBlackBox(winner, _hyperneatArgs); } else { _winningBrain = _experiment.GetBlackBox(winner); } _winningBrainTime = now; _harness.ClearItem(); _harness.SetItem(AntPos_Evaluator.GetNewItem(_harness, _evalArgs)); _tickCounter = -1; } #endregion } // Tell the harness to go _harness.Tick(_evalArgs.ElapsedTime_Seconds); _tickCounter++; var prevPosition = _harness.GetPreviousPosition(_harness.Time - _evalArgs.Delay_Seconds); var currentPosition = _harness.Item; #region draw input double[] inputArr = new double[_harness.InputSizeXY * _harness.InputSizeXY]; AntPos_Evaluator.ClearArray(inputArr); if (prevPosition != null) { double dotRadius = (_harness.VisionSize / _harness.InputSizeXY) * Math.Sqrt(2); AntPos_Evaluator.ApplyPoint(inputArr, _harness.InputCellCenters, dotRadius, prevPosition.Item2, true); } imageInput.Source = UtilityWPF.GetBitmap(inputArr, _harness.InputSizeXY, _harness.InputSizeXY, invert: true); #endregion #region draw expected output double[] expectedOutput = null; if (currentPosition == null) { expectedOutput = AntPos_Evaluator.GetExpectedOutput(null, _harness, _evalArgs); } else { var currentPos = Tuple.Create(currentPosition, currentPosition.Position, currentPosition.Velocity); expectedOutput = AntPos_Evaluator.GetExpectedOutput(currentPos, _harness, _evalArgs); } imageExpectedOutput.Source = UtilityWPF.GetBitmap(expectedOutput, _harness.OutputSizeXY, _harness.OutputSizeXY, invert: true); #endregion #region draw nn output double[] nnOutput = null; if (_winningBrain != null) { nnOutput = new double[_harness.OutputSizeXY * _harness.OutputSizeXY]; // Brain.Tick _winningBrain.InputSignalArray.CopyFrom(inputArr, 0); _winningBrain.Activate(); _winningBrain.OutputSignalArray.CopyTo(nnOutput, 0); imageNNOutput.Source = UtilityWPF.GetBitmap(nnOutput, _harness.OutputSizeXY, _harness.OutputSizeXY, invert: true); } else { imageNNOutput.Source = null; } #endregion #region draw error (nn - expected) double[] error = null; if (nnOutput != null) { error = Enumerable.Range(0, nnOutput.Length). Select(o => Math.Abs(nnOutput[o] - expectedOutput[o])). ToArray(); imageError.Source = UtilityWPF.GetBitmap(error, _harness.OutputSizeXY, _harness.OutputSizeXY, invert: true); } else { imageError.Source = null; } #endregion #region draw actual // Vision Rectangle Rectangle visionRect = new Rectangle() { Stroke = Brushes.Silver, StrokeThickness = .3, Width = _harness.VisionSize, Height = _harness.VisionSize, }; Canvas.SetLeft(visionRect, _harness.VisionSize / -2); Canvas.SetTop(visionRect, _harness.VisionSize / -2); canvasMain.Children.Add(visionRect); // Dot Previous if (prevPosition != null) { Ellipse dot = new Ellipse() { Fill = new SolidColorBrush(prevPosition.Item1.Color), Stroke = Brushes.Black, StrokeThickness = .3, Width = 2, Height = 2, }; Canvas.SetLeft(dot, prevPosition.Item2.X - 1); Canvas.SetTop(dot, prevPosition.Item2.Y - 1); canvasMain.Children.Add(dot); } // Dot Current if (currentPosition != null) { Ellipse dot = new Ellipse() { Fill = new SolidColorBrush(currentPosition.Color), Stroke = Brushes.White, StrokeThickness = .3, Width = 2, Height = 2, }; Canvas.SetLeft(dot, currentPosition.Position.X - 1); Canvas.SetTop(dot, currentPosition.Position.Y - 1); canvasMain.Children.Add(dot); } // Transform TransformGroup transform = new TransformGroup(); transform.Children.Add(new ScaleTransform(canvasMain.ActualWidth / _harness.MapSize, canvasMain.ActualHeight / _harness.MapSize)); transform.Children.Add(new TranslateTransform(canvasMain.ActualWidth / 2, canvasMain.ActualHeight / 2)); canvasMain.RenderTransform = transform; #endregion _prevTick = now; } catch (Exception ex) { MessageBox.Show(ex.ToString(), this.Title, MessageBoxButton.OK, MessageBoxImage.Error); } }
private void Timer_Tick_REAL(object sender, EventArgs e) { // This uses actual time elapsed, and runs continuously, grabbing a new neural net every 10 seconds try { imageInput.Source = null; canvasMain.Children.Clear(); DateTime now = DateTime.UtcNow; if (_harness == null) { _prevTick = now; return; } // Tell the harness to go _harness.Tick((now - _prevTick).TotalSeconds); var prevPosition = _harness.GetPreviousPosition(_harness.Time - _evalArgs.Delay_Seconds); var currentPosition = _harness.Item; if (_experiment != null && (_winningBrain == null || (now - _winningBrainTime).TotalSeconds > 10)) { NeatGenome winner = _ea.CurrentChampGenome; _genomeViewer.RefreshView(winner); _winningBrain = _experiment.GetBlackBox(winner); _winningBrainTime = now; } #region draw input double[] inputArr = new double[_harness.InputSizeXY * _harness.InputSizeXY]; AntPos_Evaluator.ClearArray(inputArr); if (prevPosition != null) { double dotRadius = (_harness.VisionSize / _harness.InputSizeXY) * Math.Sqrt(2); AntPos_Evaluator.ApplyPoint(inputArr, _harness.InputCellCenters, dotRadius, prevPosition.Item2, true); } imageInput.Source = UtilityWPF.GetBitmap(inputArr, _harness.InputSizeXY, _harness.InputSizeXY, invert: true); #endregion #region draw expected output double[] expectedOutput = null; if (currentPosition == null) { expectedOutput = AntPos_Evaluator.GetExpectedOutput(null, _harness, _evalArgs); } else { var currentPos = Tuple.Create(currentPosition, currentPosition.Position, currentPosition.Velocity); expectedOutput = AntPos_Evaluator.GetExpectedOutput(currentPos, _harness, _evalArgs); } imageExpectedOutput.Source = UtilityWPF.GetBitmap(expectedOutput, _harness.OutputSizeXY, _harness.OutputSizeXY, invert: true); #endregion #region draw nn output double[] nnOutput = null; if (_winningBrain != null) { nnOutput = new double[_harness.OutputSizeXY * _harness.OutputSizeXY]; // Brain.Tick _winningBrain.InputSignalArray.CopyFrom(inputArr, 0); _winningBrain.Activate(); _winningBrain.OutputSignalArray.CopyTo(nnOutput, 0); imageNNOutput.Source = UtilityWPF.GetBitmap(nnOutput, _harness.OutputSizeXY, _harness.OutputSizeXY, invert: true); } else { imageNNOutput.Source = null; } #endregion #region draw error (nn - expected) double[] error = null; if (nnOutput != null) { error = Enumerable.Range(0, nnOutput.Length). Select(o => Math.Abs(nnOutput[o] - expectedOutput[o])). ToArray(); imageError.Source = UtilityWPF.GetBitmap(error, _harness.OutputSizeXY, _harness.OutputSizeXY, invert: true); } else { imageError.Source = null; } #endregion #region draw actual // Vision Rectangle Rectangle visionRect = new Rectangle() { Stroke = Brushes.Silver, StrokeThickness = .3, Width = _harness.VisionSize, Height = _harness.VisionSize, }; Canvas.SetLeft(visionRect, _harness.VisionSize / -2); Canvas.SetTop(visionRect, _harness.VisionSize / -2); canvasMain.Children.Add(visionRect); // Dot Previous if (prevPosition != null) { Ellipse dot = new Ellipse() { Fill = new SolidColorBrush(prevPosition.Item1.Color), Stroke = Brushes.Black, StrokeThickness = .3, Width = 2, Height = 2, }; Canvas.SetLeft(dot, prevPosition.Item2.X - 1); Canvas.SetTop(dot, prevPosition.Item2.Y - 1); canvasMain.Children.Add(dot); } // Dot Current if (currentPosition != null) { Ellipse dot = new Ellipse() { Fill = new SolidColorBrush(currentPosition.Color), Stroke = Brushes.White, StrokeThickness = .3, Width = 2, Height = 2, }; Canvas.SetLeft(dot, currentPosition.Position.X - 1); Canvas.SetTop(dot, currentPosition.Position.Y - 1); canvasMain.Children.Add(dot); } // Transform TransformGroup transform = new TransformGroup(); transform.Children.Add(new ScaleTransform(canvasMain.ActualWidth / _harness.MapSize, canvasMain.ActualHeight / _harness.MapSize)); transform.Children.Add(new TranslateTransform(canvasMain.ActualWidth / 2, canvasMain.ActualHeight / 2)); canvasMain.RenderTransform = transform; #endregion _prevTick = now; } catch (Exception ex) { MessageBox.Show(ex.ToString(), this.Title, MessageBoxButton.OK, MessageBoxImage.Error); } }
private void TakePicture_Click(object sender, RoutedEventArgs e) { try { int size; if (!int.TryParse(txtImageSize.Text, out size)) { MessageBox.Show("Couldn't parse image size as an integer", this.Title, MessageBoxButton.OK, MessageBoxImage.Warning); return; } else if (size < 1) { MessageBox.Show("Size must be at least 1", this.Title, MessageBoxButton.OK, MessageBoxImage.Warning); return; } if (radColor.IsChecked.Value) { // No need to convert in/out of bitmap source image.Source = UtilityWPF.RenderControl(grdViewPort, size, size, true); return; } // Render the control BitmapCustomCachedBytes bitmap = null; if (radGrayTransparent.IsChecked.Value) { bitmap = (BitmapCustomCachedBytes)UtilityWPF.RenderControl(grdViewPort, size, size, false, Colors.Transparent, true); } else if (radGrayBlack.IsChecked.Value) { Brush background = grdViewPort.Background; grdViewPort.Background = Brushes.Black; grdViewPort.UpdateLayout(); bitmap = (BitmapCustomCachedBytes)UtilityWPF.RenderControl(grdViewPort, size, size, false, Colors.Black, true); grdViewPort.Background = background; } else { throw new ApplicationException("Unknown radio button"); } // Convert to gray var colors = bitmap.GetColors_Byte(). Select(o => { byte gray = Convert.ToByte(UtilityWPF.ConvertToGray(o[1], o[2], o[3])); return(new byte[] { o[0], gray, gray, gray }); }). ToArray(); // Show it image.Source = UtilityWPF.GetBitmap(colors, size, size); } catch (Exception ex) { MessageBox.Show(ex.ToString(), this.Title, MessageBoxButton.OK, MessageBoxImage.Error); } }