public CardRecognizer() { //Initialize common filter sequence , this sequence generally will be applied commonSeq = new FiltersSequence(); commonSeq.Add(Grayscale.CommonAlgorithms.BT709); commonSeq.Add(new OtsuThreshold()); commonSeq.Add(new DifferenceEdgeDetector()); Stream strm; BinaryFormatter bformat; AppDomain.CurrentDomain.AssemblyResolve += CurrentDomain_AssemblyResolve; strm = File.Open("NetworkLetra.net", FileMode.Open); bformat = new BinaryFormatter(); NetworkLetra = (FeedforwardNetwork)bformat.Deserialize(strm); strm = File.Open("NetworkSuits.net", FileMode.Open); bformat = new BinaryFormatter(); NetworkSuits = (FeedforwardNetwork) bformat.Deserialize(strm); strm = File.Open("NetworkNumero.net", FileMode.Open); bformat = new BinaryFormatter(); NetworkNumero = (FeedforwardNetwork)bformat.Deserialize(strm); AppDomain.CurrentDomain.AssemblyResolve -= CurrentDomain_AssemblyResolve; }
static void Main(string[] args) { FeedforwardNetwork network = new FeedforwardNetwork(); network.AddLayer(new FeedforwardLayer(2)); network.AddLayer(new FeedforwardLayer(3)); network.AddLayer(new FeedforwardLayer(1)); network.Reset(); // train the neural network TrainingSetNeuralGeneticAlgorithm train = new TrainingSetNeuralGeneticAlgorithm( network, true, XOR_INPUT, XOR_IDEAL, 5000, 0.1, 0.25); int epoch = 1; do { train.Iteration(); Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error); epoch++; } while ((epoch < 5000) && (train.Error > 0.001)); network = train.Network; // test the neural network Console.WriteLine("Neural Network Results:"); for (int i = 0; i < XOR_IDEAL.Length; i++) { double []actual = network.ComputeOutputs(XOR_INPUT[i]); Console.WriteLine(XOR_INPUT[i][0] + "," + XOR_INPUT[i][1] + ", actual=" + actual[0] + ",ideal=" + XOR_IDEAL[i][0]); } }
/// <summary> /// Create, train and use a neural network for XOR. /// </summary> /// <param name="args">Not used</param> static void Main(string[] args) { FeedforwardNetwork network = new FeedforwardNetwork(); network.AddLayer(new FeedforwardLayer(2)); network.AddLayer(new FeedforwardLayer(3)); network.AddLayer(new FeedforwardLayer(1)); network.Reset(); // train the neural network Train train = new HeatonResearchNeural.Feedforward.Train.Backpropagation.Backpropagation(network, XOR_INPUT, XOR_IDEAL, 0.7, 0.9); int epoch = 1; do { train.Iteration(); Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error); epoch++; } while ((epoch < 5000) && (train.Error > 0.001)); // test the neural network Console.WriteLine("Neural Network Results:"); for (int i = 0; i < XOR_IDEAL.Length; i++) { double[] actual = network.ComputeOutputs(XOR_INPUT[i]); Console.WriteLine(XOR_INPUT[i][0] + "," + XOR_INPUT[i][1] + ", actual=" + actual[0] + ",ideal=" + XOR_IDEAL[i][0]); } }
public YearBornBot() { this.network = (FeedforwardNetwork)SerializeObject .Load(Config.FILENAME_WHENBORN_NET); this.histogram = (WordHistogram)SerializeObject .Load(Config.FILENAME_HISTOGRAM); }
public TicTacToeGenetic(FeedforwardNetwork network, bool reset, int populationSize, double mutationPercent, double percentToMate, Type opponent) { this.setOpponent(opponent); this.MutationPercent=mutationPercent; this.MatingPopulation=percentToMate * 2; this.PopulationSize=populationSize; this.PercentToMate=percentToMate; this.Chromosomes = new TicTacToeChromosome[this.PopulationSize]; for (int i = 0; i < this.Chromosomes.Length; i++) { FeedforwardNetwork chromosomeNetwork = (FeedforwardNetwork)network .Clone(); if (reset) { chromosomeNetwork.Reset(); } TicTacToeChromosome c = new TicTacToeChromosome(this, chromosomeNetwork); c.UpdateGenes(); SetChromosome(i, c); } SortChromosomes(); }
/// <summary> /// Return a clone of this neural network. Including structure, weights and /// threshold values. /// </summary> /// <returns>A cloned copy of the neural network.</returns> public Object Clone() { FeedforwardNetwork result = CloneStructure(); Double[] copy = MatrixCODEC.NetworkToArray(this); MatrixCODEC.ArrayToNetwork(copy, result); return(result); }
/** * The constructor, takes a list of cities to set the initial "genes" to. * * @param cities * The order that this chromosome would visit the cities. These * cities can be thought of as the genes of this chromosome. * @throws NeuralNetworkException */ public TicTacToeChromosome(TicTacToeGenetic genetic, FeedforwardNetwork network) { this.GA = genetic; this.Network = network; InitGenes(network.MatrixSize); UpdateGenes(); }
public PruneSelectiveForm() { InitializeComponent(); this.network = new FeedforwardNetwork(); this.network.AddLayer(new FeedforwardLayer(NUM_INPUT)); this.network.AddLayer(new FeedforwardLayer(NUM_HIDDEN)); this.network.AddLayer(new FeedforwardLayer(NUM_OUTPUT)); this.network.Reset(); }
/// <summary> /// Return a clone of the structure of this neural network. /// </summary> /// <returns>A cloned copy of the structure of the neural network.</returns> public FeedforwardNetwork CloneStructure() { FeedforwardNetwork result = new FeedforwardNetwork(); foreach (FeedforwardLayer layer in this.layers) { FeedforwardLayer clonedLayer = new FeedforwardLayer(layer.NeuronCount); result.AddLayer(clonedLayer); } return(result); }
public void Process() { this.network = NetworkUtil.CreateNetwork(); Console.WriteLine("Preparing training sets..."); this.common = new CommonWords(Config.FILENAME_COMMON_WORDS); this.histogramGood = new WordHistogram(this.common); this.histogramBad = new WordHistogram(this.common); // load the good words this.histogramGood.BuildFromFile(Config.FILENAME_GOOD_TRAINING_TEXT); this.histogramGood.BuildComplete(); // load the bad words this.histogramBad.BuildFromFile(Config.FILENAME_BAD_TRAINING_TEXT); this.histogramBad.BuildComplete(); // remove low scoring words this.histogramGood .RemoveBelow((int)this.histogramGood.CalculateMean()); this.histogramBad.RemovePercent(0.99); // remove common words this.histogramGood.RemoveCommon(this.histogramBad); this.histogramGood.Trim(Config.INPUT_SIZE); this.goodAnalysis = new AnalyzeSentences(this.histogramGood, Config.INPUT_SIZE); this.badAnalysis = new AnalyzeSentences(this.histogramGood, Config.INPUT_SIZE); this.goodAnalysis.Process(this.trainingSet, 0.9, Config.FILENAME_GOOD_TRAINING_TEXT); this.badAnalysis.Process(this.trainingSet, 0.1, Config.FILENAME_BAD_TRAINING_TEXT); this.sampleCount = this.trainingSet.Ideal.Count; Console.WriteLine("Processing " + this.sampleCount + " training sets."); AllocateTrainingSets(); CopyTrainingSets(); TrainNetworkBackpropBackprop(); SerializeObject.Save(Config.FILENAME_WHENBORN_NET, this.network); SerializeObject.Save(Config.FILENAME_HISTOGRAM, this.histogramGood); Console.WriteLine("Training complete."); }
public static FeedforwardNetwork createNetwork() { FeedforwardNetwork network = new FeedforwardNetwork(); network.AddLayer(new FeedforwardLayer(9)); network .AddLayer(new FeedforwardLayer(NeuralTicTacToe.NEURONS_HIDDEN_1)); if (NeuralTicTacToe.NEURONS_HIDDEN_2 > 0) { network.AddLayer(new FeedforwardLayer( NeuralTicTacToe.NEURONS_HIDDEN_2)); } network.AddLayer(new FeedforwardLayer(1)); network.Reset(); return network; }
/// <summary> /// Convert from an array. Use an array to populate the memory of the neural network. /// </summary> /// <param name="array">An array that will hold the memory of the neural network.</param> /// <param name="network">A neural network to convert to an array.</param> public static void ArrayToNetwork(Double[] array, FeedforwardNetwork network) { // copy data to array int index = 0; foreach (FeedforwardLayer layer in network.Layers) { // now the weight matrix(if it exists) if (layer.Next != null) { index = layer.LayerMatrix.FromPackedArray(array, index); } } }
public void createNetwork() { ActivationFunction threshold = new ActivationTANH(); this.network = new FeedforwardNetwork(); this.network.AddLayer(new FeedforwardLayer(threshold, INPUT_SIZE)); this.network.AddLayer(new FeedforwardLayer(threshold, SineWave.NEURONS_HIDDEN_1)); if (SineWave.NEURONS_HIDDEN_2 > 0) { this.network.AddLayer(new FeedforwardLayer(threshold, SineWave.NEURONS_HIDDEN_2)); } this.network.AddLayer(new FeedforwardLayer(threshold, OUTPUT_SIZE)); this.network.Reset(); }
public static FeedforwardNetwork CreateNetwork() { ActivationFunction threshold = new ActivationSigmoid(); FeedforwardNetwork network = new FeedforwardNetwork(); network.AddLayer(new FeedforwardLayer(threshold, Config.INPUT_SIZE)); network.AddLayer(new FeedforwardLayer(threshold, Config.NEURONS_HIDDEN_1)); if (Config.NEURONS_HIDDEN_2 > 0) { network.AddLayer(new FeedforwardLayer(threshold, Config.NEURONS_HIDDEN_2)); } network.AddLayer(new FeedforwardLayer(threshold, Config.OUTPUT_SIZE)); network.Reset(); return network; }
public void createNetwork() { ActivationFunction threshold = new ActivationTANH(); this.network = new FeedforwardNetwork(); this.network.AddLayer(new FeedforwardLayer(threshold, PredictSP500.INPUT_SIZE * 2)); this.network.AddLayer(new FeedforwardLayer(threshold, PredictSP500.NEURONS_HIDDEN_1)); if (PredictSP500.NEURONS_HIDDEN_2 > 0) { this.network.AddLayer(new FeedforwardLayer(threshold, PredictSP500.NEURONS_HIDDEN_2)); } this.network.AddLayer(new FeedforwardLayer(threshold, PredictSP500.OUTPUT_SIZE)); this.network.Reset(); }
public TicTacToeGenetic(FeedforwardNetwork network, bool reset, int populationSize, double mutationPercent, double percentToMate, Type opponent) { this.setOpponent(opponent); this.MutationPercent=mutationPercent; this.MatingPopulation=percentToMate * 2; this.PopulationSize=populationSize; this.PercentToMate=percentToMate; this.Chromosomes = new TicTacToeChromosome[this.PopulationSize]; var rangePartitioner = Partitioner.Create(0, this.Chromosomes.Length); //Parallel.ForEach(rangePartitioner, (range, loopState) => { for (int i = 0; i < Chromosomes.Length; i++) { Console.WriteLine("step: " + i); FeedforwardNetwork chromosomeNetwork = (FeedforwardNetwork)network .Clone(); if (reset) { chromosomeNetwork.Reset(); } TicTacToeChromosome c = new TicTacToeChromosome(this, chromosomeNetwork); c.UpdateGenes(); lock (_locker) { SetChromosome(i, c); } } }; Console.WriteLine("PreEpoch # Error:" + getScore()); SortChromosomes(); }
/// <summary> /// Convert to an array. This is used with some training algorithms that /// require that the "memory" of the neuron(the weight and threshold values) /// be expressed as a linear array. /// </summary> /// <param name="network">A neural network.</param> /// <returns>The memory of the neural network as an array.</returns> public static double[] NetworkToArray(FeedforwardNetwork network) { int size = 0; // first determine size foreach (FeedforwardLayer layer in network.Layers) { // count the size of the weight matrix if (layer.HasMatrix()) { size += layer.MatrixSize; } } // allocate an array to hold Double[] result = new Double[size]; // copy data to array int index = 0; foreach (FeedforwardLayer layer in network.Layers) { // now the weight matrix(if it exists) if (layer.Next != null) { Double[] matrix = layer.LayerMatrix.ToPackedArray(); for (int i = 0; i < matrix.Length; i++) { result[index++] = matrix[i]; } } } return result; }
/// <summary> /// Compare the two neural networks. For them to be equal they must be of the /// same structure, and have the same matrix values. /// </summary> /// <param name="other">The other neural network.</param> /// <returns>True if the two networks are equal.</returns> public bool Equals(FeedforwardNetwork other) { int i = 0; foreach (FeedforwardLayer layer in this.Layers) { FeedforwardLayer otherLayer = other.Layers[i++]; if (layer.NeuronCount != otherLayer.NeuronCount) { return(false); } // make sure they either both have or do not have // a weight matrix. if ((layer.LayerMatrix == null) && (otherLayer.LayerMatrix != null)) { return(false); } if ((layer.LayerMatrix != null) && (otherLayer.LayerMatrix == null)) { return(false); } // if they both have a matrix, then compare the matrices if ((layer.LayerMatrix != null) && (otherLayer.LayerMatrix != null)) { if (!layer.LayerMatrix.Equals(otherLayer.LayerMatrix)) { return(false); } } } return(true); }
/// <summary> /// Create, train and use a neural network for XOR. /// </summary> /// <param name="args">Not used</param> static void Main(string[] args) { /*XOR_INPUT = new double[256][]; XOR_IDEAL = new double[256][]; String[] lines = File.ReadAllLines("input.txt"); for (int i = 0; i < lines.Length; i++) { String str = lines[i]; double[] input = new double[8]; for (int j = 0; j < 8; j++) { input[j] = Double.Parse(str[j].ToString()); } XOR_INPUT[i] = input; } lines = File.ReadAllLines("output.txt"); for (int i = 0; i < lines.Length; i++) { String str = lines[i]; double[] output = new double[1]; output[0] = Double.Parse(str); XOR_IDEAL[i] = output; } */ FeedforwardNetwork network = new FeedforwardNetwork(); network.AddLayer(new FeedforwardLayer(4)); network.AddLayer(new FeedforwardLayer(12)); network.AddLayer(new FeedforwardLayer(4)); network.Reset(); // train the neural network Train train = new Backpropagation(network, XOR_INPUT, XOR_IDEAL, 0.2, 0.95); int epoch = 1; Stopwatch watch = new Stopwatch(); watch.Start(); do { train.Iteration(); Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error); epoch++; } while ((epoch < 40000) && (train.Error > 0.001)); watch.Stop(); Console.WriteLine(watch.ElapsedMilliseconds / 1000.0); // test the neural network #if DEBUG Console.WriteLine("Neural Network Results:"); for (int i = 0; i < XOR_IDEAL.Length; i++) { double[] actual = network.ComputeOutputs(XOR_INPUT[i]); Console.WriteLine(XOR_INPUT[i][0] + "," + XOR_INPUT[i][1] + "," + XOR_INPUT[i][2] + "," + XOR_INPUT[i][3] + ", actual=" + actual[0] + "," + actual[1] + "," + actual[2] + "," + actual[3] + ",ideal=" + XOR_IDEAL[i][0] + "," + XOR_IDEAL[i][1] + "," + XOR_IDEAL[i][2] + "," + XOR_IDEAL[i][3]); } #endif FeedforwardLayer inputLayer = network.InputLayer; Console.WriteLine("Input layer matrix:"); Matrix layerMatrix = inputLayer.LayerMatrix; using (StreamWriter file = new StreamWriter("weights.txt")) { for (int i = 0; i < layerMatrix.Rows; i++) { for (int j = 0; j < layerMatrix.Cols; j++) { Console.Write(Math.Round(layerMatrix[i, j], 3) + " "); file.Write(layerMatrix[i, j]); if (j != layerMatrix.Cols - 1) { file.Write(" "); } } Console.WriteLine(); file.WriteLine(); } file.WriteLine(); FeedforwardLayer hiddenLayer = network.HiddenLayers.ToList()[0]; Console.WriteLine("Hidden layer matrix:"); layerMatrix = hiddenLayer.LayerMatrix; for (int i = 0; i < layerMatrix.Rows; i++) { for (int j = 0; j < layerMatrix.Cols; j++) { Console.Write(Math.Round(layerMatrix[i, j], 3) + " "); file.Write(layerMatrix[i, j]); if (j != layerMatrix.Cols - 1) { file.Write(" "); } } Console.WriteLine(); file.WriteLine(); } } #if SHOW_MATRIX FeedforwardLayer outputLayer = network.OutputLayer; Console.WriteLine("Output layer matrix:"); layerMatrix = outputLayer.LayerMatrix; for (int i = 0; i < layerMatrix.Rows; i++) { for (int j = 0; j < layerMatrix.Cols; j++) { Console.Write(layerMatrix[i, j] + " "); } Console.WriteLine(); } foreach (FeedforwardLayer feedforwardLayer in network.Layers) { Matrix layerMatrix = feedforwardLayer.LayerMatrix; Console.WriteLine(feedforwardLayer.Next); for (int i = 0; i < layerMatrix.Rows; i++) { for (int j = 0; j < layerMatrix.Cols; j++) { Console.Write(layerMatrix[i, j] + " "); } Console.WriteLine(); } } #endif Console.ReadKey(); }
public static BitmapImage GetEdges(BitmapImage binaryBitmapImage, FeedforwardNetwork network, BackgroundWorker backgroundWorker) { Bitmap sourceBitmap = GetBitmap(binaryBitmapImage); int width = sourceBitmap.Width; int height = sourceBitmap.Height; if (width < 2 || height < 2) { return null; } //todo:переделать if (width % 2 != 0 || height % 2 != 0) { return null; } Bitmap edgeBitmap = new Bitmap(width, height); for (int y = 0; y < height; y += 1) { for (int x = 0; x < width; x += 1) { edgeBitmap.SetPixel(x, y, Color.White); } } /*using (StreamWriter file = new StreamWriter("debug.txt")) {*/ for (int y = 0; y < height - 1; y += 1) { for (int x = 0; x < width - 1; x += 1) { double[] values = { (sourceBitmap.GetPixel(x, y).R) / 255, (sourceBitmap.GetPixel(x + 1, y).R) / 255, (sourceBitmap.GetPixel(x, y + 1).R) / 255, (sourceBitmap.GetPixel(x + 1, y + 1).R) / 255 }; double[] output = network.ComputeOutputs(values); backgroundWorker.ReportProgress(0, new[] {values, output}); //Thread.Sleep(300); double[,] outputValues = { { output[0], output[1] }, { output[2], output[3] } }; /*file.Write(x + "," + y + " " + (x + 1) + "," + y + " " + x + "," + (y + 1) + " " + (x + 1) + "," + (y + 1) + ": " + (sourceBitmap.GetPixel(x, y).R) + " " + (sourceBitmap.GetPixel(x + 1, y).R) + " " + (sourceBitmap.GetPixel(x, y + 1).R) + " " + (sourceBitmap.GetPixel(x + 1, y + 1).R) + " -> "); for (int y1 = 0; y1 < 2; y1++) { for (int x1 = 0; x1 < 2; x1++) { file.Write((x + x1) + "," + (y + y1)); if (x1 == 1 && y1 == 1) { file.Write(": "); } else { file.Write(" "); } } }*/ for (int y1 = 0; y1 < 2; y1++) { for (int x1 = 0; x1 < 2; x1++) { double value = outputValues[y1, x1]; int intValue = (int)value; if (value - intValue == 0.5) { value += 0.01; } int pixelValue = (int)(Math.Round(value) * 255); Color color = Color.FromArgb(pixelValue, pixelValue, pixelValue); if (edgeBitmap.GetPixel(x + x1, y + y1).R == 255) { edgeBitmap.SetPixel(x + x1, y + y1, color); } /* file.Write(color.R); if (x1 == 1 && y1 == 1) { file.Write("\r\n"); } else { file.Write(" "); }*/ } } } } // } edgeBitmap.Save("edges.jpg"); BitmapImage resultBitmapImage = GetBitmapImage(edgeBitmap); resultBitmapImage.Freeze(); return resultBitmapImage; }
/// <summary> /// Compare the two neural networks. For them to be equal they must be of the /// same structure, and have the same matrix values. /// </summary> /// <param name="other">The other neural network.</param> /// <returns>True if the two networks are equal.</returns> public bool Equals(FeedforwardNetwork other) { int i = 0; foreach (FeedforwardLayer layer in this.Layers) { FeedforwardLayer otherLayer = other.Layers[i++]; if (layer.NeuronCount != otherLayer.NeuronCount) { return false; } // make sure they either both have or do not have // a weight matrix. if ((layer.LayerMatrix == null) && (otherLayer.LayerMatrix != null)) { return false; } if ((layer.LayerMatrix != null) && (otherLayer.LayerMatrix == null)) { return false; } // if they both have a matrix, then compare the matrices if ((layer.LayerMatrix != null) && (otherLayer.LayerMatrix != null)) { if (!layer.LayerMatrix.Equals(otherLayer.LayerMatrix)) { return false; } } } return true; }
/// <summary> /// Return a clone of the structure of this neural network. /// </summary> /// <returns>A cloned copy of the structure of the neural network.</returns> public FeedforwardNetwork CloneStructure() { FeedforwardNetwork result = new FeedforwardNetwork(); foreach (FeedforwardLayer layer in this.layers) { FeedforwardLayer clonedLayer = new FeedforwardLayer(layer.NeuronCount); result.AddLayer(clonedLayer); } return result; }
/// <summary> /// Constructor that is designed to setup for a selective prune. /// </summary> /// <param name="network">The neural network that we wish to prune.</param> /// <param name="train">The training set input data.</param> /// <param name="ideal">The ideal outputs for the training set input data.</param> /// <param name="maxError">The maximum allowed error rate.</param> public Prune(FeedforwardNetwork network, double[][] train, double[][] ideal, double maxError) { this.currentNetwork = network; this.train = train; this.ideal = ideal; this.maxError = maxError; }
public void ThreadProc() { int update = 0; Prune prune = new Prune(0.7, 0.5, PruneIncrementalForm.XOR_INPUT, this.obtainIdeal(), 0.05); prune.StartIncremental(); while (!prune.Done) { prune.PruneIncramental(); update++; if (update == 10) { this.SetText( "Cycles:" + prune.Cycles + ",Hidden Neurons:" + prune.HiddenNeuronCount + ", Current Error=" + prune.Error); update = 0; } } this.SetText( "Best network found:" + prune.HiddenNeuronCount + ",error = " + prune.Error); this.network = prune.CurrentNetwork; //this.btnRun.Enabled = true; }
public void loadNeuralNetwork() { this.network = (FeedforwardNetwork)SerializeObject.Load("sp500.net"); }
/// <summary> /// Method that is called to start the incremental prune process. /// </summary> public void StartIncremental() { this.hiddenNeuronCount = 1; this.cycles = 0; this.done = false; this.currentNetwork = new FeedforwardNetwork(); this.currentNetwork .AddLayer(new FeedforwardLayer(this.train[0].Length)); this.currentNetwork.AddLayer(new FeedforwardLayer( this.hiddenNeuronCount)); this.currentNetwork .AddLayer(new FeedforwardLayer(this.ideal[0].Length)); this.currentNetwork.Reset(); this.backprop = new Backpropagation(this.currentNetwork, this.train, this.ideal, this.rate, this.momentum); }
private void btnPrune_Click(object sender, EventArgs e) { Prune prune = new Prune(this.network, PruneSelectiveForm.XOR_INPUT, this.obtainIdeal(), 0.05); int count = prune.PruneSelective(); this.network = prune.CurrentNetwork; this.SetText("Prune removed " + count + " neurons."); }
public PlayerNeural( FeedforwardNetwork network) { this.network = network; }
/// <summary> /// Internal method that will loop through all hidden neurons and prune them /// if pruning the neuron does not cause too great of an increase in error. /// </summary> /// <returns>True if a prune was made, false otherwise.</returns> protected bool FindNeuron() { for (int i = 0; i < this.HiddenCount; i++) { FeedforwardNetwork trial = this.ClipHiddenNeuron(i); double e2 = DetermineError(trial); if (e2 < this.maxError) { this.currentNetwork = trial; return true; } } return false; }
/// <summary> /// Create, train and use a neural network for XOR. /// </summary> /// <param name="args">Not used</param> static void Main(string[] args) { List<String> lines = File.ReadAllLines("weights.txt").ToList(); double[,] inputLayerWeights = new double[5, 12]; double[,] hiddenLayerWeights = new double[13, 4]; bool forInput = true; int counter = 0; foreach (string line in lines) { if (String.IsNullOrWhiteSpace(line)) { forInput = false; counter = 0; } else { if (forInput) { string[] strings = line.Split(' '); for (int i = 0; i < strings.Count(); i++) { inputLayerWeights[counter, i] = double.Parse(strings[i]); } } else { string[] strings = line.Split(' '); for (int i = 0; i < strings.Count(); i++) { hiddenLayerWeights[counter, i] = double.Parse(strings[i]); } } counter++; } } Matrix inputLayerMatrix = new Matrix(inputLayerWeights); Matrix hiddenLayermatrix = new Matrix(hiddenLayerWeights); FeedforwardNetwork network = new FeedforwardNetwork(); network.AddLayer(new FeedforwardLayer(4)); network.AddLayer(new FeedforwardLayer(12)); network.AddLayer(new FeedforwardLayer(4)); network.Reset(); FeedforwardLayer inputLayer = network.InputLayer; inputLayer.LayerMatrix = inputLayerMatrix; FeedforwardLayer hiddenLayer = network.HiddenLayers.ToList()[0]; hiddenLayer.LayerMatrix = hiddenLayermatrix; Console.WriteLine("Neural Network Results:"); for (int i = 0; i < XOR_IDEAL.Length; i++) { double[] actual = network.ComputeOutputs(XOR_INPUT[i]); Console.WriteLine(XOR_INPUT[i][0] + "," + XOR_INPUT[i][1] + "," + XOR_INPUT[i][2] + "," + XOR_INPUT[i][3] + ", actual=" + actual[0] + "," + actual[1] + "," + actual[2] + "," + actual[3] + ",ideal=" + XOR_IDEAL[i][0] + "," + XOR_IDEAL[i][1] + "," + XOR_IDEAL[i][2] + "," + XOR_IDEAL[i][3]); } Console.ReadKey(); /*Console.WriteLine("Input layer matrix:"); Matrix layerMatrix = inputLayer.LayerMatrix; for (int i = 0; i < layerMatrix.Rows; i++) { for (int j = 0; j < layerMatrix.Cols; j++) { Console.Write(Math.Round(layerMatrix[i, j], 3) + " "); } Console.WriteLine(); } Console.WriteLine("Hidden layer matrix:"); layerMatrix = hiddenLayer.LayerMatrix; for (int i = 0; i < layerMatrix.Rows; i++) { for (int j = 0; j < layerMatrix.Cols; j++) { Console.Write(Math.Round(layerMatrix[i, j], 3) + " "); } Console.WriteLine(); } */ }
/// <summary> /// Internal method to determine the error for a neural network. /// </summary> /// <param name="network">The neural network that we are seeking a error rate for.</param> /// <returns>The error for the specified neural network.</returns> protected double DetermineError(FeedforwardNetwork network) { return network.CalculateError(this.train, this.ideal); }
/// <summary> /// Internal method that is called at the end of each incremental cycle. /// </summary> protected void Increment() { bool doit = false; if (this.markErrorRate == 0) { this.markErrorRate = this.error; this.sinceMark = 0; } else { this.sinceMark++; if (this.sinceMark > 10000) { if ((this.markErrorRate - this.error) < 0.01) { doit = true; } this.markErrorRate = this.error; this.sinceMark = 0; } } if (this.error < this.maxError) { this.done = true; } if (doit) { this.cycles = 0; this.hiddenNeuronCount++; this.currentNetwork = new FeedforwardNetwork(); this.currentNetwork.AddLayer(new FeedforwardLayer( this.train[0].Length)); this.currentNetwork.AddLayer(new FeedforwardLayer( this.hiddenNeuronCount)); this.currentNetwork.AddLayer(new FeedforwardLayer( this.ideal[0].Length)); this.currentNetwork.Reset(); this.backprop = new Backpropagation(this.currentNetwork, this.train, this.ideal, this.rate, this.momentum); } }