// створення мережі BackPropagation private void CreateBackProp_Click(object sender, EventArgs e) { if (PARAMETERS == 0) { MessageBox.Show("Спочатку завантажте дані"); return; } int hidden; bool resultHidden = Int32.TryParse(this.Hidden_Neurons.Text, out hidden); bool resultLearning_Rate1 = Double.TryParse(this.Learning_rate1.Text, out LEARNING_RATE1); bool resultMomentum = Double.TryParse(this.Momentum.Text, out MOMENTUM); bool resultMin_error = Double.TryParse(this.MinError1.Text, out MIN_ERROR); if (resultHidden == false || resultLearning_Rate1 == false || resultMomentum == false || resultMin_error == false) { MessageBox.Show("Не всі обов'язкові поля заповнені для створення мережі BackPropagation"); return; } layerSizes = new int[3] { PARAMETERS, hidden, 1 }; // кількість шарів та нейронів у шарах bpn = new BackPropagationNetwork(layerSizes, TFuncs); MessageBox.Show("Мережу BackPropagation створено"); }
public void ConvolutionNetwork() { int[] layerSizes = new int[5] { 841, 1014, 1250, 100, 10 }; TransferFunction[] transferFunctions = new TransferFunction[5] { TransferFunction.None, TransferFunction.Convolution, TransferFunction.Convolution, TransferFunction.Linear, TransferFunction.Linear }; BackPropagationNetwork backPropagationNetwork = new BackPropagationNetwork(layerSizes, transferFunctions); double[] input1 = new double[841]; for (int i = 0; i < input1.Length; i++) { if(i % 2 == 0) input1[i] = 1; } double[] input2 = new double[841]; for (int i = 0; i < input2.Length; i++) { input2[i] = 1; } DataPoint _dp1 = new DataPoint(input1, new[] { 1.0 }); DataPoint _dp2 = new DataPoint(input2, new[] { 0.0 }); DataPointCollection _dataPointCollection = new DataPointCollection(); _dataPointCollection.Add(_dp1); _dataPointCollection.Add(_dp2); SimpleNetworkTrainer _networkTrainer = new SimpleNetworkTrainer(backPropagationNetwork, _dataPointCollection); _networkTrainer.TargetError = 0.0001; _networkTrainer.MaxIterations = 1000000; _networkTrainer.NudgeScale = 0.8; _networkTrainer.NudgeWindow = 100; _networkTrainer.TrainNetwork(); Assert.IsTrue(true, "Never Reached Minimum Error"); for (int i = _networkTrainer.ErrorHistory.Count - 100; i < _networkTrainer.ErrorHistory.Count; i++) { Console.WriteLine("{0}: {1:0.00000000}", i, _networkTrainer.ErrorHistory[i]); } }
public NetworkTrainer(BackPropagationNetwork BPN, DataSet DS) { Network = BPN; DataSet = DS; _idx = new Permutator(DataSet.Size); _iterations = 0; _errorHistory = new List <double>(); }
public void AddNoiseToInputPattern(int levelPercent) { int i = ((NodesInLayer(0) - 1) * levelPercent) / 100; while (i > 0) { nodes[(int)(BackPropagationNetwork.Random(0, NodesInLayer(0) - 1))].Value = BackPropagationNetwork.Random(0, 100); i--; } }
static void Main(string[] args) { int[] layerSizes = new int[3] { 3, 4, 1 }; TransferFunction[] TFuncs = new TransferFunction[3] { TransferFunction.None, TransferFunction.Sigmoid, TransferFunction.Linear }; BackPropagationNetwork bpn = new BackPropagationNetwork(layerSizes, TFuncs); double[] input = new double[] { 4, 6, 8 }; //, new double[] {4, 7, 5}, new double[] {7, 4, 8}, new double[] {6, 7, 5}, new double[] {7, 7, 8}}; double[] desired = new double[] { -0.86 }; //, new double[] {0.15}, new double[] {0.72 }, new double[] {0.53 }, new double[] { 0.44 } }; /*double[] output = new double[1]; * * * double error = 0.0; * * for(int i = 0; i < 10; i++) * { * * error = bpn.Train(ref input, ref desired, 0.15, 0.1); * bpn.Run(ref input, out output); * if (i % 1 == 0) * Console.WriteLine("Iteration {0}: \n\t Input {1:0.000} {2:0.000} {3:0.000} Output {4:0.000} error{5:0.000}", i, input[0], input[1], input[2], output[0], error); * * * }*/ double[][] inputs = { new double[] { 0.4, 11.8, 0.1 }, new double[] { 1.9, 1.9, 19.5 }, new double[] { 1.2, 23.2, 0.3 }, new double[] { 20.9, 0.0, 7.9 }, new double[] { 13.0, 19.0, 11.0 }, new double[] { 15.5, 2.9, 68.3 } }; string[] names = new string[] { "Apples", "Avocado", "Leave", "Beef Steak", "Jam", "Brazil Nuts" }; SOM somnetwork = new SOM(3, names, inputs); Console.WriteLine("----"); string row = "Water"; double[] water = new double[] { 0.0, 0.0, 0.0 }; int[] result = somnetwork.Result(water); Console.WriteLine(row + " " + result[0] + " " + result[1]); Console.ReadKey(); }
static void Main(string[] args) { string[] lines = { "First line", "Second line", "Third line" }; System.IO.File.WriteAllLines(@"C:\Users\anxel\Downloads\NeuralNetwork\NeuralNetwork\WriteLines.txt", lines); // Training Data XmlDocument doc = new XmlDocument(); doc.Load(@"C:\Users\anxel\Downloads\NeuralNetwork\NeuralNetwork\simpleData.xml"); DataSet ds = new DataSet(); ds.Load((XmlElement)doc.DocumentElement.ChildNodes[0]); // Network to train int[] layerSizes = new int[3] { 25, 35, 4 }; TransferFunction[] tFuncs = new TransferFunction[3] { TransferFunction.None, TransferFunction.Sigmoid, TransferFunction.Linear }; BackPropagationNetwork bpn = new BackPropagationNetwork(layerSizes, tFuncs); // Network trainer! NetworkTrainer nt = new NetworkTrainer(bpn, ds); nt.maxError = 0.001; nt.maxIterations = 100000; nt.nudge_window = 500; // Train Console.WriteLine("Training..."); nt.TrainDataSet(); Console.WriteLine("Done!"); // Save the network nt.network.Save(@"C:\Users\anxel\Downloads\NeuralNetwork\NeuralNetwork\simpleData.xml"); // Save the error history double[] error = nt.GetErrorHistory(); string[] filedata = new string[error.Length]; for (int i = 0; i < error.Length; i++) { filedata[i] = i.ToString() + " " + error[i].ToString(); } File.WriteAllLines(@"C:\Users\anxel\Downloads\NeuralNetwork\NeuralNetwork\simple_errors.txt", filedata); // End of program Console.WriteLine("\n\nPress Enter..."); Console.ReadLine(); }
static void Main() { int[] layerSizes = new int[3] { 1, 2, 1 }; TransferFunctionEnum[] tFuncs = new TransferFunctionEnum[3] { TransferFunctionEnum.None, TransferFunctionEnum.Sigmoid, TransferFunctionEnum.Sigmoid }; BackPropagationNetwork bpn = new BackPropagationNetwork(layerSizes, tFuncs); }
public async Task TrainNetworkAsync() { int[] layerSize = CreateLayersSize(); TransferFunction[] tfunc = AddTransferFunctionToLayers(); BackPropagationNetwork bpn = null; NetworkTrainer nt; if (bpn == null) { bpn = new BackPropagationNetwork(layerSize, tfunc); } await FixInputs(); nt = new NetworkTrainer(bpn, _dataSet); Task.Run(() => { nt.MaxError = MaxError; nt.MaxIterations = MaxIterations; nt.NudgeWindow = NudgeWindow; nt.TrainDataSet(); nt.Network.Save(@"tezine.xml"); double[] error = nt.GetErrorHistory(); string[] filedata = new string[error.Length]; for (int i = 0; i < error.Length; i++) { filedata[i] = i.ToString() + " " + error[i].ToString(); } File.WriteAllLines(@"greske.txt", filedata); NetworkTrained?.Invoke(this, "Završeno treniranje."); }); dynamic settings = SetSettingsForProgressReportPopup(); TrainingNetworkReport.Status = "Treniranje mreže..."; TrainingNetworkReport.Progress = 0; TrainingNetworkReport.MaxIterations = MaxIterations; TrainingNetworkReport.Error = 0; WindowManager window = new WindowManager(); window.ShowDialog(new TrainingNetworkProgresBarViewModel(ref _trainingNetworkReport, settings, this, nt)); }
static void Main(string[] args) { int[] layerSizes = new int[3] { 3, 4, 1 }; TransferFunction[] TFuncs = new TransferFunction[3] { TransferFunction.None, TransferFunction.Sigmoid, TransferFunction.Linear }; BackPropagationNetwork bpn = new BackPropagationNetwork(layerSizes, TFuncs); double[] input = new double[] { 4, 6, 8 };//, new double[] {4, 7, 5}, new double[] {7, 4, 8}, new double[] {6, 7, 5}, new double[] {7, 7, 8}}; /*for(int i = 0; i < input.GetUpperBound(0); i++) * { * input[i] = new double[3]; * for (int j = 0; j < input[i].Length; j++) * { * * } * }*/ double[] desired = new double[] { -0.86 };//, new double[] {0.15}, new double[] {0.72 }, new double[] {0.53 }, new double[] { 0.44 } }; double[] output = new double[1]; double error = 0.0; for (int i = 0; i < 10; i++) { error = bpn.Train(ref input, ref desired, 0.15, 0.1); bpn.Run(ref input, out output); if (i % 1 == 0) { Console.WriteLine("Iteration {0}: \n\t Input {1:0.000} {2:0.000} {3:0.000} Output {4:0.000} error{5:0.000}", i, input[0], input[1], input[2], output[0], error); } /*for (int k = 0; k < 4; k++) * Console.WriteLine("{0:0.000}", bpn.layerOtput[0][k]);*/ } Console.ReadKey(); }
public string Calculate(Kerogen ker) { BackPropagationNetwork bpn = new BackPropagationNetwork("test backpropagation network.xml"); double[] inputs = new double[5]; double[] output = new double[1]; inputs[0] = ker.Carbon; inputs[1] = ker.Hydrogen; inputs[2] = ker.Oxygen; inputs[3] = ker.Nitrogen; inputs[4] = ker.Sulfur; int result = 5; result = bpn.getCluster(inputs, output); return("<br/><h2>Даний кероген відноситься до " + result.ToString() + " типу"); }
public LeafRecognizerViewModel() { Task.Run(() => { if (File.Exists(@"tezine.xml")) { bpn = new BackPropagationNetwork(@"tezine.xml"); NumberOfInputNodes = bpn.InputSize; } if (File.Exists(@"vrste.txt")) { List <string> plants = new List <string>(File.ReadLines(@"vrste.txt").ToList()); int id = 0; foreach (var plant in plants) { Plants.Add(new PlantModel(plant, id++)); } NotifyOfPropertyChange(() => Plants); } NotifyOfPropertyChange(() => CanRecognizeLeaf); }); }
public void CanTrainNetwork() { //XOR data DataPoint _dp1 = new DataPoint(new[] { 1.0, 1.0 }, new[] { 0.0 }); DataPoint _dp2 = new DataPoint(new[] { 1.0, 0.0 }, new[] { 1.0 }); DataPoint _dp3 = new DataPoint(new[] { 0.0, 1.0 }, new[] { 1.0 }); DataPoint _dp4 = new DataPoint(new[] { 0.0, 0.0 }, new[] { 0.0 }); DataPointCollection _dataPointCollection = new DataPointCollection(); _dataPointCollection.Add(_dp1); _dataPointCollection.Add(_dp2); _dataPointCollection.Add(_dp3); _dataPointCollection.Add(_dp4); int[] _layerSizes = new int[3] { 2, 2, 1 }; TransferFunction[] _transferFunctions = new TransferFunction[3] { TransferFunction.None, TransferFunction.Sigmoid, TransferFunction.Linear }; BackPropagationNetwork _bpn = new BackPropagationNetwork(_layerSizes, _transferFunctions); SimpleNetworkTrainer _networkTrainer = new SimpleNetworkTrainer(_bpn, _dataPointCollection); _networkTrainer.TargetError = 0.0001; _networkTrainer.MaxIterations = 1000000; _networkTrainer.NudgeScale = 0.8; _networkTrainer.NudgeWindow = 100; _networkTrainer.TrainNetwork(); Assert.IsTrue(true, "Never Reached Minimum Error"); for (int i = _networkTrainer.ErrorHistory.Count - 100; i < _networkTrainer.ErrorHistory.Count; i++) { Console.WriteLine("{0}: {1:0.00000000}", i, _networkTrainer.ErrorHistory[i]); } }
// завантаження мережі BackPropagation з файлу private void DownloadBackProp_Click(object sender, EventArgs e) { string filepath = ""; // шлях до файлу, який треба відкрити openFileDialog.Multiselect = false; if (openFileDialog.ShowDialog() == DialogResult.OK) { filepath = openFileDialog.FileName; } bpn = new BackPropagationNetwork(filepath); if (bpn == null) { MessageBox.Show("Error"); } if (bpn.loaded != false) { MessageBox.Show("Мережа BackPropagation завантажена"); } else { MessageBox.Show("Виберіть інший файл для завантаження мережі"); } }
/// <summary> /// This method accepts the training examples as input and performs the training of the MLP neural network /// </summary> /// <param name="data">training data pairs</param> /// <param name="ctx">training data descriptions</param> /// <returns>IScore</returns> public override IScore Run(double[][] data, IContext ctx) { // Sum for every layer. hidLyrNeuronSum1 = x11*w11+x12*w21+..+x1N*wN1 double[][] hidLyrNeuronSum = new double[m_HiddenLayerNeurons.Length + 1][]; // outputs = ActFnx(hidLyrNeuronSum+Bias) double[][] hidLyrOut = new double[m_HiddenLayerNeurons.Length + 1][]; double[][] trainingData = new double[(int)(data.Length * 0.8)][]; double[][] validationData = new double[(int)(data.Length * 0.2)][]; trainingData = data.Take((int)(data.Length * 0.8)).ToArray(); validationData = data.Skip((int)(data.Length * 0.8)).ToArray(); int numOfInputVectors = trainingData.Length; m_InpDims = ctx.DataDescriptor.Features.Count(); m_OutputLayerNeurons = data[0].Length - m_InpDims; m_Weights = new double[m_HiddenLayerNeurons.Length + 1][, ]; m_Biases = new double[m_HiddenLayerNeurons.Length + 1][]; InitializeWeightsandBiasesinputlayer(m_InpDims); InitializeWeightsandBiaseshiddenlayers(m_HiddenLayerNeurons); InitializeWeightsandBiasesoutputlayer(m_HiddenLayerNeurons); var score = new MLPerceptronAlgorithmScore(); double lastLoss = 0; double lastValidationLoss = 0; #if TESTING string path = Directory.GetCurrentDirectory() + "\\MLPerceptron\\TestFiles\\mnist_performance_params_" + this.TestCaseNumber.ToString() + ".csv"; if (!File.Exists(path)) { File.Create(path).Dispose(); } using (var performanceData = new StreamWriter(path)) #endif { Stopwatch watch = new Stopwatch(); double timeElapsed = 0; #if TESTING performanceData.WriteLine("{0},{1},{2},{3},{4},{5}", "Epoch", "Epoch Loss", "Epoch Accuracy", "Validation Loss", "Validation Accuracy", "Time Elapsed"); #endif for (int i = 0; i < m_Iterations; i++) { watch.Restart(); score.Loss = 0; double batchAccuracy = 0; int miniBatchStartIndex = 0; while (miniBatchStartIndex < numOfInputVectors) { BackPropagationNetwork backPropagation = new BackPropagationNetwork(m_Biases, m_HiddenLayerNeurons, m_OutputLayerNeurons, m_InpDims); for (int inputVectIndx = miniBatchStartIndex; inputVectIndx < m_batchSize + miniBatchStartIndex && inputVectIndx < trainingData.Length; inputVectIndx++) { // Z2 = actFnc(X * W1) CalcFirstHiddenLayer(trainingData[inputVectIndx], m_InpDims, out hidLyrOut[0], out hidLyrNeuronSum[0]); // We use output of first layer as input of second layer. CalcRemainingHiddenLayers(hidLyrOut[0], hidLyrNeuronSum[0], m_InpDims, out hidLyrOut, out hidLyrNeuronSum); // Zk = ak-1 * Wk-1 CalculateResultatOutputlayer(hidLyrOut[m_HiddenLayerNeurons.Length - 1], m_InpDims, m_SoftMax, out hidLyrOut[m_HiddenLayerNeurons.Length], out hidLyrNeuronSum[m_HiddenLayerNeurons.Length]); if (m_SoftMax == true) { backPropagation.CalcOutputErrorSoftMax(hidLyrOut[m_HiddenLayerNeurons.Length], m_HiddenLayerNeurons, trainingData[inputVectIndx], ctx); } else { // BackPropagationNetwork backPropagation = new BackPropagationNetwork(m_HiddenLayerNeurons.Length); backPropagation.CalcOutputError(hidLyrOut[m_HiddenLayerNeurons.Length], m_HiddenLayerNeurons, hidLyrNeuronSum[m_HiddenLayerNeurons.Length], trainingData[inputVectIndx], ctx); } backPropagation.CalcHiddenLayersError(hidLyrOut, m_Weights, m_HiddenLayerNeurons, hidLyrNeuronSum, trainingData[inputVectIndx]); backPropagation.CostFunctionChangeWithBiases(m_Biases, m_HiddenLayerNeurons, m_LearningRate); backPropagation.CostFunctionChangeWithWeights(m_Weights, hidLyrOut, m_HiddenLayerNeurons, m_LearningRate, trainingData[inputVectIndx]); } backPropagation.UpdateBiases(m_Biases, m_HiddenLayerNeurons, m_LearningRate, out m_Biases); backPropagation.UpdateWeights(m_Weights, hidLyrOut, m_HiddenLayerNeurons, m_LearningRate, m_InpDims, out m_Weights); score.Errors = backPropagation.MiniBatchError[m_HiddenLayerNeurons.Length]; batchAccuracy += ((double)backPropagation.TrainingSetAccuracy / m_batchSize); double sum = 0; foreach (var outLyrErr in score.Errors) { sum += outLyrErr; } /* * 1 - mean of errors * score.Loss = 1 - (Math.Abs(sum) / score.Errors.Length); */ score.Loss += Math.Abs(sum); miniBatchStartIndex = miniBatchStartIndex + m_batchSize; } double deltaLoss = lastLoss - score.Loss; double accuracy = ((double)batchAccuracy * m_batchSize) / numOfInputVectors; var result = ((MLPerceptronResult)Predict(validationData, ctx)).results; int accurateResults = 0; double validationSetLoss = 0.0; // Check if the test data has been correctly classified by the neural network for (int j = 0; j < validationData.Length; j++) { accurateResults++; for (int k = 0; k < m_OutputLayerNeurons; k++) { validationSetLoss += Math.Abs(validationData[j][(validationData[j].Length - m_OutputLayerNeurons) + k] - result[j * m_OutputLayerNeurons + k]); //Assert.True(testData[i][(testData[i].Length - numberOfOutputs) + j] == (result[i * numberOfOutputs + j] >= 0.5 ? 1 : 0)); if (validationData[j][(validationData[j].Length - m_OutputLayerNeurons) + k] != (result[j * m_OutputLayerNeurons + k] >= 0.5 ? 1 : 0)) { accurateResults--; break; } } } double deltaValidationLoss = lastValidationLoss - validationSetLoss; double validationAccuracy = (double)accurateResults / validationData.Length; watch.Stop(); timeElapsed += ((double)watch.ElapsedMilliseconds / 1000); // Debug.WriteLine($"Loss: {score.Loss}, Last loss: {lastLoss}, Delta: {deltaLoss}, Accuracy: {accuracy}, ValidationLoss: {validationSetLoss}, Last Validationloss: {lastValidationLoss}, Delta: {deltaValidationLoss}, ValidationAccuracy: {validationAccuracy}, TimeElapsed: {timeElapsed}"); #if TESTING performanceData.WriteLine("{0},{1},{2},{3},{4},{5}", i.ToString(), score.Loss.ToString("F3"), accuracy.ToString("F3"), validationSetLoss.ToString("F3"), validationAccuracy.ToString("F3"), timeElapsed.ToString("F3")); #endif lastLoss = score.Loss; lastValidationLoss = validationSetLoss; } ctx.Score = score; return(ctx.Score); } }
static void Main(string[] args) { var layerSizes = new[] { 2, 2, 1 }; var transferFunctions = new[] { TransferFunction.None, TransferFunction.Sigmoid, TransferFunction.Linear }; var backPropagationNetwork = new BackPropagationNetwork(layerSizes, transferFunctions) { Name = "XOR-Gate Example" }; var input = new double[4][]; var expected = new double[4][]; for (int i = 0; i < 4; i++) { input[i] = new double[2]; expected[i] = new double[1]; } input[0][0] = 0.0; input[0][1] = 0.0; expected[0][0] = 0; // false xor false = false input[1][0] = 1.0; input[1][1] = 0.0; expected[1][0] = 1; // true xor false = true input[2][0] = 0.0; input[2][1] = 1.0; expected[2][0] = 1; // false xor true = true input[3][0] = 1.0; input[3][1] = 1.0; expected[3][0] = 0; // true xor true = false double error = 0.0; const int maxCount = 10; int count = 0; Stopwatch watch = Stopwatch.StartNew(); do { // prepare for training epic count++; error = 0; // train for (int i = 0; i < 4; i++) { error += backPropagationNetwork.Train(ref input[i], ref expected[i], .15, .1); } if (count % 1 == 0) { Console.WriteLine("Epoch {0} completed with error {1:0.0000}", count, error); } } while (error > 0.0001 && count <= maxCount); watch.Stop(); var output = new double[4][]; for (int i = 0; i < 4; i++) { backPropagationNetwork.Run(ref input[i], out output[i]); } for (int i = 0; i < 4; i++) { Console.WriteLine("For inputs {0} and {1}, output is {2}", input[i][0], input[i][1], output[i][0]); } Console.WriteLine("Time Elapsed :" + watch.Elapsed); Console.WriteLine("Hit Enter..."); Console.ReadLine(); }
public void CreateNetwork() { bpn = new BackPropagationNetwork(layerSizes, TFuncs); }
public BackPropagationServices(string xml) { this.bpn = new BackPropagationNetwork(xml); }
public void CreateNetwork() { bpn = new BackPropagationNetwork(layerSizes, TFuncs); bpn.SetBiasToZero(); }
static void Main(string[] args) { string filePath = @"E:\Work\NeuralXOR.xml"; int[] layerSizes = new int[3] { 2, 2, 1 }; TransferFunction[] tFuncs = new TransferFunction[3] { TransferFunction.None, TransferFunction.Sigmoid, TransferFunction.Linear }; BackPropagationNetwork bpn = new BackPropagationNetwork(layerSizes, tFuncs); //Example XOR-Gate bpn.Name = "XOR-Gate"; //Define the cases double[][] input, ouput; input = new double[4][]; ouput = new double[4][]; for (int i = 0; i < 4; i++) { input[i] = new double[2]; ouput[i] = new double[1]; } input[0][0] = 0.0; input[0][1] = 0.0; ouput[0][0] = 0.0; //false XOR false = false input[1][0] = 1.0; input[1][1] = 0.0; ouput[1][0] = 1.0; //true XOR false = true input[2][0] = 0.0; input[2][1] = 1.0; ouput[2][0] = 1.0; //false XOR true = true input[3][0] = 1.0; input[3][1] = 1.0; ouput[3][0] = 0.0; //true XOR true = false //Train the network double error = 0.0; int max_count = 1000, count = 0; do { //Prepare for training Epoch count++; error = 0.0; //Train for (int i = 0; i < 4; i++) { error += bpn.Train(ref input[i], ref ouput[i], 0.15, 0.10); } //Show Progress if (count % 100 == 0) { Console.WriteLine("Epoch {0} completed with error {1:0.0000}", count, error); } } while (error > 0.00001 && count <= max_count); //Display results! double[] networkOutput = new double[1]; for (int i = 0; i < 4; i++) { bpn.Run(ref input[i], out networkOutput); Console.WriteLine("Case {3}: {0:0.0} xor {1:0.0} = {2:0.0000}", input[i][0], input[i][1], networkOutput[0], i + 1); } bpn.Save(filePath); Console.ReadLine(); }
private static void Main(string[] args) { Console.Out.WriteLine(" BACKPROPAGATION neural network demo."); Console.Out.WriteLine(""); Console.Out.WriteLine(" Copyright(C) XP Idea.com 2001-2004 "); Console.Out.WriteLine(""); Console.Out.WriteLine("The purpose of this demo is to show learning abilities of BACKPROP network."); Console.Out.WriteLine("The BACKPROP network is able to learn much more complex data patterns, than"); Console.Out.WriteLine("Adaline network (please see OCR demo application). "); Console.Out.WriteLine("This example simple shows that the Backprop network is able to learn "); Console.Out.WriteLine("an 'exclusive OR' (XOR) operation, but the Adaline network is not able to do so."); Console.Out.WriteLine(""); Console.Out.WriteLine(" false XOR false = false"); Console.Out.WriteLine(" true XOR false = true"); Console.Out.WriteLine(" false XOR true = true"); Console.Out.WriteLine(" true XOR true = false"); Console.Out.WriteLine(""); Console.Out.WriteLine(" As result of the training, the network will produce output ‘0’"); Console.Out.WriteLine("corresponding to logical ‘false’ or ‘1’ corresponding to logical ‘true’ value."); Console.Out.WriteLine(""); Console.Out.WriteLine("PLEASE HIT ENTER TO CONTINUE"); Console.In.ReadLine(); Console.Out.WriteLine(""); Console.Out.WriteLine("During this demo you will be prompted to enter input values"); Console.Out.WriteLine("for the network. Then network will perform “XOR” operation on "); Console.Out.WriteLine("the entered values and result will be displayed to you. "); Console.Out.WriteLine("Please enter any values in range from 0 to 1 and hit [ENTER] when prompted. "); Console.Out.WriteLine(""); Console.Out.WriteLine("NOW THE NETWORK IS READY TO LEARN FOLLOWING PATTERNS"); Console.Out.WriteLine(""); Console.Out.WriteLine(" false XOR false = false;"); Console.Out.WriteLine(" true XOR false = true;"); Console.Out.WriteLine(" false XOR true = true;"); Console.Out.WriteLine(" true XOR true = false;"); Console.Out.WriteLine("PLEASE HIT ENTER TO BEGIN TRAINING"); Console.In.ReadLine(); Console.Out.Write("TRAINING...."); double d; BackPropagationNetwork BackPropNet; var patterns = new PatternsCollection(TrainingSets, 2, 1); SetPattern(patterns[0], 0, 0, 0); SetPattern(patterns[1], 0, 1, 1); SetPattern(patterns[2], 1, 0, 1); SetPattern(patterns[3], 1, 1, 0); //Network(0.55,0.6, BackPropNet = new BackPropagationNetwork(0.55, 0.6, new int[3] { 2, 3, 1 }); BackPropNet.Train(patterns); Console.Out.WriteLine("DONE!"); Console.Out.WriteLine(""); //BackPropNet.SaveToFile("test.net"); while (true) { try { Console.Out.Write("Enter 1st value: "); d = double.Parse(Console.In.ReadLine()); BackPropNet.InputNode(0).Value = d; Console.Out.Write("Enter 2nd value: "); d = double.Parse(Console.In.ReadLine()); BackPropNet.InputNode(1).Value = d; BackPropNet.Run(); Console.Out.WriteLine("Result: " + Math.Round(BackPropNet.OutputNode(0).Value)); Console.Out.WriteLine(""); } catch { return; } } }