Delta rule learning algorithm.

This learning algorithm is used to train one layer neural network of Activation Neurons with continuous activation function, see SigmoidFunction for example.

See information about delta rule learning algorithm.

Inheritance: ISupervisedLearning
コード例 #1
0
ファイル: MainForm.cs プロジェクト: nagyistoce/Neuroflow
		// Worker thread
		void SearchSolution( )
		{
			bool reducedNetwork = ( ( classesCount == 2 ) && ( useOneNeuronForTwoClasses ) );

			// prepare learning data
			double[][] input = new double[samples][];
			double[][] output = new double[samples][];

			for ( int i = 0; i < samples; i++ )
			{
				input[i] = new double[variables];
				output[i] = new double[neuronsCount];

				// set input
				for ( int j = 0; j < variables; j++ )
					input[i][j] = data[i, j];
				// set output
				if ( reducedNetwork )
				{
					output[i][0] = classes[i];
				}
				else
				{
					output[i][classes[i]] = 1;
				}
			}

			// create perceptron
			ActivationNetwork	network = new ActivationNetwork(
				new SigmoidFunction( sigmoidAlphaValue ), variables, neuronsCount );
			ActivationLayer		layer = network[0];
			// create teacher
			DeltaRuleLearning	teacher = new DeltaRuleLearning( network );
			// set learning rate
			teacher.LearningRate = learningRate;

			// iterations
			int iteration = 1;

			// statistic files
			StreamWriter errorsFile = null;
			StreamWriter weightsFile = null;

			try
			{
				// check if we need to save statistics to files
				if ( saveStatisticsToFiles )
				{
					// open files
					errorsFile	= File.CreateText( "errors.csv" );
					weightsFile	= File.CreateText( "weights.csv" );
				}
				
				// erros list
				ArrayList errorsList = new ArrayList( );

				// loop
				while ( !needToStop )
				{
					// save current weights
					if ( weightsFile != null )
					{
						for ( int i = 0; i < neuronsCount; i++ )
						{
							weightsFile.Write( "neuron" + i + "," );
							for ( int j = 0; j < variables; j++ )
								weightsFile.Write( layer[i][j] + "," );
							weightsFile.WriteLine( layer[i].Threshold );
						}
					}

					// run epoch of learning procedure
					double error = teacher.RunEpoch( input, output ) / samples;
					errorsList.Add( error );
	
					// save current error
					if ( errorsFile != null )
					{
						errorsFile.WriteLine( error );
					}				

					// show current iteration & error
                    SetText( currentIterationBox, iteration.ToString( ) );
                    SetText( currentErrorBox, error.ToString( ) );
					iteration++;

					// check if we need to stop
					if ( ( useErrorLimit ) && ( error <= learningErrorLimit ) )
						break;
					if ( ( !useErrorLimit ) && ( iterationLimit != 0 ) && ( iteration > iterationLimit ) )
						break;
				}

				// show perceptron's weights
                ClearList( weightsList );
				for ( int i = 0; i < neuronsCount; i++ )
				{
					string neuronName = string.Format( "Neuron {0}", i + 1 );
					ListViewItem item = null;

					// add all weights
					for ( int j = 0; j < variables; j++ )
					{
                        item = AddListItem( weightsList, neuronName );
                        AddListSubitem( item, string.Format( "Weight {0}", j + 1 ) );
                        AddListSubitem( item, layer[i][0].ToString( "F6" ) );
					}
					// threshold
                    item = AddListItem( weightsList, neuronName );
                    AddListSubitem( item, "Threshold");
                    AddListSubitem( item, layer[i].Threshold.ToString( "F6" ) );
				}

				// show error's dynamics
				double[,] errors = new double[errorsList.Count, 2];

				for ( int i = 0, n = errorsList.Count; i < n; i++ )
				{
					errors[i, 0] = i;
					errors[i, 1] = (double) errorsList[i];
				}

				errorChart.RangeX = new DoubleRange( 0, errorsList.Count - 1 );
				errorChart.UpdateDataSeries( "error", errors );
			}
			catch ( IOException )
			{
				MessageBox.Show( "Failed writing file", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error );
			}
			finally
			{
				// close files
				if ( errorsFile != null )
					errorsFile.Close( );
				if ( weightsFile != null )
					weightsFile.Close( );
			}

			// enable settings controls
			EnableControls( true );
		}
コード例 #2
0
ファイル: NNTrain.cs プロジェクト: adesproject/ADES
        public void startTrain()
        {
            int class_count = 0;
            if ("NN".Equals(Constants.NN_SVM_SURF))
            {
                double sigmoidAlphaValue = 1.0;
                double learningRate = 100;
                int max_epoch = 50;
                double min_err = 0.000000001;

                List<FileInfo> trainingFiles = FileTools.getTrainingFiles(ref class_count);
                int samples = trainingFiles.Count;
                // prepare learning data
                double[][] input = new double[samples][];
                Dictionary<int, double[][]> outputs = new Dictionary<int, double[][]>();

                for (int i = 0; i < samples; i++)
                {
                    int currentImageClass = Int32.Parse(trainingFiles[i].Directory.Name);
                    Bitmap bmp = (Bitmap)Bitmap.FromFile(trainingFiles[i].FullName, false);
                    int com_x = 0, com_y = 0;
                    ByteTools.imageCoM(bmp, ref com_x, ref com_y);

                    input[i] = new double[numOfinputs];

                    List<Ipoint> featureList = fillFeatures(bmp, com_x, com_y, input[i]);
                    if (!outputs.ContainsKey(currentImageClass))
                    {
                        outputs.Add(currentImageClass, new double[samples][]);
                        for (int j = 0; j < samples; j++)
                        {
                            outputs[currentImageClass][j] = new double[] { 0d };
                        }
                    }
                    outputs[currentImageClass][i][0] = 1d;
                }

                Dictionary<int, ActivationNetwork> networks = new Dictionary<int, ActivationNetwork>();

                int[] availSigns = outputs.Keys.ToArray();
                foreach (int sign in availSigns)
                {
                    ActivationNetwork network = new ActivationNetwork(new SigmoidFunction(sigmoidAlphaValue),
                        numOfinputs, new int[] { Constants.NUM_OF_NN_HIDDEN_LAYER_NODES, 1 });
                    Accord.Neuro.Learning.LevenbergMarquardtLearning teacher = new Accord.Neuro.Learning.LevenbergMarquardtLearning(network);
                    teacher.LearningRate = learningRate;
                    int epoch = 0;
                    double error;
                    while (true)
                    {
                        // run epoch of learning procedure
                        error = teacher.RunEpoch(input, outputs[sign]) / samples;
                        Console.WriteLine("Epoch:" + epoch + " Error:" + error);
                        if (epoch++ > max_epoch || error < min_err)
                            break;
                    }
                    networks.Add(sign, network);
                    network.Save(Constants.base_folder + "nn_12x12_" + sign + ".dat");
                    Logger.log("Error: " + error + " Epoch:" + epoch);
                }
            }
            else if ("NN_SURF".Equals(Constants.NN_SVM_SURF) || "NN_12SIMPLE".Equals(Constants.NN_SVM_SURF))
            {
                double sigmoidAlphaValue = 1.0;
                if ("NN_SURF".Equals(Constants.NN_SVM_SURF)) {
                    if ("triangle".Equals(Constants.CIRCLE_TRIANGLE))
                        sigmoidAlphaValue = 6.0;
                    if ("circle".Equals(Constants.CIRCLE_TRIANGLE))
                        sigmoidAlphaValue = 6.0;
                }
                else if ("NN_12SIMPLE".Equals(Constants.NN_SVM_SURF))
                {
                    if ("triangle".Equals(Constants.CIRCLE_TRIANGLE))
                        sigmoidAlphaValue = 1.0;
                    if ("circle".Equals(Constants.CIRCLE_TRIANGLE))
                        sigmoidAlphaValue = 1.0;
                }
                double learningRate = 1.00;
                int max_epoch = 3000;
                double min_err = 0.000001;

                ActivationNetwork network = new ActivationNetwork(new SigmoidFunction(sigmoidAlphaValue),
                    numOfinputs, Constants.NUM_OF_SIGN_TYPES);
                DeltaRuleLearning teacher = new DeltaRuleLearning(network);
                teacher.LearningRate = learningRate;

                /*
                ActivationNetwork network = new ActivationNetwork(new SigmoidFunction(sigmoidAlphaValue),
                    numOfinputs, new int[] { (numOfinputs + Constants.NUM_OF_SIGN_TYPES)/2, Constants.NUM_OF_SIGN_TYPES });
                BackPropagationLearning teacher = new BackPropagationLearning(network);
                teacher.LearningRate = learningRate;
                //teacher.Momentum = momentum;
                */

                List<FileInfo> trainingFiles = new List<FileInfo>(1000);
                DirectoryInfo di = new DirectoryInfo(Constants.base_folder + "train_" + Constants.CIRCLE_TRIANGLE);
                DirectoryInfo[] dirs = di.GetDirectories("*");
                foreach (DirectoryInfo dir in dirs)
                {
                    int i = 0;
                    FileInfo[] files = dir.GetFiles("*.bmp");
                    foreach (FileInfo fi in files)
                    {
                        trainingFiles.Add(fi);
                        if (i++ > Constants.MAX_TRAIN_SAMPLE)
                            break;
                    }
                }
                // List<FileInfo> trainingFiles = FileTools.getTrainingFiles(ref class_count);

                int samples = trainingFiles.Count;
                // prepare learning data
                double[][] input = new double[samples][];
                double[][] output = new double[samples][];

                for (int i = 0; i < samples; i++)
                {
                    Bitmap bmp = (Bitmap)Bitmap.FromFile(trainingFiles[i].FullName, false);
                    int com_x = 0, com_y = 0;
                    ByteTools.imageCoM(bmp, ref com_x, ref com_y);

                    input[i] = new double[numOfinputs];
                    output[i] = new double[Constants.NUM_OF_SIGN_TYPES];

                    bmp.Tag = trainingFiles[i].Directory.Name + "_" + trainingFiles[i].Name;
                    fillFeatures_SURF(bmp, com_x, com_y, input[i]);
                    output[i][Int32.Parse(trainingFiles[i].Directory.Name) - 1] = 1d;
                }

                int epoch = 0;
                double error = 0;
                while (true)
                {
                    // run epoch of learning procedure
                    error = teacher.RunEpoch(input, output) / samples;
                    Console.WriteLine("Epoch:" + epoch + " Error:" + error);
                    if (epoch++ > max_epoch || error < min_err)
                        break;
                }
                network.Save(Constants.base_folder + Constants.NN_SVM_SURF + "_" + Constants.CIRCLE_TRIANGLE + ".dat");

                Logger.log("NNTrain [" + error + "]: " + Constants.NN_SVM_SURF + ", " + Constants.CIRCLE_TRIANGLE + ", " + learningRate + ", " + sigmoidAlphaValue);
            }
        }