예제 #1
0
파일: RProp.cs 프로젝트: jstark/NeuralNet
        //
        public RProp(BackPropNet net, string patterns, string targets)
            : base(net, patterns, targets)
        {
            try
            {
                // allocate mem for oldTotalDelta
                oldTotalDelta	= new double[net.Depth-1][];
                totalDelta		= new double[net.Depth-1][];
                biasLearnRates	= new double[net.Depth-1][];
                oldTotalErrGrad = new double[net.Depth-1][,];
                totalErrGrad    = new double[net.Depth-1][,];
                weightLearnRates  = new double[net.Depth-1][,];

                //
                for(uint i=0; i< net.Depth-1; i++)
                {
                    oldTotalDelta[i] = new double[net.GetLayer(i+1).GetSize()];
                    totalDelta[i]    = new double[net.GetLayer(i+1).GetSize()];
                    biasLearnRates[i] = new double[net.GetLayer(i+1).GetSize()];
                    oldTotalErrGrad[i] = new double[errGrad[i].GetLength(0), errGrad[i].GetLength(1)];
                    totalErrGrad[i] = new double[errGrad[i].GetLength(0), errGrad[i].GetLength(1)];
                    weightLearnRates[i]= new double[errGrad[i].GetLength(0), errGrad[i].GetLength(1)];
                }
                //
                // initialize all learning rates to 0.1
                //
                InitializeLearnRates();
            }
            catch(Exception ex)
            {
                Console.WriteLine(ex.Message);
                throw;
            }
        }
예제 #2
0
 //
 public SimpleBPM(BackPropNet net, string patterns, string targets)
     : base(net, patterns, targets)
 {
     //
     try
     {
         // allocate memory for previous biases
         pBiasUpdates = new double[net.Depth-1][];
         //
         for(uint i=0; i<net.Depth-1; i++)
             pBiasUpdates[i] = new double[net.GetLayer(i+1).GetSize()];
         //
         // allocate memory for previous weights
         pWeightUpdates = (double[][,])this.errGrad.Clone(); // it should be tested
         //
     }
     catch(System.Exception ex)
     {
         Console.WriteLine(ex.Message);
         throw;
     }
 }
예제 #3
0
        protected uint vPeriod = 5; // validation period.

        #endregion Fields

        #region Constructors

        //
        //
        public TrainingAlgorithm(BackPropNet net, string tPatterns, string tTargets)
        {
            uint depth = net.Depth;
            //
            try
            {
                // allocate mem for matrices of error gradients
                errGrad = new double[depth-1][,]; // allocate mem for depth-1 vectors
                //
                for(uint i=0; i<depth-1; i++)
                {
                    uint temp1 = net.GetLayer(i).GetSize();
                    uint temp2 = net.GetLayer(i+1).GetSize();
                    //
                    errGrad[i] = new double[temp1, temp2];
                }
                //
                //
                this.trPatterns = new DataSet(tPatterns);
                this.trTargets  = new DataSet(tTargets);
                this.network    = net;
                //
                if(trPatterns.GetNumberOfVectors() != trTargets.GetNumberOfVectors())
                    throw new Exception("Datasets have different number of vectors.");
                if(trPatterns.GetDataValidation() != true || trTargets.GetDataValidation() != true)
                    throw new Exception("Invalid Data");
                if(trPatterns.GetLengthOfVectors() != net.GetLayer(0).GetSize() ||
                    trTargets.GetLengthOfVectors() != net.GetLayer(net.Depth-1).GetSize())
                    throw new Exception("Incosistent Data/Inputs or Data/Outputs");

            }
            catch(Exception ex)
            {
                Console.WriteLine(ex.Message);
                throw;
            }
        }
예제 #4
0
 //
 public TrainingAlgorithm(BackPropNet net, string tPatterns, string tTargets, string vPatterns, string vTargets)
     : this(net, tPatterns, tTargets)
 {
     this.SetValidationData(vPatterns, vTargets);
 }
예제 #5
0
 //
 public static void BinarySave(string filename, BackPropNet network)
 {
     try
     {
         FileStream fs = new FileStream(filename, FileMode.Create);
         BinaryFormatter bf = new BinaryFormatter();
         //
         bf.Serialize(fs, network);
         fs.Flush();
         fs.Close();
     }
     catch(System.Exception e)
     {
         Console.WriteLine(e.Message);
         throw;
     }
 }
예제 #6
0
 //
 public SimpleBPM(BackPropNet net, string tPatterns, string tTargets, string vPatterns, string vTargets)
     : this(net, tPatterns, tTargets)
 {
     //
     this.SetValidationData(vPatterns, vTargets);
 }
예제 #7
0
파일: Form1.cs 프로젝트: jstark/NeuralNet
 private void button2_Click(object sender, System.EventArgs e)
 {
     try
     {
         if(of.ShowDialog() == DialogResult.OK && of.FileName != "")
         {
             network = BackPropNet.BinaryLoad(of.FileName);
             rtb1.Text = "Network: "+network.GetStructure();
         }
         patterns = null;
         targets = null;
     }
     catch(System.Exception ex)
     {
         MessageBox.Show(ex.Message, "Exception", MessageBoxButtons.OK, MessageBoxIcon.Error);
         network = null;
     }
     //
 }
예제 #8
0
파일: Form1.cs 프로젝트: jstark/NeuralNet
        //
        private void button5_Click(object sender, System.EventArgs e)
        {
            string filename = null;
            //
            if(sf.ShowDialog() == DialogResult.OK && of.FileName != " ")
            {
                filename = sf.FileName;
                //
                try
                {

                    if(patterns != null)
                    {
                        if(targets != null)
                            network.Simulate(patterns, targets, filename);
                        else
                            network.Simulate(patterns, filename);
                    }
                    //
                    MessageBox.Show("Simulation Finished", "Attention", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
                }
                catch(System.Exception ex)
                {
                    MessageBox.Show(ex.Message, "Exception", MessageBoxButtons.OK, MessageBoxIcon.Error);
                    patterns = null;
                    patterns = null;
                    network  = null;
                    this.button3.Enabled = false;
                    this.button4.Enabled = false;
                    this.button5.Enabled = false;
                }
            }
            //
            if(targets != null)
            {
                NeuralLib.DataSet dset = new NeuralLib.DataSet(filename);
                //
                double[] dummy;
                //
                chart1.Series.Clear();
                chart1.OpenData(COD.Values, 0, dset.GetNumberOfVectors());
                if(network.GetLayer(network.Depth-1).GetSize() == 1)
                {
                    // network has one output neuron.
                    chart1.AxisY.Title.Text = "Real Output-Simulated Output";
                    double sse = 0.0;
                    for(int i=0; i<dset.GetNumberOfVectors(); i++)
                    {
                        dummy = dset.GetDataVector(i);
                        chart1.Value[0, i] = targets.GetDataVector(i)[0];
                        chart1.Value[1, i] = dummy[0];
                        sse += dummy[dummy.Length-1];
                    }
                    //
                    label1.Text = "SSE: "+sse.ToString("F6")+" MSE: "+(sse/dset.GetNumberOfVectors()).ToString("F6")+" RMSE: "+
                        Math.Sqrt(sse/dset.GetNumberOfVectors()).ToString("F6");
                }
                else
                {
                    chart1.AxisY.Title.Text = "RMSE";
                    double sse = 0.0;
                    for(int i=0; i<dset.GetNumberOfVectors(); i++)
                    {
                        dummy = dset.GetDataVector(i);
                        chart1.Value[0, i] = Math.Sqrt(dummy[dummy.Length-1]);
                        sse += dummy[dummy.Length-1];
                    }
                    label1.Text = "SSE: "+sse.ToString("F6")+" MSE: "+(sse/dset.GetNumberOfVectors()).ToString("F6")+" RMSE: "+
                        Math.Sqrt(sse/dset.GetNumberOfVectors()).ToString("F6");
                }
                //
                label1.Left = chart1.Left+chart1.Width/2-label1.Width/2;
                chart1.CloseData(COD.Values);
                chart1.RecalcScale();
                chart1.Refresh();
            }
        }
예제 #9
0
파일: RProp.cs 프로젝트: jstark/NeuralNet
 //
 public RProp(BackPropNet net, string tPatterns, string tTargets, string vPatterns, string vTargets)
     : this(net, tPatterns, tTargets)
 {
     this.SetValidationData(vPatterns, vTargets);
 }
예제 #10
0
파일: Form1.cs 프로젝트: jstark/NeuralNet
 private void bStart_Click(object sender, System.EventArgs e)
 {
     //
     System.IO.StreamWriter sw = new System.IO.StreamWriter(results);
     sw.AutoFlush = true;
     keepTraining = true;
     //
     int numberOfTimesToTrain = Convert.ToInt32(cbTrainTimes.SelectedItem);
     //
     uint inputSize  = Convert.ToUInt32(cbInputNeurons.SelectedItem);
     uint outputSize = Convert.ToUInt32(cbOutputNeurons.SelectedItem);
     //
     uint minNeurons = Convert.ToUInt32(cbMinHiddenNeurons.SelectedItem);
     uint maxNeurons = Convert.ToUInt32(cbMaxHiddenNeurons.SelectedItem);
     //
     tbResults.Text = "Hidden Neurons ---- Mean Train Error --- Mean Validation Error ------ Mean Test Error";
     uint n = minNeurons-1;
     while(keepTraining)
     {
         n++;
         if(n > maxNeurons)
             break;
         //
         double meanTrError = 0;
         double meanVaError = 0;
         double meanTeError = 0;
         //
         Application.DoEvents();
         for(int i=0; i<numberOfTimesToTrain; i++)
         {
             nnet = new BackPropNet(inputSize, outputSize, n);
             //
             if(cbHiddenActFunction.SelectedItem.ToString() != "LogSig")
             {
                 nnet.SetHiddenActivationFunction(ActivationFunction.TanSig);
             }
             //
             if(cbOutputActFunction.SelectedItem.ToString() == "TanSig")
                 nnet.SetOutputActivationFunction(ActivationFunction.TanSig);
             else if(cbOutputActFunction.SelectedItem.ToString() == "Linear")
                 nnet.SetOutputActivationFunction(ActivationFunction.Linear);
             //
             nnet.Randomize(-0.2, 0.2);
             //
             if(cbAlgorithm.Text == "RProp")
             {
                 algorithm = new RProp(nnet, trainingInputs, trainingTargets);
             }
             else
             {
                 algorithm = new SimpleBPM(nnet, trainingInputs, trainingTargets);
                 ((SimpleBPM)algorithm).Momentum = Convert.ToDouble(this.tbMomentum.Text);
                 ((SimpleBPM)algorithm).LearnRate = Convert.ToDouble(this.tbLearnRate.Text);
             }
             //
             algorithm.Epochs = Convert.ToUInt32(this.tbEpochs.Text);
             algorithm.MaxFailures = Convert.ToUInt32(this.tbMaxFails.Text);
             //
             if(valIsOk)
                 algorithm.SetValidationData(validationInputs, validationTargets);
             //
             if(cbAlgorithm.Text == "RProp")
             {
                 ((RProp)algorithm).Train();
             }
             else
             {
                 ((SimpleBPM)algorithm).Train();
             }
             //
             double trError = algorithm.TrainingPerformance;
             double vaError = Double.NaN;
             double teError = Double.NaN;
             //
             if(valIsOk)
             {
                 vaError = algorithm.ValidationPerformance;
                 meanVaError += vaError;
             }
             //
             if(testIsOk)
             {
                 teError = nnet.Simulate(new NeuralLib.DataSet(testInputs), new NeuralLib.DataSet(testTargets));
                 meanTeError += teError;
             }
             //
             meanTrError += trError;
             Application.DoEvents();
         }
         //
         meanTrError = meanTrError/numberOfTimesToTrain;
         //
         if(valIsOk)
             meanVaError = meanVaError/numberOfTimesToTrain;
         //
         if(testIsOk)
             meanTeError = meanTeError/numberOfTimesToTrain;
         //
         tbResults.AppendText("\n"+"\t"+n.ToString()+"\t\t"+meanTrError.ToString("E5")+"\t\t"+
             meanVaError.ToString("E6")+"\t\t"+meanTeError.ToString("E6"));
         //
         sw.WriteLine(nnet.GetStructure()+"\t\t"+meanTrError.ToString("E5")+"\t\t"+
             meanVaError.ToString("E6")+"\t\t"+meanTeError.ToString("E6"));
         Application.DoEvents();
     }
     //
     if(cbUseSecondLayer.Checked == true)
     {
         uint k = 2;
         while(keepTraining)
         {
             k++;
             if(k > n)
                 break;
             //
             double meanTrError = 0;
             double meanVaError = 0;
             double meanTeError = 0;
             //
             Application.DoEvents();
             for(int i=0; i<numberOfTimesToTrain; i++)
             {
                 nnet = new BackPropNet(inputSize, outputSize, n-1, k-1);
                 //
                 if(cbHiddenActFunction.SelectedItem.ToString() != "LogSig")
                 {
                     nnet.SetHiddenActivationFunction(ActivationFunction.TanSig);
                 }
                 //
                 if(cbOutputActFunction.SelectedItem.ToString() == "TanSig")
                     nnet.SetOutputActivationFunction(ActivationFunction.TanSig);
                 else if(cbOutputActFunction.SelectedItem.ToString() == "Linear")
                     nnet.SetOutputActivationFunction(ActivationFunction.Linear);
                 //
                 nnet.Randomize(-0.2, 0.2);
                 //
                 if(cbAlgorithm.Text == "RProp")
                 {
                     algorithm = new RProp(nnet, trainingInputs, trainingTargets);
                 }
                 else
                 {
                     algorithm = new SimpleBPM(nnet, trainingInputs, trainingTargets);
                     ((SimpleBPM)algorithm).Momentum = Convert.ToDouble(this.tbMomentum.Text);
                     ((SimpleBPM)algorithm).LearnRate = Convert.ToDouble(this.tbLearnRate.Text);
                 }
                 //
                 algorithm.Epochs = Convert.ToUInt32(this.tbEpochs.Text);
                 algorithm.MaxFailures = Convert.ToUInt32(this.tbMaxFails.Text);
                 //
                 if(valIsOk)
                     algorithm.SetValidationData(validationInputs, validationTargets);
                 //
                 if(cbAlgorithm.Text == "RProp")
                 {
                     ((RProp)algorithm).Train();
                 }
                 else
                 {
                     ((SimpleBPM)algorithm).Train();
                 }
                 //
                 double trError = algorithm.TrainingPerformance;
                 double vaError = Double.NaN;
                 double teError = Double.NaN;
                 //
                 if(valIsOk)
                 {
                     vaError = algorithm.ValidationPerformance;
                     meanVaError += vaError;
                 }
                 //
                 if(testIsOk)
                 {
                     teError = nnet.Simulate(new NeuralLib.DataSet(testInputs), new NeuralLib.DataSet(testTargets));
                     meanTeError += teError;
                 }
                 //
                 meanTrError += trError;
                 Application.DoEvents();
             }
             //
             meanTrError = meanTrError/numberOfTimesToTrain;
             //
             if(valIsOk)
             meanVaError = meanVaError/numberOfTimesToTrain;
             //
             if(testIsOk)
             meanTeError = meanTeError/numberOfTimesToTrain;
             //
             tbResults.AppendText("\n"+"\t"+nnet.GetStructure()+"\t\t"+meanTrError.ToString("E5")+"\t\t"+
                 meanVaError.ToString("E6")+"\t\t"+meanTeError.ToString("E6"));
             //
             sw.WriteLine(nnet.GetStructure()+"\t\t"+meanTrError.ToString("E5")+"\t\t"+
                 meanVaError.ToString("E6")+"\t\t"+meanTeError.ToString("E6"));
             Application.DoEvents();
         }
     }
     sw.Flush();
     sw.Close();
 }
예제 #11
0
파일: Form1.cs 프로젝트: jstark/NeuralNet
 private void bLoad_Click(object sender, System.EventArgs e)
 {
     //
     if(of.ShowDialog() == DialogResult.OK)
     {
         try
         {
             Network = BackPropNet.BinaryLoad(of.FileName);
             info.ShowDialog(this);
         }
         catch(System.Exception ex)
         {
             MessageBox.Show(ex.Message, "Exception", MessageBoxButtons.OK, MessageBoxIcon.Error);
             Network = null;
         }
     }
 }