Example #1
0
        //
        public RProp(BackPropNet net, string patterns, string targets)
            : base(net, patterns, targets)
        {
            try
            {
                // allocate mem for oldTotalDelta
                oldTotalDelta	= new double[net.Depth-1][];
                totalDelta		= new double[net.Depth-1][];
                biasLearnRates	= new double[net.Depth-1][];
                oldTotalErrGrad = new double[net.Depth-1][,];
                totalErrGrad    = new double[net.Depth-1][,];
                weightLearnRates  = new double[net.Depth-1][,];

                //
                for(uint i=0; i< net.Depth-1; i++)
                {
                    oldTotalDelta[i] = new double[net.GetLayer(i+1).GetSize()];
                    totalDelta[i]    = new double[net.GetLayer(i+1).GetSize()];
                    biasLearnRates[i] = new double[net.GetLayer(i+1).GetSize()];
                    oldTotalErrGrad[i] = new double[errGrad[i].GetLength(0), errGrad[i].GetLength(1)];
                    totalErrGrad[i] = new double[errGrad[i].GetLength(0), errGrad[i].GetLength(1)];
                    weightLearnRates[i]= new double[errGrad[i].GetLength(0), errGrad[i].GetLength(1)];
                }
                //
                // initialize all learning rates to 0.1
                //
                InitializeLearnRates();
            }
            catch(Exception ex)
            {
                Console.WriteLine(ex.Message);
                throw;
            }
        }
Example #2
0
        protected uint vPeriod = 5; // validation period.

        #endregion Fields

        #region Constructors

        //
        //
        public TrainingAlgorithm(BackPropNet net, string tPatterns, string tTargets)
        {
            uint depth = net.Depth;
            //
            try
            {
                // allocate mem for matrices of error gradients
                errGrad = new double[depth-1][,]; // allocate mem for depth-1 vectors
                //
                for(uint i=0; i<depth-1; i++)
                {
                    uint temp1 = net.GetLayer(i).GetSize();
                    uint temp2 = net.GetLayer(i+1).GetSize();
                    //
                    errGrad[i] = new double[temp1, temp2];
                }
                //
                //
                this.trPatterns = new DataSet(tPatterns);
                this.trTargets  = new DataSet(tTargets);
                this.network    = net;
                //
                if(trPatterns.GetNumberOfVectors() != trTargets.GetNumberOfVectors())
                    throw new Exception("Datasets have different number of vectors.");
                if(trPatterns.GetDataValidation() != true || trTargets.GetDataValidation() != true)
                    throw new Exception("Invalid Data");
                if(trPatterns.GetLengthOfVectors() != net.GetLayer(0).GetSize() ||
                    trTargets.GetLengthOfVectors() != net.GetLayer(net.Depth-1).GetSize())
                    throw new Exception("Incosistent Data/Inputs or Data/Outputs");

            }
            catch(Exception ex)
            {
                Console.WriteLine(ex.Message);
                throw;
            }
        }
Example #3
0
 //
 public SimpleBPM(BackPropNet net, string patterns, string targets)
     : base(net, patterns, targets)
 {
     //
     try
     {
         // allocate memory for previous biases
         pBiasUpdates = new double[net.Depth-1][];
         //
         for(uint i=0; i<net.Depth-1; i++)
             pBiasUpdates[i] = new double[net.GetLayer(i+1).GetSize()];
         //
         // allocate memory for previous weights
         pWeightUpdates = (double[][,])this.errGrad.Clone(); // it should be tested
         //
     }
     catch(System.Exception ex)
     {
         Console.WriteLine(ex.Message);
         throw;
     }
 }