Exemple #1
0
        private void Train()
        {
            int        numOfLayers          = (int)MLNumOfLayersUpDown.Value;
            double     learningRate         = (double)MPLearningRateUpDown.Value;
            List <int> numOfNeuronsPerLayer = new List <int> ();

            int numOfEpocks = (int)MLNumOfEpocksUpDown.Value;
            LearningAlgorithm backpropagation = new Backpropagation();

            for (int i = 0; i < numOfLayers; ++i)
            {
                numOfNeuronsPerLayer.Add((i == 0) ? trainingSamples[0].Count : (i == numOfLayers - 1) ? 3 : int.Parse(NeuronsInHLayers[1, i - 1].Value.ToString()));
            }


            this.neuralNetwork = new FeedforwardNeuralNetwrok(numOfLayers);
            this.neuralNetwork.setNetwork(numOfNeuronsPerLayer);
            for (int i = 1; i < numOfLayers; ++i)
            {
                this.neuralNetwork.setLayer(i, new SigmoidFunction());
            }



            for (int i = 0; i < numOfEpocks; ++i)
            {
                this.neuralNetwork.train(trainingSamples, trainingLabels, learningRate, backpropagation);
            }
        }
        public RBF(int K, int numOfEpocks, double learningRate, double maxError, List <List <double> > trainingSamples, List <List <double> > trainingLabels, List <List <double> > testSamples, List <List <double> > testLabels)
        {
            this.numOfEpocks     = numOfEpocks;
            this.trainingSamples = trainingSamples;
            this.trainingLabels  = trainingLabels;
            this.testSamples     = testSamples;
            this.testLabels      = testLabels;
            this.maxError        = maxError;
            this.learningRate    = learningRate;
            Centroids            = new List <List <double> >(); //[NumOfCluster][NumOfFeatures (means)]
            Random rnd     = new Random(Guid.NewGuid().GetHashCode());
            int    counter = K;                                 // Num of Clusters

            while (counter-- > 0)
            {
                int index = rnd.Next(trainingSamples.Count);
                Centroids.Add(trainingSamples[index]);
            }

            KMean    = new K_Means(ref Centroids, trainingSamples);
            Variance = KMean.GetVariance;
            ////

            ////
            List <int> numOfNeuronsPerLayer = new List <int>();

            backpropagation = new Backpropagation();

            numOfNeuronsPerLayer.Add(K);
            numOfNeuronsPerLayer.Add(3);

            this.neuralNetwork = new FeedforwardNeuralNetwrok(2);
            this.neuralNetwork.setNetwork(numOfNeuronsPerLayer);

            this.neuralNetwork.LMSsetLayer(1, new IdentityFunction());
        }