Пример #1
0
        protected internal override void onStart()
        {
            base.onStart();

            // set weights between input and rbf layer using kmeans
            KMeansClustering kmeans = new KMeansClustering(TrainingSet);

            kmeans.NumberOfClusters = neuralNetwork.getLayerAt(1).NeuronsCount;             // set number of clusters as number of rbf neurons
            kmeans.doClustering();

            // get clusters (centroids)
            Cluster[] clusters = kmeans.Clusters;

            // assign each rbf neuron to one cluster
            // and use centroid vectors to initialize neuron's input weights
            Layer rbfLayer = neuralNetwork.getLayerAt(1);
            int   i        = 0;

            foreach (Neuron neuron in rbfLayer.Neurons)
            {
                KVector  centroid     = clusters[i].Centroid;
                double[] weightValues = centroid.Values;
                int      c            = 0;
                foreach (Connection conn in neuron.InputConnections)
                {
                    conn.Weight.Value = weightValues[c];
                    c++;
                }
                i++;
            }

            // get cluster centroids as list
            List <KVector> centroids = new List <KVector>();

            foreach (Cluster cluster in clusters)
            {
                centroids.Add(cluster.Centroid);
            }

            // use KNN to calculate sigma param - gausssian function width for each neuron
            KNearestNeighbour knn = new KNearestNeighbour();

            knn.DataSet = centroids;

            int n = 0;

            foreach (KVector centroid in centroids)
            {
                // calculate and set sigma for each neuron in rbf layer
                KVector[] nearestNeighbours = knn.getKNearestNeighbours(centroid, k);
                double    sigma             = calculateSigma(centroid, nearestNeighbours);  // calculate in method
                Neuron    neuron            = rbfLayer.getNeuronAt(n);
                ((Gaussian)neuron.TransferFunction).Sigma = sigma;
                i++;
            }
        }
Пример #2
0
        /// <summary>
        /// Calculates and returns  width of a gaussian function </summary>
        /// <param name="centroid"> </param>
        /// <param name="nearestNeighbours"> </param>
        /// <returns>  </returns>
        private double calculateSigma(KVector centroid, KVector[] nearestNeighbours)
        {
            double sigma = 0;

            foreach (KVector nn in nearestNeighbours)
            {
                sigma += Math.Pow(centroid.distanceFrom(nn), 2);
            }

            sigma = Math.Sqrt(1 / ((double)nearestNeighbours.Length) * sigma);

            return(sigma);
        }
Пример #3
0
        /// <summary>
        /// http://en.wikipedia.org/wiki/Selection_algorithm </summary>
        /// <param name="vector"> </param>
        /// <param name="k"> </param>
        /// <returns>  </returns>
        public virtual KVector[] getKNearestNeighbours(KVector vector, int k)
        {
            KVector[] nearestNeighbours = new KVector[k];

            // calculate distances for entire dataset
            foreach (KVector otherVector in dataSet)
            {
                double distance = vector.distanceFrom(otherVector);
                otherVector.Distance = distance;
            }

            for (int i = 0; i < k; i++)
            {
                int     minIndex    = i;
                KVector minVector   = dataSet[i];
                double  minDistance = minVector.Distance;

                for (int j = i + 1; j < dataSet.Count; j++)
                {
                    if (dataSet[j].Distance <= minDistance)
                    {
                        minVector   = dataSet[j];
                        minDistance = minVector.Distance;
                        minIndex    = j;
                    }
                }

                // swap list[i] and list[minIndex]
                KVector temp = dataSet[i];
                dataSet[i]        = dataSet[minIndex];
                dataSet[minIndex] = temp;

                nearestNeighbours[i] = dataSet[i];
            }


            //            function select(list[1..n], k)
            //                for i from 1 to k
            //                    minIndex = i
            //                    minValue = list[i]
            //                    for j from i+1 to n
            //                        if list[j] < minValue
            //                            minIndex = j
            //                            minValue = list[j]
            //                    swap list[i] and list[minIndex]
            //                return list[k]

            return(nearestNeighbours);
        }
Пример #4
0
        /// <summary>
        /// load all parameters
        /// </summary>
        public static void LoadParameters()
        {
            if (!File.Exists(paramFile))
            {
                throw new Exception("Parameters file not exist");
            }
            parameters = new ParameterSettings();
            XmlSerializer xmlSerializer = new XmlSerializer(typeof(ParameterSettings));
            FileStream    xmlFileStream = new FileStream(paramFile, FileMode.Open);

            parameters = (ParameterSettings)xmlSerializer.Deserialize(xmlFileStream);
            xmlFileStream.Close();

            kVector    = new KVector(parameters.kDopsParam.kParam);
            kVectors3D = new KVector();
        }