Example #1
0
        public override void InitialiseClusters()
        {
            epochCounter = 1;
            iterator     = 1;
            Random Randy = new Random();

            map = new KohonenNeuron[x, y];
            for (int i = 0; i < x; i++)
            {
                for (int j = 0; j < y; j++)
                {
                    //randomly initialise weights (in "Centroid" field) of each neuron
                    DenseVector InitialCentroid = new DenseVector(data[0].Item2.Count);
                    Parallel.For(0, InitialCentroid.Count, iter => {
                        InitialCentroid[iter] = Randy.NextDouble();
                    });
                    List <Tuple <string, DenseVector> > EmptyList = new List <Tuple <string, DenseVector> >();
                    map[i, j] = new KohonenNeuron(EmptyList, $"{i}, {j}", new Tuple <int, int>(i, j));
                    map[i, j].SetCentroid(InitialCentroid);
                }
            }
            Tuple <string, DenseVector>[] ShuffleList = new Tuple <string, DenseVector> [data.Count];
            data.CopyTo(ShuffleList);
            shuffleList = ShuffleList.ToList();
        }
Example #2
0
        public override void IterateOnce()
        {
            if (!stopped)
            {
                if (Decimal.Divide(Decimal.Divide(iterator, epochCounter), data.Count) == 1)
                {
                    //maintaining separate list allows sampling without replacement at random throughout each epoch
                    //of training.  This needs to be reset to match all data at start of each epoch.
                    // 1 epoch = 1 pass over all datapoints

                    epochCounter++;
                    Tuple <string, DenseVector>[] ShuffleList = new Tuple <string, DenseVector> [data.Count];
                    data.CopyTo(ShuffleList);
                    shuffleList = ShuffleList.ToList();
                }
                //randomly select datapoint with replacement
                Random CurrentRand = new Random();
                int    PickIndex   = CurrentRand.Next(shuffleList.Count);
                Tuple <string, DenseVector> Picked = shuffleList[PickIndex];
                shuffleList.RemoveAt(PickIndex);
                //select most appropriate neuron by distance (distance squared to reduce overhead)
                double           BestDistance   = HelperFunctions.GetEuclideanDistanceSquared(Picked.Item2, map[0, 0].GetCentroid());
                Tuple <int, int> BestCoordinate = new Tuple <int, int>(0, 0);
                KohonenNeuron    Best           = map[0, 0];
                for (int i = 1; i < x; i++)
                {
                    Parallel.For(0, y, j =>
                    {
                        double TestDistance = HelperFunctions.GetEuclideanDistanceSquared(Picked.Item2, map[i, j].GetCentroid());
                        if (BestDistance > TestDistance)
                        {
                            BestDistance   = TestDistance;
                            BestCoordinate = new Tuple <int, int>(i, j);
                            Best           = map[i, j];
                        }
                    });
                }
                //update data coordinates list
                currentDataCoordinates[data.IndexOf(Picked)] = BestCoordinate;
                //select neighbourhood and adjust neighbours' weights accordingly
                //exponential decay formula ensures neighbourhood keeps shrinking
                double CurrentRadius       = radius * Math.Exp(-(double)Decimal.Divide(iterator, (decimal)timeConstant));
                double CurrentLearningRate = learningRate * Math.Exp(-(double)Decimal.Divide(iterator, (decimal)timeConstant));
                for (int i = 0; i < x; i++)
                {
                    Parallel.For(0, y, j =>
                    {
                        double DistanceFromBest = HelperFunctions.GetEuclideanDistanceSquared(Best.Coordinates, map[i, j].Coordinates);
                        if (DistanceFromBest < Math.Pow(CurrentRadius, 2))
                        {
                            double NeighbourhoodWeighting   = Math.Exp(-(double)Decimal.Divide((decimal)DistanceFromBest, (decimal)(2 * Math.Pow(CurrentRadius, 2))));
                            DenseVector CentroidReplacement = map[i, j].GetCentroid() + (NeighbourhoodWeighting * CurrentLearningRate) * (Picked.Item2 - map[i, j].GetCentroid());
                            map[i, j].SetCentroid(CentroidReplacement);
                        }
                    });
                }
                //housekeeping
                iterator++;
                stopped = StoppingConditionMet();
                if (stopped)
                {
                    //add all datapoints to clusters by coordinates
                    //add all neurons to cluster list
                    for (int i = 0; i < x; i++)
                    {
                        for (int j = 0; j < y; j++)
                        {
                            ConcurrentBag <Tuple <string, DenseVector> > FinalMemberList = new ConcurrentBag <Tuple <string, DenseVector> >();
                            Parallel.For(0, data.Count, datum =>
                            {
                                if (currentDataCoordinates[datum].Item1 == map[i, j].Coordinates[0] && currentDataCoordinates[datum].Item2 == map[i, j].Coordinates[1])
                                {
                                    FinalMemberList.Add(data[datum]);
                                }
                            });
                            map[i, j].AddMembers(FinalMemberList.OrderBy(o => o.Item1).ToList());
                            clusters.Add(map[i, j]);
                        }
                    }
                }
            }
        }