private double NeighborsWeightCoefficient(double neighborhoodParameter, KohonenLayerNeuron winner, KohonenLayerNeuron neighbor) { double result = Math.Exp(-Math.Pow(winner.DistanceToNeuron(neighbor), 2) / (2 * Math.Pow(neighborhoodParameter, 2))); return(result > neighborhoodParameter ? 0 : result); }
/// <summary> /// Initializes two layers in neural network and neurons inside. /// </summary> /// <param name="dataDimension">Number of attributes each vector has. Affects number of neurons in first (distributive) layer.</param> public List <KohonenLayerNeuron> InitializeLayers(int dataDimension) { // initializing neurons in distributive (first) layer var firstLayer = new Layer(); for (int i = 0; i < dataDimension; i++) { firstLayer.Neurons.Add(new DistributiveLayerNeuron()); } // initializing neurons in Kohonen (second) layer var secondLayer = new Layer(); int x = 1; int y = 1; for (int i = 0; i < _kohonenCardWidth * _kohonenCardHeight; i++) { var neuron = new KohonenLayerNeuron(x, y); // connecting current neuron with each neuron from first layer foreach (var neuronFromFirstLayer in firstLayer.Neurons) { var signal = new Signal(); neuron.InputSignals.Add(signal); neuronFromFirstLayer.OutputSignals.Add(signal); } ////neuron.InitializeRandomWeights(firstLayer.Neurons.Count); secondLayer.Neurons.Add(neuron); if (y == _kohonenCardWidth) { y = 0; x++; } y++; } _layers.Add(firstLayer); _layers.Add(secondLayer); _logger.Information( "Initialized 2 layers: distributive ({NeuronsInFirstLayer} neurons) and Kohonen ({NeuronsInKohonenLayer} neurons).", dataDimension, _kohonenCardHeight * _kohonenCardWidth); return(_layers[1].Neurons.OfType <KohonenLayerNeuron>().ToList()); }
public List <InputDataResult> Learn(List <InputData> learnData) { _logger.Information("Started learning. Test data has {NumberOfRecords} records.", learnData.Count); var result = new List <InputDataResult>(); var sw = new Stopwatch(); sw.Start(); // initialize weights _layers[1].Neurons.ForEach(n => learnData[0].Inputs.ForEach(n.Weights.Add)); int dataSize = learnData.Count; for (int iteration = 0; iteration < learnData.Count; iteration++) { InputData inputData = learnData[iteration]; // setting input values for (int i = 0; i < inputData.Inputs.Count; i++) { _layers[0].Neurons[i].InputSignals[0].Value = inputData.Inputs[i]; _layers[0].Neurons[i].FeedForward(); } double minDistance = double.MaxValue; int indexOfMinDistanceNeuron = 0; for (int i = 0; i < _layers[1].Neurons.Count; i++) { double distanceToNeuron = _layers[1].Neurons[i].DistanceToWeightVector(inputData.Inputs); if (distanceToNeuron < minDistance) { minDistance = distanceToNeuron; indexOfMinDistanceNeuron = i; } } KohonenLayerNeuron winner = _layers[1].Neurons[indexOfMinDistanceNeuron] as KohonenLayerNeuron; result.Add(new InputDataResult(inputData, winner)); double neighborhoodRadius = NeighborhoodRadius(iteration, dataSize); double learningRate = LearningRate(iteration); foreach (KohonenLayerNeuron neuron in _layers[1].Neurons.Cast <KohonenLayerNeuron>()) { var neighborsWeightCoefficient = neuron == winner ? 1 : NeighborsWeightCoefficient(neighborhoodRadius, winner, neuron); for (int i = 0; i < neuron.Weights.Count; i++) { neuron.Weights[i] += learningRate * neighborsWeightCoefficient * (_layers[1].Neurons[i].Weights[0] - neuron.Weights[i]); } } } _logger.Information("Learning finished in {TimeElapsed}.", sw.Elapsed); return(result); }
public List <Cluster> Cluster(List <InputData> dataToCluster, int numberOfClusters) { Dictionary <KohonenLayerNeuron, List <InputData> > neuronsInputData = new Dictionary <KohonenLayerNeuron, List <InputData> >(); foreach (var inputData in dataToCluster) { for (int i = 0; i < inputData.Inputs.Count; i++) { _layers[0].Neurons[i].InputSignals[0].Value = inputData.Inputs[i]; _layers[0].Neurons[i].FeedForward(); } double minDistance = double.MaxValue; int indexOfMinDistanceNeuron = 0; for (int i = 0; i < _layers[1].Neurons.Count; i++) { double distanceToNeuron = _layers[1].Neurons[i].DistanceToWeightVector(inputData.Inputs); if (distanceToNeuron < minDistance) { minDistance = distanceToNeuron; indexOfMinDistanceNeuron = i; } } KohonenLayerNeuron winner = _layers[1].Neurons[indexOfMinDistanceNeuron] as KohonenLayerNeuron; if (!neuronsInputData.ContainsKey(winner)) { neuronsInputData[winner] = new List <InputData>(); } neuronsInputData[winner].Add(inputData); } var kMeansResult = KMeans.Cluster(neuronsInputData.Keys.Select(k => k.Position).ToArray(), numberOfClusters, 30, delegate(double[] point, double[] centroid) { // calculating Euclidean distance double res = 0; for (int i = 0; i < point.Length; i++) { res += Math.Pow(point[i] - centroid[i], 2); } res = Math.Sqrt(res); return(res); }); var result = new List <Cluster>(); foreach (var positions in kMeansResult.Clusters) { var res = new List <InputDataResult>(); foreach (var position in positions) { KohonenLayerNeuron kohonenLayerNeuron = neuronsInputData.Keys.FirstOrDefault(k => k.Position == position); res.AddRange(neuronsInputData[kohonenLayerNeuron].Select(e => new InputDataResult(e, kohonenLayerNeuron))); } result.Add(new Cluster { InputDataResults = res }); } return(result); }