private void Process() { bool isSuccess = false; while (!isSuccess) { int max = FuzzyLogicUtil.FindMax(ChoiceValues.ToArray()); double normOfSimilarity = 0; if (max != -1) { normOfSimilarity = F2.Neurons(max).Output(); } else { // Indicate that there is no eligible neuron // as winner, so create a dummy neuron in F2 layer. max = F2.AddNeuron(FuzzyLogicUtil.MakeDummyNeuron(InputSize)); ChoiceValues.Add( FuzzyLogicUtil.ChoicingValue(normOfInputs, 1.0 * InputSize, Choice)); normOfSimilarity = normOfInputs; } // Calculate and compare with vigilance winningVigilance = normOfSimilarity / normOfInputs; if (winningVigilance >= Vigilance) { winningNeuronPos = max; isSuccess = true; } else { ChoiceValues[max] = 0; } } // Adjust neuron weight automatically. if it is set. if (AutoAdjustWeight) { AdjustWeight(); } // Calculate output. output = FuzzyLogicUtil.Intersect(F2.Neurons(WinningNeuronPos).Weights.ToArray(), InputData, InputSize); }
public void Run(double[] data) { input = data; // Compute norm of input values. normOfInputs = FuzzyLogicUtil.Norm(data); // Transfer input values. f1.Input(data); // Calculate initial choicing value ChoiceValues.Clear(); for (int i = 0; i < f2.Count; ++i) { double normOfWeight = FuzzyLogicUtil.Norm(F2.Neurons(i).Weights.ToArray()); ChoiceValues.Add(FuzzyLogicUtil.ChoicingValue(F2.Neurons(i).Output(), normOfWeight, Choice)); } Process(); }