コード例 #1
0
ファイル: FuzzyLogicUtil.cs プロジェクト: kachontep/nnsharp
 public static Neuron MakeDummyNeuron(int inputSize)
 {
     Neuron dummy = new Neuron(LinearFunction.Instance,
         FuzzySetIntersectOperation.Instance);
     for (int i = 0; i < inputSize; i++)
         dummy.Weights.Add(1.0);
     return dummy;
 }
コード例 #2
0
ファイル: NeuronTest.cs プロジェクト: kachontep/nnsharp
        public void TestInputOutput()
        {
            double[] input = { 1, 2, 3, 4, 5 };
            double[] weights = { 1, 2, 3, 4, 5 };
            double expected = 55;

            Neuron neuron = new Neuron(LinearFunction.Instance);
            for (int i = 0; i < weights.Length; i++)
                neuron.Weights.Add(weights[i]);

            neuron.Input(input);

            Assert.AreEqual(expected, neuron.Output());
        }
コード例 #3
0
ファイル: NeuronLayerTest.cs プロジェクト: kachontep/nnsharp
        public void SetUp()
        {
            layer2 = new NeuronLayer();
            layer1 = new NeuronLayer(layer2);

            double[,] weights1 = new double[,] { { 1, 1,1 ,1 }, { 1, 1, 1, 1 },
                                      {1, 1, 1, 1}, { 1, 1, 1, 1}};
            double[,] weights2 = new double[,]{ { 1, 1, 1,1 }, { 1, 1, 1, 1 },
                                      {1, 1, 1, 1}, { 1, 1, 1, 1}};

            for (int i = 0; i < NEURON_SIZE; i++)
            {
                Neuron neuron1 = new Neuron(LinearFunction.Instance);
                for(int j = 0; j <= weights1.GetUpperBound(0); j++)
                    neuron1.Weights.Add(weights1[i,j]);
                layer1.AddNeuron(neuron1);

                Neuron neuron2 = new Neuron(LinearFunction.Instance);
                for (int j = 0; j <= weights2.GetUpperBound(0); j++)
                    neuron2.Weights.Add(weights2[i,j]);
                layer2.AddNeuron(neuron2);
            }
        }
コード例 #4
0
 private void Initialize()
 {
     for (int i = 0; i < InputSize; ++i)
     {
         Neuron neuron = new Neuron(LinearFunction.Instance);
         for (int j = 0; j < InputSize; ++j)
         {
             if (i == j)
                 neuron.Weights.Add(1.0);
             else
                 neuron.Weights.Add(0.0);
         }
         f1.AddNeuron(neuron);
     }
 }
コード例 #5
0
        private void Process()
        {
            bool isSuccess = false;

            while (!isSuccess)
            {
                // Select target weight
                int winnerNeuronIndex = ArtA.WinningNeuronPos;

                Neuron weightNeuron = null;
                if (winnerNeuronIndex >= Map.Count)
                {
                    // Create new weight neuron in mapfield.
                    weightNeuron = Map.Neurons(
                        Map.AddNeuron(FuzzyLogicUtil.MakeDummyNeuron(ArtB.InputSize)));
                }
                else
                {
                    weightNeuron = Map.Neurons(winnerNeuronIndex);
                }

                if (training)
                {
                    // Check vigilance condition
                    double[] artBValues = artB.Output;
                    weightNeuron.Input(artBValues);
                    double normOfSimilarity = weightNeuron.Output();
                    double testVigilance    = normOfSimilarity / FuzzyLogicUtil.Norm(artBValues);
                    if (testVigilance >= Vigilance)
                    {
                        output = FuzzyLogicUtil.Intersect(weightNeuron.Weights.ToArray(), artBValues, ArtB.InputSize);

                        // Modify new mapfield weight.
                        for (int i = 0; i < weightNeuron.Weights.Count; ++i)
                        {
                            weightNeuron.Weights[i] = (Beta * output[i]) + ((1 - Beta) * weightNeuron.Weights[i]);
                        }

                        // Call ArtA to adjust its winner neuron weight.
                        bool isWeightAdjusted = false;
                        if (FastCommitedSlowLearningOption)
                        {
                            if (IsUnCommited(ArtA.F2.Neurons(ArtA.WinningNeuronPos)))
                            {
                                double beta = ArtA.Beta;
                                ArtA.Beta = 1.0;
                                artA.AdjustWeight();
                                ArtA.Beta        = beta;
                                isWeightAdjusted = true;
                            }
                        }
                        if (!isWeightAdjusted)
                        {
                            ArtA.AdjustWeight();
                        }

                        // Call ArtB to adjust its winner neuron weight.
                        ArtB.AdjustWeight();

                        isSuccess = true;
                    }
                    else
                    {
                        double newVigilance = ArtA.WinningVigilance + ArtA.Choice;
                        ArtA.Vigilance = newVigilance > 1 ? 1.0 : newVigilance;
                        ArtA.Reset(winnerNeuronIndex);
                    }
                }
                else
                {
                    output    = weightNeuron.Weights.ToArray();
                    isSuccess = true;
                }
            }
            training = false;
        }
コード例 #6
0
 bool IsUnCommited(Neuron neuron)
 {
     bool isUnCommited = true;
     if (neuron != null)
     {
         for (int i = 0; i < neuron.Weights.Count; i++)
         {
             if (neuron.Weights[i] != 1)
             {
                 isUnCommited = false;
             }
         }
     }
     return isUnCommited;
 }
コード例 #7
0
 public void RemoveNeuron(Neuron neuron)
 {
     InnerList.Remove(neuron);
 }
コード例 #8
0
 public int AddNeuron(Neuron neuron)
 {
     return(InnerList.Add(neuron));
 }