Example #1
0
        public SigmoidNeuron(string NeuronData, NeuronLayer PreviousLayer)
        {
            if (PreviousLayer != null)
            {
                string[] DataSplit   = NeuronData.Split('|');
                string[] WeightsData = DataSplit[0].Split(',');

                Bias = float.Parse(DataSplit[1]);

                Weights      = new float[WeightsData.Length];
                Inputs       = new SigmoidNeuron[WeightsData.Length];
                NablaWeights = new float[PreviousLayer.Nodes.Length];

                for (int i = 0; i < WeightsData.Length; ++i)
                {
                    Weights[i]      = float.Parse(WeightsData[i]);
                    Inputs[i]       = PreviousLayer.Nodes[i];
                    NablaWeights[i] = 0;
                }
            }
            else
            {
                Bias = 0;
            }
        }
Example #2
0
 public NeuronLayer(int NumNodes, NeuronLayer PreviousLayer)
 {
     Nodes = new SigmoidNeuron[NumNodes];
     for (int i = 0; i < NumNodes; ++i)
     {
         Nodes[i] = new SigmoidNeuron(PreviousLayer);
     }
 }
Example #3
0
        public NeuronLayer(int NumNodes, string[] NeuronData, NeuronLayer PreviousLayer)
        {
            Nodes = new SigmoidNeuron[NumNodes];

            for (int i = 0; i < NumNodes; ++i)
            {
                Nodes[i] = new SigmoidNeuron((NeuronData.Length > 0) ? NeuronData[i] : null, PreviousLayer);
            }
        }
        public void SetupNet()
        {
            // if there is a pre-exsiting net layout use that. otherwise build a new net.
            if (NetLayout == null)
            {
                NetLayers = new NeuronLayer[NumLayers];

                for (int i = 0; i < NumLayers; ++i)
                {
                    NetLayers[i] = new NeuronLayer(LayerNeuronCounts[i], (i > 0) ? NetLayers[i - 1] : null);
                }

                Outputs         = new float[LayerNeuronCounts[NumLayers - 1]];
                ExpectedOutputs = new float[LayerNeuronCounts[NumLayers - 1]];
            }
            else
            {
                LoadNetFromFile();
            }
        }
        void LoadNetFromFile()
        {
            string[] Neurons = NetLayout.text.Split('\n');

            NetLayers = new NeuronLayer[NumLayers];

            int StartCount = 0;

            for (int i = 0; i < NumLayers; ++i)
            {
                string[] LayerNeuronData = GetNeuronRange(StartCount, (i > 0) ? LayerNeuronCounts[i] + StartCount : 0, Neurons);

                NetLayers[i] = new NeuronLayer(LayerNeuronCounts[i], LayerNeuronData, (i > 0) ? NetLayers[i - 1] : null);

                StartCount += (i > 0) ? LayerNeuronCounts[i] : 0;
            }

            Outputs         = new float[LayerNeuronCounts[NumLayers - 1]];
            ExpectedOutputs = new float[LayerNeuronCounts[NumLayers - 1]];
        }
Example #6
0
        public SigmoidNeuron(NeuronLayer PreviousLayer)
        {
            if (PreviousLayer != null)
            {
                Weights      = new float[PreviousLayer.Nodes.Length];
                Inputs       = new SigmoidNeuron[PreviousLayer.Nodes.Length];
                NablaWeights = new float[PreviousLayer.Nodes.Length];

                for (int i = 0; i < PreviousLayer.Nodes.Length; ++i)
                {
                    Inputs[i]       = PreviousLayer.Nodes[i];
                    Weights[i]      = Random.Range(-1f, 1f);
                    NablaWeights[i] = 0;
                }

                Bias = Random.Range(-1f, 1f);
            }
            else
            {
                Bias = 0;
            }
        }
Example #7
0
        public SigmoidNeuron(NeuronLayer PreviousLayer)
        {
            if (PreviousLayer != null)
            {
                System.Random myRand = new System.Random();
                Weights      = new float[PreviousLayer.Nodes.Length];
                Inputs       = new SigmoidNeuron[PreviousLayer.Nodes.Length];
                NablaWeights = new float[PreviousLayer.Nodes.Length];

                for (int i = 0; i < PreviousLayer.Nodes.Length; ++i)
                {
                    Inputs[i]       = PreviousLayer.Nodes[i];
                    Weights[i]      = (float)((myRand.NextDouble() - 0.5) * 2);//Random.Range(-1f, 1f);
                    NablaWeights[i] = 0;
                }

                Bias = (float)((myRand.NextDouble() - 0.5) * 2);//Random.Range(-1f, 1f);
            }
            else
            {
                Bias = 0;
            }
        }