/// <summary>
        /// Randomize the connections between two layers.
        /// </summary>
        /// <param name="network">The network to randomize.</param>
        /// <param name="fromLayer">The starting layer.</param>
        private void RandomizeSynapse(BasicNetwork network, int fromLayer)
        {
            int toLayer             = fromLayer + 1;
            int toCount             = network.GetLayerNeuronCount(toLayer);
            int fromCount           = network.GetLayerNeuronCount(fromLayer);
            int fromCountTotalCount = network.GetLayerTotalNeuronCount(fromLayer);
            IActivationFunction af  = network.GetActivation(toLayer);
            double low  = CalculateRange(af, Double.NegativeInfinity);
            double high = CalculateRange(af, Double.PositiveInfinity);

            double b = 0.7d * Math.Pow(toCount, (1d / fromCount)) / (high - low);

            for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
            {
                if (fromCount != fromCountTotalCount)
                {
                    double w = RangeRandomizer.Randomize(-b, b);
                    network.SetWeight(fromLayer, fromCount, toNeuron, w);
                }
                for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
                {
                    double w = RangeRandomizer.Randomize(0, b);
                    network.SetWeight(fromLayer, fromNeuron, toNeuron, w);
                }
            }
        }
Example #2
0
        /// <summary>
        /// Format the network as a human readable string that lists the hidden
        /// layers.
        /// </summary>
        ///
        /// <param name="network">The network to format.</param>
        /// <returns>A human readable string.</returns>
        public static String NetworkToString(BasicNetwork network)
        {
            if (network != null)
            {
                var result = new StringBuilder();
                int num    = 1;

                // display only hidden layers
                for (int i = 1; i < network.LayerCount - 1; i++)
                {
                    if (result.Length > 0)
                    {
                        result.Append(",");
                    }
                    result.Append("H");
                    result.Append(num++);
                    result.Append("=");
                    result.Append(network.GetLayerNeuronCount(i));
                }

                return(result.ToString());
            }
            else
            {
                return("N/A");
            }
        }
        /// <summary>
        /// Randomize one level of a neural network.
        /// </summary>
        ///
        /// <param name="network">The network to randomize</param>
        /// <param name="fromLayer">The from level to randomize.</param>
        public override void Randomize(BasicNetwork network, int fromLayer)
        {
            int fromCount = network.GetLayerTotalNeuronCount(fromLayer);
            int toCount   = network.GetLayerNeuronCount(fromLayer + 1);

            for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
            {
                for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
                {
                    double v = CalculateValue(toCount);
                    network.SetWeight(fromLayer, fromNeuron, toNeuron, v);
                }
            }
        }
        public void TestPruneNeuronHidden()
        {
            BasicNetwork   network = ObtainNetwork();
            PruneSelective prune   = new PruneSelective(network);

            prune.Prune(1, 1);
            Assert.AreEqual(18, network.EncodedArrayLength());
            Assert.AreEqual(2, network.GetLayerNeuronCount(1));
            Assert.AreEqual("1,3,4,5,7,8,9,11,12,13,15,16,17,18,19,23,24,25", network.DumpWeights());

            BasicNetwork model = EncogUtility.SimpleFeedForward(2, 2, 0, 4, false);

            CheckWithModel(model.Structure.Flat, network.Structure.Flat);
        }
Example #5
0
        /// <summary>
        /// Randomize one level of a neural network.
        /// </summary>
        ///
        /// <param name="network">The network to randomize</param>
        /// <param name="fromLayer">The from level to randomize.</param>
        public virtual void Randomize(BasicNetwork network, int fromLayer)
        {
            int fromCount = network.GetLayerTotalNeuronCount(fromLayer);
            int toCount   = network.GetLayerNeuronCount(fromLayer + 1);

            for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
            {
                for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
                {
                    double v = network.GetWeight(fromLayer, fromNeuron, toNeuron);
                    v = Randomize(v);
                    network.SetWeight(fromLayer, fromNeuron, toNeuron, v);
                }
            }
        }
        public void TestPruneNeuronOutput()
        {
            BasicNetwork network = ObtainNetwork();

            Assert.AreEqual(4, network.OutputCount);
            PruneSelective prune = new PruneSelective(network);

            prune.Prune(2, 1);
            Assert.AreEqual(21, network.EncodedArrayLength());
            Assert.AreEqual(3, network.GetLayerNeuronCount(2));
            Assert.AreEqual("1,2,3,4,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25", network.DumpWeights());

            BasicNetwork model = EncogUtility.SimpleFeedForward(2, 3, 0, 3, false);

            CheckWithModel(model.Structure.Flat, network.Structure.Flat);
            Assert.AreEqual(3, network.OutputCount);
        }
        /// <summary>
        /// Randomize one level of a neural network.
        /// </summary>
        ///
        /// <param name="network">The network to randomize</param>
        /// <param name="fromLayer">The from level to randomize.</param>
        public override void Randomize(BasicNetwork network, int fromLayer)
        {
            int fromCount = network.GetLayerTotalNeuronCount(fromLayer);
            int toCount   = network.GetLayerNeuronCount(fromLayer + 1);

            for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
            {
                double n = 0.0;
                for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
                {
                    double w = network.GetWeight(fromLayer, fromNeuron, toNeuron);
                    n += w * w;
                }
                n = Math.Sqrt(n);


                for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
                {
                    double w = network.GetWeight(fromLayer, fromNeuron, toNeuron);
                    w = _beta * w / n;
                    network.SetWeight(fromLayer, fromNeuron, toNeuron, w);
                }
            }
        }
Example #8
0
        /// <summary>
        /// Change the neuron count for the network. If the count is increased then a
        /// zero-weighted neuron is added, which will not affect the output of the
        /// neural network. If the neuron count is decreased, then the weakest neuron
        /// will be removed.
        /// This method cannot be used to remove a bias neuron.
        /// </summary>
        ///
        /// <param name="layer">The layer to adjust.</param>
        /// <param name="neuronCount">The new neuron count for this layer.</param>
        public void ChangeNeuronCount(int layer, int neuronCount)
        {
            if (neuronCount == 0)
            {
                throw new NeuralNetworkError("Can't decrease to zero neurons.");
            }

            int currentCount = _network.GetLayerNeuronCount(layer);

            // is there anything to do?
            if (neuronCount == currentCount)
            {
                return;
            }

            if (neuronCount > currentCount)
            {
                IncreaseNeuronCount(layer, neuronCount);
            }
            else
            {
                DecreaseNeuronCount(layer, neuronCount);
            }
        }
        /// <summary>
        /// Construct a network analyze class. Analyze the specified network.
        /// </summary>
        ///
        /// <param name="network">The network to analyze.</param>
        public AnalyzeNetwork(BasicNetwork network)
        {
            int assignDisabled = 0;
            int assignedTotal = 0;
            IList<Double> biasList = new List<Double>();
            IList<Double> weightList = new List<Double>();
            IList<Double> allList = new List<Double>();

            for (int layerNumber = 0; layerNumber < network.LayerCount - 1; layerNumber++)
            {

                int fromCount = network.GetLayerNeuronCount(layerNumber);
                int fromBiasCount = network
                    .GetLayerTotalNeuronCount(layerNumber);
                int toCount = network.GetLayerNeuronCount(layerNumber + 1);

                // weights
                for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
                {
                    for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
                    {
                        double v = network.GetWeight(layerNumber, fromNeuron,
                                                     toNeuron);

                        if (network.Structure.ConnectionLimited )
                        {
                            if (Math.Abs(v) < network.Structure.ConnectionLimit )
                            {
                                assignDisabled++;
                            }
                        }

                        weightList.Add(v);
                        allList.Add(v);
                        assignedTotal++;
                    }
                }

                // bias
                if (fromCount != fromBiasCount)
                {
                    int biasNeuron = fromCount;
                    for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
                    {
                        double v = network.GetWeight(layerNumber, biasNeuron,
                                                       toNeuron);
                        if (network.Structure.ConnectionLimited)
                        {
                            if (Math.Abs(v) < network.Structure.ConnectionLimit)
                            {
                                assignDisabled++;
                            }
                        }

                        biasList.Add(v);
                        allList.Add(v);
                        assignedTotal++;
                    }
                }
            }

            _disabledConnections = assignDisabled;
            _totalConnections = assignedTotal;
            _weights = new NumericRange(weightList);
            _bias = new NumericRange(biasList);
            _weightsAndBias = new NumericRange(allList);
            _weightValues = EngineArray.ListToDouble(weightList);
            _allValues = EngineArray.ListToDouble(allList);
            _biasValues = EngineArray.ListToDouble(biasList);
        }
        /// <summary>
        /// Construct a network analyze class. Analyze the specified network.
        /// </summary>
        ///
        /// <param name="network">The network to analyze.</param>
        public AnalyzeNetwork(BasicNetwork network)
        {
            int            assignDisabled = 0;
            int            assignedTotal  = 0;
            IList <Double> biasList       = new List <Double>();
            IList <Double> weightList     = new List <Double>();
            IList <Double> allList        = new List <Double>();

            for (int layerNumber = 0; layerNumber < network.LayerCount - 1; layerNumber++)
            {
                int fromCount     = network.GetLayerNeuronCount(layerNumber);
                int fromBiasCount = network
                                    .GetLayerTotalNeuronCount(layerNumber);
                int toCount = network.GetLayerNeuronCount(layerNumber + 1);

                // weights
                for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
                {
                    for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
                    {
                        double v = network.GetWeight(layerNumber, fromNeuron,
                                                     toNeuron);

                        if (network.Structure.ConnectionLimited)
                        {
                            if (Math.Abs(v) < network.Structure.ConnectionLimit)
                            {
                                assignDisabled++;
                            }
                        }

                        weightList.Add(v);
                        allList.Add(v);
                        assignedTotal++;
                    }
                }

                // bias
                if (fromCount != fromBiasCount)
                {
                    int biasNeuron = fromCount;
                    for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
                    {
                        double v = network.GetWeight(layerNumber, biasNeuron,
                                                     toNeuron);
                        if (network.Structure.ConnectionLimited)
                        {
                            if (Math.Abs(v) < network.Structure.ConnectionLimit)
                            {
                                assignDisabled++;
                            }
                        }

                        biasList.Add(v);
                        allList.Add(v);
                        assignedTotal++;
                    }
                }
            }

            _disabledConnections = assignDisabled;
            _totalConnections    = assignedTotal;
            _weights             = new NumericRange(weightList);
            _bias           = new NumericRange(biasList);
            _weightsAndBias = new NumericRange(allList);
            _weightValues   = EngineArray.ListToDouble(weightList);
            _allValues      = EngineArray.ListToDouble(allList);
            _biasValues     = EngineArray.ListToDouble(biasList);
        }
        /// <summary>
        /// Craete a freeform network from a basic network.
        /// </summary>
        /// <param name="network">The basic network to use.</param>
        public FreeformNetwork(BasicNetwork network)
        {
            if (network.LayerCount < 2)
            {
                throw new FreeformNetworkError(
                          "The BasicNetwork must have at least two layers to be converted.");
            }

            // handle each layer
            IFreeformLayer previousLayer = null;

            for (int currentLayerIndex = 0;
                 currentLayerIndex < network
                 .LayerCount;
                 currentLayerIndex++)
            {
                // create the layer
                IFreeformLayer currentLayer = _layerFactory.Factor();

                // Is this the input layer?
                if (_inputLayer == null)
                {
                    _inputLayer = currentLayer;
                }

                // Add the neurons for this layer
                for (int i = 0; i < network.GetLayerNeuronCount(currentLayerIndex); i++)
                {
                    // obtain the summation object.
                    IInputSummation summation = null;

                    if (previousLayer != null)
                    {
                        summation = _summationFactory.Factor(network
                                                             .GetActivation(currentLayerIndex));
                    }

                    // add the new neuron
                    currentLayer.Add(_neuronFactory.FactorRegular(summation));
                }

                // Fully connect this layer to previous
                if (previousLayer != null)
                {
                    ConnectLayersFromBasic(network, currentLayerIndex - 1,
                                           previousLayer, currentLayer);
                }

                // Add the bias neuron
                // The bias is added after connections so it has no inputs
                if (network.IsLayerBiased(currentLayerIndex))
                {
                    IFreeformNeuron biasNeuron = _neuronFactory
                                                 .FactorRegular(null);
                    biasNeuron.IsBias     = true;
                    biasNeuron.Activation = network
                                            .GetLayerBiasActivation(currentLayerIndex);
                    currentLayer.Add(biasNeuron);
                }

                // update previous layer
                previousLayer = currentLayer;
            }

            // finally, set the output layer.
            _outputLayer = previousLayer;
        }
        /// <summary>
        /// Construct a network analyze class. Analyze the specified network.
        /// </summary>
        ///
        /// <param name="network">The network to analyze.</param>
        public AnalyzeNetwork(BasicNetwork network)
        {
            IList<Double> biasList = new List<Double>();
            IList<Double> weightList = new List<Double>();
            IList<Double> allList = new List<Double>();

            for (int layerNumber = 0; layerNumber < network.LayerCount - 1; layerNumber++)
            {
                int fromCount = network.GetLayerNeuronCount(layerNumber);
                int fromBiasCount = network
                    .GetLayerTotalNeuronCount(layerNumber);
                int toCount = network.GetLayerNeuronCount(layerNumber + 1);

                // weights
                for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
                {
                    for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
                    {
                        double v = network.GetWeight(layerNumber, fromNeuron,
                                                     toNeuron);
                        weightList.Add(v);
                        allList.Add(v);
                    }
                }

                // bias
                if (fromCount != fromBiasCount)
                {
                    int biasNeuron = fromCount;
                    for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
                    {
                        double v = network.GetWeight(layerNumber, biasNeuron,
                                                       toNeuron);
                        biasList.Add(v);
                        allList.Add(v);
                    }
                }
            }

            _disabledConnections = 0;
            _totalConnections = 0;
            _weights = new NumericRange(weightList);
            _bias = new NumericRange(biasList);
            _weightsAndBias = new NumericRange(allList);
            _weightValues = EngineArray.ListToDouble(weightList);
            _allValues = EngineArray.ListToDouble(allList);
            _biasValues = EngineArray.ListToDouble(biasList);
        }
Example #13
0
        /// <summary>
        /// Calculate the derivatives for this training set element.
        /// </summary>
        ///
        /// <param name="pair">The training set element.</param>
        /// <returns>The sum squared of errors.</returns>
        private double CalculateDerivatives(IMLDataPair pair)
        {
            // error values
            double e   = 0.0d;
            double sum = 0.0d;

            _network.Compute(pair.Input);

            int fromLayer       = _network.LayerCount - 2;
            int toLayer         = _network.LayerCount - 1;
            int fromNeuronCount = _network.GetLayerTotalNeuronCount(fromLayer);
            int toNeuronCount   = _network.GetLayerNeuronCount(toLayer);

            double output = _network.Structure.Flat.LayerOutput[0];

            e = pair.Ideal[0] - output;

            for (int i = 0; i < fromNeuronCount; i++)
            {
                double lastOutput = _network.GetLayerOutput(fromLayer, i);

                _jacobian[_jacobianRow][_jacobianCol++] = CalcDerivative(
                    _network.GetActivation(toLayer), output) * lastOutput;
            }

            while (fromLayer > 0)
            {
                fromLayer--;
                toLayer--;
                fromNeuronCount = _network.GetLayerTotalNeuronCount(fromLayer);
                toNeuronCount   = _network.GetLayerNeuronCount(toLayer);

                // this.network.getLayerOutput(fromLayer, neuronNumber) holder.getResult().get(lastSynapse);

                // for each neuron in the input layer
                for (int neuron = 0; neuron < toNeuronCount; neuron++)
                {
                    output = _network.GetLayerOutput(toLayer, neuron);

                    IActivationFunction function = _network.GetActivation(toLayer);

                    double w   = _network.GetWeight(toLayer, neuron, 0);
                    double val = CalcDerivative(function, output)
                                 * CalcDerivative2(function, sum) * w;

                    // for each weight of the input neuron
                    for (int i = 0; i < fromNeuronCount; i++)
                    {
                        sum = 0.0d;
                        // for each neuron in the next layer
                        for (int j = 0; j < toNeuronCount; j++)
                        {
                            // for each weight of the next neuron
                            for (int k = 0; k < fromNeuronCount; k++)
                            {
                                sum += _network.GetWeight(fromLayer, k, j) * output;
                            }
                        }

                        _jacobian[_jacobianRow][_jacobianCol++] = val
                                                                  * _network.GetLayerOutput(fromLayer, i);
                    }
                }
            }

            // return error
            return(e);
        }