コード例 #1
0
        public static void evaluateNetwork(BasicNetwork network, IMLDataSet training)
        {
            double total     = 0;
            int    seed      = 0;
            int    completed = 0;

            Stopwatch sw = new Stopwatch();

            sw.Start();
            while (completed < SAMPLE_SIZE)
            {
                new ConsistentRandomizer(-1, 1, seed).Randomize(network);
                int iter = Evaluate(network, training);
                if (iter == -1)
                {
                    seed++;
                }
                else
                {
                    total += iter;
                    seed++;
                    completed++;
                }
            }

            sw.Stop();


            Console.WriteLine(network.GetActivation(1).GetType().Name + ": time="
                              + Format.FormatInteger((int)sw.ElapsedMilliseconds)
                              + "ms, Avg Iterations: "
                              + Format.FormatInteger((int)(total / SAMPLE_SIZE)));
        }
コード例 #2
0
        /// <summary>
        /// Randomize the connections between two layers.
        /// </summary>
        /// <param name="network">The network to randomize.</param>
        /// <param name="fromLayer">The starting layer.</param>
        private void RandomizeSynapse(BasicNetwork network, int fromLayer)
        {
            int toLayer             = fromLayer + 1;
            int toCount             = network.GetLayerNeuronCount(toLayer);
            int fromCount           = network.GetLayerNeuronCount(fromLayer);
            int fromCountTotalCount = network.GetLayerTotalNeuronCount(fromLayer);
            IActivationFunction af  = network.GetActivation(toLayer);
            double low  = CalculateRange(af, Double.NegativeInfinity);
            double high = CalculateRange(af, Double.PositiveInfinity);

            double b = 0.7d * Math.Pow(toCount, (1d / fromCount)) / (high - low);

            for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
            {
                if (fromCount != fromCountTotalCount)
                {
                    double w = RangeRandomizer.Randomize(-b, b);
                    network.SetWeight(fromLayer, fromCount, toNeuron, w);
                }
                for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
                {
                    double w = RangeRandomizer.Randomize(0, b);
                    network.SetWeight(fromLayer, fromNeuron, toNeuron, w);
                }
            }
        }
コード例 #3
0
        /// <summary>
        ///   Measure the performance of the network
        /// </summary>
        /// <param name = "network">Network to analyze</param>
        /// <param name = "dataset">Dataset with input and ideal data</param>
        /// <returns>Error % of correct bits, returned by the network.</returns>
        public static double MeasurePerformance(BasicNetwork network, BasicNeuralDataSet dataset)
        {
            int   correctBits = 0;
            float threshold;
            IActivationFunction activationFunction = network.GetActivation(network.LayerCount - 1); // get the activation function of the output layer

            if (activationFunction is ActivationSigmoid)
            {
                threshold = 0.5f; /* > 0.5, range of sigmoid [0..1]*/
            }
            else if (activationFunction is ActivationTANH)
            {
                threshold = 0.0f; /*> 0, range of bipolar sigmoid is [-1..1]*/
            }
            else
            {
                throw new ArgumentException("Bad activation function");
            }

            int n = (int)dataset.Count;

            Parallel.For(
                0,
                n,
                i =>
            {
                IMLData actualOutputs = network.Compute(dataset.Data[i].Input);
                lock (LockObject)
                {
                    for (int j = 0, k = actualOutputs.Count; j < k; j++)
                    {
                        if ((actualOutputs[j] > threshold && dataset.Data[i].Ideal[j] > threshold) ||
                            (actualOutputs[j] < threshold && dataset.Data[i].Ideal[j] < threshold))
                        {
                            correctBits++;
                        }
                    }
                }
            });

            long totalOutputBitsCount = dataset.Count * dataset.Data[0].Ideal.Count;

            return((double)correctBits / totalOutputBitsCount);
        }
コード例 #4
0
        /// <summary>
        /// Craete a freeform network from a basic network.
        /// </summary>
        /// <param name="network">The basic network to use.</param>
        public FreeformNetwork(BasicNetwork network)
        {
            if (network.LayerCount < 2)
            {
                throw new FreeformNetworkError(
                          "The BasicNetwork must have at least two layers to be converted.");
            }

            // handle each layer
            IFreeformLayer previousLayer = null;

            for (int currentLayerIndex = 0;
                 currentLayerIndex < network
                 .LayerCount;
                 currentLayerIndex++)
            {
                // create the layer
                IFreeformLayer currentLayer = _layerFactory.Factor();

                // Is this the input layer?
                if (_inputLayer == null)
                {
                    _inputLayer = currentLayer;
                }

                // Add the neurons for this layer
                for (int i = 0; i < network.GetLayerNeuronCount(currentLayerIndex); i++)
                {
                    // obtain the summation object.
                    IInputSummation summation = null;

                    if (previousLayer != null)
                    {
                        summation = _summationFactory.Factor(network
                                                             .GetActivation(currentLayerIndex));
                    }

                    // add the new neuron
                    currentLayer.Add(_neuronFactory.FactorRegular(summation));
                }

                // Fully connect this layer to previous
                if (previousLayer != null)
                {
                    ConnectLayersFromBasic(network, currentLayerIndex - 1,
                                           previousLayer, currentLayer);
                }

                // Add the bias neuron
                // The bias is added after connections so it has no inputs
                if (network.IsLayerBiased(currentLayerIndex))
                {
                    IFreeformNeuron biasNeuron = _neuronFactory
                                                 .FactorRegular(null);
                    biasNeuron.IsBias     = true;
                    biasNeuron.Activation = network
                                            .GetLayerBiasActivation(currentLayerIndex);
                    currentLayer.Add(biasNeuron);
                }

                // update previous layer
                previousLayer = currentLayer;
            }

            // finally, set the output layer.
            _outputLayer = previousLayer;
        }
コード例 #5
0
        /// <summary>
        /// Calculate the derivatives for this training set element.
        /// </summary>
        ///
        /// <param name="pair">The training set element.</param>
        /// <returns>The sum squared of errors.</returns>
        private double CalculateDerivatives(IMLDataPair pair)
        {
            // error values
            double e   = 0.0d;
            double sum = 0.0d;

            _network.Compute(pair.Input);

            int fromLayer       = _network.LayerCount - 2;
            int toLayer         = _network.LayerCount - 1;
            int fromNeuronCount = _network.GetLayerTotalNeuronCount(fromLayer);
            int toNeuronCount   = _network.GetLayerNeuronCount(toLayer);

            double output = _network.Structure.Flat.LayerOutput[0];

            e = pair.Ideal[0] - output;

            for (int i = 0; i < fromNeuronCount; i++)
            {
                double lastOutput = _network.GetLayerOutput(fromLayer, i);

                _jacobian[_jacobianRow][_jacobianCol++] = CalcDerivative(
                    _network.GetActivation(toLayer), output) * lastOutput;
            }

            while (fromLayer > 0)
            {
                fromLayer--;
                toLayer--;
                fromNeuronCount = _network.GetLayerTotalNeuronCount(fromLayer);
                toNeuronCount   = _network.GetLayerNeuronCount(toLayer);

                // this.network.getLayerOutput(fromLayer, neuronNumber) holder.getResult().get(lastSynapse);

                // for each neuron in the input layer
                for (int neuron = 0; neuron < toNeuronCount; neuron++)
                {
                    output = _network.GetLayerOutput(toLayer, neuron);

                    IActivationFunction function = _network.GetActivation(toLayer);

                    double w   = _network.GetWeight(toLayer, neuron, 0);
                    double val = CalcDerivative(function, output)
                                 * CalcDerivative2(function, sum) * w;

                    // for each weight of the input neuron
                    for (int i = 0; i < fromNeuronCount; i++)
                    {
                        sum = 0.0d;
                        // for each neuron in the next layer
                        for (int j = 0; j < toNeuronCount; j++)
                        {
                            // for each weight of the next neuron
                            for (int k = 0; k < fromNeuronCount; k++)
                            {
                                sum += _network.GetWeight(fromLayer, k, j) * output;
                            }
                        }

                        _jacobian[_jacobianRow][_jacobianCol++] = val
                                                                  * _network.GetLayerOutput(fromLayer, i);
                    }
                }
            }

            // return error
            return(e);
        }