Exemplo n.º 1
0
        /// <summary>
        /// Construct the LMA object.
        /// </summary>
        /// <param name="network">The network to train. Must have a single output neuron.</param>
        /// <param name="training">The training data to use. Must be indexable.</param>
        public SVDTraining(BasicNetwork network, INeuralDataSet training)
        {
            ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT);

            if (outputLayer == null)
            {
                throw new TrainingError("SVD requires an output layer.");
            }

            if (outputLayer.NeuronCount != 1)
            {
                throw new TrainingError("SVD requires an output layer with a single neuron.");
            }

            if (network.GetLayer(RadialBasisPattern.RBF_LAYER) == null)
            {
                throw new TrainingError("SVD is only tested to work on radial basis function networks.");
            }

            rbfLayer = (RadialBasisFunctionLayer)network.GetLayer(RadialBasisPattern.RBF_LAYER);

            this.Training       = training;
            this.network        = network;
            this.trainingLength = (int)this.Training.InputSize;

            BasicNeuralData input = new BasicNeuralData(this.Training.InputSize);
            BasicNeuralData ideal = new BasicNeuralData(this.Training.IdealSize);

            this.pair = new BasicNeuralDataPair(input, ideal);
        }
        /// <summary>
        /// Generate the RBF network.
        /// </summary>
        /// <returns>The neural network.</returns>
        public BasicNetwork Generate()
        {
            ILayer input = new BasicLayer(new ActivationLinear(), false,
                                          this.inputNeurons);
            ILayer                   output   = new BasicLayer(new ActivationLinear(), false, this.outputNeurons);
            BasicNetwork             network  = new BasicNetwork();
            RadialBasisFunctionLayer rbfLayer = new RadialBasisFunctionLayer(
                this.hiddenNeurons);

            network.AddLayer(input);
            network.AddLayer(rbfLayer, SynapseType.Direct);
            network.AddLayer(output);
            network.Structure.FinalizeStructure();
            network.Reset();
            network.TagLayer(RBF_LAYER, rbfLayer);
            rbfLayer.RandomizeRBFCentersAndWidths(this.inputNeurons, -1, 1, RBFEnum.Gaussian);
            int y = PatternConst.START_Y;

            input.X    = PatternConst.START_X;
            input.Y    = y;
            y         += PatternConst.INC_Y;
            rbfLayer.X = PatternConst.START_X;
            rbfLayer.Y = y;
            y         += PatternConst.INC_Y;
            output.X   = PatternConst.START_X;
            output.Y   = y;
            return(network);
        }
Exemplo n.º 3
0
 private void SaveRBF(WriteXML xmlout, RadialBasisFunctionLayer layer)
 {
     xmlout.BeginTag(RadialBasisFunctionLayerPersistor.PROPERTY_RBF);
     foreach (IRadialBasisFunction rbf in layer.RadialBasisFunction)
     {
         xmlout.BeginTag(rbf.GetType().Name);
         xmlout.AddProperty(PROPERTY_CENTERS, rbf.Centers, rbf.Centers.Length);
         xmlout.AddProperty(PROPERTY_PEAK, rbf.Peak);
         xmlout.AddProperty(PROPERTY_WIDTH, rbf.Width);
         xmlout.EndTag();
     }
     xmlout.EndTag();
 }
Exemplo n.º 4
0
        /// <summary>
        /// Save a RBF layer.
        /// </summary>
        /// <param name="obj">The object to save.</param>
        /// <param name="xmlout">XML stream to write to.</param>
        public void Save(IEncogPersistedObject obj, WriteXML xmlout)
        {
            PersistorUtil.BeginEncogObject(
                EncogPersistedCollection.TYPE_RADIAL_BASIS_LAYER, xmlout, obj,
                false);
            RadialBasisFunctionLayer layer = (RadialBasisFunctionLayer)obj;

            xmlout.AddProperty(BasicLayerPersistor.PROPERTY_NEURONS, layer.NeuronCount);
            xmlout.AddProperty(BasicLayerPersistor.PROPERTY_X, layer.X);
            xmlout.AddProperty(BasicLayerPersistor.PROPERTY_Y, layer.Y);

            SaveRBF(xmlout, layer);

            xmlout.EndTag();
        }
Exemplo n.º 5
0
        /// <summary>
        /// Load a RBF layer.
        /// </summary>
        /// <param name="xmlin">The XML to read from.</param>
        /// <returns>The object that was loaded.</returns>
        public IEncogPersistedObject Load(ReadXML xmlin)
        {
            int neuronCount = 0;
            int x           = 0;
            int y           = 0;
            int dimensions  = 1;

            IRadialBasisFunction[] rbfs = new IRadialBasisFunction[0];

            String end = xmlin.LastTag.Name;

            while (xmlin.ReadToTag())
            {
                if (xmlin.IsIt(BasicLayerPersistor.PROPERTY_NEURONS, true))
                {
                    neuronCount = xmlin.ReadIntToTag();
                }
                else if (xmlin.IsIt(BasicLayerPersistor.PROPERTY_X, true))
                {
                    x = xmlin.ReadIntToTag();
                }
                else if (xmlin.IsIt(BasicLayerPersistor.PROPERTY_Y, true))
                {
                    y = xmlin.ReadIntToTag();
                }
                else if (xmlin.IsIt(RadialBasisFunctionLayerPersistor.PROPERTY_RBF,
                                    true))
                {
                    rbfs = LoadAllRBF(xmlin);
                }
                else if (xmlin.IsIt(end, false))
                {
                    break;
                }
            }

            RadialBasisFunctionLayer layer = new RadialBasisFunctionLayer(neuronCount);

            layer.RadialBasisFunction = rbfs;
            layer.X = x;
            layer.Y = y;

            return(layer);
        }
Exemplo n.º 6
0
        public void Execute(IExampleInterface app)
        {
            //Specify the number of dimensions and the number of neurons per dimension
            int dimensions             = 2;
            int numNeuronsPerDimension = 7;

            //Set the standard RBF neuron width.
            //Literature seems to suggest this is a good default value.
            double volumeNeuronWidth = 2.0 / numNeuronsPerDimension;

            //RBF can struggle when it comes to flats at the edge of the sample space.
            //We have added the ability to include wider neurons on the sample space boundary which greatly
            //improves fitting to flats
            bool includeEdgeRBFs = true;

            #region Setup
            //General setup is the same as before
            RadialBasisPattern pattern = new RadialBasisPattern();
            pattern.InputNeurons  = dimensions;
            pattern.OutputNeurons = 1;

            //Total number of neurons required.
            //Total number of Edges is calculated possibly for future use but not used any further here
            int numNeurons = (int)Math.Pow(numNeuronsPerDimension, dimensions);
            int numEdges   = (int)(dimensions * Math.Pow(2, dimensions - 1));

            pattern.AddHiddenLayer(numNeurons);

            BasicNetwork             network  = pattern.Generate();
            RadialBasisFunctionLayer rbfLayer = (RadialBasisFunctionLayer)network.GetLayer(RadialBasisPattern.RBF_LAYER);
            network.Reset();

            //Position the multidimensional RBF neurons, with equal spacing, within the provided sample space from 0 to 1.
            rbfLayer.SetRBFCentersAndWidthsEqualSpacing(0, 1, RBFEnum.Gaussian, dimensions, volumeNeuronWidth, includeEdgeRBFs);

            #endregion

            //Create some training data that can not easily be represented by gaussians
            //There are other training examples for both 1D and 2D
            //Degenerate training data only provides outputs as 1 or 0 (averaging over all outputs for a given set of inputs would produce something approaching the smooth training data).
            //Smooth training data provides true values for the provided input dimensions.
            Create2DSmoothTainingDataGit();

            //Create the training set and train.
            INeuralDataSet trainingSet = new BasicNeuralDataSet(INPUT, IDEAL);
            ITrain         train       = new SVDTraining(network, trainingSet);

            //SVD is a single step solve
            int epoch = 1;
            do
            {
                train.Iteration();
                Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error);
                epoch++;
            } while ((epoch < 1) && (train.Error > 0.001));

            // test the neural network
            Console.WriteLine("Neural Network Results:");

            //Create a testing array which may be to a higher resoltion than the original training data
            Set2DTestingArrays(100);
            trainingSet = new BasicNeuralDataSet(INPUT, IDEAL);

            //Write out the results data
            using (var sw = new System.IO.StreamWriter("results.csv", false))
            {
                foreach (INeuralDataPair pair in trainingSet)
                {
                    INeuralData output = network.Compute(pair.Input);
                    //1D//sw.WriteLine(InverseScale(pair.Input[0]) + ", " + Chop(InverseScale(output[0])));// + ", " + pair.Ideal[0]);
                    sw.WriteLine(InverseScale(pair.Input[0]) + ", " + InverseScale(pair.Input[1]) + ", " + Chop(InverseScale(output[0])));// + ", " + pair.Ideal[0]);// + ",ideal=" + pair.Ideal[0]);
                    //3D//sw.WriteLine(InverseScale(pair.Input[0]) + ", " + InverseScale(pair.Input[1]) + ", " + InverseScale(pair.Input[2]) + ", " + Chop(InverseScale(output[0])));// + ", " + pair.Ideal[0]);// + ",ideal=" + pair.Ideal[0]);
                    //Console.WriteLine(pair.Input[0] + ", actual=" + output[0] + ",ideal=" + pair.Ideal[0]);
                }
            }

            Console.WriteLine("\nFit output saved to results.csv");
            Console.WriteLine("\nComplete - Please press the 'any' key to close.");
            Console.ReadKey();
        }
Exemplo n.º 7
0
        /// <summary>
        /// Prune one of the neurons from this layer. Remove all entries in this
        /// weight matrix and other layers.
        /// </summary>
        /// <param name="targetLayer">The neuron to prune. Zero specifies the first neuron.</param>
        /// <param name="neuron">The neuron to prune.</param>
        public void Prune(ILayer targetLayer, int neuron)
        {
            // delete a row on this matrix
            foreach (ISynapse synapse in targetLayer.Next)
            {
                if (synapse.WeightMatrix != null)
                {
                    synapse.WeightMatrix =
                        MatrixMath.DeleteRow(synapse.WeightMatrix, neuron);
                }
            }

            // delete a column on the previous
            ICollection <ILayer> previous = this.network.Structure
                                            .GetPreviousLayers(targetLayer);

            foreach (ILayer prevLayer in previous)
            {
                if (previous != null)
                {
                    foreach (ISynapse synapse in prevLayer.Next)
                    {
                        if (synapse.WeightMatrix != null)
                        {
                            synapse.WeightMatrix =
                                MatrixMath.DeleteCol(synapse.WeightMatrix,
                                                     neuron);
                        }
                    }
                }
            }

            // remove the bias
            if (targetLayer.HasBias)
            {
                double[] newBias = new double[targetLayer
                                              .NeuronCount - 1];

                int targetIndex = 0;
                for (int i = 0; i < targetLayer.NeuronCount; i++)
                {
                    if (i != neuron)
                    {
                        newBias[targetIndex++] = targetLayer.BiasWeights[i];
                    }
                }

                targetLayer.BiasWeights = newBias;
            }

            // adjust RBF
            if (targetLayer is RadialBasisFunctionLayer)
            {
                RadialBasisFunctionLayer rbf    = (RadialBasisFunctionLayer)targetLayer;
                IRadialBasisFunction[]   newRBF = new GaussianFunction[targetLayer
                                                                       .NeuronCount - 1];

                int targetIndex = 0;
                for (int i = 0; i < targetLayer.NeuronCount; i++)
                {
                    if (i != neuron)
                    {
                        newRBF[targetIndex++] = rbf.RadialBasisFunction[i];
                    }
                }
                rbf.RadialBasisFunction = newRBF;
            }

            // update the neuron count
            targetLayer.NeuronCount -= 1;
        }
Exemplo n.º 8
0
        /// <summary>
        /// Internal function to increase the neuron count. This will add a
        /// zero-weight neuron to this layer.
        /// </summary>
        /// <param name="layer">The layer to increase.</param>
        /// <param name="neuronCount">The new neuron count.</param>
        private void IncreaseNeuronCount(ILayer layer, int neuronCount)
        {
            // adjust the bias
            double[] newBias = new double[neuronCount];
            if (layer.HasBias)
            {
                for (int i = 0; i < layer.NeuronCount; i++)
                {
                    newBias[i] = layer.BiasWeights[i];
                }

                layer.BiasWeights = newBias;
            }

            // adjust the outbound weight matrixes
            foreach (ISynapse synapse in layer.Next)
            {
                if (synapse.WeightMatrix != null)
                {
                    Matrix newMatrix = new Matrix(neuronCount, synapse
                                                  .ToNeuronCount);
                    // copy existing matrix to new matrix
                    for (int row = 0; row < layer.NeuronCount; row++)
                    {
                        for (int col = 0; col < synapse.ToNeuronCount; col++)
                        {
                            newMatrix[row, col] = synapse.WeightMatrix[row, col];
                        }
                    }
                    synapse.WeightMatrix = newMatrix;
                }
            }

            // adjust the inbound weight matrixes
            ICollection <ISynapse> inboundSynapses = this.network.Structure
                                                     .GetPreviousSynapses(layer);

            foreach (ISynapse synapse in inboundSynapses)
            {
                if (synapse.WeightMatrix != null)
                {
                    Matrix newMatrix = new Matrix(synapse.FromNeuronCount,
                                                  neuronCount);
                    // copy existing matrix to new matrix
                    for (int row = 0; row < synapse.FromNeuronCount; row++)
                    {
                        for (int col = 0; col < synapse.ToNeuronCount; col++)
                        {
                            newMatrix[row, col] = synapse.WeightMatrix[row, col];
                        }
                    }
                    synapse.WeightMatrix = newMatrix;
                }
            }

            // adjust the bias
            if (layer.HasBias)
            {
                double[] newBias2 = new double[neuronCount];

                for (int i = 0; i < layer.NeuronCount; i++)
                {
                    newBias2[i] = layer.BiasWeights[i];
                }
                layer.BiasWeights = newBias2;
            }

            // adjust RBF
            if (layer is RadialBasisFunctionLayer)
            {
                RadialBasisFunctionLayer rbf    = (RadialBasisFunctionLayer)layer;
                IRadialBasisFunction[]   newRBF = new IRadialBasisFunction[neuronCount];
                for (int i = 0; i < rbf.RadialBasisFunction.Length; i++)
                {
                    newRBF[i] = rbf.RadialBasisFunction[i];
                }

                for (int i = rbf.RadialBasisFunction.Length; i < neuronCount; i++)
                {
                    newRBF[i] = new GaussianFunction(ThreadSafeRandom.NextDouble() - 0.5,
                                                     ThreadSafeRandom.NextDouble(), ThreadSafeRandom.NextDouble() - 0.5);
                }

                rbf.RadialBasisFunction = newRBF;
            }

            // finally, up the neuron count
            layer.NeuronCount = neuronCount;
        }
Exemplo n.º 9
0
        /// <summary>
        /// Create the flat neural network.
        /// </summary>
        public void Flatten()
        {
            bool isRBF = false;
            IDictionary <ILayer, FlatLayer>      regular2flat = new Dictionary <ILayer, FlatLayer>();
            IDictionary <FlatLayer, ILayer>      flat2regular = new Dictionary <FlatLayer, ILayer>();
            IList <ObjectPair <ILayer, ILayer> > contexts     = new List <ObjectPair <ILayer, ILayer> >();

            this.flat = null;

            ValidateForFlat val = new ValidateForFlat();

            if (val.IsValid(this.network) == null)
            {
                if (this.layers.Count == 3 &&
                    this.layers[1] is RadialBasisFunctionLayer)
                {
                    RadialBasisFunctionLayer rbf = (RadialBasisFunctionLayer)this.layers[1];
                    this.flat = new FlatNetworkRBF(this.network.InputCount,
                                                   rbf.NeuronCount, this.network.OutputCount,
                                                   rbf.RadialBasisFunction);
                    FlattenWeights();
                    this.flatUpdate = FlatUpdateNeeded.None;
                    return;
                }

                int         flatLayerCount = CountNonContext();
                FlatLayer[] flatLayers     = new FlatLayer[flatLayerCount];

                int index = flatLayers.Length - 1;
                foreach (ILayer layer in this.layers)
                {
                    if (layer is ContextLayer)
                    {
                        ISynapse inboundSynapse = network.Structure
                                                  .FindPreviousSynapseByLayerType(layer,
                                                                                  typeof(BasicLayer));
                        ISynapse outboundSynapse = network
                                                   .Structure
                                                   .FindNextSynapseByLayerType(layer, typeof(BasicLayer));

                        if (inboundSynapse == null)
                        {
                            throw new NeuralNetworkError(
                                      "Context layer must be connected to by one BasicLayer.");
                        }

                        if (outboundSynapse == null)
                        {
                            throw new NeuralNetworkError(
                                      "Context layer must connect to by one BasicLayer.");
                        }

                        ILayer inbound  = inboundSynapse.FromLayer;
                        ILayer outbound = outboundSynapse.ToLayer;

                        contexts
                        .Add(new ObjectPair <ILayer, ILayer>(inbound, outbound));
                    }
                    else
                    {
                        double bias = this.FindNextBias(layer);

                        IActivationFunction activationType;
                        double[]            param = new double[1];

                        if (layer.ActivationFunction == null)
                        {
                            activationType = new ActivationLinear();
                            param          = new double[1];
                            param[0]       = 1;
                        }
                        else
                        {
                            activationType = layer.ActivationFunction;
                            param          = layer.ActivationFunction.Params;
                        }

                        FlatLayer flatLayer = new FlatLayer(activationType, layer
                                                            .NeuronCount, bias, param);

                        regular2flat[layer]     = flatLayer;
                        flat2regular[flatLayer] = layer;
                        flatLayers[index--]     = flatLayer;
                    }
                }

                // now link up the context layers
                foreach (ObjectPair <ILayer, ILayer> context in contexts)
                {
                    // link the context layer on the FlatLayer
                    ILayer   layer   = context.B;
                    ISynapse synapse = this.network
                                       .Structure
                                       .FindPreviousSynapseByLayerType(layer, typeof(BasicLayer));
                    FlatLayer from = regular2flat[context.A];
                    FlatLayer to   = regular2flat[synapse.FromLayer];
                    to.ContextFedBy = from;
                }

                this.flat = new FlatNetwork(flatLayers);

                // update the context indexes on the non-flat network
                for (int i = 0; i < flatLayerCount; i++)
                {
                    FlatLayer fedBy = flatLayers[i].ContextFedBy;
                    if (fedBy != null)
                    {
                        ILayer   fedBy2  = flat2regular[flatLayers[i + 1]];
                        ISynapse synapse = FindPreviousSynapseByLayerType(fedBy2, typeof(ContextLayer));
                        if (synapse == null)
                        {
                            throw new NeuralNetworkError("Can't find parent synapse to context layer.");
                        }
                        ContextLayer context = (ContextLayer)synapse.FromLayer;

                        // find fedby index
                        int fedByIndex = -1;
                        for (int j = 0; j < flatLayerCount; j++)
                        {
                            if (flatLayers[j] == fedBy)
                            {
                                fedByIndex = j;
                                break;
                            }
                        }

                        if (fedByIndex == -1)
                        {
                            throw new NeuralNetworkError("Can't find layer feeding context.");
                        }

                        context.FlatContextIndex = this.flat.ContextTargetOffset[fedByIndex];
                    }
                }

                // RBF networks will not train every layer
                if (isRBF)
                {
                    this.flat.EndTraining = flatLayers.Length - 1;
                }

                FlattenWeights();

                if (this.IsConnectionLimited)
                {
                }

                this.flatUpdate = FlatUpdateNeeded.None;
            }
            else
            {
                this.flatUpdate = FlatUpdateNeeded.Never;
            }
        }