/// <summary> /// Load a RBF layer. /// </summary> /// <param name="xmlin">The XML to read from.</param> /// <returns>The object that was loaded.</returns> public IEncogPersistedObject Load(ReadXML xmlin) { int neuronCount = 0; int x = 0; int y = 0; int dimensions = 1; IRadialBasisFunction[] rbfs = new IRadialBasisFunction[0]; String end = xmlin.LastTag.Name; while (xmlin.ReadToTag()) { if (xmlin.IsIt(BasicLayerPersistor.PROPERTY_NEURONS, true)) { neuronCount = xmlin.ReadIntToTag(); } else if (xmlin.IsIt(BasicLayerPersistor.PROPERTY_X, true)) { x = xmlin.ReadIntToTag(); } else if (xmlin.IsIt(BasicLayerPersistor.PROPERTY_Y, true)) { y = xmlin.ReadIntToTag(); } else if (xmlin.IsIt(RadialBasisFunctionLayerPersistor.PROPERTY_RBF, true)) { rbfs = LoadAllRBF(xmlin); } else if (xmlin.IsIt(end, false)) { break; } } RadialBasisFunctionLayer layer = new RadialBasisFunctionLayer(neuronCount); layer.RadialBasisFunction = rbfs; layer.X = x; layer.Y = y; return layer; }
/// <summary> /// Generate the RBF network. /// </summary> /// <returns>The neural network.</returns> public BasicNetwork Generate() { ILayer input = new BasicLayer(new ActivationLinear(), false, this.inputNeurons); ILayer output = new BasicLayer(new ActivationLinear(), false, this.outputNeurons); BasicNetwork network = new BasicNetwork(); RadialBasisFunctionLayer rbfLayer = new RadialBasisFunctionLayer( this.hiddenNeurons); network.AddLayer(input); network.AddLayer(rbfLayer, SynapseType.Direct); network.AddLayer(output); network.Structure.FinalizeStructure(); network.Reset(); network.TagLayer(RBF_LAYER, rbfLayer); rbfLayer.RandomizeRBFCentersAndWidths(this.inputNeurons, -1, 1, RBFEnum.Gaussian); int y = PatternConst.START_Y; input.X = PatternConst.START_X; input.Y = y; y += PatternConst.INC_Y; rbfLayer.X = PatternConst.START_X; rbfLayer.Y = y; y += PatternConst.INC_Y; output.X = PatternConst.START_X; output.Y = y; return network; }
private void SaveRBF(WriteXML xmlout, RadialBasisFunctionLayer layer) { xmlout.BeginTag(RadialBasisFunctionLayerPersistor.PROPERTY_RBF); foreach (IRadialBasisFunction rbf in layer.RadialBasisFunction) { xmlout.BeginTag(rbf.GetType().Name); xmlout.AddProperty(PROPERTY_CENTERS, rbf.Centers, rbf.Centers.Length); xmlout.AddProperty(PROPERTY_PEAK, rbf.Peak); xmlout.AddProperty(PROPERTY_WIDTH, rbf.Width); xmlout.EndTag(); } xmlout.EndTag(); }
/// <summary> /// Construct the LMA object. /// </summary> /// <param name="network">The network to train. Must have a single output neuron.</param> /// <param name="training">The training data to use. Must be indexable.</param> public SVDTraining(BasicNetwork network, INeuralDataSet training) { ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT); if (outputLayer == null) { throw new TrainingError("SVD requires an output layer."); } if (outputLayer.NeuronCount != 1) { throw new TrainingError("SVD requires an output layer with a single neuron."); } if (network.GetLayer(RadialBasisPattern.RBF_LAYER) == null) throw new TrainingError("SVD is only tested to work on radial basis function networks."); rbfLayer = (RadialBasisFunctionLayer)network.GetLayer(RadialBasisPattern.RBF_LAYER); this.Training = training; this.network = network; this.trainingLength = (int)this.Training.InputSize; BasicNeuralData input = new BasicNeuralData(this.Training.InputSize); BasicNeuralData ideal = new BasicNeuralData(this.Training.IdealSize); this.pair = new BasicNeuralDataPair(input, ideal); }