/// <summary> /// Generate a Jordan neural network. /// </summary> /// <returns>A Jordan neural network.</returns> public BasicNetwork Generate() { // construct an Jordan type network ILayer input = new BasicLayer(this.activation, false, this.inputNeurons); ILayer hidden = new BasicLayer(this.activation, true, this.hiddenNeurons); ILayer output = new BasicLayer(this.activation, true, this.outputNeurons); ILayer context = new ContextLayer(this.outputNeurons); BasicNetwork network = new BasicNetwork(); network.AddLayer(input); network.AddLayer(hidden); network.AddLayer(output); output.AddNext(context, SynapseType.OneToOne); context.AddNext(hidden); int y = PatternConst.START_Y; input.X = PatternConst.START_X; input.Y = y; y += PatternConst.INC_Y; hidden.X = PatternConst.START_X; hidden.Y = y; context.X = PatternConst.INDENT_X; context.Y = y; y += PatternConst.INC_Y; output.X = PatternConst.START_X; output.Y = y; network.Structure.FinalizeStructure(); network.Reset(); return network; }
/// <summary> /// Load the specified Encog object from an XML reader. /// </summary> /// <param name="xmlIn">The XML reader to use.</param> /// <returns>The loaded object.</returns> public IEncogPersistedObject Load(ReadXML xmlIn) { int neuronCount = 0; int x = 0; int y = 0; double biasActivation = 1; String threshold = null; IActivationFunction activation = null; String end = xmlIn.LastTag.Name; String context = null; while (xmlIn.ReadToTag()) { if (xmlIn.IsIt(BasicLayerPersistor.TAG_ACTIVATION, true)) { xmlIn.ReadToTag(); String type = xmlIn.LastTag.Name; activation = BasicLayerPersistor.LoadActivation(type, xmlIn); } else if (xmlIn.IsIt(BasicLayerPersistor.PROPERTY_NEURONS, true)) { neuronCount = xmlIn.ReadIntToTag(); } else if (xmlIn.IsIt(BasicLayerPersistor.PROPERTY_X, true)) { x = xmlIn.ReadIntToTag(); } else if (xmlIn.IsIt(BasicLayerPersistor.PROPERTY_Y, true)) { y = xmlIn.ReadIntToTag(); } else if (xmlIn.IsIt(BasicLayerPersistor.PROPERTY_THRESHOLD, true)) { threshold = xmlIn.ReadTextToTag(); } else if (xmlIn.IsIt(PROPERTY_CONTEXT, true)) { context = xmlIn.ReadTextToTag(); } else if (xmlIn.IsIt(BasicLayerPersistor.PROPERTY_BIAS_ACTIVATION, true)) { biasActivation = double.Parse(xmlIn.ReadTextToTag()); } else if (xmlIn.IsIt(end, false)) { break; } } if (neuronCount > 0) { ContextLayer layer; if (threshold == null) { layer = new ContextLayer(activation, false, neuronCount); } else { double[] t = NumberList.FromList(CSVFormat.EG_FORMAT, threshold); layer = new ContextLayer(activation, true, neuronCount); for (int i = 0; i < t.Length; i++) { layer.BiasWeights[i] = t[i]; } } if (context != null) { double[] c = NumberList.FromList(CSVFormat.EG_FORMAT, context); for (int i = 0; i < c.Length; i++) { layer.Context[i] = c[i]; } } layer.X = x; layer.Y = y; layer.BiasActivation = biasActivation; return layer; } return null; }
/// <summary> /// Generate the RSOM network. /// </summary> /// <returns>The neural network.</returns> public BasicNetwork Generate() { ILayer output = new BasicLayer(new ActivationLinear(), false, this.outputNeurons); ILayer input = new BasicLayer(new ActivationLinear(), false, this.inputNeurons); BasicNetwork network = new BasicNetwork(); ILayer context = new ContextLayer(this.outputNeurons); network.AddLayer(input); network.AddLayer(output); output.AddNext(context, SynapseType.OneToOne); context.AddNext(input); int y = PatternConst.START_Y; input.X = PatternConst.START_X; input.Y = y; context.X = PatternConst.INDENT_X; context.Y = y; y += PatternConst.INC_Y; output.X = PatternConst.START_X; output.Y = y; network.Structure.FinalizeStructure(); network.Reset(); return network; }