Inheritance: Encog.Persist.BasicPersistedObject, ILayer
Exemplo n.º 1
0
        /// <summary>
        /// Clone this object.
        /// </summary>
        /// <returns>A cloned version of this object.</returns>
        public override object Clone()
        {
            BasicLayer result = new BasicLayer(
                (IActivationFunction)this.activationFunction.Clone(),
                this.HasBias, this.NeuronCount);

            return(result);
        }
Exemplo n.º 2
0
 public IMLMethod Generate()
 {
     BasicNetwork method = new BasicNetwork();
     ILayer layer = new BasicLayer(new ActivationLinear(), true, this._xcfe830a7176c14e5);
     ILayer layer2 = new BasicLayer(new ActivationLinear(), false, this._x8f581d694fca0474);
     if (0 == 0)
     {
         method.AddLayer(layer);
         method.AddLayer(layer2);
         method.Structure.FinalizeStructure();
         new RangeRandomizer(-0.5, 0.5).Randomize(method);
         return method;
     }
     return method;
 }
Exemplo n.º 3
0
 public IMLMethod Generate()
 {
     BasicLayer layer;
     BasicLayer layer2;
     BasicNetwork network = new BasicNetwork();
     if ((0 != 0) || (0 == 0))
     {
         network.AddLayer(layer2 = new BasicLayer(this._x2a5a4034520336f3, true, this._xcfe830a7176c14e5));
     }
     network.AddLayer(layer = new BasicLayer(this._x2a5a4034520336f3, true, this._xdf89f9cf9fc3d06f));
     network.AddLayer(new BasicLayer(null, false, this._x8f581d694fca0474));
     layer2.ContextFedBy = layer;
     network.Structure.FinalizeStructure();
     network.Reset();
     return network;
 }
Exemplo n.º 4
0
        /// <summary>
        /// Generate the network.
        /// </summary>
        public IMLMethod Generate()
        {
            var network = new BasicNetwork();

            ILayer inputLayer = new BasicLayer(new ActivationLinear(), true,
                                              _inputNeurons);
            ILayer outputLayer = new BasicLayer(new ActivationLinear(), false,
                                               _outputNeurons);

            network.AddLayer(inputLayer);
            network.AddLayer(outputLayer);
            network.Structure.FinalizeStructure();

            (new RangeRandomizer(-0.5d, 0.5d)).Randomize(network);

            return network;
        }
        /// <summary>
        /// Generate the Elman neural network.
        /// </summary>
        /// <returns>The Elman neural network.</returns>
        public BasicNetwork Generate()
        {
            int y = PatternConst.START_Y;
            ILayer input = new BasicLayer(this.activation, false,
                   this.inputNeurons);

            BasicNetwork result = new BasicNetwork();
            result.AddLayer(input);

            input.X = PatternConst.START_X;
            input.Y = y;
            y += PatternConst.INC_Y;

            foreach (int count in this.hidden)
            {

                ILayer hidden = new BasicLayer(
                       this.activation, true, count);

                result.AddLayer(hidden);
                hidden.X = PatternConst.START_X;
                hidden.Y = y;
                y += PatternConst.INC_Y;
            }

            ILayer output = new BasicLayer(this.activation, true,
                   this.outputNeurons);
            result.AddLayer(output);
            output.X = PatternConst.START_X;
            output.Y = y;
            y += PatternConst.INC_Y;

            result.Structure.FinalizeStructure();
            result.Reset();

            return result;
        }
        /// <summary>
        /// Generate the feedforward neural network.
        /// </summary>
        public IMLMethod Generate()
        {
            if (_activationOutput == null)
                _activationOutput = _activationHidden;

            ILayer input = new BasicLayer(null, true, _inputNeurons);

            var result = new BasicNetwork();
            result.AddLayer(input);


            foreach (Int32 count  in  _hidden)
            {
                ILayer hidden = new BasicLayer(_activationHidden, true,
                                                (count));

                result.AddLayer(hidden);
            }

            ILayer output = new BasicLayer(_activationOutput, false,
                                          _outputNeurons);
            result.AddLayer(output);

            result.Structure.FinalizeStructure();
            result.Reset();

            return result;
        }
Exemplo n.º 7
0
 /// <summary>
 /// Generate the RSOM network.
 /// </summary>
 /// <returns>The neural network.</returns>
 public BasicNetwork Generate()
 {
     ILayer input = new BasicLayer(new ActivationLinear(), false,
             this.inputNeurons);
     ILayer output = new BasicLayer(new ActivationLinear(), false,
             this.outputNeurons);
     int y = PatternConst.START_Y;
     BasicNetwork network = new BasicNetwork();
     network.AddLayer(input);
     network.AddLayer(output);
     input.X = PatternConst.START_X;
     output.X = PatternConst.START_X;
     input.Y = y;
     y += PatternConst.INC_Y;
     output.Y = y;
     network.Logic = new SOMLogic();
     network.Structure.FinalizeStructure();
     network.Reset();
     return network;
 }
        /// <summary>
        /// Load the specified Encog object from an XML reader.
        /// </summary>
        /// <param name="xmlIn">The XML reader to use.</param>
        /// <returns>The loaded object.</returns>
        public IEncogPersistedObject Load(ReadXML xmlIn)
        {
            int neuronCount = 0;
            int x = 0;
            int y = 0;
            String threshold = null;
            IActivationFunction activation = null;
            String end = xmlIn.LastTag.Name;
            double biasActivation = 1;

            while (xmlIn.ReadToTag())
            {
                if (xmlIn.IsIt(BasicLayerPersistor.TAG_ACTIVATION, true))
                {
                    xmlIn.ReadToTag();
                    String type = xmlIn.LastTag.Name;
                    activation = LoadActivation(type, xmlIn);
                }
                else if (xmlIn.IsIt(BasicLayerPersistor.PROPERTY_NEURONS, true))
                {
                    neuronCount = xmlIn.ReadIntToTag();
                }
                else if (xmlIn.IsIt(BasicLayerPersistor.PROPERTY_THRESHOLD, true))
                {
                    threshold = xmlIn.ReadTextToTag();
                }
                else if (xmlIn.IsIt(BasicLayerPersistor.PROPERTY_X, true))
                {
                    x = xmlIn.ReadIntToTag();
                }
                else if (xmlIn.IsIt(BasicLayerPersistor.PROPERTY_Y, true))
                {
                    y = xmlIn.ReadIntToTag();
                }
                else if (xmlIn.IsIt(BasicLayerPersistor.PROPERTY_BIAS_ACTIVATION, true))
                {
                    biasActivation = double.Parse(xmlIn.ReadTextToTag());
                }
                else if (xmlIn.IsIt(end, false))
                {
                    break;
                }
            }

            if (neuronCount > 0)
            {
                BasicLayer layer;

                if (threshold == null)
                {
                    layer = new BasicLayer(activation, false, neuronCount);
                }
                else
                {
                    double[] t = NumberList.FromList(CSVFormat.EG_FORMAT, threshold);
                    layer = new BasicLayer(activation, true, neuronCount);
                    for (int i = 0; i < t.Length; i++)
                    {
                        layer.BiasWeights[i] = t[i];
                    }
                }
                layer.X = x;
                layer.Y = y;
                layer.BiasActivation = biasActivation;
                return layer;
            }
            return null;
        }
Exemplo n.º 9
0
        /// <summary>
        /// Generate the RSOM network.
        /// </summary>
        /// <returns>The neural network.</returns>
        public BasicNetwork Generate()
        {
            ILayer output = new BasicLayer(new ActivationLinear(), false,
                    this.outputNeurons);
            ILayer input = new BasicLayer(new ActivationLinear(), false,
                    this.inputNeurons);

            BasicNetwork network = new BasicNetwork();
            ILayer context = new ContextLayer(this.outputNeurons);
            network.AddLayer(input);
            network.AddLayer(output);

            output.AddNext(context, SynapseType.OneToOne);
            context.AddNext(input);

            int y = PatternConst.START_Y;
            input.X = PatternConst.START_X;
            input.Y = y;

            context.X = PatternConst.INDENT_X;
            context.Y = y;

            y += PatternConst.INC_Y;

            output.X = PatternConst.START_X;
            output.Y = y;

            network.Structure.FinalizeStructure();
            network.Reset();
            return network;
        }
Exemplo n.º 10
0
        /// <summary>
        /// Generate the network.
        /// </summary>
        /// <returns>The generated network.</returns>
        public BasicNetwork Generate()
        {

            ILayer input, instar, outstar;
            int y = PatternConst.START_Y;

            BasicNetwork network = new BasicNetwork();
            network.AddLayer(input = new BasicLayer(new ActivationLinear(), false, this.inputCount));
            network.AddLayer(instar = new BasicLayer(new ActivationCompetitive(), false, this.instarCount));
            network.AddLayer(outstar = new BasicLayer(new ActivationLinear(), false, this.outstarCount));
            network.Structure.FinalizeStructure();
            network.Reset();

            input.X = PatternConst.START_X;
            input.Y = y;
            y += PatternConst.INC_Y;

            instar.X = PatternConst.START_X;
            instar.Y = y;
            y += PatternConst.INC_Y;

            outstar.X = PatternConst.START_X;
            outstar.Y = y;

            // tag as needed
            network.TagLayer(BasicNetwork.TAG_INPUT, input);
            network.TagLayer(BasicNetwork.TAG_OUTPUT, outstar);
            network.TagLayer(CPNPattern.TAG_INSTAR, instar);
            network.TagLayer(CPNPattern.TAG_OUTSTAR, outstar);

            return network;
        }
        /// <summary>
        /// Generate the network.
        /// </summary>
        /// <returns>The generated network.</returns>
        public BasicNetwork Generate()
        {
            ILayer layer = new BasicLayer(new ActivationBiPolar(), true,
                    this.neuronCount);

            BasicNetwork result = new BasicNetwork(new BoltzmannLogic());
            result.SetProperty(BoltzmannLogic.PROPERTY_ANNEAL_CYCLES, this.annealCycles);
            result.SetProperty(BoltzmannLogic.PROPERTY_RUN_CYCLES, this.runCycles);
            result.SetProperty(BoltzmannLogic.PROPERTY_TEMPERATURE, this.temperature);
            result.AddLayer(layer);
            layer.AddNext(layer);
            layer.X = PatternConst.START_X;
            layer.Y = PatternConst.START_Y;
            result.Structure.FinalizeStructure();
            result.Reset();
            return result;
        }
Exemplo n.º 12
0
 /// <summary>
 /// Generate the RBF network.
 /// </summary>
 /// <returns>The neural network.</returns>
 public BasicNetwork Generate()
 {
     ILayer input = new BasicLayer(new ActivationLinear(), false,
             this.inputNeurons);
     ILayer output = new BasicLayer(new ActivationLinear(), false, this.outputNeurons);
     BasicNetwork network = new BasicNetwork();
     RadialBasisFunctionLayer rbfLayer = new RadialBasisFunctionLayer(
            this.hiddenNeurons);
     network.AddLayer(input);
     network.AddLayer(rbfLayer, SynapseType.Direct);
     network.AddLayer(output);
     network.Structure.FinalizeStructure();
     network.Reset();
     network.TagLayer(RBF_LAYER, rbfLayer);
     rbfLayer.RandomizeRBFCentersAndWidths(this.inputNeurons, -1, 1, RBFEnum.Gaussian);
     int y = PatternConst.START_Y;
     input.X = PatternConst.START_X;
     input.Y = y;
     y += PatternConst.INC_Y;
     rbfLayer.X = PatternConst.START_X;
     rbfLayer.Y = y;
     y += PatternConst.INC_Y;
     output.X = PatternConst.START_X;
     output.Y = y;
     return network;
 }
Exemplo n.º 13
0
        /// <summary>
        /// Generate the network.
        /// </summary>
        /// <returns>The generated network.</returns>
        public BasicNetwork Generate()
        {
            BasicNetwork network = new BasicNetwork();

            int y = PatternConst.START_Y;

            ILayer inputLayer = new BasicLayer(new ActivationLinear(), false, InputNeurons);
            ILayer outputLayer = new BasicLayer(new ActivationLinear(), true, OutputNeurons);

            network.AddLayer(inputLayer);
            network.AddLayer(outputLayer);
            network.Structure.FinalizeStructure();

            (new RangeRandomizer(-0.5, 0.5)).Randomize(network);

            inputLayer.X = PatternConst.START_X;
            inputLayer.Y = y;
            y += PatternConst.INC_Y;

            outputLayer.X = PatternConst.START_X;
            outputLayer.Y = y;

            return network;
        }
Exemplo n.º 14
0
        /// <summary>
        /// Generate the neural network.
        /// </summary>
        /// <returns>The generated neural network.</returns>
        public BasicNetwork Generate()
        {
            BasicNetwork network = new BasicNetwork(new ART1Logic());

            int y = PatternConst.START_Y;

            ILayer layerF1 = new BasicLayer(new ActivationLinear(), false, this.InputNeurons);
            ILayer layerF2 = new BasicLayer(new ActivationLinear(), false, this.OutputNeurons);
            ISynapse synapseF1toF2 = new WeightedSynapse(layerF1, layerF2);
            ISynapse synapseF2toF1 = new WeightedSynapse(layerF2, layerF1);
            layerF1.Next.Add(synapseF1toF2);
            layerF2.Next.Add(synapseF2toF1);

            // apply tags
            network.TagLayer(BasicNetwork.TAG_INPUT, layerF1);
            network.TagLayer(BasicNetwork.TAG_OUTPUT, layerF2);
            network.TagLayer(ART1Pattern.TAG_F1, layerF1);
            network.TagLayer(ART1Pattern.TAG_F2, layerF2);

            layerF1.X = PatternConst.START_X;
            layerF1.Y = y;
            y += PatternConst.INC_Y;

            layerF2.X = PatternConst.START_X;
            layerF2.Y = y;

            network.SetProperty(ARTLogic.PROPERTY_A1, this.A1);
            network.SetProperty(ARTLogic.PROPERTY_B1, this.B1);
            network.SetProperty(ARTLogic.PROPERTY_C1, this.C1);
            network.SetProperty(ARTLogic.PROPERTY_D1, this.D1);
            network.SetProperty(ARTLogic.PROPERTY_L, this.L);
            network.SetProperty(ARTLogic.PROPERTY_VIGILANCE, this.Vigilance);

            network.Structure.FinalizeStructure();

            return network;
        }
Exemplo n.º 15
0
 public void AddHiddenLayer(BasicLayer layer)
 {
     Model.AddLayer(layer);
 }
Exemplo n.º 16
0
        /// <summary>
        /// Generate a Jordan neural network.
        /// </summary>
        /// <returns>A Jordan neural network.</returns>
        public BasicNetwork Generate()
        {
            // construct an Jordan type network
            ILayer input = new BasicLayer(this.activation, false,
                   this.inputNeurons);
            ILayer hidden = new BasicLayer(this.activation, true,
                   this.hiddenNeurons);
            ILayer output = new BasicLayer(this.activation, true,
                   this.outputNeurons);
            ILayer context = new ContextLayer(this.outputNeurons);
            BasicNetwork network = new BasicNetwork();
            network.AddLayer(input);
            network.AddLayer(hidden);
            network.AddLayer(output);

            output.AddNext(context, SynapseType.OneToOne);
            context.AddNext(hidden);

            int y = PatternConst.START_Y;
            input.X = PatternConst.START_X;
            input.Y = y;
            y += PatternConst.INC_Y;
            hidden.X = PatternConst.START_X;
            hidden.Y = y;
            context.X = PatternConst.INDENT_X;
            context.Y = y;
            y += PatternConst.INC_Y;
            output.X = PatternConst.START_X;
            output.Y = y;

            network.Structure.FinalizeStructure();
            network.Reset();
            return network;
        }
Exemplo n.º 17
0
        /// <summary>
        /// Generate the RBF network. 
        /// </summary>
        /// <returns>The neural network.</returns>
        public BasicNetwork Generate()
        {

            int y = PatternConst.START_Y;
            BasicLayer inputLayer = new BasicLayer(new ActivationLinear(),
                    false, this.InputNeurons);
            inputLayer.X = PatternConst.START_X;
            inputLayer.Y = y;
            y += PatternConst.INC_Y;
            BasicLayer outputLayer = new BasicLayer(ActivationFunction, false, this.OutputNeurons);
            outputLayer.X = PatternConst.START_X;
            outputLayer.Y = y;
            NEATSynapse synapse = new NEATSynapse(inputLayer, outputLayer,
                    this.neurons, this.NEATActivation, 0);
            synapse.Snapshot = this.Snapshot;
            inputLayer.AddSynapse(synapse);
            BasicNetwork network = new BasicNetwork();
            network.TagLayer(BasicNetwork.TAG_INPUT, inputLayer);
            network.TagLayer(BasicNetwork.TAG_OUTPUT, outputLayer);
            network.Structure.FinalizeStructure();

            return network;


        }
Exemplo n.º 18
0
 public IMLMethod Generate()
 {
     ILayer layer;
     BasicNetwork network;
     int num;
     if (this._x34a5e0736d060c9c != null)
     {
         goto Label_00C0;
     }
     if (0xff != 0)
     {
         goto Label_00B4;
     }
     Label_0015:
     network.Reset();
     if (0 != 0)
     {
         goto Label_00CE;
     }
     if ((((uint) num) | 0x7fffffff) != 0)
     {
         return network;
     }
     Label_00B4:
     this._x34a5e0736d060c9c = this._xff166cbf56128ec5;
     Label_00C0:
     layer = new BasicLayer(null, true, this._xcfe830a7176c14e5);
     Label_00CE:
     network = new BasicNetwork();
     network.AddLayer(layer);
     foreach (int num in this._xab3ddaff42dd298a)
     {
         ILayer layer2 = new BasicLayer(this._xff166cbf56128ec5, true, num);
         network.AddLayer(layer2);
     }
     ILayer layer3 = new BasicLayer(this._x34a5e0736d060c9c, false, this._x8f581d694fca0474);
     network.AddLayer(layer3);
     network.Structure.FinalizeStructure();
     if (0x7fffffff != 0)
     {
         goto Label_0015;
     }
     return network;
 }
Exemplo n.º 19
0
        /// <summary>
        /// Generate the Hopfield neural network.
        /// </summary>
        /// <returns>The generated network.</returns>
        public BasicNetwork Generate()
        {
            ILayer layer = new BasicLayer(new ActivationBiPolar(), false,
                    this.neuronCount);

            BasicNetwork result = new BasicNetwork(new HopfieldLogic());
            result.AddLayer(layer);
            layer.AddNext(layer);
            layer.X = PatternConst.START_X;
            layer.Y = PatternConst.START_Y;
            result.Structure.FinalizeStructure();
            result.Reset();
            return result;
        }
Exemplo n.º 20
0
        /// <summary>
        /// The generated network.
        /// </summary>
        /// <returns></returns>
        public BasicNetwork Generate()
        {
            BasicNetwork network = new BasicNetwork(new BAMLogic());

            ILayer f1Layer = new BasicLayer(new ActivationBiPolar(), false,
                    F1Neurons);
            ILayer f2Layer = new BasicLayer(new ActivationBiPolar(), false,
                    F2Neurons);
            ISynapse synapseInputToOutput = new WeightedSynapse(f1Layer,
                    f2Layer);
            ISynapse synapseOutputToInput = new WeightedSynapse(f2Layer,
                    f1Layer);
            f1Layer.AddSynapse(synapseInputToOutput);
            f2Layer.AddSynapse(synapseOutputToInput);

            network.TagLayer(BAMPattern.TAG_F1, f1Layer);
            network.TagLayer(BAMPattern.TAG_F2, f2Layer);

            network.Structure.FinalizeStructure();
            network.Structure.FinalizeStructure();

            f1Layer.Y = PatternConst.START_Y;
            f2Layer.Y = PatternConst.START_Y;

            f1Layer.X = PatternConst.START_X;
            f2Layer.X = PatternConst.INDENT_X;

            return network;
        }
Exemplo n.º 21
0
        /// <summary>
        /// Generate the Elman neural network.
        /// </summary>
        ///
        /// <returns>The Elman neural network.</returns>
        public IMLMethod Generate()
        {
            BasicLayer hidden, input;

            var network = new BasicNetwork();
            network.AddLayer(input = new BasicLayer(_activation, true,
                                                    _inputNeurons));
            network.AddLayer(hidden = new BasicLayer(_activation, true,
                                                     _hiddenNeurons));
            network.AddLayer(new BasicLayer(null, false, _outputNeurons));
            input.ContextFedBy = hidden;
            network.Structure.FinalizeStructure();
            network.Reset();
            return network;
        }
Exemplo n.º 22
0
        /// <summary>
        /// Clone this object. 
        /// </summary>
        /// <returns>A cloned version of this object.</returns>
        public override object Clone()
        {
            BasicLayer result = new BasicLayer(
                   (IActivationFunction)this.activationFunction.Clone(),
                   this.HasBias, this.NeuronCount);
            return result;

        }