Implements a NEAT network as a synapse between two layers. In Encog, a NEAT network is created by using a NEATSynapse between an input and output layer. NEAT networks only have an input and an output layer. There are no actual hidden layers. Rather this synapse will evolve many hidden neurons that have connections that are not easily defined by layers. Connections can be feedforward, recurrent, or self-connected. NEAT networks relieve the programmer of the need to define the hidden layer structure of the neural network. The output from the neural network can be calculated normally or using a snapshot. The snapshot mode is slower, but it can be more accurate. The snapshot handles recurrent layers better, as it takes the time to loop through the network multiple times to "flush out" the recurrent links. NeuroEvolution of Augmenting Topologies (NEAT) is a genetic algorithm for the generation of evolving artificial neural networks. It was developed by Ken Stanley while at The University of Texas at Austin. http://www.cs.ucf.edu/~kstanley/
Inheritance: Encog.Persist.BasicPersistedSubObject, ISynapse, IContextClearable
        /// <summary>
        /// Generate the RBF network. 
        /// </summary>
        /// <returns>The neural network.</returns>
        public BasicNetwork Generate()
        {

            int y = PatternConst.START_Y;
            BasicLayer inputLayer = new BasicLayer(new ActivationLinear(),
                    false, this.InputNeurons);
            inputLayer.X = PatternConst.START_X;
            inputLayer.Y = y;
            y += PatternConst.INC_Y;
            BasicLayer outputLayer = new BasicLayer(ActivationFunction, false, this.OutputNeurons);
            outputLayer.X = PatternConst.START_X;
            outputLayer.Y = y;
            NEATSynapse synapse = new NEATSynapse(inputLayer, outputLayer,
                    this.neurons, this.NEATActivation, 0);
            synapse.Snapshot = this.Snapshot;
            inputLayer.AddSynapse(synapse);
            BasicNetwork network = new BasicNetwork();
            network.TagLayer(BasicNetwork.TAG_INPUT, inputLayer);
            network.TagLayer(BasicNetwork.TAG_OUTPUT, outputLayer);
            network.Structure.FinalizeStructure();

            return network;


        }
        /// <summary>
        /// Add a "next" layer.
        /// </summary>
        /// <param name="next">The next layer to add.</param>
        /// <param name="type">The synapse type to use for this layer.</param>
        public void AddNext(ILayer next, SynapseType type)
        {
            ISynapse synapse = null;

            switch (type)
            {
                case SynapseType.OneToOne:
                    synapse = new OneToOneSynapse(this, next);
                    break;
                case SynapseType.Weighted:
                    synapse = new WeightedSynapse(this, next);
                    break;
                case SynapseType.Weightless:
                    synapse = new WeightlessSynapse(this, next);
                    break;
                case SynapseType.Direct:
                    synapse = new DirectSynapse(this, next);
                    break;
                case SynapseType.NEAT:
                    synapse = new NEATSynapse(this, next);
                    break;
                default:
                    throw new NeuralNetworkError("Unknown synapse type");
            }

            if (synapse == null)
            {
                String str = "Unknown synapse type.";
#if logging
                if (BasicLayer.logger.IsErrorEnabled)
                {
                    BasicLayer.logger.Error(str);
                }
#endif
                throw new NeuralNetworkError(str);
            }
            else
            {
                this.next.Add(synapse);
            }
        }