/// <summary>
        /// Compute the output for a given input to the neural network. This method
        /// provides a parameter to specify an output holder to use.  This holder
        /// allows propagation training to track the output from each layer.
        /// If you do not need this holder pass null, or use the other
        /// compare method.
        /// </summary>
        /// <param name="input">The input provide to the neural network.</param>
        /// <param name="useHolder">Allows a holder to be specified, this allows
        /// propagation training to check the output of each layer.</param>
        /// <returns>The results from the output neurons.</returns>
        public virtual INeuralData Compute(INeuralData input,
                                           NeuralOutputHolder useHolder)
        {
            NeuralOutputHolder holder;

            ILayer inputLayer = this.network.GetLayer(BasicNetwork.TAG_INPUT);

#if logging
            if (FeedforwardLogic.logger.IsDebugEnabled)
            {
                FeedforwardLogic.logger.Debug("Pattern " + input.ToString()
                                              + " presented to neural network");
            }
#endif

            if (useHolder == null && this.network.Structure.Flat != null)
            {
                this.network.Structure.UpdateFlatNetwork();
                INeuralData result = new BasicNeuralData(this.network.Structure.Flat.OutputCount);
                this.network.Structure.Flat.Compute(input.Data, result.Data);
                return(result);
            }

            if (useHolder == null)
            {
                holder = new NeuralOutputHolder();
            }
            else
            {
                holder = useHolder;
            }

            Compute(holder, inputLayer, input, null);
            return(holder.Output);
        }
        /// <summary>
        /// Internal computation method for a single layer.  This is called,
        /// as the neural network processes.
        /// </summary>
        /// <param name="holder">The output holder.</param>
        /// <param name="layer">The layer to process.</param>
        /// <param name="input">The input to this layer.</param>
        /// <param name="source">The source synapse.</param>
        private void Compute(NeuralOutputHolder holder, ILayer layer,
                             INeuralData input, ISynapse source)
        {
            try
            {
#if logging
                if (FeedforwardLogic.logger.IsDebugEnabled)
                {
                    FeedforwardLogic.logger.Debug("Processing layer: "
                                                  + layer.ToString()
                                                  + ", input= "
                                                  + input.ToString());
                }
#endif

                // typically used to process any recurrent layers that feed into this
                // layer.
                PreprocessLayer(layer, input, source);

                foreach (ISynapse synapse in layer.Next)
                {
                    if (!holder.Result.ContainsKey(synapse))
                    {
#if logging
                        if (FeedforwardLogic.logger.IsDebugEnabled)
                        {
                            FeedforwardLogic.logger.Debug("Processing synapse: " + synapse.ToString());
                        }
#endif
                        INeuralData pattern = synapse.Compute(input);
                        pattern = synapse.ToLayer.Compute(pattern);
                        synapse.ToLayer.Process(pattern);
                        holder.Result[synapse] = input;
                        Compute(holder, synapse.ToLayer, pattern, synapse);

                        ILayer outputLayer = this.network.GetLayer(BasicNetwork.TAG_OUTPUT);

                        // Is this the output from the entire network?
                        if (synapse.ToLayer == outputLayer)
                        {
                            holder.Output = pattern;
                        }
                    }
                }
            }
            catch (IndexOutOfRangeException ex)
            {
                throw new NeuralNetworkError("Size mismatch on input of size " + input.Count + " and layer: ", ex);
            }
        }
Esempio n. 3
0
        /// <summary>
        /// Handle recurrent layers.  See if there are any recurrent layers before
        /// the specified layer that must affect the input.
        /// </summary>
        /// <param name="layer">The layer being processed, see if there are any recurrent
        /// connections to this.</param>
        /// <param name="input">The input to the layer, will be modified with the result
        /// from any recurrent layers.</param>
        /// <param name="source">The source synapse.</param>
        public override void PreprocessLayer(ILayer layer,
                                             INeuralData input, ISynapse source)
        {
            foreach (ISynapse synapse in
                     this.Network.Structure.GetPreviousSynapses(layer))
            {
                if (synapse != source)
                {
#if logging
                    if (SimpleRecurrentLogic.logger.IsDebugEnabled)
                    {
                        SimpleRecurrentLogic.logger.Debug("Recurrent layer from: " + input.ToString());
                    }
#endif
                    INeuralData recurrentInput = synapse.FromLayer.Recur();

                    if (recurrentInput != null)
                    {
                        INeuralData recurrentOutput = synapse
                                                      .Compute(recurrentInput);

                        for (int i = 0; i < input.Count; i++)
                        {
                            input[i] = input[i]
                                       + recurrentOutput[i];
                        }
#if logging
                        if (SimpleRecurrentLogic.logger.IsDebugEnabled)
                        {
                            SimpleRecurrentLogic.logger.Debug("Recurrent layer to: " + input.ToString());
                        }
#endif
                    }
                }
            }
        }
        /// <summary>
        /// Handle recurrent layers.  See if there are any recurrent layers before
        /// the specified layer that must affect the input.
        /// </summary>
        /// <param name="layer">The layer being processed, see if there are any recurrent
        /// connections to this.</param>
        /// <param name="input">The input to the layer, will be modified with the result
        /// from any recurrent layers.</param>
        /// <param name="source">The source synapse.</param>
        public override void PreprocessLayer(ILayer layer,
                 INeuralData input, ISynapse source)
        {
            foreach (ISynapse synapse in
                     this.Network.Structure.GetPreviousSynapses(layer))
            {
                if (synapse != source)
                {
#if logging
                    if (SimpleRecurrentLogic.logger.IsDebugEnabled)
                    {
                        SimpleRecurrentLogic.logger.Debug("Recurrent layer from: " + input.ToString());
                    }
#endif
                    INeuralData recurrentInput = synapse.FromLayer.Recur();

                    if (recurrentInput != null)
                    {
                        INeuralData recurrentOutput = synapse
                               .Compute(recurrentInput);

                        for (int i = 0; i < input.Count; i++)
                        {
                            input[i] = input[i]
                                    + recurrentOutput[i];
                        }
#if logging
                        if (SimpleRecurrentLogic.logger.IsDebugEnabled)
                        {
                            SimpleRecurrentLogic.logger.Debug("Recurrent layer to: " + input.ToString());
                        }
#endif
                    }
                }
            }
        }
        /// <summary>
        /// Compute the output for a given input to the neural network. This method
        /// provides a parameter to specify an output holder to use.  This holder
        /// allows propagation training to track the output from each layer.
        /// If you do not need this holder pass null, or use the other 
        /// compare method.
        /// </summary>
        /// <param name="input">The input provide to the neural network.</param>
        /// <param name="useHolder">Allows a holder to be specified, this allows
        /// propagation training to check the output of each layer.</param>
        /// <returns>The results from the output neurons.</returns>
        public virtual INeuralData Compute(INeuralData input,
                 NeuralOutputHolder useHolder)
        {
            NeuralOutputHolder holder;

            ILayer inputLayer = this.network.GetLayer(BasicNetwork.TAG_INPUT);

#if logging
            if (FeedforwardLogic.logger.IsDebugEnabled)
            {
                FeedforwardLogic.logger.Debug("Pattern " + input.ToString()
                    + " presented to neural network");
            }
#endif

            if (useHolder == null && this.network.Structure.Flat != null)
            {
                this.network.Structure.UpdateFlatNetwork();
                INeuralData result = new BasicNeuralData(this.network.Structure.Flat.OutputCount);
                this.network.Structure.Flat.Compute(input.Data, result.Data);
                return result;
            }

            if (useHolder == null)
            {
                holder = new NeuralOutputHolder();
            }
            else
            {
                holder = useHolder;
            }

            Compute(holder, inputLayer, input, null);
            return holder.Output;
        }
        /// <summary>
        /// Internal computation method for a single layer.  This is called, 
        /// as the neural network processes.
        /// </summary>
        /// <param name="holder">The output holder.</param>
        /// <param name="layer">The layer to process.</param>
        /// <param name="input">The input to this layer.</param>
        /// <param name="source">The source synapse.</param>
        private void Compute(NeuralOutputHolder holder, ILayer layer,
                 INeuralData input, ISynapse source)
        {
            try
            {
#if logging
                if (FeedforwardLogic.logger.IsDebugEnabled)
                {
                    FeedforwardLogic.logger.Debug("Processing layer: "
                        + layer.ToString()
                        + ", input= "
                        + input.ToString());
                }
#endif
                
                // typically used to process any recurrent layers that feed into this
                // layer.
                PreprocessLayer(layer, input, source);

                foreach (ISynapse synapse in layer.Next)
                {
                    if (!holder.Result.ContainsKey(synapse))
                    {
#if logging
                        if (FeedforwardLogic.logger.IsDebugEnabled)
                        {
                            FeedforwardLogic.logger.Debug("Processing synapse: " + synapse.ToString());
                        }
#endif
                        INeuralData pattern = synapse.Compute(input);
                        pattern = synapse.ToLayer.Compute(pattern);
                        synapse.ToLayer.Process(pattern);
                        holder.Result[synapse] = input;
                        Compute(holder, synapse.ToLayer, pattern, synapse);

                        ILayer outputLayer = this.network.GetLayer(BasicNetwork.TAG_OUTPUT);

                        // Is this the output from the entire network?
                        if (synapse.ToLayer == outputLayer)
                        {
                            holder.Output = pattern;
                        }
                    }
                }
            }
            catch (IndexOutOfRangeException ex)
            {
                throw new NeuralNetworkError("Size mismatch on input of size " + input.Count + " and layer: ", ex);
            }
        }