Пример #1
0
        /// <summary>
        /// Not supported. Will throw an error.
        /// </summary>
        /// <param name="inputData">Not used.</param>
        /// <param name="idealData">Not used.</param>
        public void Add(INeuralData inputData, INeuralData idealData)
        {
#if logging
            UnionNeuralDataSet.logger.Error(ADD_ERROR);
#endif
            throw new NeuralDataError(ADD_ERROR);
        }
Пример #2
0
        /// <inheritdoc/>
        public void Compute(double[] input, double[] output)
        {
            BasicNeuralData input2  = new BasicNeuralData(input);
            INeuralData     output2 = this.Compute(input2);

            EngineArray.ArrayCopy(output2.Data, output);
        }
Пример #3
0
        /// <summary>
        /// Not supported.  Will throw an error.
        /// </summary>
        /// <param name="data1">Not used.</param>
        public void Add(INeuralData data1)
        {
#if logging
            UnionNeuralDataSet.logger.Error(ADD_ERROR);
#endif
            throw new NeuralDataError(ADD_ERROR);
        }
Пример #4
0
        /// <summary>
        /// Propagate the layer.
        /// </summary>
        /// <param name="synapse">The synapse for this layer.</param>
        /// <param name="input">The input pattern.</param>
        /// <param name="output">The output pattern.</param>
        /// <returns>True if the network has become stable.</returns>
        private bool PropagateLayer(ISynapse synapse, INeuralData input,
                                    INeuralData output)
        {
            int  i, j;
            int  sum, outt = 0;
            bool stable;

            stable = true;

            for (i = 0; i < output.Count; i++)
            {
                sum = 0;
                for (j = 0; j < input.Count; j++)
                {
                    sum += (int)(GetWeight(synapse, input, i, j) * input[j]);
                }
                if (sum != 0)
                {
                    if (sum < 0)
                    {
                        outt = -1;
                    }
                    else
                    {
                        outt = 1;
                    }
                    if (outt != (int)output[i])
                    {
                        stable    = false;
                        output[i] = outt;
                    }
                }
            }
            return(stable);
        }
Пример #5
0
            /// <summary>
            /// Move to the next object.
            /// </summary>
            /// <returns>True if there is a next object.</returns>
            public bool MoveNext()
            {
                if (!this.results.NextResult())
                {
                    return(false);
                }
                INeuralData input = new BasicNeuralData(owner.inputSize);
                INeuralData ideal = null;

                for (int i = 1; i <= owner.inputSize; i++)
                {
                    input[i - 1] = this.results.GetDouble(i);
                }

                if (owner.idealSize > 0)
                {
                    ideal =
                        new BasicNeuralData(owner.idealSize);
                    for (int i = 1; i <= owner.idealSize; i++)
                    {
                        ideal[i - 1] =
                            this.results.GetDouble(i + owner.inputSize);
                    }
                }

                this.current = new BasicNeuralDataPair(input, ideal);
                return(true);
            }
Пример #6
0
        /// <summary>
        /// Compute the output for a given input to the neural network. This method
        /// provides a parameter to specify an output holder to use.  This holder
        /// allows propagation training to track the output from each layer.
        /// If you do not need this holder pass null, or use the other
        /// compare method.
        /// </summary>
        /// <param name="input">The input provide to the neural network.</param>
        /// <param name="useHolder">Allows a holder to be specified, this allows
        /// propagation training to check the output of each layer.</param>
        /// <returns>The results from the output neurons.</returns>
        public virtual INeuralData Compute(INeuralData input,
                                           NeuralOutputHolder useHolder)
        {
            NeuralOutputHolder holder;

            ILayer inputLayer = this.network.GetLayer(BasicNetwork.TAG_INPUT);

#if logging
            if (FeedforwardLogic.logger.IsDebugEnabled)
            {
                FeedforwardLogic.logger.Debug("Pattern " + input.ToString()
                                              + " presented to neural network");
            }
#endif

            if (useHolder == null && this.network.Structure.Flat != null)
            {
                this.network.Structure.UpdateFlatNetwork();
                INeuralData result = new BasicNeuralData(this.network.Structure.Flat.OutputCount);
                this.network.Structure.Flat.Compute(input.Data, result.Data);
                return(result);
            }

            if (useHolder == null)
            {
                holder = new NeuralOutputHolder();
            }
            else
            {
                holder = useHolder;
            }

            Compute(holder, inputLayer, input, null);
            return(holder.Output);
        }
Пример #7
0
        /// <summary>
        /// Perform one training iteration.
        /// </summary>
        public override void Iteration()
        {
            if (this.mustInit)
            {
                InitWeight();
            }

            ErrorCalculation error = new ErrorCalculation();

            foreach (INeuralDataPair pair in this.training)
            {
                INeuralData output = this.parts.InstarSynapse.Compute(
                    pair.Input);
                int j = this.parts.Winner(output);
                for (int i = 0; i < this.parts.OutstarLayer.NeuronCount; i++)
                {
                    double delta = this.learningRate
                                   * (pair.Ideal[i] - this.parts
                                      .OutstarSynapse.WeightMatrix[j, i]);
                    this.parts.OutstarSynapse.WeightMatrix.Add(j, i, delta);
                }

                error.UpdateError(output.Data, pair.Ideal.Data);
            }

            this.Error = error.Calculate();
        }
Пример #8
0
        /// <summary>
        /// Compute the values before sending output to the next layer.
        /// This function allows the activation functions to be called.
        /// </summary>
        /// <param name="pattern">The incoming Project.</param>
        /// <returns>The output from this layer.</returns>
        public override INeuralData Compute(INeuralData pattern)
        {
            INeuralData result = new BasicNeuralData(NeuronCount);

            for (int i = 0; i < NeuronCount; i++)
            {
                if (this.radialBasisFunction[i] == null)
                {
                    String str =
                        "Error, must define radial functions for each neuron";
#if logging
                    if (RadialBasisFunctionLayer.logger.IsErrorEnabled)
                    {
                        RadialBasisFunctionLayer.logger.Error(str);
                    }
#endif
                    throw new NeuralNetworkError(str);
                }

                IRadialBasisFunction f = this.radialBasisFunction[i];

                if (pattern.Data.Length != f.Dimensions)
                {
                    throw new Exception("Inputs must equal the number of dimensions.");
                }

                result[i] = f.Calculate(pattern.Data);
            }

            return(result);
        }
Пример #9
0
        /// <summary>
        /// Encode the Encog dataset.
        /// </summary>
        /// <param name="training">The training data.</param>
        /// <param name="outputIndex">The ideal element to use, this is necessary becase SVM's have
        /// only a single output.</param>
        /// <returns>The SVM problem.</returns>
        public static svm_problem Encode(INeuralDataSet training, int outputIndex)
        {
            svm_problem result = new svm_problem();

            result.l = (int)ObtainTrainingLength(training);

            result.y = new double[result.l];
            result.x = new svm_node[result.l][];

            int elementIndex = 0;

            foreach (INeuralDataPair pair in training)
            {
                INeuralData input  = pair.Input;
                INeuralData output = pair.Ideal;
                result.x[elementIndex] = new svm_node[input.Count];

                for (int i = 0; i < input.Count; i++)
                {
                    result.x[elementIndex][i]               = new svm_node();
                    result.x[elementIndex][i].index         = i + 1;
                    result.x[elementIndex][i].value_Renamed = input[i];
                }

                result.y[elementIndex] = output[outputIndex];

                elementIndex++;
            }

            return(result);
        }
Пример #10
0
        /// <summary>
        /// Compute the output for a given input to the neural network. This method
        /// provides a parameter to specify an output holder to use.  This holder
        /// allows propagation training to track the output from each layer.
        /// If you do not need this holder pass null, or use the other 
        /// compare method.
        /// </summary>
        /// <param name="input">The input provide to the neural network.</param>
        /// <param name="useHolder">Allows a holder to be specified, this allows
        /// propagation training to check the output of each layer.</param>
        /// <returns>The results from the output neurons.</returns>
        public virtual INeuralData Compute(INeuralData input,
                 NeuralOutputHolder useHolder)
        {
            NeuralOutputHolder holder;

            ILayer inputLayer = this.network.GetLayer(BasicNetwork.TAG_INPUT);

#if logging
            if (FeedforwardLogic.logger.IsDebugEnabled)
            {
                FeedforwardLogic.logger.Debug("Pattern " + input.ToString()
                    + " presented to neural network");
            }
#endif

            if (useHolder == null && this.network.Structure.Flat != null)
            {
                this.network.Structure.UpdateFlatNetwork();
                INeuralData result = new BasicNeuralData(this.network.Structure.Flat.OutputCount);
                this.network.Structure.Flat.Compute(input.Data, result.Data);
                return result;
            }

            if (useHolder == null)
            {
                holder = new NeuralOutputHolder();
            }
            else
            {
                holder = useHolder;
            }

            Compute(holder, inputLayer, input, null);
            return holder.Output;
        }
Пример #11
0
 /// <summary>
 /// Construct a context layer with the parameters specified.
 /// </summary>
 /// <param name="thresholdFunction">The threshold function to use.</param>
 /// <param name="hasThreshold">Does this layer have thresholds?</param>
 /// <param name="neuronCount">The neuron count to use.</param>
 public ContextLayer(IActivationFunction thresholdFunction,
                     bool hasThreshold, int neuronCount)
     : base(thresholdFunction, hasThreshold, neuronCount)
 {
     this.FlatContextIndex = -1;
     this.context          = new BasicNeuralData(neuronCount);
 }
Пример #12
0
        /// <summary>
        /// Calculate the best matching unit (BMU). This is the output neuron that
        /// has the lowest Euclidean distance to the input vector.
        /// </summary>
        /// <param name="synapse">The synapse to calculate for.</param>
        /// <param name="input">The input vector.</param>
        /// <returns>The output neuron number that is the BMU.</returns>
        public int CalculateBMU(ISynapse synapse, INeuralData input)
        {
            int result = 0;

            // Track the lowest distance so far.
            double lowestDistance = double.MaxValue;

            for (int i = 0; i < this.training.OutputNeuronCount; i++)
            {
                double distance = CalculateEuclideanDistance(synapse, input,
                                                             i);

                // Track the lowest distance, this is the BMU.
                if (distance < lowestDistance)
                {
                    lowestDistance = distance;
                    result         = i;
                }
            }

            // Track the worst distance, this is the error for the entire network.
            if (lowestDistance > this.worstDistance)
            {
                this.worstDistance = lowestDistance;
            }

            return(result);
        }
        /// <summary>
        /// Calculate the best matching unit (BMU). This is the output neuron that
        /// has the lowest Euclidean distance to the input vector.
        /// </summary>
        /// <param name="synapse">The synapse to calculate for.</param>
        /// <param name="input">The input vector.</param>
        /// <returns>The output neuron number that is the BMU.</returns>
        public int CalculateBMU(ISynapse synapse, INeuralData input)
        {
            int result = 0;

            // Track the lowest distance so far.
            double lowestDistance = double.MaxValue;

            for (int i = 0; i < this.training.OutputNeuronCount; i++)
            {
                double distance = CalculateEuclideanDistance(synapse, input,
                       i);

                // Track the lowest distance, this is the BMU.
                if (distance < lowestDistance)
                {
                    lowestDistance = distance;
                    result = i;
                }
            }

            // Track the worst distance, this is the error for the entire network.
            if (lowestDistance > this.worstDistance)
            {
                this.worstDistance = lowestDistance;
            }

            return result;
        }
        /// <summary>
        /// Add only input data, for an unsupervised dataset.
        /// </summary>
        /// <param name="data1">The data to be added.</param>
        public void Add(INeuralData data1)
        {
            if (!this.loading)
            {
                throw new NeuralDataError(BufferedNeuralDataSet.ERROR_ADD);
            }

            egb.Write(data1.Data);
        }
Пример #15
0
        /// <summary>
        /// Perform one Hopfield iteration.
        /// </summary>
        public void Run()
        {
            INeuralData temp = this.Compute(this.CurrentState, null);

            for (int i = 0; i < temp.Count; i++)
            {
                this.CurrentState.SetBoolean(i, temp[i] > 0);
            }
        }
Пример #16
0
        /// <summary>
        /// Add the specified input and ideal object to the collection.
        /// </summary>
        /// <param name="inputData">The image to train with.</param>
        /// <param name="idealData">The expected otuput form this image.</param>
        public override void Add(INeuralData inputData, INeuralData idealData)
        {
            if (!(inputData is ImageNeuralData))
            {
                throw new NeuralNetworkError(ImageNeuralDataSet.MUST_USE_IMAGE);
            }

            base.Add(inputData, idealData);
        }
        /// <summary>
        /// Add both the input and ideal data.
        /// </summary>
        /// <param name="inputData">The input data.</param>
        /// <param name="idealData">The ideal data.</param>
        public void Add(INeuralData inputData, INeuralData idealData)
        {
            if (!this.loading)
            {
                throw new NeuralDataError(BufferedNeuralDataSet.ERROR_ADD);
            }

            this.egb.Write(inputData.Data);
            this.egb.Write(idealData.Data);
        }
Пример #18
0
 /// <summary>
 /// Train for the specified synapse and BMU.
 /// </summary>
 /// <param name="bmu">The best matching unit for this input.</param>
 /// <param name="synapse">The synapse to train.</param>
 /// <param name="input">The input to train for.</param>
 private void Train(int bmu, ISynapse synapse,
                    INeuralData input)
 {
     // adjust the weight for the BMU and its neighborhood
     for (int outputNeuron = 0; outputNeuron < this.outputNeuronCount;
          outputNeuron++)
     {
         TrainPattern(synapse, input, outputNeuron, bmu);
     }
 }
Пример #19
0
 /// <summary>
 /// Copy the specified input pattern to the weight matrix. This causes an
 /// output neuron to learn this pattern "exactly". This is useful when a
 /// winner is to be forced.
 /// </summary>
 /// <param name="synapse">The synapse that is the target of the copy.</param>
 /// <param name="outputNeuron">The output neuron to set.</param>
 /// <param name="input">The input pattern to copy.</param>
 private void CopyInputPattern(ISynapse synapse,
                               int outputNeuron, INeuralData input)
 {
     for (int inputNeuron = 0; inputNeuron < this.inputNeuronCount;
          inputNeuron++)
     {
         synapse.WeightMatrix[inputNeuron, outputNeuron] =
             input[inputNeuron];
     }
 }
Пример #20
0
 /// <summary>
 /// Train the specified pattern.  Find a winning neuron and adjust all
 /// neurons according to the neighborhood function.
 /// </summary>
 /// <param name="pattern">The pattern to train.</param>
 public void TrainPattern(INeuralData pattern)
 {
     foreach (ISynapse synapse in this.synapses)
     {
         INeuralData input = pattern;
         int         bmu   = this.bmuUtil.CalculateBMU(synapse, input);
         Train(bmu, synapse, input);
     }
     ApplyCorrection();
 }
Пример #21
0
 /// <summary>
 /// Get the specified weight.
 /// </summary>
 /// <param name="synapse">The synapse to get the weight from.</param>
 /// <param name="input">The input, to obtain the size from.</param>
 /// <param name="x">The x matrix value. (could be row or column, depending on input)</param>
 /// <param name="y">The y matrix value. (could be row or column, depending on input)</param>
 /// <returns>The value from the matrix.</returns>
 private double GetWeight(ISynapse synapse, INeuralData input, int x, int y)
 {
     if (synapse.FromNeuronCount != input.Count)
     {
         return(synapse.WeightMatrix[x, y]);
     }
     else
     {
         return(synapse.WeightMatrix[y, x]);
     }
 }
Пример #22
0
        /// <summary>
        /// Convert regular Encog NeuralData into the "sparse" data needed by an SVM.
        /// </summary>
        /// <param name="data">The data to convert.</param>
        /// <returns>The SVM sparse data.</returns>
        public svm_node[] MakeSparse(INeuralData data)
        {
            svm_node[] result = new svm_node[data.Count];
            for (int i = 0; i < data.Count; i++)
            {
                result[i]               = new svm_node();
                result[i].index         = i + 1;
                result[i].value_Renamed = data[i];
            }

            return(result);
        }
Пример #23
0
    public void testNetwork(double[][] networkOutputs)
    {
      for (int i = 0; i < networkOutputs.Length; i++)
      {
        //INeuralData data = new BasicNeuralData(networkInput[i]);
        //networkOutputs[i] = (double[])network.Compute(data).Data.Clone();

        BasicNeuralData inputs = new BasicNeuralData(networkInput[i]);
        INeuralData output = network.Compute(inputs);
        EngineArray.ArrayCopy(output.Data, networkOutputs[i]);
      }
    }
Пример #24
0
        /// <summary>
        /// Calculate the error for this neural network. The error is calculated
        /// using root-mean-square(RMS).
        /// </summary>
        /// <param name="data">The training set.</param>
        /// <returns>The error percentage.</returns>
        public double CalculateError(INeuralDataSet data)
        {
            ClearContext();
            ErrorCalculation errorCalculation = new ErrorCalculation();

            foreach (INeuralDataPair pair in data)
            {
                INeuralData actual = Compute(pair.Input);
                errorCalculation.UpdateError(actual.Data, pair.Ideal.Data);
            }
            return(errorCalculation.Calculate());
        }
Пример #25
0
 /// <summary>
 /// Evaluate the network and display (to the console) the output for every
 /// value in the training set. Displays ideal and actual.
 /// </summary>
 /// <param name="network">The network to evaluate.</param>
 /// <param name="training">The training set to evaluate.</param>
 public static void Evaluate(BasicNetwork network,
                             INeuralDataSet training)
 {
     foreach (INeuralDataPair pair in training)
     {
         INeuralData output = network.Compute(pair.Input);
         Console.WriteLine("Input="
                           + EncogUtility.FormatNeuralData(pair.Input)
                           + ", Actual=" + EncogUtility.FormatNeuralData(output)
                           + ", Ideal="
                           + EncogUtility.FormatNeuralData(pair.Ideal));
     }
 }
Пример #26
0
        /// <summary>
        /// Compute the output for the given input.
        /// </summary>
        /// <param name="input">The input to the SVM.</param>
        /// <returns>The results from the SVM.</returns>
        public override INeuralData Compute(INeuralData input)
        {
            INeuralData result = new BasicNeuralData(this.outputCount);

            svm_node[] formattedInput = MakeSparse(input);

            for (int i = 0; i < this.outputCount; i++)
            {
                double d = svm.svm_predict(this.models[i], formattedInput);
                result[i] = d;
            }
            return(result);
        }
Пример #27
0
        /// <summary>
        /// Setup the network logic, read parameters from the network.
        /// NOT USED, call the run method.
        /// </summary>
        /// <param name="input">Not used</param>
        /// <param name="useHolder">Not used</param>
        /// <returns>Not used</returns>
        public override INeuralData Compute(INeuralData input, NeuralOutputHolder useHolder)
        {
            String str = "Compute on BasicNetwork cannot be used, rather call" +
                         " the run method on the logic class.";

#if logging
            if (logger.IsErrorEnabled)
            {
                logger.Error(str);
            }
#endif
            throw new NeuralNetworkError(str);
        }
Пример #28
0
        /// <summary>
        /// Called to process input from the previous layer. Simply store the output
        /// in the context.
        /// </summary>
        /// <param name="pattern">The pattern to store in the context.</param>
        public override void Process(INeuralData pattern)
        {
            double[] target = this.context.Data;
            double[] source = pattern.Data;

            Array.Copy(source, target, source.Length);

#if logging
            if (ContextLayer.logger.IsDebugEnabled)
            {
                ContextLayer.logger.Debug("Updated ContextLayer to " + pattern);
            }
#endif
        }
Пример #29
0
 /// <summary>
 /// Compute the output for a given input to the neural network. This method
 /// provides a parameter to specify an output holder to use.  This holder
 /// allows propagation training to track the output from each layer.
 /// If you do not need this holder pass null, or use the other
 /// compare method.
 /// </summary>
 /// <param name="input">The input provide to the neural network.</param>
 /// <param name="useHolder">Allows a holder to be specified, this allows
 /// propagation training to check the output of each layer.</param>
 /// <returns>The results from the output neurons.</returns>
 public virtual INeuralData Compute(INeuralData input,
                                    NeuralOutputHolder useHolder)
 {
     try
     {
         return(logic.Compute(input, useHolder));
     }
     catch (IndexOutOfRangeException ex)
     {
         throw new NeuralNetworkError(
                   "Index exception: there was likely a mismatch between layer sizes, or the size of the input presented to the network.",
                   ex);
     }
 }
Пример #30
0
        /// <summary>
        /// Format neural data as a list of numbers.
        /// </summary>
        /// <param name="data">The neural data to format.</param>
        /// <returns>The formatted neural data.</returns>
        private static String FormatNeuralData(INeuralData data)
        {
            StringBuilder result = new StringBuilder();

            for (int i = 0; i < data.Count; i++)
            {
                if (i != 0)
                {
                    result.Append(',');
                }
                result.Append(Format.FormatDouble(data[i], 4));
            }
            return(result.ToString());
        }
Пример #31
0
        /// <summary>
        /// Calculate the winning neuron from the data, this is the neuron
        /// that has the highest output.
        /// </summary>
        /// <param name="data">The data to use to determine the winning neuron.</param>
        /// <returns>The winning neuron index, or -1 if no winner.</returns>
        public int Winner(INeuralData data)
        {
            int winner = -1;

            for (int i = 0; i < data.Count; i++)
            {
                if (winner == -1 || data[i] > data[winner])
                {
                    winner = i;
                }
            }

            return(winner);
        }
Пример #32
0
        /// <summary>
        /// Internal computation method for a single layer.  This is called,
        /// as the neural network processes.
        /// </summary>
        /// <param name="holder">The output holder.</param>
        /// <param name="layer">The layer to process.</param>
        /// <param name="input">The input to this layer.</param>
        /// <param name="source">The source synapse.</param>
        private void Compute(NeuralOutputHolder holder, ILayer layer,
                             INeuralData input, ISynapse source)
        {
            try
            {
#if logging
                if (FeedforwardLogic.logger.IsDebugEnabled)
                {
                    FeedforwardLogic.logger.Debug("Processing layer: "
                                                  + layer.ToString()
                                                  + ", input= "
                                                  + input.ToString());
                }
#endif

                // typically used to process any recurrent layers that feed into this
                // layer.
                PreprocessLayer(layer, input, source);

                foreach (ISynapse synapse in layer.Next)
                {
                    if (!holder.Result.ContainsKey(synapse))
                    {
#if logging
                        if (FeedforwardLogic.logger.IsDebugEnabled)
                        {
                            FeedforwardLogic.logger.Debug("Processing synapse: " + synapse.ToString());
                        }
#endif
                        INeuralData pattern = synapse.Compute(input);
                        pattern = synapse.ToLayer.Compute(pattern);
                        synapse.ToLayer.Process(pattern);
                        holder.Result[synapse] = input;
                        Compute(holder, synapse.ToLayer, pattern, synapse);

                        ILayer outputLayer = this.network.GetLayer(BasicNetwork.TAG_OUTPUT);

                        // Is this the output from the entire network?
                        if (synapse.ToLayer == outputLayer)
                        {
                            holder.Output = pattern;
                        }
                    }
                }
            }
            catch (IndexOutOfRangeException ex)
            {
                throw new NeuralNetworkError("Size mismatch on input of size " + input.Count + " and layer: ", ex);
            }
        }
Пример #33
0
        /// <summary>
        /// Train the neural network for the specified pattern. The neural network
        /// can be trained for more than one pattern. To do this simply call the
        /// train method more than once.
        /// </summary>
        /// <param name="pattern">The pattern to train for.</param>
        public void AddPattern(INeuralData pattern)
        {

            // Create a row matrix from the input, convert boolean to bipolar
            Matrix m2 = Matrix.CreateRowMatrix(pattern.Data);
            // Transpose the matrix and multiply by the original input matrix
            Matrix m1 = MatrixMath.Transpose(m2);
            Matrix m3 = MatrixMath.Multiply(m1, m2);

            // matrix 3 should be square by now, so create an identity
            // matrix of the same size.
            Matrix identity = MatrixMath.Identity(m3.Rows);

            // subtract the identity matrix
            Matrix m4 = MatrixMath.Subtract(m3, identity);

            // now add the calculated matrix, for this pattern, to the
            // existing weight matrix.
            ConvertHopfieldMatrix(m4);
        }
        /// <summary>
        /// Handle recurrent layers.  See if there are any recurrent layers before
        /// the specified layer that must affect the input.
        /// </summary>
        /// <param name="layer">The layer being processed, see if there are any recurrent
        /// connections to this.</param>
        /// <param name="input">The input to the layer, will be modified with the result
        /// from any recurrent layers.</param>
        /// <param name="source">The source synapse.</param>
        public override void PreprocessLayer(ILayer layer,
                 INeuralData input, ISynapse source)
        {
            foreach (ISynapse synapse in
                     this.Network.Structure.GetPreviousSynapses(layer))
            {
                if (synapse != source)
                {
#if logging
                    if (SimpleRecurrentLogic.logger.IsDebugEnabled)
                    {
                        SimpleRecurrentLogic.logger.Debug("Recurrent layer from: " + input.ToString());
                    }
#endif
                    INeuralData recurrentInput = synapse.FromLayer.Recur();

                    if (recurrentInput != null)
                    {
                        INeuralData recurrentOutput = synapse
                               .Compute(recurrentInput);

                        for (int i = 0; i < input.Count; i++)
                        {
                            input[i] = input[i]
                                    + recurrentOutput[i];
                        }
#if logging
                        if (SimpleRecurrentLogic.logger.IsDebugEnabled)
                        {
                            SimpleRecurrentLogic.logger.Debug("Recurrent layer to: " + input.ToString());
                        }
#endif
                    }
                }
            }
        }
        /// <summary>
        /// Compute the weighted output from this synapse. Each neuron
        /// in the from layer has a weighted connection to each of the
        /// neurons in the next layer. 
        /// </summary>
        /// <param name="input">The input from the synapse.</param>
        /// <returns>The output from this synapse.</returns>
        public override INeuralData Compute(INeuralData input)
        {
            	INeuralData result = new BasicNeuralData(this.ToNeuronCount);
		
		double[] inputArray = input.Data;
		double[][] matrixArray = this.WeightMatrix.Data;
		double[] resultArray = result.Data;

		for (int i = 0; i < this.ToNeuronCount; i++) {
			
			double sum = 0;
			for(int j = 0;j<inputArray.Length;j++ )
			{
				sum+=inputArray[j]*matrixArray[j][i];
			}
			resultArray[i] = sum;
		}
		return result;
        }
Пример #36
0
        /// <summary>
        /// Compute the output from this synapse. 
        /// </summary>
        /// <param name="input">The input to this synapse.</param>
        /// <returns>The output from this synapse.</returns>
        public INeuralData Compute(INeuralData input)
        {
            INeuralData result = new BasicNeuralData(ToNeuronCount);

            if (this.neurons.Count == 0)
            {
                throw new NeuralNetworkError("This network has not been evolved yet, it has no neurons in the NEAT synapse.");
            }

            int flushCount = 1;

            if (snapshot)
            {
                flushCount = networkDepth;
            }

            // iterate through the network FlushCount times
            for (int i = 0; i < flushCount; ++i)
            {
                int outputIndex = 0;
                int index = 0;

                result.Clear();

                // populate the input neurons
                while (neurons[index].NeuronType == NEATNeuronType.Input)
                {
                    neurons[index].Output = input[index];

                    index++;
                }

                // set the bias neuron
                neurons[index++].Output = 1;

                while (index < neurons.Count)
                {

                    NEATNeuron currentNeuron = neurons[index];

                    double sum = 0;

                    foreach (NEATLink link in currentNeuron.InboundLinks)
                    {
                        double weight = link.Weight;
                        double neuronOutput = link.FromNeuron.Output;
                        sum += weight * neuronOutput;
                    }

                    double[] d = new double[1];
                    d[0] = sum / currentNeuron.ActivationResponse;
                    activationFunction.ActivationFunction(d,0,1);

                    neurons[index].Output = d[0];

                    if (currentNeuron.NeuronType == NEATNeuronType.Output)
                    {
                        result.Data[outputIndex++] = currentNeuron.Output;
                    }
                    index++;
                }
            }

            return result;
        }
Пример #37
0
 /// <summary>
 /// Can be overridden by subclasses.  Usually used to implement recurrent 
 /// layers. 
 /// </summary>
 /// <param name="layer">The layer to process.</param>
 /// <param name="input">The input to this layer.</param>
 /// <param name="source">The source from this layer.</param>
 virtual public void PreprocessLayer(ILayer layer, INeuralData input, ISynapse source)
 {
     // nothing to do		
 }
Пример #38
0
        /// <summary>
        /// Compute the output for the given input.
        /// </summary>
        /// <param name="input">The input to the SVM.</param>
        /// <param name="useHolder">The output holder to use.</param>
        /// <returns>The results from the SVM.</returns>
        public override INeuralData Compute(INeuralData input,
                NeuralOutputHolder useHolder)
        {

            useHolder.Output = Compute(input);
            return useHolder.Output;
        }
Пример #39
0
        /// <summary>
        /// Not supported.
        /// </summary>
        /// <param name="data1">Not used.</param>
        public void Add(INeuralData data1)
        {
            throw new TrainingError(FoldedDataSet.ADD_NOT_SUPPORTED);

        }
 /// <summary>
 /// Train for the specified synapse and BMU.
 /// </summary>
 /// <param name="bmu">The best matching unit for this input.</param>
 /// <param name="synapse">The synapse to train.</param>
 /// <param name="input">The input to train for.</param>
 private void Train(int bmu, ISynapse synapse,
          INeuralData input)
 {
     // adjust the weight for the BMU and its neighborhood
     for (int outputNeuron = 0; outputNeuron < this.outputNeuronCount;
         outputNeuron++)
     {
         TrainPattern(synapse, input, outputNeuron, bmu);
     }
 }
Пример #41
0
 /// <summary>
 /// Compute the output for a given input to the neural network. This method
 /// provides a parameter to specify an output holder to use.  This holder
 /// allows propagation training to track the output from each layer.
 /// If you do not need this holder pass null, or use the other 
 /// compare method.
 /// </summary>
 /// <param name="input">The input provide to the neural network.</param>
 /// <param name="useHolder">Allows a holder to be specified, this allows
 /// propagation training to check the output of each layer.</param>
 /// <returns>The results from the output neurons.</returns>
 public virtual INeuralData Compute(INeuralData input,
          NeuralOutputHolder useHolder)
 {
     try
     {
         return logic.Compute(input, useHolder);
     }
     catch (IndexOutOfRangeException ex)
     {
         throw new NeuralNetworkError(
                 "Index exception: there was likely a mismatch between layer sizes, or the size of the input presented to the network.",
                 ex);
     }
 }
Пример #42
0
 /// <summary>
 /// Compute the output for a given input to the neural network.
 /// </summary>
 /// <param name="input">The input to the neural network.</param>
 /// <returns>The output from the neural network.</returns>
 public virtual INeuralData Compute(INeuralData input)
 {
     return Compute(input, null);
 }
Пример #43
0
 /// <summary>
 /// Compute the output from this synapse.
 /// </summary>
 /// <param name="input">The input to this synapse.</param>
 /// <returns>The output from this synapse.</returns>
 public abstract INeuralData Compute(INeuralData input);
Пример #44
0
 /// <summary>
 /// Get the specified weight.
 /// </summary>
 /// <param name="synapse">The synapse to get the weight from.</param>
 /// <param name="input">The input, to obtain the size from.</param>
 /// <param name="x">The x matrix value. (could be row or column, depending on input)</param>
 /// <param name="y">The y matrix value. (could be row or column, depending on input)</param>
 /// <returns>The value from the matrix.</returns>
 private double GetWeight(ISynapse synapse, INeuralData input, int x, int y)
 {
     if (synapse.FromNeuronCount != input.Count)
         return synapse.WeightMatrix[x, y];
     else
         return synapse.WeightMatrix[y, x];
 }
Пример #45
0
        /// <summary>
        /// Propagate the layer.
        /// </summary>
        /// <param name="synapse">The synapse for this layer.</param>
        /// <param name="input">The input pattern.</param>
        /// <param name="output">The output pattern.</param>
        /// <returns>True if the network has become stable.</returns>
        private bool PropagateLayer(ISynapse synapse, INeuralData input,
                INeuralData output)
        {
            int i, j;
            int sum, outt = 0;
            bool stable;

            stable = true;

            for (i = 0; i < output.Count; i++)
            {
                sum = 0;
                for (j = 0; j < input.Count; j++)
                {
                    sum += (int)(GetWeight(synapse, input, i, j) * input[j]);
                }
                if (sum != 0)
                {
                    if (sum < 0)
                        outt = -1;
                    else
                        outt = 1;
                    if (outt != (int)output[i])
                    {
                        stable = false;
                        output[i] = outt;
                    }
                }
            }
            return stable;
        }
 /// <summary>
 /// Copy the specified input pattern to the weight matrix. This causes an
 /// output neuron to learn this pattern "exactly". This is useful when a
 /// winner is to be forced.
 /// </summary>
 /// <param name="synapse">The synapse that is the target of the copy.</param>
 /// <param name="outputNeuron">The output neuron to set.</param>
 /// <param name="input">The input pattern to copy.</param>
 private void CopyInputPattern(ISynapse synapse,
          int outputNeuron, INeuralData input)
 {
     for (int inputNeuron = 0; inputNeuron < this.inputNeuronCount;
         inputNeuron++)
     {
         synapse.WeightMatrix[inputNeuron, outputNeuron] =
                 input[inputNeuron];
     }
 }
        /// <summary>
        /// Force any neurons that did not win to off-load patterns from overworked
        /// neurons.
        /// </summary>
        /// <param name="synapse">An array that specifies how many times each output neuron has
        /// "won".</param>
        /// <param name="won">The training pattern that is the least represented by this
        /// neural network.</param>
        /// <param name="leastRepresented">The synapse to modify.</param>
        /// <returns>True if a winner was forced.</returns>
        private bool ForceWinners(ISynapse synapse, int[] won,
                 INeuralData leastRepresented)
        {

            double maxActivation = double.MinValue;
            int maxActivationNeuron = -1;

            INeuralData output = this.network.Compute(leastRepresented);

            // Loop over all of the output neurons. Consider any neurons that were
            // not the BMU (winner) for any pattern. Track which of these
            // non-winning neurons had the highest activation.
            for (int outputNeuron = 0; outputNeuron < won.Length; outputNeuron++)
            {
                // Only consider neurons that did not "win".
                if (won[outputNeuron] == 0)
                {
                    if ((maxActivationNeuron == -1)
                            || (output.Data[outputNeuron] > maxActivation))
                    {
                        maxActivation = output.Data[outputNeuron];
                        maxActivationNeuron = outputNeuron;
                    }
                }
            }

            // If a neurons was found that did not activate for any patterns, then
            // force it to "win" the least represented pattern.
            if (maxActivationNeuron != -1)
            {
                CopyInputPattern(synapse, maxActivationNeuron, leastRepresented);
                return true;
            }
            else
            {
                return false;
            }
        }
        /// <summary>
        /// Train for the specified pattern.
        /// </summary>
        /// <param name="synapse">The synapse to train.</param>
        /// <param name="input">The input pattern to train for.</param>
        /// <param name="current">The current output neuron being trained.</param>
        /// <param name="bmu">The best matching unit, or winning output neuron.</param>
        private void TrainPattern(ISynapse synapse, INeuralData input,
                 int current, int bmu)
        {
            Matrix correction = this.correctionMatrix[synapse];

            for (int inputNeuron = 0; inputNeuron < this.inputNeuronCount;
                inputNeuron++)
            {

                double currentWeight = synapse.WeightMatrix[inputNeuron,
                       current];
                double inputValue = input.Data[inputNeuron];

                double newWeight = DetermineNewWeight(currentWeight,
                       inputValue, current, bmu);

                correction[inputNeuron, current] = newWeight;
            }
        }
        /// <summary>
        /// Train the specified pattern.  Find a winning neuron and adjust all
        /// neurons according to the neighborhood function.
        /// </summary>
        /// <param name="pattern">The pattern to train.</param>
        public void TrainPattern(INeuralData pattern)
        {
            foreach (ISynapse synapse in this.synapses)
            {
                INeuralData input = pattern;
                int bmu = this.bmuUtil.CalculateBMU(synapse, input);
                Train(bmu, synapse, input);
            }
            ApplyCorrection();

        }
Пример #50
0
        /// <summary>
        /// Setup the network logic, read parameters from the network.
        /// NOT USED, call the run method.
        /// </summary>
        /// <param name="input">Not used</param>
        /// <param name="useHolder">Not used</param>
        /// <returns>Not used</returns>
        public override INeuralData Compute(INeuralData input, NeuralOutputHolder useHolder)
        {
            String str = "Compute on BasicNetwork cannot be used, rather call" +
                    " the run method on the logic class.";
#if logging
            if (logger.IsErrorEnabled)
            {
                logger.Error(str);
            }
#endif
            throw new NeuralNetworkError(str);
        }
Пример #51
0
        /// <summary>
        /// Determine which member of the output is the winning neuron.
        /// </summary>
        /// <param name="output">The output from the neural network.</param>
        /// <returns>The winning neuron.</returns>
        public static int DetermineWinner(INeuralData output)
        {

            int win = 0;

            double biggest = double.MinValue;
            for (int i = 0; i < output.Count; i++)
            {

                if (output[i] > biggest)
                {
                    biggest = output[i];
                    win = i;
                }
            }

            return win;
        }
Пример #52
0
        /// <summary>
        /// Compute the output for the given input.
        /// </summary>
        /// <param name="input">The input to the SVM.</param>
        /// <returns>The results from the SVM.</returns>
        public override INeuralData Compute(INeuralData input)
        {
            INeuralData result = new BasicNeuralData(this.outputCount);

            svm_node[] formattedInput = MakeSparse(input);

            for (int i = 0; i < this.outputCount; i++)
            {
                double d = svm.svm_predict(this.models[i], formattedInput);
                result[i] = d;
            }
            return result;
        }
Пример #53
0
 /// <summary>
 /// Compute the output from this synapse.
 /// </summary>
 /// <param name="input">The input to this synapse.</param>
 /// <returns>The output is the same as the input.</returns>
 public override INeuralData Compute(INeuralData input)
 {
     return input;
 }
Пример #54
0
        /// <summary>
        /// Calculate the winning neuron from the data, this is the neuron
        /// that has the highest output.
        /// </summary>
        /// <param name="data">The data to use to determine the winning neuron.</param>
        /// <returns>The winning neuron index, or -1 if no winner.</returns>
        public int Winner(INeuralData data)
        {
            int winner = -1;

            for (int i = 0; i < data.Count; i++)
            {
                if (winner == -1 || data[i] > data[winner])
                {
                    winner = i;
                }
            }

            return winner;
        }
Пример #55
0
        /// <summary>
        /// Internal computation method for a single layer.  This is called, 
        /// as the neural network processes.
        /// </summary>
        /// <param name="holder">The output holder.</param>
        /// <param name="layer">The layer to process.</param>
        /// <param name="input">The input to this layer.</param>
        /// <param name="source">The source synapse.</param>
        private void Compute(NeuralOutputHolder holder, ILayer layer,
                 INeuralData input, ISynapse source)
        {
            try
            {
#if logging
                if (FeedforwardLogic.logger.IsDebugEnabled)
                {
                    FeedforwardLogic.logger.Debug("Processing layer: "
                        + layer.ToString()
                        + ", input= "
                        + input.ToString());
                }
#endif
                
                // typically used to process any recurrent layers that feed into this
                // layer.
                PreprocessLayer(layer, input, source);

                foreach (ISynapse synapse in layer.Next)
                {
                    if (!holder.Result.ContainsKey(synapse))
                    {
#if logging
                        if (FeedforwardLogic.logger.IsDebugEnabled)
                        {
                            FeedforwardLogic.logger.Debug("Processing synapse: " + synapse.ToString());
                        }
#endif
                        INeuralData pattern = synapse.Compute(input);
                        pattern = synapse.ToLayer.Compute(pattern);
                        synapse.ToLayer.Process(pattern);
                        holder.Result[synapse] = input;
                        Compute(holder, synapse.ToLayer, pattern, synapse);

                        ILayer outputLayer = this.network.GetLayer(BasicNetwork.TAG_OUTPUT);

                        // Is this the output from the entire network?
                        if (synapse.ToLayer == outputLayer)
                        {
                            holder.Output = pattern;
                        }
                    }
                }
            }
            catch (IndexOutOfRangeException ex)
            {
                throw new NeuralNetworkError("Size mismatch on input of size " + input.Count + " and layer: ", ex);
            }
        }
        /// <summary>
        /// Compute the values before sending output to the next layer.
        /// This function allows the activation functions to be called.
        /// </summary>
        /// <param name="pattern">The incoming Project.</param>
        /// <returns>The output from this layer.</returns>
        public override INeuralData Compute(INeuralData pattern)
        {
            INeuralData result = new BasicNeuralData(NeuronCount);

            for (int i = 0; i < NeuronCount; i++)
            {

                if (this.radialBasisFunction[i] == null)
                {
                    String str =
               "Error, must define radial functions for each neuron";
#if logging
                    if (RadialBasisFunctionLayer.logger.IsErrorEnabled)
                    {
                        RadialBasisFunctionLayer.logger.Error(str);
                    }
#endif
                    throw new NeuralNetworkError(str);
                }

                IRadialBasisFunction f = this.radialBasisFunction[i];

                if (pattern.Data.Length != f.Dimensions)
                    throw new Exception("Inputs must equal the number of dimensions.");

                result[i] = f.Calculate(pattern.Data);
            }

            return result;
        }
Пример #57
0
        /// <summary>
        /// Handle recurrent layers.  See if there are any recurrent layers before
        /// the specified layer that must affect the input.
        /// </summary>
        /// <param name="layer">The layer being processed, see if there are any recurrent
        /// connections to this.</param>
        /// <param name="input">The input to the layer, will be modified with the result
        /// from any recurrent layers.</param>
        /// <param name="source">The source synapse.</param>
        private void HandleRecurrentInput(ILayer layer,
                 INeuralData input, ISynapse source)
        {
            foreach (ISynapse synapse
                    in this.structure.GetPreviousSynapses(layer))
            {
                if (synapse != source)
                {
#if logging
                    if (BasicNetwork.logger.IsDebugEnabled)
                    {
                        BasicNetwork.logger.Debug("Recurrent layer from: " + input);
                    }
#endif
                    INeuralData recurrentInput = synapse.FromLayer
                           .Recur();

                    if (recurrentInput != null)
                    {
                        INeuralData recurrentOutput = synapse
                               .Compute(recurrentInput);

                        for (int i = 0; i < input.Count; i++)
                        {
                            input[i] = input[i]
                                    + recurrentOutput[i];
                        }
#if logging
                        if (BasicNetwork.logger.IsDebugEnabled)
                        {
                            BasicNetwork.logger.Debug("Recurrent layer to: " + input);
                        }
#endif
                    }
                }
            }
        }
Пример #58
0
        /// <summary>
        /// Not supported.
        /// </summary>
        /// <param name="inputData">Not used.</param>
        /// <param name="idealData">Not used.</param>
        public void Add(INeuralData inputData, INeuralData idealData)
        {
            throw new TrainingError(FoldedDataSet.ADD_NOT_SUPPORTED);

        }
Пример #59
0
        /// <summary>
        /// Convert regular Encog NeuralData into the "sparse" data needed by an SVM. 
        /// </summary>
        /// <param name="data">The data to convert.</param>
        /// <returns>The SVM sparse data.</returns>
        public svm_node[] MakeSparse(INeuralData data)
        {
            svm_node[] result = new svm_node[data.Count];
            for (int i = 0; i < data.Count; i++)
            {
                result[i] = new svm_node();
                result[i].index = i + 1;
                result[i].value_Renamed = data[i];
            }

            return result;
        }
Пример #60
0
        /// <summary>
        /// Determine the winner for the specified input. This is the number of the
        /// winning neuron.
        /// </summary>
        /// <param name="input">The input patter to present to the neural network.</param>
        /// <returns>The winning neuron.</returns>
        public int Winner(INeuralData input)
        {

            INeuralData output = Compute(input);
            return DetermineWinner(output);
        }