Exemple #1
0
        /// <summary>
        /// Propagate the layer.
        /// </summary>
        /// <param name="synapse">The synapse for this layer.</param>
        /// <param name="input">The input pattern.</param>
        /// <param name="output">The output pattern.</param>
        /// <returns>True if the network has become stable.</returns>
        private bool PropagateLayer(ISynapse synapse, INeuralData input,
                                    INeuralData output)
        {
            int  i, j;
            int  sum, outt = 0;
            bool stable;

            stable = true;

            for (i = 0; i < output.Count; i++)
            {
                sum = 0;
                for (j = 0; j < input.Count; j++)
                {
                    sum += (int)(GetWeight(synapse, input, i, j) * input[j]);
                }
                if (sum != 0)
                {
                    if (sum < 0)
                    {
                        outt = -1;
                    }
                    else
                    {
                        outt = 1;
                    }
                    if (outt != (int)output[i])
                    {
                        stable    = false;
                        output[i] = outt;
                    }
                }
            }
            return(stable);
        }
Exemple #2
0
        /// <summary>
        /// Enable(or disable) a connection.
        /// </summary>
        /// <param name="synapse">The synapse.</param>
        /// <param name="fromNeuron">The from neuron.</param>
        /// <param name="toNeuron">The to neuron.</param>
        /// <param name="enable">True, if enabled.</param>
        public void EnableConnection(ISynapse synapse, int fromNeuron, int toNeuron, bool enable)
        {
            if (synapse.WeightMatrix == null)
            {
                throw new NeuralNetworkError("Can't enable/disable connection on a synapse that does not have a weight matrix.");
            }

            double value = synapse.WeightMatrix[fromNeuron, toNeuron];

            if (enable)
            {
                if (!this.structure.IsConnectionLimited)
                {
                    return;
                }

                if (Math.Abs(value) < this.structure.ConnectionLimit)
                {
                    synapse.WeightMatrix[fromNeuron, toNeuron] = RangeRandomizer.Randomize(-1, 1);
                }
            }
            else
            {
                if (!this.structure.IsConnectionLimited)
                {
                    this.Properties[BasicNetwork.TAG_LIMIT] = BasicNetwork.DEFAULT_CONNECTION_LIMIT;
                    this.structure.FinalizeStructure();
                }
                synapse.WeightMatrix[fromNeuron, toNeuron] = 0;
            }
        }
Exemple #3
0
        private ISynapse[] CreateSynapses(int[] layers, int synapsesCount, double learningRate)
        {
            var synapseIndex       = 0;
            var firstNeuronInLayer = 0;
            var synapses           = new ISynapse[synapsesCount];

            for (var i = 0; i < layers.Length - 1; i++)
            {
                var prevLayerBegin = firstNeuronInLayer;
                var nextLayerBegin = firstNeuronInLayer + layers[i];
                for (var j = prevLayerBegin; j < prevLayerBegin + layers[i]; j++)
                {
                    for (var k = nextLayerBegin; k < nextLayerBegin + layers[i + 1]; k++)
                    {
                        synapses[synapseIndex] = new Synapse
                        {
                            P = _neurons[j],
                            Q = _neurons[k],
                            N = learningRate,
                            W = _generator.Get(),
                        };
                        synapseIndex++;
                    }
                }
                firstNeuronInLayer += layers[i];
            }
            return(synapses);
        }
        /// <summary>
        /// Process any synapses that should be loaded.
        /// </summary>
        /// <param name="xmlIn">The XML reader.</param>
        private void HandleSynapses(ReadXML xmlIn)
        {
            String end = xmlIn.LastTag.Name;

            while (xmlIn.ReadToTag())
            {
                if (xmlIn.IsIt(BasicNetworkPersistor.TAG_SYNAPSE, true))
                {
                    int from = xmlIn.LastTag.GetAttributeInt(
                        BasicNetworkPersistor.ATTRIBUTE_FROM);
                    int to = xmlIn.LastTag.GetAttributeInt(
                        BasicNetworkPersistor.ATTRIBUTE_TO);
                    xmlIn.ReadToTag();
                    IPersistor persistor = PersistorUtil.CreatePersistor(xmlIn
                                                                         .LastTag.Name);
                    ISynapse synapse = (ISynapse)persistor.Load(xmlIn);
                    synapse.FromLayer = this.index2layer[from];
                    synapse.ToLayer   = this.index2layer[to];
                    synapse.FromLayer.AddSynapse(synapse);
                }
                if (xmlIn.IsIt(end, false))
                {
                    break;
                }
            }
        }
        /// <summary>
        /// Calculate the best matching unit (BMU). This is the output neuron that
        /// has the lowest Euclidean distance to the input vector.
        /// </summary>
        /// <param name="synapse">The synapse to calculate for.</param>
        /// <param name="input">The input vector.</param>
        /// <returns>The output neuron number that is the BMU.</returns>
        public int CalculateBMU(ISynapse synapse, INeuralData input)
        {
            int result = 0;

            // Track the lowest distance so far.
            double lowestDistance = double.MaxValue;

            for (int i = 0; i < this.training.OutputNeuronCount; i++)
            {
                double distance = CalculateEuclideanDistance(synapse, input,
                       i);

                // Track the lowest distance, this is the BMU.
                if (distance < lowestDistance)
                {
                    lowestDistance = distance;
                    result = i;
                }
            }

            // Track the worst distance, this is the error for the entire network.
            if (lowestDistance > this.worstDistance)
            {
                this.worstDistance = lowestDistance;
            }

            return result;
        }
        /// <summary>
        /// Process a synapse.
        /// </summary>
        /// <param name="network">The network to process.</param>
        /// <param name="layer">The layer to process.</param>
        /// <param name="array">The array to process.</param>
        /// <param name="index">The current index.</param>
        /// <returns>The index after this synapse has been read.</returns>
        private static int ProcessLayer(BasicNetwork network,
                                        ILayer layer, double[] array, int index)
        {
            int result = index;

            // see if the previous layer, which is the next layer that the loop will hit,
            // is either a connection to a BasicLayer or a ContextLayer.
            ISynapse synapse = network.Structure
                               .FindPreviousSynapseByLayerType(layer, typeof(BasicLayer));
            ISynapse contextSynapse = network.Structure
                                      .FindPreviousSynapseByLayerType(layer, typeof(ContextLayer));

            // get a list of of the previous synapses to this layer
            IList <ISynapse> list = network.Structure.GetPreviousSynapses(layer);

            // If there is not a BasicLayer or contextLayer as the next layer, then
            // just take the first synapse of any type.
            if (synapse == null && contextSynapse == null && list.Count > 0)
            {
                synapse = list[0];
            }

            // is there any data to record for this synapse?
            if (synapse != null && synapse.WeightMatrix != null)
            {
                // process each weight matrix
                for (int x = 0; x < synapse.ToNeuronCount; x++)
                {
                    for (int y = 0; y < synapse.FromNeuronCount; y++)
                    {
                        synapse.WeightMatrix[y, x] = array[result++];
                    }
                    if (synapse.ToLayer.HasBias)
                    {
                        synapse.ToLayer.BiasWeights[x] = array[result++];
                    }

                    if (contextSynapse != null)
                    {
                        for (int z = 0; z < contextSynapse.FromNeuronCount; z++)
                        {
                            double value    = array[result++];
                            double oldValue = synapse.WeightMatrix[z, x];

                            // if this connection is limited, do not update it to anything but zero
                            if (Math.Abs(oldValue) < network.Structure
                                .ConnectionLimit)
                            {
                                value = 0;
                            }

                            // update the actual matrix
                            contextSynapse.WeightMatrix[z, x] = value;
                        }
                    }
                }
            }

            return(result);
        }
 /// <summary>
 /// Once the hopfield synapse has been found, this method is called
 /// to train it.
 /// </summary>
 /// <param name="recurrent">The hopfield layer.</param>
 private void TrainHopfieldSynapse(ISynapse recurrent)
 {
     foreach (INeuralDataPair data in this.Training)
     {
         TrainHopfieldSynapse(recurrent, data.Input);
     }
 }
Exemple #8
0
        /// <summary>
        /// Find the specified synapse, throw an error if it is required.
        /// </summary>
        /// <param name="fromLayer">The from layer.</param>
        /// <param name="toLayer">The to layer.</param>
        /// <param name="required">Is this required?</param>
        /// <returns>The synapse, if it exists, otherwise null.</returns>
        public ISynapse FindSynapse(ILayer fromLayer, ILayer toLayer,
                                    bool required)
        {
            ISynapse result = null;

            foreach (ISynapse synapse in Synapses)
            {
                if ((synapse.FromLayer == fromLayer) &&
                    (synapse.ToLayer == toLayer))
                {
                    result = synapse;
                    break;
                }
            }

            if (required && (result == null))
            {
                String str =
                    "This operation requires a network with a synapse between the "
                    + NameLayer(fromLayer)
                    + " layer to the "
                    + NameLayer(toLayer) + " layer.";
#if logging
                if (NeuralStructure.logger.IsErrorEnabled)
                {
                    NeuralStructure.logger.Error(str);
                }
#endif
                throw new NeuralNetworkError(str);
            }

            return(result);
        }
Exemple #9
0
        /// <summary>
        /// Calculate the best matching unit (BMU). This is the output neuron that
        /// has the lowest Euclidean distance to the input vector.
        /// </summary>
        /// <param name="synapse">The synapse to calculate for.</param>
        /// <param name="input">The input vector.</param>
        /// <returns>The output neuron number that is the BMU.</returns>
        public int CalculateBMU(ISynapse synapse, INeuralData input)
        {
            int result = 0;

            // Track the lowest distance so far.
            double lowestDistance = double.MaxValue;

            for (int i = 0; i < this.training.OutputNeuronCount; i++)
            {
                double distance = CalculateEuclideanDistance(synapse, input,
                                                             i);

                // Track the lowest distance, this is the BMU.
                if (distance < lowestDistance)
                {
                    lowestDistance = distance;
                    result         = i;
                }
            }

            // Track the worst distance, this is the error for the entire network.
            if (lowestDistance > this.worstDistance)
            {
                this.worstDistance = lowestDistance;
            }

            return(result);
        }
Exemple #10
0
 /// <summary>
 /// Setup the network logic, read parameters from the network.
 /// </summary>
 /// <param name="network">The network that this logic class belongs to.</param>
 public void Init(BasicNetwork network)
 {
     this.network       = network;
     this.f1Layer       = network.GetLayer(BAMPattern.TAG_F1);
     this.f2Layer       = network.GetLayer(BAMPattern.TAG_F2);
     this.synapseF1ToF2 = network.Structure.FindSynapse(this.f1Layer, this.f2Layer, true);
     this.synapseF2ToF1 = network.Structure.FindSynapse(this.f2Layer, this.f1Layer, true);
 }
Exemple #11
0
 /// <summary>
 /// Setup the network logic, read parameters from the network.
 /// </summary>
 /// <param name="network">The network that this logic class belongs to.</param>
 public override void Init(BasicNetwork network)
 {
     base.Init(network);
     // hold references to parts of the network we will need later
     this.thermalLayer   = this.Network.GetLayer(BasicNetwork.TAG_INPUT);
     this.thermalSynapse = this.Network.Structure.FindSynapse(this.thermalLayer, this.thermalLayer, true);
     this.currentState   = new BiPolarNeuralData(this.thermalLayer.NeuronCount);
 }
        /// <summary>
        /// Determine the network size.
        /// </summary>
        /// <param name="network">The network to check.</param>
        /// <returns>The size of the network.</returns>
        public static int NetworkSize(BasicNetwork network)
        {
            // see if there is already an up to date flat network
            if (network.Structure.Flat != null &&
                (network.Structure.FlatUpdate == FlatUpdateNeeded.None ||
                 network.Structure.FlatUpdate == FlatUpdateNeeded.Unflatten))
            {
                return(network.Structure.Flat.Weights.Length);
            }

            int index = 0;

            // loop over all of the layers, take the output layer first
            foreach (ILayer layer in network.Structure.Layers)
            {
                // see if the previous layer, which is the next layer that the loop will hit,
                // is either a connection to a BasicLayer or a ContextLayer.
                ISynapse synapse = network.Structure
                                   .FindPreviousSynapseByLayerType(layer, typeof(BasicLayer));
                ISynapse contextSynapse = network.Structure.FindPreviousSynapseByLayerType(
                    layer, typeof(ContextLayer));

                // get a list of of the previous synapses to this layer
                IList <ISynapse> list = network.Structure.GetPreviousSynapses(layer);

                // If there is not a BasicLayer or contextLayer as the next layer, then
                // just take the first synapse of any type.
                if (synapse == null && contextSynapse == null && list.Count > 0)
                {
                    synapse = list[0];
                }

                // is there any data to record for this synapse?
                if (synapse != null && synapse.WeightMatrix != null)
                {
                    // process each weight matrix
                    for (int x = 0; x < synapse.ToNeuronCount; x++)
                    {
                        index += synapse.FromNeuronCount;


                        if (synapse.ToLayer.HasBias)
                        {
                            index++;
                        }

                        if (contextSynapse != null)
                        {
                            index += contextSynapse.FromNeuronCount;
                        }
                    }
                }
            }

            return(index);
        }
Exemple #13
0
        /// <summary>
        /// This general function checks the existency of the interconnection between the entity and a party entity
        /// </summary>
        /// <param name="entityConnectionsCollection">Bank of synapses of the entities</param>
        /// <param name="entityIdx">An index of the entity in the connections bank (target neuron)</param>
        /// <param name="partyIdx">An index of the party entity (source neuron)</param>
        private bool ExistsInterconnection(List <ISynapse>[] entityConnectionsCollection, int entityIdx, int partyIdx)
        {
            //Try to select the same synapse
            ISynapse equalConn = (from connection in entityConnectionsCollection[entityIdx]
                                  where connection.SourceNeuron.Placement.GlobalFlatIdx == partyIdx
                                  select connection
                                  ).FirstOrDefault();

            return(equalConn != null);
        }
Exemple #14
0
        /// <summary>
        /// Determine of two neurons are connected.  They are not connected
        /// if they have a zero weight, or a weight below the connection level.
        /// Non-connected weights have no effect on the output of the neural
        /// network, and are not trained.
        /// </summary>
        /// <param name="synapse">The synapse.</param>
        /// <param name="fromNeuron">The from neuron.</param>
        /// <param name="toNeuron">The to neuron.</param>
        /// <returns></returns>
        public bool IsConnected(ISynapse synapse, int fromNeuron, int toNeuron)
        {
            if (!this.structure.IsConnectionLimited)
            {
                return(true);
            }
            double value = synapse.WeightMatrix[fromNeuron, toNeuron];

            return(Math.Abs(value) > this.structure.ConnectionLimit);
        }
Exemple #15
0
        public virtual void ProcessInput(ISynapse synapse)
        {
            _values.Add(synapse.Value);
            _synapses[synapse] = true;

            if (_synapses.All(kv => kv.Value))
            {
                OnAllInputs();
            }
        }
Exemple #16
0
 /// <summary>
 /// Copy the specified input pattern to the weight matrix. This causes an
 /// output neuron to learn this pattern "exactly". This is useful when a
 /// winner is to be forced.
 /// </summary>
 /// <param name="synapse">The synapse that is the target of the copy.</param>
 /// <param name="outputNeuron">The output neuron to set.</param>
 /// <param name="input">The input pattern to copy.</param>
 private void CopyInputPattern(ISynapse synapse,
                               int outputNeuron, INeuralData input)
 {
     for (int inputNeuron = 0; inputNeuron < this.inputNeuronCount;
          inputNeuron++)
     {
         synapse.WeightMatrix[inputNeuron, outputNeuron] =
             input[inputNeuron];
     }
 }
Exemple #17
0
 /// <summary>
 /// Train for the specified synapse and BMU.
 /// </summary>
 /// <param name="bmu">The best matching unit for this input.</param>
 /// <param name="synapse">The synapse to train.</param>
 /// <param name="input">The input to train for.</param>
 private void Train(int bmu, ISynapse synapse,
                    INeuralData input)
 {
     // adjust the weight for the BMU and its neighborhood
     for (int outputNeuron = 0; outputNeuron < this.outputNeuronCount;
          outputNeuron++)
     {
         TrainPattern(synapse, input, outputNeuron, bmu);
     }
 }
Exemple #18
0
 /// <summary>
 /// Get the specified weight.
 /// </summary>
 /// <param name="synapse">The synapse to get the weight from.</param>
 /// <param name="input">The input, to obtain the size from.</param>
 /// <param name="x">The x matrix value. (could be row or column, depending on input)</param>
 /// <param name="y">The y matrix value. (could be row or column, depending on input)</param>
 /// <returns>The value from the matrix.</returns>
 private double GetWeight(ISynapse synapse, INeuralData input, int x, int y)
 {
     if (synapse.FromNeuronCount != input.Count)
     {
         return(synapse.WeightMatrix[x, y]);
     }
     else
     {
         return(synapse.WeightMatrix[y, x]);
     }
 }
Exemple #19
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="synapse">The synapse to be disconnected.</param>
        public void Disconnect(ISynapse synapse)
        {
            // 1. Make its source neuron not aware of it ...
            sourceNeuron.TargetSynapses.Remove(synapse);
            // ... and vice versa.
            sourceNeuron = null;

            // 2. Make its target neuron not aware of it ...
            targetNeuron.SourceSynapses.Remove(synapse);
            // ... and vice versa.
            targetNeuron = null;
        }
 /**
  * Update the Hopfield weights after training.
  * @param target The target synapse.
  * @param delta The amoun to change the weights by.
  */
 private void ConvertHopfieldMatrix(ISynapse target,
                                    Matrix delta)
 {
     // add the new weight matrix to what is there already
     for (int row = 0; row < delta.Rows; row++)
     {
         for (int col = 0; col < delta.Rows; col++)
         {
             target.WeightMatrix.Add(row, col, delta[row, col]);
         }
     }
 }
        /// <summary>
        /// Internal computation method for a single layer.  This is called,
        /// as the neural network processes.
        /// </summary>
        /// <param name="holder">The output holder.</param>
        /// <param name="layer">The layer to process.</param>
        /// <param name="input">The input to this layer.</param>
        /// <param name="source">The source synapse.</param>
        private void Compute(NeuralOutputHolder holder, ILayer layer,
                             INeuralData input, ISynapse source)
        {
            try
            {
#if logging
                if (FeedforwardLogic.logger.IsDebugEnabled)
                {
                    FeedforwardLogic.logger.Debug("Processing layer: "
                                                  + layer.ToString()
                                                  + ", input= "
                                                  + input.ToString());
                }
#endif

                // typically used to process any recurrent layers that feed into this
                // layer.
                PreprocessLayer(layer, input, source);

                foreach (ISynapse synapse in layer.Next)
                {
                    if (!holder.Result.ContainsKey(synapse))
                    {
#if logging
                        if (FeedforwardLogic.logger.IsDebugEnabled)
                        {
                            FeedforwardLogic.logger.Debug("Processing synapse: " + synapse.ToString());
                        }
#endif
                        INeuralData pattern = synapse.Compute(input);
                        pattern = synapse.ToLayer.Compute(pattern);
                        synapse.ToLayer.Process(pattern);
                        holder.Result[synapse] = input;
                        Compute(holder, synapse.ToLayer, pattern, synapse);

                        ILayer outputLayer = this.network.GetLayer(BasicNetwork.TAG_OUTPUT);

                        // Is this the output from the entire network?
                        if (synapse.ToLayer == outputLayer)
                        {
                            holder.Output = pattern;
                        }
                    }
                }
            }
            catch (IndexOutOfRangeException ex)
            {
                throw new NeuralNetworkError("Size mismatch on input of size " + input.Count + " and layer: ", ex);
            }
        }
        /// <summary>
        /// Construct he ADALINE trainer.
        /// </summary>
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training set.</param>
        /// <param name="learningRate">The learning rate.</param>
        public TrainAdaline(BasicNetwork network, INeuralDataSet training,
                double learningRate)
        {
            if (network.Structure.Layers.Count > 2)
                throw new NeuralNetworkError(
                        "An ADALINE network only has two layers.");
            this.network = network;

            ILayer input = network.GetLayer(BasicNetwork.TAG_INPUT);

            this.synapse = input.Next[0];
            this.training = training;
            this.learningRate = learningRate;
        }
Exemple #23
0
        /// <summary>
        /// Calculate the Euclidean distance for the specified output neuron and the
        /// input vector.  This is the square root of the squares of the differences
        /// between the weight and input vectors.
        /// </summary>
        /// <param name="synapse">The synapse to get the weights from.</param>
        /// <param name="input">The input vector.</param>
        /// <param name="outputNeuron">The neuron we are calculating the distance for.</param>
        /// <returns>The Euclidean distance.</returns>
        public double CalculateEuclideanDistance(ISynapse synapse,
                                                 INeuralData input, int outputNeuron)
        {
            double result = 0;

            // Loop over all input data.
            for (int i = 0; i < input.Count; i++)
            {
                double diff = input[i]
                              - synapse.WeightMatrix[i, outputNeuron];
                result += diff * diff;
            }
            return(BoundMath.Sqrt(result));
        }
Exemple #24
0
        /// <summary>
        /// Construct he ADALINE trainer.
        /// </summary>
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training set.</param>
        /// <param name="learningRate">The learning rate.</param>
        public TrainAdaline(BasicNetwork network, INeuralDataSet training,
                            double learningRate)
        {
            if (network.Structure.Layers.Count > 2)
            {
                throw new NeuralNetworkError(
                          "An ADALINE network only has two layers.");
            }
            this.network = network;

            ILayer input = network.GetLayer(BasicNetwork.TAG_INPUT);

            this.synapse      = input.Next[0];
            this.training     = training;
            this.learningRate = learningRate;
        }
Exemple #25
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="synapse">The synapse to be connected.</param>
        public void Connect(ISynapse synapse)
        {
            // 1. Make the synapse aware of its source neuron ...
            ILayer sourceLayer = parentConnector.SourceLayer;

            sourceNeuron = sourceLayer.GetNeuronByIndex(blueprint.SourceNeuronIndex);
            // ... and vice versa.
            sourceNeuron.TargetSynapses.Add(synapse);

            // 2. Make the synapse aware of its target neuron ...
            ILayer targetLayer = parentConnector.TargetLayer;

            targetNeuron = (IActivationNeuron)targetLayer.GetNeuronByIndex(blueprint.TargetNeuronIndex);
            // ... and vice versa.
            targetNeuron.SourceSynapses.Add(synapse);
        }
        /// <summary>
        /// Add a "next" layer.
        /// </summary>
        /// <param name="next">The next layer to add.</param>
        /// <param name="type">The synapse type to use for this layer.</param>
        public void AddNext(ILayer next, SynapseType type)
        {
            ISynapse synapse = null;

            switch (type)
            {
            case SynapseType.OneToOne:
                synapse = new OneToOneSynapse(this, next);
                break;

            case SynapseType.Weighted:
                synapse = new WeightedSynapse(this, next);
                break;

            case SynapseType.Weightless:
                synapse = new WeightlessSynapse(this, next);
                break;

            case SynapseType.Direct:
                synapse = new DirectSynapse(this, next);
                break;

            case SynapseType.NEAT:
                synapse = new NEATSynapse(this, next);
                break;

            default:
                throw new NeuralNetworkError("Unknown synapse type");
            }

            if (synapse == null)
            {
                String str = "Unknown synapse type.";
#if logging
                if (BasicLayer.logger.IsErrorEnabled)
                {
                    BasicLayer.logger.Error(str);
                }
#endif
                throw new NeuralNetworkError(str);
            }
            else
            {
                this.next.Add(synapse);
            }
        }
Exemple #27
0
        /// <summary>
        /// Construct the object and find the parts of the network.
        /// </summary>
        /// <param name="network">The network to train.</param>
        public FindCPN(BasicNetwork network)
        {
            if (network.Structure.Layers.Count != 3)
            {
                String str = "A CPN network must have exactly 3 layers";
#if logging
                if (logger.IsErrorEnabled)
                {
                    logger.Error(str);
                }
#endif
                throw new TrainingError(str);
            }

            this.inputLayer   = network.GetLayer(BasicNetwork.TAG_INPUT);
            this.outstarLayer = network.GetLayer(CPNPattern.TAG_OUTSTAR);
            this.instarLayer  = network.GetLayer(CPNPattern.TAG_INSTAR);

            if (this.outstarLayer == null)
            {
                String str = "Can't find an OUTSTAR layer, this is required.";
#if logging
                if (logger.IsErrorEnabled)
                {
                    logger.Error(str);
                }
#endif
                throw new TrainingError(str);
            }

            if (this.instarLayer == null)
            {
                String str = "Can't find an OUTSTAR layer, this is required.";
#if logging
                if (logger.IsErrorEnabled)
                {
                    logger.Error(str);
                }
#endif
                throw new TrainingError(str);
            }

            this.instarSynapse  = this.inputLayer.Next[0];
            this.outstarSynapse = this.instarLayer.Next[0];
        }
Exemple #28
0
        /// <summary>
        /// Train for the specified pattern.
        /// </summary>
        /// <param name="synapse">The synapse to train.</param>
        /// <param name="input">The input pattern to train for.</param>
        /// <param name="current">The current output neuron being trained.</param>
        /// <param name="bmu">The best matching unit, or winning output neuron.</param>
        private void TrainPattern(ISynapse synapse, INeuralData input,
                                  int current, int bmu)
        {
            Matrix correction = this.correctionMatrix[synapse];

            for (int inputNeuron = 0; inputNeuron < this.inputNeuronCount;
                 inputNeuron++)
            {
                double currentWeight = synapse.WeightMatrix[inputNeuron,
                                                            current];
                double inputValue = input.Data[inputNeuron];

                double newWeight = DetermineNewWeight(currentWeight,
                                                      inputValue, current, bmu);

                correction[inputNeuron, current] = newWeight;
            }
        }
Exemple #29
0
 /// <summary>
 /// Randomize a synapse, only randomize those connections that are actually connected.
 /// </summary>
 /// <param name="network">The network the synapse belongs to.</param>
 /// <param name="synapse">The synapse to randomize.</param>
 public virtual void Randomize(BasicNetwork network, ISynapse synapse)
 {
     if (synapse.WeightMatrix != null)
     {
         bool       limited = network.Structure.IsConnectionLimited;
         double[][] d       = synapse.WeightMatrix.Data;
         for (int fromNeuron = 0; fromNeuron < synapse.WeightMatrix.Rows; fromNeuron++)
         {
             for (int toNeuron = 0; toNeuron < synapse.WeightMatrix.Cols; toNeuron++)
             {
                 if (!limited || network.IsConnected(synapse, fromNeuron, toNeuron))
                 {
                     d[fromNeuron][toNeuron] = Randomize(d[fromNeuron][toNeuron]);
                 }
             }
         }
     }
 }
        /// <summary>
        /// Train the neural network for the specified pattern. The neural network
        /// can be trained for more than one pattern. To do this simply call the
        /// train method more than once.
        /// </summary>
        /// <param name="synapse">The synapse to train.</param>
        /// <param name="pattern">The pattern to train for.</param>
        public void TrainHopfieldSynapse(ISynapse synapse,
                                         INeuralData pattern)
        {
            // Create a row matrix from the input, convert boolean to bipolar
            Matrix m2 = Matrix.CreateRowMatrix(pattern.Data);
            // Transpose the matrix and multiply by the original input matrix
            Matrix m1 = MatrixMath.Transpose(m2);
            Matrix m3 = MatrixMath.Multiply(m1, m2);
            // matrix 3 should be square by now, so create an identity
            // matrix of the same size.
            Matrix identity = MatrixMath.Identity(m3.Rows);

            // subtract the identity matrix
            Matrix m4 = MatrixMath.Subtract(m3, identity);

            // now add the calculated matrix, for this pattern, to the
            // existing weight matrix.
            ConvertHopfieldMatrix(synapse, m4);
        }
Exemple #31
0
        /// <summary>
        /// Find the next bias layer for a given layer.
        /// </summary>
        /// <param name="layer">The layer to search from.</param>
        /// <returns>The layer bias.</returns>
        private double FindNextBias(ILayer layer)
        {
            double bias = FlatNetwork.NO_BIAS_ACTIVATION;

            if (layer.Next.Count > 0)
            {
                ISynapse synapse = network.Structure
                                   .FindNextSynapseByLayerType(layer, typeof(BasicLayer));
                if (synapse != null)
                {
                    ILayer nextLayer = synapse.ToLayer;
                    if (nextLayer.HasBias)
                    {
                        bias = nextLayer.BiasActivation;
                    }
                }
            }
            return(bias);
        }
        /// <summary>
        /// Randomize the specified synapse.
        /// </summary>
        /// <param name="beta">The beta value.</param>
        /// <param name="synapse">The synapse to modify.</param>
        private void Randomize(double beta, ISynapse synapse)
        {
            if (synapse.WeightMatrix == null)
            {
                return;
            }

            for (int j = 0; j < synapse.ToNeuronCount; j++)
            {
                double norm = 0.0;

                // Calculate the Euclidean Norm for the weights
                for (int k = 0; k < synapse.FromNeuronCount; k++)
                {
                    double v = synapse.WeightMatrix[k, j];
                    norm += v * v;
                }

                if (synapse.ToLayer.HasBias)
                {
                    double value = synapse.ToLayer.BiasWeights[j];
                    norm += value * value;
                }


                norm = Math.Sqrt(norm);

                // Rescale the weights using beta and the norm
                for (int k = 0; k < synapse.FromNeuronCount; k++)
                {
                    double value = synapse.WeightMatrix[k, j];
                    synapse.WeightMatrix[k, j] = beta * value / norm;
                }

                if (synapse.ToLayer.HasBias)
                {
                    double value = synapse.ToLayer.BiasWeights[j];
                    synapse.ToLayer.BiasWeights[j] = beta * value / norm;
                }
            }
        }
        /// <summary>
        /// Handle recurrent layers.  See if there are any recurrent layers before
        /// the specified layer that must affect the input.
        /// </summary>
        /// <param name="layer">The layer being processed, see if there are any recurrent
        /// connections to this.</param>
        /// <param name="input">The input to the layer, will be modified with the result
        /// from any recurrent layers.</param>
        /// <param name="source">The source synapse.</param>
        public override void PreprocessLayer(ILayer layer,
                 INeuralData input, ISynapse source)
        {
            foreach (ISynapse synapse in
                     this.Network.Structure.GetPreviousSynapses(layer))
            {
                if (synapse != source)
                {
#if logging
                    if (SimpleRecurrentLogic.logger.IsDebugEnabled)
                    {
                        SimpleRecurrentLogic.logger.Debug("Recurrent layer from: " + input.ToString());
                    }
#endif
                    INeuralData recurrentInput = synapse.FromLayer.Recur();

                    if (recurrentInput != null)
                    {
                        INeuralData recurrentOutput = synapse
                               .Compute(recurrentInput);

                        for (int i = 0; i < input.Count; i++)
                        {
                            input[i] = input[i]
                                    + recurrentOutput[i];
                        }
#if logging
                        if (SimpleRecurrentLogic.logger.IsDebugEnabled)
                        {
                            SimpleRecurrentLogic.logger.Debug("Recurrent layer to: " + input.ToString());
                        }
#endif
                    }
                }
            }
        }
        /// <summary>
        /// Calculate the Euclidean distance for the specified output neuron and the
        /// input vector.  This is the square root of the squares of the differences
        /// between the weight and input vectors.
        /// </summary>
        /// <param name="synapse">The synapse to get the weights from.</param>
        /// <param name="input">The input vector.</param>
        /// <param name="outputNeuron">The neuron we are calculating the distance for.</param>
        /// <returns>The Euclidean distance.</returns>
        public double CalculateEuclideanDistance(ISynapse synapse,
                 INeuralData input, int outputNeuron)
        {
            double result = 0;

            // Loop over all input data.
            for (int i = 0; i < input.Count; i++)
            {
                double diff = input[i]
                       - synapse.WeightMatrix[i, outputNeuron];
                result += diff * diff;
            }
            return BoundMath.Sqrt(result);
        }
Exemple #35
0
        /// <summary>
        /// Construct the object and find the parts of the network.
        /// </summary>
        /// <param name="network">The network to train.</param>
        public FindCPN(BasicNetwork network)
        {
            if (network.Structure.Layers.Count != 3)
            {
                String str = "A CPN network must have exactly 3 layers";
#if logging
                if (logger.IsErrorEnabled)
                {
                    logger.Error(str);
                }
#endif
                throw new TrainingError(str);
            }

            this.inputLayer = network.GetLayer(BasicNetwork.TAG_INPUT);
            this.outstarLayer = network.GetLayer(CPNPattern.TAG_OUTSTAR);
            this.instarLayer = network.GetLayer(CPNPattern.TAG_INSTAR);

            if (this.outstarLayer == null)
            {
                String str = "Can't find an OUTSTAR layer, this is required.";
#if logging
                if (logger.IsErrorEnabled)
                {
                    logger.Error(str);
                }
#endif
                throw new TrainingError(str);
            }

            if (this.instarLayer == null)
            {
                String str = "Can't find an OUTSTAR layer, this is required.";
#if logging
                if (logger.IsErrorEnabled)
                {
                    logger.Error(str);
                }
#endif
                throw new TrainingError(str);
            }

            this.instarSynapse = this.inputLayer.Next[0];
            this.outstarSynapse = this.instarLayer.Next[0];
        }
        /// <summary>
        /// Enable(or disable) a connection.
        /// </summary>
        /// <param name="synapse">The synapse.</param>
        /// <param name="fromNeuron">The from neuron.</param>
        /// <param name="toNeuron">The to neuron.</param>
        /// <param name="enable">True, if enabled.</param>
        public void EnableConnection(ISynapse synapse, int fromNeuron, int toNeuron, bool enable)
        {
            if (synapse.WeightMatrix == null)
            {
                throw new NeuralNetworkError("Can't enable/disable connection on a synapse that does not have a weight matrix.");
            }

            double value = synapse.WeightMatrix[fromNeuron, toNeuron];

            if (enable)
            {
                if (!this.structure.IsConnectionLimited)
                    return;

                if (Math.Abs(value) < this.structure.ConnectionLimit)
                    synapse.WeightMatrix[fromNeuron, toNeuron] = RangeRandomizer.Randomize(-1, 1);
            }
            else
            {
                if (!this.structure.IsConnectionLimited)
                {
                    this.Properties[BasicNetwork.TAG_LIMIT] = BasicNetwork.DEFAULT_CONNECTION_LIMIT;
                    this.structure.FinalizeStructure();
                }
                synapse.WeightMatrix[fromNeuron, toNeuron] = 0;
            }
        }
 /**
  * Update the Hopfield weights after training.
  * @param target The target synapse.
  * @param delta The amoun to change the weights by.
  */
 private void ConvertHopfieldMatrix(ISynapse target,
          Matrix delta)
 {
     // add the new weight matrix to what is there already
     for (int row = 0; row < delta.Rows; row++)
     {
         for (int col = 0; col < delta.Rows; col++)
         {
             target.WeightMatrix.Add( row, col, delta[row, col]);
         }
     }
 }
        /// <summary>
        /// Train for the specified pattern.
        /// </summary>
        /// <param name="synapse">The synapse to train.</param>
        /// <param name="input">The input pattern to train for.</param>
        /// <param name="current">The current output neuron being trained.</param>
        /// <param name="bmu">The best matching unit, or winning output neuron.</param>
        private void TrainPattern(ISynapse synapse, INeuralData input,
                 int current, int bmu)
        {
            Matrix correction = this.correctionMatrix[synapse];

            for (int inputNeuron = 0; inputNeuron < this.inputNeuronCount;
                inputNeuron++)
            {

                double currentWeight = synapse.WeightMatrix[inputNeuron,
                       current];
                double inputValue = input.Data[inputNeuron];

                double newWeight = DetermineNewWeight(currentWeight,
                       inputValue, current, bmu);

                correction[inputNeuron, current] = newWeight;
            }
        }
        /// <summary>
        /// Force any neurons that did not win to off-load patterns from overworked
        /// neurons.
        /// </summary>
        /// <param name="synapse">An array that specifies how many times each output neuron has
        /// "won".</param>
        /// <param name="won">The training pattern that is the least represented by this
        /// neural network.</param>
        /// <param name="leastRepresented">The synapse to modify.</param>
        /// <returns>True if a winner was forced.</returns>
        private bool ForceWinners(ISynapse synapse, int[] won,
                 INeuralData leastRepresented)
        {

            double maxActivation = double.MinValue;
            int maxActivationNeuron = -1;

            INeuralData output = this.network.Compute(leastRepresented);

            // Loop over all of the output neurons. Consider any neurons that were
            // not the BMU (winner) for any pattern. Track which of these
            // non-winning neurons had the highest activation.
            for (int outputNeuron = 0; outputNeuron < won.Length; outputNeuron++)
            {
                // Only consider neurons that did not "win".
                if (won[outputNeuron] == 0)
                {
                    if ((maxActivationNeuron == -1)
                            || (output.Data[outputNeuron] > maxActivation))
                    {
                        maxActivation = output.Data[outputNeuron];
                        maxActivationNeuron = outputNeuron;
                    }
                }
            }

            // If a neurons was found that did not activate for any patterns, then
            // force it to "win" the least represented pattern.
            if (maxActivationNeuron != -1)
            {
                CopyInputPattern(synapse, maxActivationNeuron, leastRepresented);
                return true;
            }
            else
            {
                return false;
            }
        }
Exemple #40
0
Fichier : MP.cs Projet : babaq/Soul
 public void ProjectedFrom(INeuron sourceneuron, ISynapse selfsynapse)
 {
     if (!this.Synapses.ContainsValue(selfsynapse))
     {
         this.Synapses.Add(selfsynapse.ID, selfsynapse);
         if (sourceneuron.ParentNetwork == null && this.parentnetwork != null)
         {
             sourceneuron.ParentNetwork = this.parentnetwork;
             return;
         }
         if (sourceneuron.ParentNetwork != null && this.parentnetwork == null)
         {
             this.parentnetwork = sourceneuron.ParentNetwork;
             return;
         }
     }
 }
        /// <summary>
        /// Internal computation method for a single layer.  This is called, 
        /// as the neural network processes.
        /// </summary>
        /// <param name="holder">The output holder.</param>
        /// <param name="layer">The layer to process.</param>
        /// <param name="input">The input to this layer.</param>
        /// <param name="source">The source synapse.</param>
        private void Compute(NeuralOutputHolder holder, ILayer layer,
                 INeuralData input, ISynapse source)
        {
            try
            {
#if logging
                if (FeedforwardLogic.logger.IsDebugEnabled)
                {
                    FeedforwardLogic.logger.Debug("Processing layer: "
                        + layer.ToString()
                        + ", input= "
                        + input.ToString());
                }
#endif
                
                // typically used to process any recurrent layers that feed into this
                // layer.
                PreprocessLayer(layer, input, source);

                foreach (ISynapse synapse in layer.Next)
                {
                    if (!holder.Result.ContainsKey(synapse))
                    {
#if logging
                        if (FeedforwardLogic.logger.IsDebugEnabled)
                        {
                            FeedforwardLogic.logger.Debug("Processing synapse: " + synapse.ToString());
                        }
#endif
                        INeuralData pattern = synapse.Compute(input);
                        pattern = synapse.ToLayer.Compute(pattern);
                        synapse.ToLayer.Process(pattern);
                        holder.Result[synapse] = input;
                        Compute(holder, synapse.ToLayer, pattern, synapse);

                        ILayer outputLayer = this.network.GetLayer(BasicNetwork.TAG_OUTPUT);

                        // Is this the output from the entire network?
                        if (synapse.ToLayer == outputLayer)
                        {
                            holder.Output = pattern;
                        }
                    }
                }
            }
            catch (IndexOutOfRangeException ex)
            {
                throw new NeuralNetworkError("Size mismatch on input of size " + input.Count + " and layer: ", ex);
            }
        }
Exemple #42
0
 /// <summary>
 /// Setup the network logic, read parameters from the network.
 /// </summary>
 /// <param name="network">The network that this logic class belongs to.</param>
 public void Init(BasicNetwork network)
 {
     this.network = network;
     this.f1Layer = network.GetLayer(BAMPattern.TAG_F1);
     this.f2Layer = network.GetLayer(BAMPattern.TAG_F2);
     this.synapseF1ToF2 = network.Structure.FindSynapse(this.f1Layer, this.f2Layer, true);
     this.synapseF2ToF1 = network.Structure.FindSynapse(this.f2Layer, this.f1Layer, true);
 }
Exemple #43
0
        /// <summary>
        /// Propagate the layer.
        /// </summary>
        /// <param name="synapse">The synapse for this layer.</param>
        /// <param name="input">The input pattern.</param>
        /// <param name="output">The output pattern.</param>
        /// <returns>True if the network has become stable.</returns>
        private bool PropagateLayer(ISynapse synapse, INeuralData input,
                INeuralData output)
        {
            int i, j;
            int sum, outt = 0;
            bool stable;

            stable = true;

            for (i = 0; i < output.Count; i++)
            {
                sum = 0;
                for (j = 0; j < input.Count; j++)
                {
                    sum += (int)(GetWeight(synapse, input, i, j) * input[j]);
                }
                if (sum != 0)
                {
                    if (sum < 0)
                        outt = -1;
                    else
                        outt = 1;
                    if (outt != (int)output[i])
                    {
                        stable = false;
                        output[i] = outt;
                    }
                }
            }
            return stable;
        }
Exemple #44
0
 /// <summary>
 /// Get the specified weight.
 /// </summary>
 /// <param name="synapse">The synapse to get the weight from.</param>
 /// <param name="input">The input, to obtain the size from.</param>
 /// <param name="x">The x matrix value. (could be row or column, depending on input)</param>
 /// <param name="y">The y matrix value. (could be row or column, depending on input)</param>
 /// <returns>The value from the matrix.</returns>
 private double GetWeight(ISynapse synapse, INeuralData input, int x, int y)
 {
     if (synapse.FromNeuronCount != input.Count)
         return synapse.WeightMatrix[x, y];
     else
         return synapse.WeightMatrix[y, x];
 }
 /// <summary>
 /// Add a synapse to the list of outbound synapses.  Usually you should 
 /// simply call the addLayer method to add to the outbound list.
 /// </summary>
 /// <param name="synapse">The synapse to add.</param>
 public void AddSynapse(ISynapse synapse)
 {
     this.next.Add(synapse);
 }
        /// <summary>
        /// Train the neural network for the specified pattern. The neural network
        /// can be trained for more than one pattern. To do this simply call the
        /// train method more than once.
        /// </summary>
        /// <param name="synapse">The synapse to train.</param>
        /// <param name="pattern">The pattern to train for.</param>
        public void TrainHopfieldSynapse(ISynapse synapse,
                 INeuralData pattern)
        {

            // Create a row matrix from the input, convert boolean to bipolar
            Matrix m2 = Matrix.CreateRowMatrix(pattern.Data);
            // Transpose the matrix and multiply by the original input matrix
            Matrix m1 = MatrixMath.Transpose(m2);
            Matrix m3 = MatrixMath.Multiply(m1, m2);
            // matrix 3 should be square by now, so create an identity
            // matrix of the same size.
            Matrix identity = MatrixMath.Identity(m3.Rows);

            // subtract the identity matrix
            Matrix m4 = MatrixMath.Subtract(m3, identity);

            // now add the calculated matrix, for this pattern, to the
            // existing weight matrix.
            ConvertHopfieldMatrix(synapse, m4);
        }
        /// <summary>
        /// Randomize a synapse, only randomize those connections that are actually connected.
        /// </summary>
        /// <param name="network">The network the synapse belongs to.</param>
        /// <param name="synapse">The synapse to randomize.</param>
        public override void Randomize(BasicNetwork network, ISynapse synapse)
        {
            if (synapse.WeightMatrix != null)
            {
                bool limited = network.Structure.IsConnectionLimited;
                double[][] d = synapse.WeightMatrix.Data;
                for (int fromNeuron = 0; fromNeuron < synapse.WeightMatrix.Rows; fromNeuron++)
                {
                    for (int toNeuron = 0; toNeuron < synapse.WeightMatrix.Cols; toNeuron++)
                    {
                        if (!limited || network.IsConnected(synapse, fromNeuron, toNeuron))
                            d[fromNeuron][toNeuron] = CalculateValue(synapse.WeightMatrix.Rows);
                    }
                }

            }
        }
Exemple #48
0
Fichier : MP.cs Projet : babaq/Soul
 public void ProjectTo(INeuron targetneuron, ISynapse targetsynapse)
 {
     if (!targetneuron.Synapses.ContainsValue(targetsynapse))
     {
         targetneuron.Synapses.Add(targetsynapse.ID, targetsynapse);
         if (targetneuron.ParentNetwork == null && this.parentnetwork != null)
         {
             targetneuron.ParentNetwork = this.parentnetwork;
             return;
         }
         if (targetneuron.ParentNetwork != null && this.parentnetwork == null)
         {
             this.parentnetwork = targetneuron.ParentNetwork;
             return;
         }
     }
 }
        /// <summary>
        /// Randomize the specified synapse.
        /// </summary>
        /// <param name="beta">The beta value.</param>
        /// <param name="synapse">The synapse to modify.</param>
        private void Randomize(double beta, ISynapse synapse)
        {
            if (synapse.WeightMatrix == null)
                return;

            for (int j = 0; j < synapse.ToNeuronCount; j++)
            {
                double norm = 0.0;

                // Calculate the Euclidean Norm for the weights
                for (int k = 0; k < synapse.FromNeuronCount; k++)
                {
                    double v = synapse.WeightMatrix[k, j];
                    norm += v * v;
                }

                if (synapse.ToLayer.HasBias)
                {
                    double value = synapse.ToLayer.BiasWeights[j];
                    norm += value * value;
                }


                norm = Math.Sqrt(norm);

                // Rescale the weights using beta and the norm
                for (int k = 0; k < synapse.FromNeuronCount; k++)
                {
                    double value = synapse.WeightMatrix[k, j];
                    synapse.WeightMatrix[k, j] = beta * value / norm;
                }

                if (synapse.ToLayer.HasBias)
                {
                    double value = synapse.ToLayer.BiasWeights[j];
                    synapse.ToLayer.BiasWeights[j] = beta * value / norm;
                }
            }
        }
 /// <summary>
 /// Copy the specified input pattern to the weight matrix. This causes an
 /// output neuron to learn this pattern "exactly". This is useful when a
 /// winner is to be forced.
 /// </summary>
 /// <param name="synapse">The synapse that is the target of the copy.</param>
 /// <param name="outputNeuron">The output neuron to set.</param>
 /// <param name="input">The input pattern to copy.</param>
 private void CopyInputPattern(ISynapse synapse,
          int outputNeuron, INeuralData input)
 {
     for (int inputNeuron = 0; inputNeuron < this.inputNeuronCount;
         inputNeuron++)
     {
         synapse.WeightMatrix[inputNeuron, outputNeuron] =
                 input[inputNeuron];
     }
 }
        /// <summary>
        /// Handle recurrent layers.  See if there are any recurrent layers before
        /// the specified layer that must affect the input.
        /// </summary>
        /// <param name="layer">The layer being processed, see if there are any recurrent
        /// connections to this.</param>
        /// <param name="input">The input to the layer, will be modified with the result
        /// from any recurrent layers.</param>
        /// <param name="source">The source synapse.</param>
        private void HandleRecurrentInput(ILayer layer,
                 INeuralData input, ISynapse source)
        {
            foreach (ISynapse synapse
                    in this.structure.GetPreviousSynapses(layer))
            {
                if (synapse != source)
                {
#if logging
                    if (BasicNetwork.logger.IsDebugEnabled)
                    {
                        BasicNetwork.logger.Debug("Recurrent layer from: " + input);
                    }
#endif
                    INeuralData recurrentInput = synapse.FromLayer
                           .Recur();

                    if (recurrentInput != null)
                    {
                        INeuralData recurrentOutput = synapse
                               .Compute(recurrentInput);

                        for (int i = 0; i < input.Count; i++)
                        {
                            input[i] = input[i]
                                    + recurrentOutput[i];
                        }
#if logging
                        if (BasicNetwork.logger.IsDebugEnabled)
                        {
                            BasicNetwork.logger.Debug("Recurrent layer to: " + input);
                        }
#endif
                    }
                }
            }
        }
 /// <summary>
 /// Train for the specified synapse and BMU.
 /// </summary>
 /// <param name="bmu">The best matching unit for this input.</param>
 /// <param name="synapse">The synapse to train.</param>
 /// <param name="input">The input to train for.</param>
 private void Train(int bmu, ISynapse synapse,
          INeuralData input)
 {
     // adjust the weight for the BMU and its neighborhood
     for (int outputNeuron = 0; outputNeuron < this.outputNeuronCount;
         outputNeuron++)
     {
         TrainPattern(synapse, input, outputNeuron, bmu);
     }
 }
        /// <summary>
        /// Determine of two neurons are connected.  They are not connected 
        /// if they have a zero weight, or a weight below the connection level.  
        /// Non-connected weights have no effect on the output of the neural 
        /// network, and are not trained.
        /// </summary>
        /// <param name="synapse">The synapse.</param>
        /// <param name="fromNeuron">The from neuron.</param>
        /// <param name="toNeuron">The to neuron.</param>
        /// <returns></returns>
        public bool IsConnected(ISynapse synapse, int fromNeuron, int toNeuron)
        {
            if (!this.structure.IsConnectionLimited)
                return true;
            double value = synapse.WeightMatrix[fromNeuron, toNeuron];

            return (Math.Abs(value) > this.structure.ConnectionLimit);
        }
 /// <summary>
 /// Setup the network logic, read parameters from the network.
 /// </summary>
 /// <param name="network">The network that this logic class belongs to.</param>
 public override void Init(BasicNetwork network)
 {
     base.Init(network);
     // hold references to parts of the network we will need later
     this.thermalLayer = this.Network.GetLayer(BasicNetwork.TAG_INPUT);
     this.thermalSynapse = this.Network.Structure.FindSynapse(this.thermalLayer, this.thermalLayer, true);
     this.currentState = new BiPolarNeuralData(this.thermalLayer.NeuronCount);
 }
        /// <summary>
        /// Setup the network logic, read parameters from the network.
        /// </summary>
        /// <param name="network">The network that this logic class belongs to.</param>
        public override void Init(BasicNetwork network)
        {
            base.Init(network);

            this.layerF1 = this.Network.GetLayer(ART1Pattern.TAG_F1);
            this.layerF2 = this.Network.GetLayer(ART1Pattern.TAG_F2);
            this.inhibitF2 = new bool[this.layerF2.NeuronCount];
            this.synapseF1toF2 = this.Network.Structure.FindSynapse(this.layerF1, this.layerF2, true);
            this.synapseF2toF1 = this.Network.Structure.FindSynapse(this.layerF2, this.layerF1, true);
            this.outputF1 = new BiPolarNeuralData(this.layerF1.NeuronCount);
            this.outputF2 = new BiPolarNeuralData(this.layerF2.NeuronCount);

            this.a1 = this.Network.GetPropertyDouble(ARTLogic.PROPERTY_A1);
            this.b1 = this.Network.GetPropertyDouble(ARTLogic.PROPERTY_B1);
            this.c1 = this.Network.GetPropertyDouble(ARTLogic.PROPERTY_C1);
            this.d1 = this.Network.GetPropertyDouble(ARTLogic.PROPERTY_D1);
            this.l = this.Network.GetPropertyDouble(ARTLogic.PROPERTY_L);
            this.vigilance = this.Network.GetPropertyDouble(ARTLogic.PROPERTY_VIGILANCE);

            this.noWinner = this.layerF2.NeuronCount;
            Reset();

        }
 /// <summary>
 /// Can be overridden by subclasses.  Usually used to implement recurrent 
 /// layers. 
 /// </summary>
 /// <param name="layer">The layer to process.</param>
 /// <param name="input">The input to this layer.</param>
 /// <param name="source">The source from this layer.</param>
 virtual public void PreprocessLayer(ILayer layer, INeuralData input, ISynapse source)
 {
     // nothing to do		
 }
 /// <summary>
 /// Once the hopfield synapse has been found, this method is called
 /// to train it.
 /// </summary>
 /// <param name="recurrent">The hopfield layer.</param>
 private void TrainHopfieldSynapse(ISynapse recurrent)
 {
     foreach (INeuralDataPair data in this.Training)
     {
         TrainHopfieldSynapse(recurrent, data.Input);
     }
 }