예제 #1
0
 /// <summary>
 /// Creates  connectivity between specified neuron and all neurons in specified layer
 /// </summary>
 /// <param name="fromNeuron">
 ///            neuron to connect </param>
 /// <param name="toLayer">
 ///            layer to connect to </param>
 public static void createConnection(Neuron fromNeuron, Layer toLayer)
 {
     foreach (Neuron toNeuron in toLayer.Neurons)
     {
         ConnectionFactory.createConnection(fromNeuron, toNeuron);
     }
 }
예제 #2
0
 /// <summary>
 /// Creates 2D layer with specified dimensions, filled with neurons with
 /// specified properties
 /// </summary>
 /// <param name="dimensions">       layer dimensions </param>
 /// <param name="neuronProperties"> neuron properties </param>
 public FeatureMapLayer(Dimension2D dimensions, NeuronProperties neuronProperties, Dimension2D kernelDimension) : this(dimensions, kernelDimension)
 {
     for (int i = 0; i < dimensions.Height * dimensions.Width; i++)
     {
         Neuron neuron = NeuronFactory.createNeuron(neuronProperties);
         addNeuron(neuron);
     }
 }
예제 #3
0
        //    /**
        //     * Iterate all layers, neurons and connection weight and apply distort randomization
        //     * @param neuralNetwork
        //     */
        //    @Override
        //    public void randomize(NeuralNetwork neuralNetwork) {
        //        for (Layer layer : neuralNetwork.getLayers()) {
        //            for (Neuron neuron : layer.getNeurons()) {
        //                for (Connection connection : neuron.getInputConnections()) {
        //                    double weight = connection.getWeight().getValue();
        //                    connection.getWeight().setValue(distort(weight));
        //                }
        //            }
        //        }
        //
        //    }

        /// <summary>
        /// Iterate all layers, neurons and connection weight and apply distort randomization </summary>
        /// <param name="neuron"> </param>
        public override void randomize(Neuron neuron)
        {
            foreach (Connection connection in neuron.InputConnections)
            {
                double weight = connection.Weight.Value;
                connection.Weight.Value = distort(weight);
            }
        }
예제 #4
0
 /// <summary>
 /// This method implements weights update procedure for the single neuron
 /// </summary>
 /// <param name="neuron">
 ///            neuron to update weights
 ///        desiredOutput
 ///	      desired output of the neuron </param>
 protected internal virtual void updateNeuronWeights(Neuron neuron, double desiredOutput)
 {
     foreach (Connection connection in neuron.InputConnections)
     {
         double input       = connection.Input;
         double deltaWeight = input * desiredOutput * this.learningRate;
         connection.Weight.inc(deltaWeight);
     }
 }
예제 #5
0
 /// <summary>
 /// Creates forward connection pattern between specified layers
 /// </summary>
 /// <param name="fromLayer">
 ///            layer to connect </param>
 /// <param name="toLayer">
 ///            layer to connect to </param>
 public static void forwardConnect(Layer fromLayer, Layer toLayer)
 {
     for (int i = 0; i < fromLayer.NeuronsCount; i++)
     {
         Neuron fromNeuron = fromLayer.getNeuronAt(i);
         Neuron toNeuron   = toLayer.getNeuronAt(i);
         createConnection(fromNeuron, toNeuron, 1);
     }
 }
예제 #6
0
        /// <summary>
        /// Creates an empty 2D layer with specified dimensions
        /// </summary>
        /// <param name="dimensions"> layer dimensions (width and weight) </param>
        public FeatureMapLayer(Dimension2D dimensions, NeuronProperties neuronProperties)
        {
            this.dimensions = dimensions;

            for (int i = 0; i < dimensions.Height * dimensions.Width; i++)
            {
                Neuron neuron = NeuronFactory.createNeuron(neuronProperties);
                addNeuron(neuron);
            }
        }
예제 #7
0
        /// <summary>
        /// Creates a new instance of InputLayer with specified number of input neurons </summary>
        /// <param name="neuronsCount"> input neurons count for this layer </param>
        public InputLayer(int neuronsCount)
        {
            NeuronProperties inputNeuronProperties = new NeuronProperties(typeof(InputNeuron), typeof(Linear));

            for (int i = 0; i < neuronsCount; i++)
            {
                Neuron neuron = NeuronFactory.createNeuron(inputNeuronProperties);
                this.addNeuron(neuron);
            }
        }
예제 #8
0
        /// <summary>
        /// This method implements weights update procedure for the single neuron
        /// </summary>
        /// <param name="neuron">
        ///            neuron to update weights </param>
        protected internal override void updateNeuronWeights(Neuron neuron)
        {
            double output = neuron.Output;

            foreach (Connection connection in neuron.InputConnections)
            {
                double input       = connection.Input;
                double deltaWeight = input * output * this.learningRate;
                connection.Weight.dec(deltaWeight);                 // the only difference to UnsupervisedHebbianLearning is this substraction instead addition
            }
        }
예제 #9
0
        /// <summary>
        /// This method implements weights update procedure for the single neuron
        /// </summary>
        /// <param name="neuron">
        ///            neuron to update weights for </param>
        protected internal override void updateNeuronWeights(Neuron neuron)
        {
            double output = neuron.Output;

            foreach (Connection connection in neuron.InputConnections)
            {
                double input       = connection.Input;
                double weight      = connection.Weight.Value;
                double deltaWeight = this.learningRate * input * (output - weight);
                connection.Weight.inc(deltaWeight);
            }
        }
예제 #10
0
        /// <summary>
        /// This method implements weights update procedure for the single neuron
        /// </summary>
        /// <param name="neuron">
        ///            neuron to update weights </param>
        protected internal override void updateNeuronWeights(Neuron neuron)
        {
            double output = neuron.Output;

            foreach (Connection connection in neuron.InputConnections)
            {
                double input       = connection.Input;
                double netInput    = neuron.NetInput;
                double deltaWeight = (input - netInput) * output * this.learningRate;                 // is it right to use netInput here?
                connection.Weight.inc(deltaWeight);
            }
        }
예제 #11
0
        /// <summary>
        /// This method implements weights update procedure for the single neuron
        /// In addition to weights change in LMS it applies change to neuron's threshold
        /// </summary>
        /// <param name="neuron">
        ///            neuron to update weights </param>
        public override void updateNeuronWeights(Neuron neuron)
        {
            // adjust the input connection weights with method from superclass
            base.updateNeuronWeights(neuron);

            // and adjust the neurons threshold
            ThresholdNeuron thresholdNeuron = (ThresholdNeuron)neuron;
            // get neurons error
            double neuronError = thresholdNeuron.Error;
            // get the neurons threshold
            double thresh = thresholdNeuron.Thresh;

            // calculate new threshold value
            thresh = thresh - this.learningRate * neuronError;
            // apply the new threshold
            thresholdNeuron.Thresh = thresh;
        }
예제 #12
0
        /// <summary>
        /// Creates full connectivity within layer - each neuron with all other
        /// within the same layer with the specified weight and delay values for all
        /// conections.
        /// </summary>
        public static void fullConnect(Layer layer, double weightVal, int delay)
        {
            int neuronNum = layer.NeuronsCount;

            for (int i = 0; i < neuronNum; i++)
            {
                for (int j = 0; j < neuronNum; j++)
                {
                    if (j == i)
                    {
                        continue;
                    }
                    Neuron from = layer.getNeuronAt(i);
                    Neuron to   = layer.getNeuronAt(j);
                    createConnection(from, to, weightVal, delay);
                }         // j
            }             // i
        }
예제 #13
0
        /// <summary>
        /// This method implements weights update procedure for the single neuron
        /// </summary>
        /// <param name="neuron">
        ///            neuron to update weights </param>
        protected internal override void updateNeuronWeights(Neuron neuron)
        {
            double output = neuron.Output;

            foreach (Connection connection in neuron.InputConnections)
            {
                double input = connection.Input;

                if (((input > 0) && (output > 0)) || ((input <= 0) && (output <= 0)))
                {
                    connection.Weight.inc(this.learningRate);
                }
                else
                {
                    connection.Weight.dec(this.learningRate);
                }
            }
        }
        /// <summary>
        /// This method implements weights update procedure for the single neuron for
        /// the back propagation with momentum factor
        /// </summary>
        /// <param name="neuron"> neuron to update weights </param>
        public override void updateNeuronWeights(Neuron neuron)
        {
            foreach (Connection connection in neuron.InputConnections)
            {
                double input = connection.Input;
                if (input == 0)
                {
                    continue;
                }

                // get the error for specified neuron,
                double neuronError = neuron.Error;

                // tanh can be used to minimise the impact of big error values, which can cause network instability
                // suggested at https://sourceforge.net/tracker/?func=detail&atid=1107579&aid=3130561&group_id=238532
                // double neuronError = Math.tanh(neuron.getError());

                Weight weight = connection.Weight;
                MomentumWeightTrainingData weightTrainingData = (MomentumWeightTrainingData)weight.TrainingData;

                //double currentWeightValue = weight.getValue();
                double previousWeightValue = weightTrainingData.previousValue;
                double weightChange        = this.learningRate * neuronError * input + momentum * (weight.value - previousWeightValue);
                // save previous weight value
                //weight.getTrainingData().set(TrainingData.PREVIOUS_WEIGHT, currentWeightValue);
                weightTrainingData.previousValue = weight.value;


                // if the learning is in batch mode apply the weight change immediately
                if (this.InBatchMode == false)
                {
                    weight.weightChange = weightChange;
                    weight.value       += weightChange;
                }                 // otherwise, sum the weight changes and apply them after at the end of epoch
                else
                {
                    weight.weightChange += weightChange;
                }
            }
        }
예제 #15
0
        /// <summary>
        /// Creates connections with shared weights between two feature maps
        /// Assumes that toMap is from Pooling layer.
        /// <p/>
        /// In this implementation, there is no overlapping between kernel positions.
        /// </summary>
        /// <param name="fromMap"> source feature map </param>
        /// <param name="toMap">   destination feature map </param>
        public override void connectMaps(FeatureMapLayer fromMap, FeatureMapLayer toMap)
        {
            int    kernelWidth  = kernel.Width;
            int    kernelHeight = kernel.Height;
            Weight weight       = new Weight(1);

            for (int x = 0; x < fromMap.Width - kernelWidth + 1; x += kernelWidth)             // < da li step treba da je kernel
            {
                for (int y = 0; y < fromMap.Height - kernelHeight + 1; y += kernelHeight)
                {
                    Neuron toNeuron = toMap.getNeuronAt(x / kernelWidth, y / kernelHeight);
                    for (int dy = 0; dy < kernelHeight; dy++)
                    {
                        for (int dx = 0; dx < kernelWidth; dx++)
                        {
                            int    fromX      = x + dx;
                            int    fromY      = y + dy;
                            Neuron fromNeuron = fromMap.getNeuronAt(fromX, fromY);
                            ConnectionFactory.createConnection(fromNeuron, toNeuron, weight);
                        }
                    }
                }
            }
        }
예제 #16
0
        public static void createConnection(Neuron fromNeuron, Neuron toNeuron, double weightVal, int delay)
        {
            DelayedConnection connection = new DelayedConnection(fromNeuron, toNeuron, weightVal, delay);

            toNeuron.addInputConnection(connection);
        }
예제 #17
0
        /// <summary>
        /// Creates connection between two specified neurons
        /// </summary>
        /// <param name="fromNeuron">
        ///            neuron to connect (connection source) </param>
        /// <param name="toNeuron">
        ///            neuron to connect to (connection target) </param>
        /// <param name="weight">
        ///            connection weight </param>
        public static void createConnection(Neuron fromNeuron, Neuron toNeuron, Weight weight)
        {
            Connection connection = new Connection(fromNeuron, toNeuron, weight);

            toNeuron.addInputConnection(connection);
        }
예제 #18
0
 public override void addInputConnection(Neuron fromNeuron, double weightVal)
 {
 }
예제 #19
0
 /// <summary>
 /// Creates an instance of delayed connection to cpecified neuron and
 /// with specified weight </summary>
 /// <param name="fromNeuron"> neuron to connect (source neuron) </param>
 /// <param name="toNeuron"> neuron to connect to (destination neuron) </param>
 /// <param name="weightVal"> weight value for the connection </param>
 /// <param name="delay"> delay for the connection </param>
 public DelayedConnection(Neuron fromNeuron, Neuron toNeuron, double weightVal, int delay) : base(fromNeuron, toNeuron, weightVal)
 {
     this.delay = delay;
 }
예제 #20
0
 public override void addInputConnection(Neuron fromNeuron)
 {
 }