Exemplo n.º 1
0
 /// <summary>
 ///     Initializes a new instance of the <see cref="RecurrentContext" /> class.
 /// </summary>
 /// <param name='sourceNode'>
 ///     Source node.
 /// </param>
 /// <param name='rateOfUpdate'>
 ///     Rate of update.
 /// </param>
 /// <param name='parentLayer'>
 ///     Parent layer.
 /// </param>
 /// <param name='activationFunction'>
 ///     Activation function.
 /// </param>
 public RecurrentContext(Base sourceNode, Double rateOfUpdate, Layer.Base parentLayer, ActivationFunction.Base activationFunction)
     : base(parentLayer, activationFunction)
 {
     _Value = 0.0f;
     _SourceNode = sourceNode;
     _RateOfUpdate = rateOfUpdate;
 }
Exemplo n.º 2
0
        public HiddenLayer(uint numberOfNeurons, Layer previous)
            : base(numberOfNeurons)
        {
            this.previousLayer = previous;
            // default activation function is logsig
            this.activationFunction = ActivationFunction.LogSig;
            this.pointer = new ToActivationFunction(NMath.LogSig);
            this.pDeriv = new ToActivationDerivative(NMath.DLogSig);
            this.input = previous.GetOutputVector();
            //
            try
            {

                layerNeurons = new Neuron[numberOfNeurons];
                //
                for(int i=0; i<numberOfNeurons; i++)
                {
                    layerNeurons[i] = new Neuron(previous.GetSize());
                    layerNeurons[i].SetInputVector(input);
                }
                //
            }
            catch(System.Exception exception)
            {
                Console.WriteLine("****EXCEPTION****\n {0} {1}", exception.Message);
                previousLayer = null;
                pointer = null;
                layerNeurons = null;
            }
        }
Exemplo n.º 3
0
 public CustomizableNode(CombinationFunction combinationFunction,
                         ActivationFunction activationFunction,
                         float[] constants)
     : base(constants, activationFunction)
 {
     this.combinationFunction = combinationFunction;
 }
Exemplo n.º 4
0
        public void Build(int inputsCount, 
            int[] hiddenLayersSpec, 
            int outputsCount, 
            ActivationFunction activationFunction,
            double maximumAbsoluteWeight)
        {
            layers = new List<ILayer>();
            inputLayer = new InputLayer();
            for (int i = 0; i < inputsCount; i++)
                inputLayer.AddNeuron(new Neuron());

            isBipolar = activationFunction == Neuron.BipolarActivationFunction;
            layers.Add((Layer)inputLayer);

            if (hiddenLayersSpec != null)
                for (int i = 0; i < hiddenLayersSpec.Length; i++)
                {
                    ILayer layer = new Layer();
                    for (int j = 0; j < hiddenLayersSpec[i]; j++)
                    {
                        layer.AddNeuron(new Neuron(activationFunction));
                    }
                    layers.Add(layer);
                }

            outputLayer = new Layer();
            for (int i = 0; i < outputsCount; i++)
            {
                outputLayer.AddNeuron(new Neuron(activationFunction));
            }
            layers.Add(outputLayer);

            ConnectLayers(maximumAbsoluteWeight);
        }
Exemplo n.º 5
0
 public GrossbergLayer(ActivationFunction function, int neuronsCount, int inputsCount, ActivationFunction activationFunction)
     : base(neuronsCount, inputsCount, activationFunction)
 {
     // create each neuron
     for (int i = 0; i < neuronsCount; i++)
         neurons[i] = new ActivationNeuron(inputsCount, function);
 }
 public CounterPropagationNetwork(int inputsCount, ActivationFunction function,  params int[] neuronsCount)
     : base(inputsCount, 2)
 {
     // create layer
     layers[0] = new KohonenLayer(neuronsCount[0], inputsCount);
     layers[1] = new GrossbergLayer(neuronsCount[1], neuronsCount[0], function);
 }
Exemplo n.º 7
0
 public Neuron(double threshold)
 {
     this.inputValue = 0.0;
     this.threshold = threshold;
     this.inputSynapses = new HashSet<Synapse>();
     this.outputSynapses = new HashSet<Synapse>();
     this.activation = ActivationType.Step;
 }
Exemplo n.º 8
0
 /// <summary>
 /// Build a new Layer with neurons neurones. Every neuron 
 /// has "inputs" inputs and the activation function f.
 /// </summary>
 /// <param name="inputs">Number of inputs</param>
 /// <param name="neurons">Number of neurons</param>
 /// <param name="f">Activation function of each neuron</param>
 public Layer(int neurons, int inputs, ActivationFunction f)
 {
     nn = neurons;
      ni = inputs;
      this.neurons = new Neuron[nn];
      output = new float[nn];
      for (int i = 0; i < neurons; i++)
     this.neurons[i] = new Neuron(inputs, f);
 }
Exemplo n.º 9
0
        public NeuralNetwork(int[] layers, ActivationFunction afunc, double lambda, double learningRate = 1, bool useBias = true)
        {
            Layers = new List<Layer>();

            for(int i = 1; i < layers.Length; i++)
            {
                Layers.Add(new Layer(layers[i], layers[i - 1], afunc, lambda, learningRate, useBias));
            }
        }
Exemplo n.º 10
0
        public Layer(int neuronCount, int inputCount, ActivationFunction function)
        {
            _inputCount = inputCount;
            _neuronCount = neuronCount;

            _neurons = new Neuron[neuronCount];
            for(int i = 0; i < neuronCount; i++)
            {
                _neurons[i] = new Neuron(inputCount, function);
            }
        }
Exemplo n.º 11
0
        public Node()
        {
            outConnections  = new List<Connection>();
            inConnections = new List<Connection>();
            inValues = new List<double>();
            inWeights = new List<double>();

            activate = Activation.SigmoidActivation.evaluate;
            combine = Combination.Summation.evaluate;
            fired = false;
            bias = false;
        }
Exemplo n.º 12
0
	public Layer(Matrix weightMatrix, Vector biasVector, ActivationFunction af) {

		activationFunction = af;
		this.weightMatrix = weightMatrix;
		lastWeightUpdateMatrix = new Matrix(weightMatrix.getRowDimension(),
				weightMatrix.getColumnDimension());
		penultimateWeightUpdateMatrix = new Matrix(weightMatrix
				.getRowDimension(), weightMatrix.getColumnDimension());

		this.biasVector = biasVector;
		lastBiasUpdateVector = new Vector(biasVector.getRowDimension());
		penultimateBiasUpdateVector = new Vector(biasVector.getRowDimension());
	}
Exemplo n.º 13
0
        //Constructor for the neuron.  It will setup an array for the weights and tell us what activation function to use.
        public Neuron(int NumWeights, ActivationFunction activation)
        {
            mWeights = new float[NumWeights];
            // -1 b/c VB uses this number as the upper bound and not the number of elements. :-|
            int idx = 0;
            for (idx = 0; idx <= mWeights.Length - 1; idx++) {
                mWeights[idx] = (float)Ants.Rand.NextDouble();
                //Debug.Write(mWeights(idx).ToString & " ")
            }
            //Debug.WriteLine("")

            this.Activation = activation;
        }
Exemplo n.º 14
0
 /// <summary>
 /// Create a new neural network
 /// with "inputs" inputs and size of "layers"
 /// layers of neurones.
 /// The layer i is made with layers_desc[i] neurones.
 /// The activation function of each neuron is set to n_act.
 /// The lerning algorithm is set to learn.
 /// </summary>
 /// <param name="inputs">Number of inputs of the network</param>
 /// <param name="layers_desc">Number of neurons for each layer of the network</param>
 /// <param name="n_act">Activation function for each neuron in the network</param>
 /// <param name="learn">Learning algorithm to be used by the neural network</param>
 public NeuralNetwork(int inputs, int[] layers_desc, ActivationFunction n_act, LearningAlgorithm learn)
 {
     if (layers_desc.Length < 1)
     throw new Exception("PERCEPTRON : cannot build perceptron, it must have at least 1 layer of neurone");
      if (inputs < 1)
     throw new Exception("PERCEPTRON : cannot build perceptron, it must have at least 1 input");
      la = learn;
      ni = inputs;
      layers = new Layer[layers_desc.Length];
      layers[0] = new Layer(layers_desc[0], ni);
      for (int i = 1; i < layers_desc.Length; i++)
     layers[i] = new Layer(layers_desc[i], layers_desc[i - 1], n_act);
 }
Exemplo n.º 15
0
        public Neuron(int inputCount, ActivationFunction function)
        {
            _weights = new double[inputCount];
            _inputCount = inputCount;
            _function = function;

            Random rnd = new Random();
            for(int i = 0; i < inputCount; i++)
            {
                _weights[i] = (double) rnd.Next(-10, 10) / 10;
            }

            _bias = (double)rnd.Next(-10, 10) / 10;
        }
Exemplo n.º 16
0
 public ActivationNetwork( ActivationFunction function, int inputsCount, params int[] neuronsCount )
     : base(inputsCount, neuronsCount.Length)
 {
     // create each layer
     for ( int i = 0; i < layersCount; i++ )
     {
         layers[i] = new ActivationLayer(
             // neurons count in the layer
             neuronsCount[i],
             // inputs count of the layer
             ( i == 0 ) ? inputsCount : neuronsCount[i - 1],
             // activation function of the layer
             function );
     }
 }
Exemplo n.º 17
0
        public BasicNode(float[] constants, ActivationFunction activationFunction)
        {
            if(constants != null) this.constants = constants;

            if(activationFunction != null){
                this.activationFunction = activationFunction;
            }
            else{
                activationFunction = delegate(float value)
                {
                    if(value <= 0) return 0;
                    return 1;
                };
            }

            meta = new Dictionary<string, string>();
        }
Exemplo n.º 18
0
	public Layer(int numberOfNeurons, int numberOfInputs,
			double lowerLimitForWeights, double upperLimitForWeights,
			ActivationFunction af) {

		activationFunction = af;
		this.weightMatrix = new Matrix(numberOfNeurons, numberOfInputs);
		lastWeightUpdateMatrix = new Matrix(weightMatrix.getRowDimension(),
				weightMatrix.getColumnDimension());
		penultimateWeightUpdateMatrix = new Matrix(weightMatrix
				.getRowDimension(), weightMatrix.getColumnDimension());

		this.biasVector = new Vector(numberOfNeurons);
		lastBiasUpdateVector = new Vector(biasVector.getRowDimension());
		penultimateBiasUpdateVector = new Vector(biasVector.getRowDimension());

		initializeMatrix(weightMatrix, lowerLimitForWeights,
				upperLimitForWeights);
		initializeVector(biasVector, lowerLimitForWeights, upperLimitForWeights);
	}
Exemplo n.º 19
0
        public Layer(int numNeurons, int numInputs, ActivationFunction afunc, double lambda, double learningRate, bool useBias)
        {
            AFunc = afunc;
            Lambda = lambda;

            var tempTheta = DenseMatrix.CreateRandom(numNeurons, numInputs, new MathNet.Numerics.Distributions.Normal());

            if(useBias)
            {
                isBiased = useBias;
                Theta = DenseMatrix.Create(numNeurons, 1, 1.0).Append(tempTheta);
            }
            else
            {
                Theta = tempTheta;
            }

            LearningRate = learningRate;
        }
Exemplo n.º 20
0
		/// <summary>
		/// Build a neurone with Ni inputs whith a default 
		/// activation function (SIGMOID)
		/// </summary>
		/// <param name="Ni">number of inputs</param>
		public Neuron(int Ni)
		{
			w = new float[Ni];
			last_w = new float[Ni];
			f = new SigmoidActivationFunction();
		}
Exemplo n.º 21
0
		/// <summary>
		/// Build a neurone with Ni inputs
		/// </summary>
		/// <param name="Ni">number of inputs</param>
		/// <param name="af">The activation function of the neuron</param>
		public Neuron(int Ni, ActivationFunction af)
		{
			w = new float[Ni];
			last_w = new float[Ni];
			f = af;
		}
Exemplo n.º 22
0
 public static string ActivationFunctionToStr(ActivationFunction t) {
   string ret = VisionLabPINVOKE.ActivationFunctionToStr((int)t);
   if (VisionLabPINVOKE.SWIGPendingException.Pending) throw VisionLabPINVOKE.SWIGPendingException.Retrieve();
   return ret;
 }
        public void ComputeErrors(IDisposable state,  IDeviceArray outputs, IDeviceArray errors, Marshaled<IDeviceArray2[]> lowerWeightsM, Marshaled<IDeviceArray[]> lowerErrorsM, ActivationFunction function, float alpha)
        {
            var lowerWeights = lowerWeightsM.Instance();
            var lowerErrors = lowerErrorsM.Instance();

            var mOutputs = outputs.ToManaged();
            var mErrors = (ManagedArray)errors;

            Debug.Assert(lowerWeights.Length != 0 && lowerWeights.Length == lowerErrors.Length);

            fixed (float* pOutputs = mOutputs.InternalArray, pErrors = mErrors.InternalArray)
            {
                var outputsPtr = mOutputs.ToPtr(pOutputs);
                var errorsPtr = mErrors.ToPtr(pErrors);

                if (function == ActivationFunction.Sigmoid)
                {
                    for (int oIdx = 0; oIdx < outputs.Size; oIdx++)
                    {
                        float sum = 0.0f;
                        for (int lIdx = 0; lIdx < lowerErrors.Length; lIdx++)
                        {
                            var lowerWeightsMA = (ManagedArray2)lowerWeights[lIdx];
                            var lowerErrorsMA = (ManagedArray)lowerErrors[lIdx];

                            Debug.Assert(lowerWeightsMA.Size2 == lowerErrorsMA.Size);
                            Debug.Assert(lowerWeightsMA.Size1 == outputs.Size);

                            fixed (float* pLowerWeights = lowerWeightsMA.InternalArray, pLowerErrors = lowerErrorsMA.InternalArray)
                            {
                                sum += ComputeErrors_LowerErrorSum(lowerErrorsMA.ToPtr(pLowerErrors), lowerWeightsMA.ToPtr2(pLowerWeights), oIdx);
                            }
                        }
                        errorsPtr[oIdx] = sum * SigmoidD(outputsPtr[oIdx], alpha);
                    }
                }
                else
                {
                    for (int oIdx = 0; oIdx < outputs.Size; oIdx++)
                    {
                        float sum = 0.0f;
                        for (int lIdx = 0; lIdx < lowerErrors.Length; lIdx++)
                        {
                            var lowerWeightsMA = (ManagedArray2)lowerWeights[lIdx];
                            var lowerErrorsMA = (ManagedArray)lowerErrors[lIdx];

                            Debug.Assert(lowerWeightsMA.Size2 == lowerErrorsMA.Size);
                            Debug.Assert(lowerWeightsMA.Size1 == outputs.Size);

                            fixed (float* plw = lowerWeightsMA.InternalArray, ple = lowerWeightsMA.InternalArray)
                            {
                                sum += ComputeErrors_LowerErrorSum(lowerErrorsMA.ToPtr(ple), lowerWeightsMA.ToPtr2(plw), oIdx);
                            }
                        }
                        errorsPtr[oIdx] = sum * alpha;
                    }
                }
            }
        }
Exemplo n.º 24
0
 /// <summary>
 /// Set an activation function to all neurons of the network
 /// </summary>
 /// <param name="f">An activation function</param>
 public void setActivationFunction(ActivationFunction f)
 {
     foreach (Layer l in layers)
     l.setActivationFunction(f);
 }
Exemplo n.º 25
0
		/// <summary>
		/// Set the activation function f to all neurons of the layer
		/// </summary>
		/// <param name="f">An activation function</param>
		public void setActivationFunction(ActivationFunction f) 
		{
			foreach(Neuron n in neurons)
				n.F = f;
		}
Exemplo n.º 26
0
 public ActivationLayer( int neuronsCount, int inputsCount, ActivationFunction function )
     : base(neuronsCount, inputsCount)
 {
     for ( int i = 0; i < neuronsCount; i++ )
         neurons[i] = new ActivationNeuron( inputsCount, function );
 }
        public unsafe void ComputeErrors(IDisposable state, IDeviceArray outputs, IDeviceArray errors, IDeviceArray desiredOutputs, ActivationFunction function, float alpha)
        {
            var mOutputs = outputs.ToManaged();
            var mErrors = (ManagedArray)errors;
            var mDesiredOutputs = desiredOutputs.ToManaged();

            fixed (float* pOutputs = mOutputs.InternalArray, pErrors = mErrors.InternalArray, pDesiredOutputs = mDesiredOutputs.InternalArray)
            {
                var outputsPtr = mOutputs.ToPtr(pOutputs);
                var errorsPtr = mErrors.ToPtr(pErrors);
                var desiredOutputsPtr = mDesiredOutputs.ToPtr(pDesiredOutputs);

                if (function == ActivationFunction.Sigmoid)
                {
                    for (int oIdx = 0; oIdx < outputs.Size; oIdx++)
                    {
                        errorsPtr[oIdx] = (desiredOutputsPtr[oIdx] - outputsPtr[oIdx]) * SigmoidD(outputsPtr[oIdx], alpha);
                    }
                }
                else
                {
                    for (int oIdx = 0; oIdx < outputs.Size; oIdx++)
                    {
                        errorsPtr[oIdx] = (desiredOutputsPtr[oIdx] - outputsPtr[oIdx]) * alpha;
                    }
                }
            }
        }
        private void NextButtonClick(object sender, RoutedEventArgs e)
        {
            switch (selectedNetworkType)
            {
                case 0:
                    int[] neuronsCount = Layers.Select(l => l.NeuronsCount).ToArray();
                    ActivationFunction[] activationFunctions = new ActivationFunction[Layers.Count];
                    int i = 0;
                    foreach (var layer in Layers)
                    {
                        if(layer.ActivationFunction == GuiActivationFunction.ThresholdFunction)
                        {
                            activationFunctions[i] = new ThresholdFunction();
                        }
                        else if (layer.ActivationFunction == GuiActivationFunction.LinearFunction)
                        {
                            activationFunctions[i] = new LinearFunction();
                        }
                        else
                        {
                            activationFunctions[i] = new SigmoidFunction();
                        }
                        i++;
                    }

                    this.Network = new ActivationNetwork(activationFunctions, selectedInputCount, neuronsCount);

                    break;
                case 1:
                    this.Network = new KohonenNetwork(this.selectedInputCount, Layers[0].NeuronsCount);
                    break;
                case 2:

                    if (Layers[0].ActivationFunction == GuiActivationFunction.ThresholdFunction)
                    {
                        this.Network = new CounterPropagationNetwork(this.selectedInputCount, new ThresholdFunction(), Layers[0].NeuronsCount, Layers[1].NeuronsCount);

                    }
                    else
                    {
                        this.Network = new CounterPropagationNetwork(this.selectedInputCount, new SigmoidFunction(), Layers[0].NeuronsCount, Layers[1].NeuronsCount);

                    }

                   break;
            }

            this.Close();
        }
        public void ComputeForwardRTLR(IDisposable state, Marshaled<DeviceArrayFactory[]> inputsM, Marshaled<IDeviceArray2[]> weightsM, IDeviceArray biases, IDeviceArray outputs, IDeviceArray netValueDerivates, ActivationFunction function, float alpha)
        {
            var inputs = inputsM.Instance();
            var weights = weightsM.Instance();

            Debug.Assert(inputs.Length != 0 && inputs.Length == weights.Length);

            var mOutputs = outputs.ToManaged();
            var mNVDerivs = netValueDerivates.ToManaged();
            var mBiases = (ManagedArray)biases;

            fixed (float* pOutputs = mOutputs.InternalArray, pBiases = mBiases.InternalArray, pNVDerivs = mNVDerivs.InternalArray)
            {
                var outputsPtr = mOutputs.ToPtr(pOutputs);
                var biasesPtr = mBiases.ToPtr(pBiases);
                var nvDerivsPtr = mNVDerivs.ToPtr(pNVDerivs);

                if (function == ActivationFunction.Sigmoid)
                {
                    for (int oIdx = 0; oIdx < outputs.Size; oIdx++)
                    {
                        float sum = biasesPtr[oIdx];
                        for (int lIdx = 0; lIdx < inputs.Length; lIdx++)
                        {
                            var inputsMA = (inputs[lIdx]()).ToManaged();
                            var weightsMA = (ManagedArray2)weights[lIdx];

                            Debug.Assert(inputsMA.Size != 0 && inputsMA.Size == weightsMA.Size1);
                            Debug.Assert(outputs.Size == weightsMA.Size2);

                            fixed (float* pInputs = inputsMA.InternalArray, pWeights = weightsMA.InternalArray)
                            {
                                sum += ComputeForward_Sum(inputsMA.ToPtr(pInputs), weightsMA.ToPtr2(pWeights), oIdx);
                            }
                        }

                        outputsPtr[oIdx] = Sigmoid(sum, alpha);
                        nvDerivsPtr[oIdx] = SigmoidD(sum, alpha);
                    }
                }
                else
                {
                    for (int oIdx = 0; oIdx < outputs.Size; oIdx++)
                    {
                        float sum = biasesPtr[oIdx];
                        for (int lIdx = 0; lIdx < inputs.Length; lIdx++)
                        {
                            var inputsMA = (inputs[lIdx]()).ToManaged();
                            var weightsMA = (ManagedArray2)weights[lIdx];

                            Debug.Assert(inputsMA.Size != 0 && inputsMA.Size == weightsMA.Size1);
                            Debug.Assert(outputs.Size == weightsMA.Size2);

                            fixed (float* pInputs = inputsMA.InternalArray, pWeights = weightsMA.InternalArray)
                            {
                                sum += ComputeForward_Sum(inputsMA.ToPtr(pInputs), weightsMA.ToPtr2(pWeights), oIdx);
                            }
                        }

                        outputsPtr[oIdx] = Math.Min(Math.Max(sum * alpha, -alpha), alpha);
                        nvDerivsPtr[oIdx] = alpha;
                    }
                }
            }
        }
Exemplo n.º 30
0
 /// <summary>
 /// Construct this layer with a non-default threshold function.
 /// </summary>
 /// <param name="thresholdFunction">The threshold function to use.</param>
 /// <param name="neuronCount">How many neurons in this layer.</param>
 public FeedforwardLayer(ActivationFunction thresholdFunction,
          int neuronCount)
 {
     this.fire = new double[neuronCount];
     this.activationFunction = thresholdFunction;
 }