Beispiel #1
0
 //Constructor
 /// <summary>
 /// Creates an initialized instance.
 /// </summary>
 /// <param name="location">The neuron's location.</param>
 /// <param name="analogActivation">The instance of an analog activation function.</param>
 /// <param name="bias">The constant bias.</param>
 /// <param name="firingThreshold">The firing threshold value. It must be GE0 and LT1. Every time the current normalized activation is higher than the normalized past reference activation by at least this threshold, it is evaluated as a firing event.</param>
 /// <param name="firingThresholdMaxRefDeepness">Maximum age of the past activation for the evaluation of the firing event.</param>
 /// <param name="retainmentStrength">The strength of the analog neuron's retainment property. It enables the leaky integrator feature of the neuron.</param>
 /// <param name="predictorsProviderCfg">The configuration of the predictors provider.</param>
 public HiddenNeuron(NeuronLocation location,
                     AFAnalogBase analogActivation,
                     double bias,
                     double firingThreshold,
                     int firingThresholdMaxRefDeepness,
                     double retainmentStrength,
                     PredictorsProviderSettings predictorsProviderCfg
                     )
 {
     Location   = location;
     Statistics = new NeuronStatistics();
     Bias       = bias;
     //Activation check
     if (analogActivation.TypeOfActivation == ActivationType.Spiking)
     {
         throw new ArgumentException($"Wrong type of the activation function.", "analogActivation");
     }
     _activationFn             = analogActivation;
     _analogFiringThreshold    = firingThreshold;
     _histActivationsQueue     = firingThresholdMaxRefDeepness < 2 ? null : new SimpleQueue <double>(firingThresholdMaxRefDeepness);
     _analogRetainmentStrength = retainmentStrength;
     _predictorsProvider       = predictorsProviderCfg != null ? new PredictorsProvider(predictorsProviderCfg) : null;
     OutputData = new NeuronOutputData();
     Reset(false);
     return;
 }
Beispiel #2
0
        /// <summary>
        /// Tests whether the activation function can be used as the FF network's hidden layer activation.
        /// </summary>
        /// <param name="activationCfg">The configuration of the activation function.</param>
        public static bool IsAllowedHiddenAF(IActivationSettings activationCfg)
        {
            if (activationCfg.TypeOfActivation != ActivationType.Analog)
            {
                return(false);
            }
            AFAnalogBase analogAF = (AFAnalogBase)ActivationFactory.CreateAF(activationCfg, new Random(0));

            if (!analogAF.SupportsDerivative || analogAF.DependsOnSorround)
            {
                return(false);
            }
            return(true);
        }
Beispiel #3
0
 //Constructor
 /// <summary>
 /// Creates an initialized instance.
 /// </summary>
 /// <param name="numOfNeurons">The number of layer neurons.</param>
 /// <param name="activation">The activation function of the layer.</param>
 public Layer(int numOfNeurons, AFAnalogBase activation)
 {
     //Check correctness
     if (numOfNeurons < 1)
     {
         throw new ArgumentOutOfRangeException("numOfNeurons", $"Invalid parameter value: {numOfNeurons}");
     }
     //Setup
     Activation          = activation ?? throw new ArgumentException("activation", "Activation can't be null");
     NumOfLayerNeurons   = numOfNeurons;
     NumOfInputNodes     = -1;
     WeightsStartFlatIdx = 0;
     BiasesStartFlatIdx  = 0;
     NeuronsStartFlatIdx = 0;
     return;
 }
Beispiel #4
0
 //Methods
 /// <summary>
 /// Adds the new hidden layer into the network structure.
 /// </summary>
 /// <param name="numOfNeurons">The number of layer's neurons.</param>
 /// <param name="activation">The activation function of the layer neurons.</param>
 public void AddLayer(int numOfNeurons, AFAnalogBase activation)
 {
     if (!Finalized)
     {
         if (activation.DependsOnSorround)
         {
             throw new ArgumentException("Activation requires multiple input for the Compute method. It is not allowed for the hidden layer.", "activation");
         }
         //Add new layer
         LayerCollection.Add(new Layer(numOfNeurons, activation));
     }
     else
     {
         throw new InvalidOperationException($"Can´t add new layer. Network structure is finalized.");
     }
     return;
 }
Beispiel #5
0
        /// <summary>
        /// Finalizes the network internal structure and locks it against the further changes.
        /// </summary>
        /// <param name="outputActivation">The activation function of the output layer.</param>
        public void FinalizeStructure(AFAnalogBase outputActivation)
        {
            if (Finalized)
            {
                throw new InvalidOperationException($"Network structure has been already finalized.");
            }
            if (outputActivation.DependsOnSorround && NumOfOutputValues < 2)
            {
                throw new ArgumentException("Activation requires multiple input for the Compute method but number of output values is less than 2.", "outputActivation");
            }
            //Add output layer
            LayerCollection.Add(new Layer(NumOfOutputValues, outputActivation));
            //Finalize layers
            int numOfInputNodes     = NumOfInputValues;
            int neuronsFlatStartIdx = 0;
            int weightsFlatStartIdx = 0;

            _isAllowedNguyenWidrowRandomization = true;
            foreach (Layer layer in LayerCollection)
            {
                layer.FinalizeStructure(numOfInputNodes, neuronsFlatStartIdx, weightsFlatStartIdx);
                neuronsFlatStartIdx += layer.NumOfLayerNeurons;
                weightsFlatStartIdx += layer.NumOfLayerNeurons * layer.NumOfInputNodes + layer.NumOfLayerNeurons;
                numOfInputNodes      = layer.NumOfLayerNeurons;
                if (layer.Activation.GetType() != typeof(AFAnalogElliot) &&
                    layer.Activation.GetType() != typeof(AFAnalogTanH)
                    )
                {
                    _isAllowedNguyenWidrowRandomization = false;
                }
            }
            if (LayerCollection.Count < 2)
            {
                _isAllowedNguyenWidrowRandomization = false;
            }
            NumOfNeurons = neuronsFlatStartIdx;
            _flatWeights = new double[weightsFlatStartIdx];
            return;
        }