Example #1
0
 public FlatLayer(IActivationFunction activation, int count, double biasActivation)
 {
     this.Activation = activation;
     this._x10f4d88af727adbc = count;
     this._x25922738b86264c8 = biasActivation;
     this._x4d51c0aa16352a14 = null;
 }
 /// <summary>
 /// Construct a flat layer.
 /// </summary>
 ///
 /// <param name="activation">The activation function.</param>
 /// <param name="count">The neuron count.</param>
 /// <param name="biasActivation">The bias activation.</param>
 public FlatLayer(IActivationFunction activation, int count,
     double biasActivation)
 {
     Activation = activation;
     _count = count;
     _biasActivation = biasActivation;
     _contextFedBy = null;
 }
Example #3
0
 /// <summary>
 /// Construct a flat layer.
 /// </summary>
 ///
 /// <param name="activation">The activation function.</param>
 /// <param name="count">The neuron count.</param>
 /// <param name="biasActivation">The bias activation.</param>
 public FlatLayer(IActivationFunction activation, int count,
                  double biasActivation)
 {
     Activation      = activation;
     _count          = count;
     _biasActivation = biasActivation;
     _contextFedBy   = null;
 }
Example #4
0
        /// <summary>
        /// Construct an RBF flat network.
        /// </summary>
        ///
        /// <param name="inputCount">The number of input neurons. (also the number of dimensions)</param>
        /// <param name="hiddenCount">The number of hidden neurons.</param>
        /// <param name="outputCount">The number of output neurons.</param>
        /// <param name="rbf"></param>
        public FlatNetworkRBF(int inputCount, int hiddenCount,
                              int outputCount, IRadialBasisFunction[] rbf)
        {
            var layers = new FlatLayer[3];
            _rbf = rbf;

            layers[0] = new FlatLayer(new ActivationLinear(), inputCount, 0.0d);
            layers[1] = new FlatLayer(new ActivationLinear(), hiddenCount, 0.0d);
            layers[2] = new FlatLayer(new ActivationLinear(), outputCount, 0.0d);

            Init(layers);
        }
        /// <summary>
        /// Construct an RBF flat network.
        /// </summary>
        ///
        /// <param name="inputCount">The number of input neurons. (also the number of dimensions)</param>
        /// <param name="hiddenCount">The number of hidden neurons.</param>
        /// <param name="outputCount">The number of output neurons.</param>
        /// <param name="rbf"></param>
        public FlatNetworkRBF(int inputCount, int hiddenCount,
                              int outputCount, IRadialBasisFunction[] rbf)
        {
            var layers = new FlatLayer[3];

            _rbf = rbf;

            layers[0] = new FlatLayer(new ActivationLinear(), inputCount, 0.0d);
            layers[1] = new FlatLayer(new ActivationLinear(), hiddenCount, 0.0d);
            layers[2] = new FlatLayer(new ActivationLinear(), outputCount, 0.0d);

            Init(layers);
        }
Example #6
0
 public FlatNetworkRBF(int inputCount, int hiddenCount, int outputCount, IRadialBasisFunction[] rbf)
 {
     FlatLayer[] layers = new FlatLayer[3];
     this._rbf = rbf;
     layers[0] = new FlatLayer(new ActivationLinear(), inputCount, 0.0);
     do
     {
         layers[1] = new FlatLayer(new ActivationLinear(), hiddenCount, 0.0);
     }
     while ((((uint) hiddenCount) & 0) != 0);
     if (2 != 0)
     {
         layers[2] = new FlatLayer(new ActivationLinear(), outputCount, 0.0);
         base.Init(layers);
     }
 }
        /// <summary>
        /// Construct a flat neural network.
        /// </summary>
        ///
        /// <param name="input">Neurons in the input layer.</param>
        /// <param name="hidden1"></param>
        /// <param name="hidden2"></param>
        /// <param name="output">Neurons in the output layer.</param>
        /// <param name="tanh">True if this is a tanh activation, false for sigmoid.</param>
        public FlatNetwork(int input, int hidden1, int hidden2,
                           int output, bool tanh)
        {
            IActivationFunction linearAct = new ActivationLinear();

            FlatLayer[]         layers;
            IActivationFunction act = (tanh)
                                          ? (new ActivationTANH())
                                          : (IActivationFunction)(new ActivationSigmoid());

            if ((hidden1 == 0) && (hidden2 == 0))
            {
                layers    = new FlatLayer[2];
                layers[0] = new FlatLayer(linearAct, input,
                                          DefaultBiasActivation);
                layers[1] = new FlatLayer(act, output,
                                          NoBiasActivation);
            }
            else if ((hidden1 == 0) || (hidden2 == 0))
            {
                int count = Math.Max(hidden1, hidden2);
                layers    = new FlatLayer[3];
                layers[0] = new FlatLayer(linearAct, input,
                                          DefaultBiasActivation);
                layers[1] = new FlatLayer(act, count,
                                          DefaultBiasActivation);
                layers[2] = new FlatLayer(act, output,
                                          NoBiasActivation);
            }
            else
            {
                layers    = new FlatLayer[4];
                layers[0] = new FlatLayer(linearAct, input,
                                          DefaultBiasActivation);
                layers[1] = new FlatLayer(act, hidden1,
                                          DefaultBiasActivation);
                layers[2] = new FlatLayer(act, hidden2,
                                          DefaultBiasActivation);
                layers[3] = new FlatLayer(act, output,
                                          NoBiasActivation);
            }

            _isLimited       = false;
            _connectionLimit = 0.0d;

            Init(layers);
        }
Example #8
0
 public void Init(FlatLayer[] layers)
 {
     int num2;
     int num3;
     int num4;
     int num5;
     FlatLayer layer;
     FlatLayer layer2;
     int num6;
     int num7;
     int length = layers.Length;
     if (((uint) num4) <= uint.MaxValue)
     {
         this._inputCount = layers[0].Count;
         goto Label_0452;
     }
     if (((uint) num5) <= uint.MaxValue)
     {
         goto Label_03A5;
     }
     goto Label_01EF;
     Label_00AA:
     if (num5 >= 0)
     {
         goto Label_0357;
     }
     Label_00B2:
     this._beginTraining = 0;
     this._endTraining = this._layerCounts.Length - 1;
     this._weights = new double[num4];
     this._layerOutput = new double[num3];
     this._layerSums = new double[num3];
     this.ClearContext();
     return;
     Label_00CE:
     num6 += layers[num7].TotalCount;
     Label_00DC:
     num7--;
     Label_00E2:
     if (num7 >= 0)
     {
         while (layers[num7].ContextFedBy == layer)
         {
             this._hasContext = true;
             this._contextTargetSize[num2] = layers[num7].ContextCount;
             if (((uint) num5) >= 0)
             {
                 goto Label_011B;
             }
         }
         goto Label_00CE;
     }
     num2++;
     if ((((uint) num4) - ((uint) num4)) <= uint.MaxValue)
     {
         if ((((uint) num7) - ((uint) num4)) > uint.MaxValue)
         {
             goto Label_0357;
         }
         if ((((uint) num4) & 0) != 0)
         {
             goto Label_0452;
         }
         num5--;
         goto Label_00AA;
     }
     if ((((uint) num3) + ((uint) num3)) >= 0)
     {
         goto Label_013B;
     }
     Label_011B:
     this._contextTargetOffset[num2] = num6 + (layers[num7].TotalCount - layers[num7].ContextCount);
     goto Label_00CE;
     Label_013B:
     num7 = layers.Length - 1;
     goto Label_00E2;
     Label_0182:
     num6 = 0;
     if ((((uint) num4) + ((uint) num2)) <= uint.MaxValue)
     {
         goto Label_013B;
     }
     return;
     Label_018A:
     if (num2 == 0)
     {
         this._weightIndex[num2] = 0;
         this._layerIndex[num2] = 0;
         goto Label_0182;
     }
     this._weightIndex[num2] = this._weightIndex[num2 - 1] + (this._layerCounts[num2] * this._layerFeedCounts[num2 - 1]);
     this._layerIndex[num2] = this._layerIndex[num2 - 1] + this._layerCounts[num2 - 1];
     if ((((uint) num7) + ((uint) num2)) <= uint.MaxValue)
     {
         goto Label_0182;
     }
     Label_01EF:
     if ((((uint) num6) | 8) == 0)
     {
         goto Label_00B2;
     }
     num4 += layer.Count * layer2.TotalCount;
     if (0 == 0)
     {
         goto Label_018A;
     }
     Label_026C:
     this._layerFeedCounts[num2] = layer.Count;
     this._layerContextCount[num2] = layer.ContextCount;
     this._activationFunctions[num2] = layer.Activation;
     Label_0299:
     num3 += layer.TotalCount;
     if (layer2 == null)
     {
         goto Label_018A;
     }
     goto Label_01EF;
     Label_02AC:
     if (num5 > 0)
     {
         goto Label_02EB;
     }
     Label_02B1:
     this._biasActivation[num2] = layer.BiasActivation;
     this._layerCounts[num2] = layer.TotalCount;
     goto Label_026C;
     Label_02EB:
     layer2 = layers[num5 - 1];
     if ((((uint) num6) - ((uint) num4)) >= 0)
     {
         if ((((uint) length) - ((uint) num7)) >= 0)
         {
             goto Label_02B1;
         }
         goto Label_02AC;
     }
     goto Label_03A5;
     Label_0357:
     layer = layers[num5];
     layer2 = null;
     goto Label_03CB;
     Label_03A5:
     this._layerFeedCounts = new int[length];
     this._contextTargetOffset = new int[length];
     this._contextTargetSize = new int[length];
     if ((((uint) length) - ((uint) num3)) < 0)
     {
         goto Label_00DC;
     }
     this._biasActivation = new double[length];
     num2 = 0;
     num3 = 0;
     num4 = 0;
     if ((((uint) length) & 0) == 0)
     {
         num5 = layers.Length - 1;
         goto Label_00AA;
     }
     Label_03CB:
     if ((((uint) num6) - ((uint) num7)) >= 0)
     {
         if ((((uint) num3) - ((uint) num5)) > uint.MaxValue)
         {
             goto Label_0299;
         }
         if ((((uint) num2) - ((uint) num4)) < 0)
         {
             goto Label_03A5;
         }
     }
     if (2 != 0)
     {
         goto Label_02AC;
     }
     if ((((uint) num4) | 0x80000000) == 0)
     {
         goto Label_0182;
     }
     goto Label_02EB;
     Label_0452:
     this._outputCount = layers[length - 1].Count;
     this._layerCounts = new int[length];
     this._layerContextCount = new int[length];
     this._weightIndex = new int[length];
     this._layerIndex = new int[length];
     this._activationFunctions = new IActivationFunction[length];
     if (((uint) length) < 0)
     {
         return;
     }
     goto Label_03A5;
 }
Example #9
0
 public FlatNetwork(int input, int hidden1, int hidden2, int output, bool tanh)
 {
     // This item is obfuscated and can not be translated.
     FlatLayer[] layerArray;
     IActivationFunction function2;
     int num;
     IActivationFunction function3;
     IActivationFunction activation = new ActivationLinear();
     if (tanh)
     {
         goto Label_01EA;
     }
     goto Label_0219;
     Label_000B:
     this._connectionLimit = 0.0;
     this.Init(layerArray);
     if (0 != 0)
     {
         if (((uint) hidden1) > uint.MaxValue)
         {
             goto Label_0069;
         }
         goto Label_0219;
     }
     if (((uint) num) < 0)
     {
         goto Label_0189;
     }
     return;
     Label_0069:
     layerArray[3] = new FlatLayer(function2, output, 0.0);
     if ((((uint) num) + ((uint) output)) >= 0)
     {
     }
     Label_0098:
     this._isLimited = false;
     goto Label_000B;
     Label_00A1:
     if (hidden2 != 0)
     {
         if ((((uint) tanh) - ((uint) hidden2)) > uint.MaxValue)
         {
             goto Label_01AA;
         }
         layerArray = new FlatLayer[4];
         layerArray[0] = new FlatLayer(activation, input, 1.0);
         layerArray[1] = new FlatLayer(function2, hidden1, 1.0);
         if ((((uint) tanh) + ((uint) hidden2)) <= uint.MaxValue)
         {
             layerArray[2] = new FlatLayer(function2, hidden2, 1.0);
             if (((uint) hidden2) > uint.MaxValue)
             {
                 goto Label_015F;
             }
             goto Label_0069;
         }
         goto Label_000B;
     }
     Label_010F:
     num = Math.Max(hidden1, hidden2);
     layerArray = new FlatLayer[] { new FlatLayer(activation, input, 1.0), new FlatLayer(function2, num, 1.0), new FlatLayer(function2, output, 0.0) };
     goto Label_0098;
     Label_015F:
     if (((uint) output) > uint.MaxValue)
     {
         goto Label_01AA;
     }
     if ((((uint) tanh) + ((uint) num)) <= uint.MaxValue)
     {
         goto Label_0198;
     }
     Label_0189:
     if (-1 != 0)
     {
         goto Label_00A1;
     }
     goto Label_010F;
     Label_0198:
     if (hidden1 != 0)
     {
         goto Label_00A1;
     }
     goto Label_010F;
     Label_01AA:
     if (hidden2 != 0)
     {
         goto Label_0198;
     }
     layerArray = new FlatLayer[] { new FlatLayer(activation, input, 1.0), new FlatLayer(function2, output, 0.0) };
     goto Label_0098;
     Label_01EA:
     function2 = new ActivationTANH();
     if (hidden1 != 0)
     {
         goto Label_015F;
     }
     goto Label_01AA;
     Label_0219:
     function3 = new ActivationSigmoid();
     if ((((uint) num) | 2) != 0)
     {
         goto Label_01EA;
     }
     goto Label_010F;
 }
Example #10
0
 public FlatNetwork(FlatLayer[] layers)
 {
     this.Init(layers);
 }
        /// <summary>
        /// Construct a flat network.
        /// </summary>
        ///
        /// <param name="layers">The layers of the network to create.</param>
        public void Init(FlatLayer[] layers)
        {
            int layerCount = layers.Length;

            _inputCount  = layers[0].Count;
            _outputCount = layers[layerCount - 1].Count;

            _layerCounts         = new int[layerCount];
            _layerContextCount   = new int[layerCount];
            _weightIndex         = new int[layerCount];
            _layerIndex          = new int[layerCount];
            _activationFunctions = new IActivationFunction[layerCount];
            _layerFeedCounts     = new int[layerCount];
            _contextTargetOffset = new int[layerCount];
            _contextTargetSize   = new int[layerCount];
            _biasActivation      = new double[layerCount];

            int index       = 0;
            int neuronCount = 0;
            int weightCount = 0;

            for (int i = layers.Length - 1; i >= 0; i--)
            {
                FlatLayer layer     = layers[i];
                FlatLayer nextLayer = null;

                if (i > 0)
                {
                    nextLayer = layers[i - 1];
                }

                _biasActivation[index]      = layer.BiasActivation;
                _layerCounts[index]         = layer.TotalCount;
                _layerFeedCounts[index]     = layer.Count;
                _layerContextCount[index]   = layer.ContextCount;
                _activationFunctions[index] = layer.Activation;

                neuronCount += layer.TotalCount;

                if (nextLayer != null)
                {
                    weightCount += layer.Count * nextLayer.TotalCount;
                }

                if (index == 0)
                {
                    _weightIndex[index] = 0;
                    _layerIndex[index]  = 0;
                }
                else
                {
                    _weightIndex[index] = _weightIndex[index - 1]
                                          + (_layerCounts[index] * _layerFeedCounts[index - 1]);
                    _layerIndex[index] = _layerIndex[index - 1]
                                         + _layerCounts[index - 1];
                }

                int neuronIndex = 0;
                for (int j = layers.Length - 1; j >= 0; j--)
                {
                    if (layers[j].ContextFedBy == layer)
                    {
                        _hasContext = true;
                        _contextTargetSize[index]   = layers[j].ContextCount;
                        _contextTargetOffset[index] = neuronIndex
                                                      + (layers[j].TotalCount - layers[j].ContextCount);
                    }
                    neuronIndex += layers[j].TotalCount;
                }

                index++;
            }

            _beginTraining = 0;
            _endTraining   = _layerCounts.Length - 1;

            _weights     = new double[weightCount];
            _layerOutput = new double[neuronCount];
            _layerSums   = new double[neuronCount];

            ClearContext();
        }
Example #12
0
 public void FinalizeStructure()
 {
     FlatLayer[] layerArray;
     int num;
     BasicLayer layer;
     if (this._layers.Count >= 2)
     {
         goto Label_00D4;
     }
     throw new NeuralNetworkError("There must be at least two layers before the structure is finalized.");
     Label_001A:
     if (num < this._layers.Count)
     {
         goto Label_009A;
     }
     this._flat = new FlatNetwork(layerArray);
     this.FinalizeLimit();
     Label_003D:
     this._layers.Clear();
     this.EnforceLimit();
     return;
     Label_0062:
     layerArray[num] = layer;
     num++;
     goto Label_001A;
     Label_006F:
     if (2 != 0)
     {
         goto Label_0062;
     }
     Label_0088:
     while (layer.Activation == null)
     {
         layer.Activation = new ActivationLinear();
         if (0 == 0)
         {
             goto Label_006F;
         }
     }
     if (((uint) num) > uint.MaxValue)
     {
         goto Label_006F;
     }
     goto Label_0062;
     Label_009A:
     layer = (BasicLayer) this._layers[num];
     goto Label_0088;
     if ((((uint) num) - ((uint) num)) < 0)
     {
         goto Label_003D;
     }
     Label_00D4:
     layerArray = new FlatLayer[this._layers.Count];
     num = 0;
     if (((uint) num) < 0)
     {
         goto Label_009A;
     }
     if (0 != 0)
     {
         goto Label_006F;
     }
     goto Label_001A;
 }
        /// <summary>
        /// Build the synapse and layer structure. This method should be called after
        /// you are done adding layers to a network, or change the network's logic
        /// property.
        /// </summary>
        ///
        public void FinalizeStructure()
        {
            if (_layers.Count < 2)
            {
                throw new NeuralNetworkError(
                    "There must be at least two layers before the structure is finalized.");
            }

            var flatLayers = new FlatLayer[_layers.Count];

            for (int i = 0; i < _layers.Count; i++)
            {
                var layer = (BasicLayer) _layers[i];
                if (layer.Activation == null)
                {
                    layer.Activation = new ActivationLinear();
                }

                flatLayers[i] = layer;
            }

            _flat = new FlatNetwork(flatLayers);

            FinalizeLimit();
            _layers.Clear();
            EnforceLimit();
        }
Example #14
0
        /// <summary>
        /// Construct a flat network.
        /// </summary>
        ///
        /// <param name="layers">The layers of the network to create.</param>
        public void Init(FlatLayer[] layers)
        {
            int layerCount = layers.Length;

            _inputCount = layers[0].Count;
            _outputCount = layers[layerCount - 1].Count;

            _layerCounts = new int[layerCount];
            _layerContextCount = new int[layerCount];
            _weightIndex = new int[layerCount];
            _layerIndex = new int[layerCount];
            _activationFunctions = new IActivationFunction[layerCount];
            _layerFeedCounts = new int[layerCount];
            _contextTargetOffset = new int[layerCount];
            _contextTargetSize = new int[layerCount];
            _biasActivation = new double[layerCount];

            int index = 0;
            int neuronCount = 0;
            int weightCount = 0;

            for (int i = layers.Length - 1; i >= 0; i--)
            {
                FlatLayer layer = layers[i];
                FlatLayer nextLayer = null;

                if (i > 0)
                {
                    nextLayer = layers[i - 1];
                }

                _biasActivation[index] = layer.BiasActivation;
                _layerCounts[index] = layer.TotalCount;
                _layerFeedCounts[index] = layer.Count;
                _layerContextCount[index] = layer.ContextCount;
                _activationFunctions[index] = layer.Activation;

                neuronCount += layer.TotalCount;

                if (nextLayer != null)
                {
                    weightCount += layer.Count*nextLayer.TotalCount;
                }

                if (index == 0)
                {
                    _weightIndex[index] = 0;
                    _layerIndex[index] = 0;
                }
                else
                {
                    _weightIndex[index] = _weightIndex[index - 1]
                                         + (_layerCounts[index]*_layerFeedCounts[index - 1]);
                    _layerIndex[index] = _layerIndex[index - 1]
                                        + _layerCounts[index - 1];
                }

                int neuronIndex = 0;
                for (int j = layers.Length - 1; j >= 0; j--)
                {
                    if (layers[j].ContextFedBy == layer)
                    {
                        _hasContext = true;
                        _contextTargetSize[index] = layers[j].ContextCount;
                        _contextTargetOffset[index] = neuronIndex
                                                     + (layers[j].TotalCount - layers[j].ContextCount);
                    }
                    neuronIndex += layers[j].TotalCount;
                }

                index++;
            }

            _beginTraining = 0;
            _endTraining = _layerCounts.Length - 1;

            _weights = new double[weightCount];
            _layerOutput = new double[neuronCount];
            _layerSums = new double[neuronCount];

            ClearContext();
        }
Example #15
0
        /// <summary>
        /// Construct a flat neural network.
        /// </summary>
        ///
        /// <param name="input">Neurons in the input layer.</param>
        /// <param name="hidden1"></param>
        /// <param name="hidden2"></param>
        /// <param name="output">Neurons in the output layer.</param>
        /// <param name="tanh">True if this is a tanh activation, false for sigmoid.</param>
        public FlatNetwork(int input, int hidden1, int hidden2,
            int output, bool tanh)
        {
            IActivationFunction linearAct = new ActivationLinear();
            FlatLayer[] layers;
            IActivationFunction act = (tanh)
                                          ? (new ActivationTANH())
                                          : (IActivationFunction) (new ActivationSigmoid());

            if ((hidden1 == 0) && (hidden2 == 0))
            {
                layers = new FlatLayer[2];
                layers[0] = new FlatLayer(linearAct, input,
                                          DefaultBiasActivation);
                layers[1] = new FlatLayer(act, output,
                                          NoBiasActivation);
            }
            else if ((hidden1 == 0) || (hidden2 == 0))
            {
                int count = Math.Max(hidden1, hidden2);
                layers = new FlatLayer[3];
                layers[0] = new FlatLayer(linearAct, input,
                                          DefaultBiasActivation);
                layers[1] = new FlatLayer(act, count,
                                          DefaultBiasActivation);
                layers[2] = new FlatLayer(act, output,
                                          NoBiasActivation);
            }
            else
            {
                layers = new FlatLayer[4];
                layers[0] = new FlatLayer(linearAct, input,
                                          DefaultBiasActivation);
                layers[1] = new FlatLayer(act, hidden1,
                                          DefaultBiasActivation);
                layers[2] = new FlatLayer(act, hidden2,
                                          DefaultBiasActivation);
                layers[3] = new FlatLayer(act, output,
                                          NoBiasActivation);
            }

            _isLimited = false;
            _connectionLimit = 0.0d;

            Init(layers);
        }