/// <summary>
        ///
        /// </summary>
        /// <param name="sizes">First is the input size, others are the layers output sizes.</param>
        /// <param name="types">The type of each layer. Length of types must be one less then length of sizes.</param>
        public NeuralNetwork(int[] sizes, LayerActivation[] types = null)
        {
            if (types == null)
            {
                types = new LayerActivation[sizes.Length - 1];
                for (int i = 0; i < types.Length; i++)
                {
                    types[i] = LayerActivation.Relu;
                }
            }
            else if (types.Length != sizes.Length - 1)
            {
                throw new Exception("Length of types must be 1 less then length of sizes!");
            }

            if (sizes.Length < 2)
            {
                throw new Exception("There must be at least one layer");
            }
            NeuralNetworkLayer previous = null;
            int inputSize = sizes[0];

            Layers = new NeuralNetworkLayer[sizes.Length - 1];
            for (int i = 1; i < sizes.Length; i++)
            {
                int outputSize           = sizes[i];
                NeuralNetworkLayer layer = new NeuralNetworkLayer(inputSize, outputSize, types[i - 1], previous);
                Layers[i - 1] = layer;
                previous      = layer;
                inputSize     = outputSize;
            }

            InputSize  = Layers[0].InputSize;
            OutputSize = Layers.Last().OutputSize;
        }
        public NeuralTrainLayer(NeuralNetworkLayer layer, float[] input)
        {
            float[] result = layer.Feed(input, false);
            Input  = input;
            Output = result;

            Layer = layer;
            DCDB  = new float[Output.Length];
            DCDW  = new float[input.Length * Output.Length];
        }
Exemple #3
0
        public NeuralNetworkLayer(int inputSize, int outputSize, NeuralNetwork.LayerActivation layerType = NeuralNetwork.LayerActivation.Tanh, NeuralNetworkLayer previousLayer = null)
        {
            InputSize  = inputSize;
            OutputSize = outputSize;
            Size       = inputSize * outputSize;

            Bias   = new float[outputSize];
            Weight = new float[Size];

            if (previousLayer != null)
            {
                previousLayer.nextLayer = this;
            }

            switch (layerType)
            {
            case NeuralNetwork.LayerActivation.Tanh:
                FlatFunction = Tanh;
                FlatDeriv    = TanhD;
                break;

            case NeuralNetwork.LayerActivation.Sigmoid:
                FlatFunction = Sigmoid;
                FlatDeriv    = SigmoidD;
                break;

            case NeuralNetwork.LayerActivation.Relu:
                FlatFunction = Relu;
                FlatDeriv    = ReluD;
                break;

            case NeuralNetwork.LayerActivation.LeakyRelu:
                FlatFunction = LeakyRelu;
                FlatDeriv    = LeakyReluD;
                break;

            default:
                FlatFunction = Relu;
                FlatDeriv    = ReluD;
                break;
            }
            LayerActivation = layerType;
            InitializeWeights();
            InitializeBiases();
        }