Esempio n. 1
0
        private void InitializeNeuralNetwork()
        {
            NeuralNetwork[] networks = new NeuralNetwork[4];
            int[]           layers   = new int[] { 784, 16, 16, 10 };
            NeuralNetwork.LayerActivation[] layerType = new NeuralNetwork.LayerActivation[]
            { NeuralNetwork.LayerActivation.Relu, NeuralNetwork.LayerActivation.LeakyRelu, NeuralNetwork.LayerActivation.Sigmoid };

            //neuralNetwork = new NeuralNetwork(layers, layerType);

            networks[0] = new NeuralNetwork(layers, layerType);

            layers    = new int[] { 784, 16, 16, 10 };
            layerType = new NeuralNetwork.LayerActivation[]
            { NeuralNetwork.LayerActivation.Sigmoid, NeuralNetwork.LayerActivation.Sigmoid, NeuralNetwork.LayerActivation.Sigmoid };
            networks[1] = new NeuralNetwork(layers, layerType);

            layers    = new int[] { 784, 20, 10 };
            layerType = new NeuralNetwork.LayerActivation[]
            { NeuralNetwork.LayerActivation.Sigmoid, NeuralNetwork.LayerActivation.Sigmoid, };
            networks[2] = new NeuralNetwork(layers, layerType);

            layers    = new int[] { 784, 16, 16, 16, 10 };
            layerType = new NeuralNetwork.LayerActivation[]
            { NeuralNetwork.LayerActivation.LeakyRelu, NeuralNetwork.LayerActivation.LeakyRelu, NeuralNetwork.LayerActivation.LeakyRelu, NeuralNetwork.LayerActivation.Sigmoid, };
            networks[3] = new NeuralNetwork(layers, layerType);

            network = democracy = new Democracy(networks);
        }
Esempio n. 2
0
        public NeuralNetworkLayer(int inputSize, int outputSize, NeuralNetwork.LayerActivation layerType = NeuralNetwork.LayerActivation.Tanh, NeuralNetworkLayer previousLayer = null)
        {
            InputSize  = inputSize;
            OutputSize = outputSize;
            Size       = inputSize * outputSize;

            Bias   = new float[outputSize];
            Weight = new float[Size];

            if (previousLayer != null)
            {
                previousLayer.nextLayer = this;
            }

            switch (layerType)
            {
            case NeuralNetwork.LayerActivation.Tanh:
                FlatFunction = Tanh;
                FlatDeriv    = TanhD;
                break;

            case NeuralNetwork.LayerActivation.Sigmoid:
                FlatFunction = Sigmoid;
                FlatDeriv    = SigmoidD;
                break;

            case NeuralNetwork.LayerActivation.Relu:
                FlatFunction = Relu;
                FlatDeriv    = ReluD;
                break;

            case NeuralNetwork.LayerActivation.LeakyRelu:
                FlatFunction = LeakyRelu;
                FlatDeriv    = LeakyReluD;
                break;

            default:
                FlatFunction = Relu;
                FlatDeriv    = ReluD;
                break;
            }
            LayerActivation = layerType;
            InitializeWeights();
            InitializeBiases();
        }
Esempio n. 3
0
        public Democracy(NeuralNetwork[] participants)
        {
            NeuralNetwork.LayerActivation type = participants[0].Layers.Last().LayerActivation;
            InputSize  = participants[0].InputSize;
            OutputSize = participants[0].OutputSize;
            foreach (NeuralNetwork neuralNetwork in participants)
            {
                if (neuralNetwork.InputSize != InputSize || neuralNetwork.OutputSize != OutputSize)
                {
                    throw new Exception("Neural networks are not identical in input/output size!");
                }
                if (neuralNetwork.Layers.Last().LayerActivation != type)
                {
                    throw new Exception("All participants must end with the same output activation!");
                }
            }
            Participants = participants;

            Reviewer = new NeuralNetwork(new int[] { OutputSize *participants.Length, OutputSize * 2, OutputSize },
                                         new NeuralNetwork.LayerActivation[] { NeuralNetwork.LayerActivation.LeakyRelu, type });;
        }