public float[] Think(float[] input) // Compute inputs with the network and return outputs
        {
            //read in all given inputs and set the value of the input neurons
            EraseMemory();
            for (int i = 0; i < Inputs.Length; i++)
            {
                Inputs[i].SetValue(Inputs[i], input[i]);
            }

            //get all computed data from output neurons
            float[] output = new float[Outputs.Length];
            for (int i = 0; i < Outputs.Length; i++)
            {
                //output[i] = Neuron.Sigmoid(Outputs[i].Activation);
                output[i] = Neuron.ReLU(Outputs[i].Activation);
                //output[i] = (float)Math.Tanh(Outputs[i].Activation);
            }
            return(output);
        }
        static public float[][][] Backpropagate(Brain brain, float TweakAmount, int outputNum, int expectedNum)
        {
            Neuron[][] layers = new Neuron[brain.HiddenLayers.Length + 1][];
            Array.Copy(brain.AllLayers, 1, layers, 0, brain.AllLayers.Length - 1);
            float[] targetOutput = new float[10];
            targetOutput[expectedNum] = 1.0F;

            float[][][] allChanges = new float[layers.Length][][];

            for (int layerNum = layers.Length - 1; layerNum >= 0; layerNum--)
            {
                Neuron[]  layer         = layers[layerNum];
                float[][] neuronChanges = new float[layer.Length][];

                for (int neuronNum = 0; neuronNum < layer.Length; neuronNum++)
                {
                    Neuron  neuron        = layer[neuronNum];
                    float[] weightChanges = new float[neuron.Weight.Length];

                    for (int i = 0; i < neuron.Weight.Length; i++)
                    {
                        float deltaW = 0.0F;
                        if (neuron.Type == Neuron.NeuronType.HiddenNeuron)
                        {
                            float delta_i      = Delta_i(layers, layerNum, neuronNum, targetOutput);
                            float activation_j = brain.AllLayers[layerNum][i].Activation;

                            deltaW = DeltaW(TweakAmount, delta_i, activation_j);
                        }
                        else
                        {
                            float delta_i      = Delta_i(neuron, targetOutput[neuronNum], Neuron.ReLU(neuron.Activation));
                            float activation_j = brain.AllLayers[layerNum][i].Activation;
                            deltaW = DeltaW(TweakAmount, delta_i, activation_j);
                        }

                        if (!float.IsNaN(deltaW))
                        {
                            weightChanges[i]  = deltaW;
                            neuron.Weight[i] += deltaW;
                        }
                    }
                    neuronChanges[neuronNum] = weightChanges;
                }
                allChanges[layerNum] = neuronChanges;
            }

            return(allChanges);
        }