コード例 #1
0
        private void Init()
        {
            //Init classes
            scalingFunction     = new ScalingFunction();
            activationFunctions = new ActivationFunctions();
            geneticAlgorithm    = new GeneticAlgorithm();
            neuralNetwork       = new NeuralNetwork();
            weightsGenerator    = new WeightsGenerator();

            //Init Lists
            keyStore    = new List <BTCKeyStore>();
            dataSet     = new List <DataSet>();
            valkeyStore = new List <BTCKeyStore>();
            valdataSet  = new List <DataSet>();
            nnld        = new List <NeuralNetworkLayerDesign>();

            currentMaxBytes = 0;
            deathRate       = 10; //If too high, then chance plays an increasing role and skews the result.
        }
コード例 #2
0
ファイル: Form1.cs プロジェクト: mmcc1/FreqTest
        private byte ExecuteNetwork(double[] exampleAddressDbl)
        {
            NeuralNetwork       nn = new NeuralNetwork();
            ScalingFunction     sf = new ScalingFunction();
            ActivationFunctions af = new ActivationFunctions();

            double[][] networkWeights = new double[5][];
            double[][] networkBias    = new double[5][];

            WeightsGenerator wg = new WeightsGenerator();

            int[] networks1 = new int[] { 8, 6, 4, 2, 1 };

            networkWeights[0] = wg.CreateWeights(3, networks1[0] * 32);
            networkWeights[1] = wg.CreateWeights(3, networks1[1] * 8);
            networkWeights[2] = wg.CreateWeights(3, networks1[2] * 6);
            networkWeights[3] = wg.CreateWeights(3, networks1[3] * 4);
            networkWeights[4] = wg.CreateWeights(3, networks1[4] * 2);

            networkBias[0] = Enumerable.Repeat(0.00, networks1[0] * 32).ToArray();
            networkBias[1] = Enumerable.Repeat(0.00, networks1[1] * 8).ToArray();
            networkBias[2] = Enumerable.Repeat(0.00, networks1[2] * 6).ToArray();
            networkBias[3] = Enumerable.Repeat(0.00, networks1[3] * 4).ToArray();
            networkBias[4] = Enumerable.Repeat(0.00, networks1[4] * 2).ToArray();

            double[] layerOneOutput = nn.PerceptronLayer(networks1[0], exampleAddressDbl, networkWeights[0], exampleAddressDbl.Length, networkBias[0]);
            layerOneOutput = af.TanSigmoid(layerOneOutput);

            double[] layerTwoOutput = nn.PerceptronLayer(networks1[1], layerOneOutput, networkWeights[1], layerOneOutput.Length, networkBias[1]);
            layerTwoOutput = af.TanSigmoid(layerTwoOutput);

            double[] layerThreeOutput = nn.PerceptronLayer(networks1[2], layerTwoOutput, networkWeights[2], layerTwoOutput.Length, networkBias[2]);
            layerThreeOutput = af.TanSigmoid(layerThreeOutput);

            double[] layerFourOutput = nn.PerceptronLayer(networks1[3], layerThreeOutput, networkWeights[3], layerThreeOutput.Length, networkBias[3]);
            layerFourOutput = af.TanSigmoid(layerFourOutput);

            double[] layerOutput = nn.PerceptronLayer(networks1[4], layerFourOutput, networkWeights[4], layerFourOutput.Length, networkBias[4]);
            layerOutput = af.TanSigmoid(layerOutput);

            return((byte)(255.0 * layerOutput[0]));
        }