Ejemplo n.º 1
0
        public static NeuralNetwork SimpleLinearNetwork()
        {
            var net = new NeuralNetwork(1, activation: new Mocks.LinearActivation());

            net.IsTraining = true;
            var layer1 = new NeuralLayer(1);

            net.AddLayer(layer1);
            var layer2 = new NeuralLayer(1);

            net.AddLayer(layer2);
            var layer3 = new NeuralLayer(1);

            net.AddLayer(layer3);
            net.Build();

            layer1[0].Bias = 1;
            layer1[0][0]   = 3;
            layer2[0].Bias = -1;
            layer2[0][0]   = 1;
            layer3[0].Bias = 2;
            layer3[0][0]   = -1;

            return(net);
        }
Ejemplo n.º 2
0
        public static NeuralNetwork CreateElmanNN(int inputCount, int outputCount)
        {
            NeuralNetwork network = new NeuralNetwork();
            Layer         input   = new Layer();
            Layer         output  = new Layer();

            for (int i = 0; i < inputCount; i++)
            {
                input.AddUnit(new NetworkUnit(ActFuncs.Identity));
            }
            for (int i = 0; i < outputCount; i++)
            {
                NetworkUnit memoryUnit = new NetworkUnit(ActFuncs.Identity);
                NetworkUnit biasUnit   = NetworkUnit.CreateBias();
                NetworkUnit outUnit    = new NetworkUnit(ActFuncs.Sigmoid, biasUnit)
                {
                    MemoryUnit = memoryUnit
                };
                input.AddUnit(memoryUnit);
                output.AddUnit(outUnit);
            }
            network.AddLayer(input);
            network.AddLayer(output);

            return(network);
        }
Ejemplo n.º 3
0
    void Start()
    {
        snakeInstances = new List <SnakeInstance>();

        //First iteration, 1 network will be unchanged, the rest will have adjusted weights
        int x;
        int y;

        for (int i = 0; i < 88; i++)
        {
            NeuralNetwork network = new NeuralNetwork();
            network.AddLayer(new NeuralLayer(6, 0.5, "INPUT"));
            network.AddLayer(new NeuralLayer(100, 0.5, ""));
            network.AddLayer(new NeuralLayer(3, 0.5, "OUTPUT"));
            network.BuildNetwork();

            x = -100 + ((i % 11) * 20);
            y = -70 + (Mathf.FloorToInt(i / 11) * 20);

            snakeInstances.Add(Instantiate(snakePrefab, new Vector2(x, y), new Quaternion()).GetComponent <SnakeInstance>());
            snakeInstances[i].network = network;
            if (i > 0)
            {
                snakeInstances[i].network.RandomizeWeights(0.4);
            }
        }

        //Now that we created all of our instances, start iteration 1
        for (int i = 0; i < 88; i++)
        {
            snakeInstances[i].Begin();
        }
    }
Ejemplo n.º 4
0
        public static NeuralNetwork SimpleLinearNetworkWithDropout(double drate, double dseed)
        {
            var net = new NeuralNetwork(1, activation: new Mocks.LinearActivation());

            net.IsTraining = true;
            var layer1 = new NeuralLayer(1);

            net.AddLayer(layer1);
            var layer2 = new NeuralLayer(1);

            layer2.DropoutRate     = drate;
            layer2.DropoutSeed     = 1;
            layer2[0].LastRetained = true;
            net.AddLayer(layer2);
            var layer3 = new NeuralLayer(1);

            net.AddLayer(layer3);
            net.Build();

            layer1[0].Bias = 1;
            layer1[0][0]   = 3;
            layer2[0].Bias = -1;
            layer2[0][0]   = 1;
            layer3[0].Bias = 2;
            layer3[0][0]   = -1;

            return(net);
        }
Ejemplo n.º 5
0
    override public void Init(NeuralNetwork p_network = null)
    {
        NeuralNetwork network   = null;
        Optimizer     optimizer = null;

        if (p_network == null)
        {
            network = new NeuralNetwork();
            network.AddLayer("input", new InputLayer(GetParam(STATE_DIM)), BaseLayer.TYPE.INPUT);
            network.AddLayer("hidden0", new CoreLayer(SolverConfig.GetInstance().hidden_layer, ACTIVATION.RELU, BaseLayer.TYPE.HIDDEN), BaseLayer.TYPE.HIDDEN);
            network.AddLayer("output", new CoreLayer(GetParam(ACTION_DIM), ACTIVATION.TANH, BaseLayer.TYPE.OUTPUT), BaseLayer.TYPE.OUTPUT);

            // feed-forward connections
            network.AddConnection("input", "hidden0", Connection.INIT.GLOROT_UNIFORM);
            network.AddConnection("hidden0", "output", Connection.INIT.GLOROT_UNIFORM);
        }
        else
        {
            network = p_network;
        }

        optimizer = new ADAM(network);
        //optimizer = new RMSProp(network);
        //optimizer = new BackProp(network, 1e-5f, 0.99f, true);
        _critic = new DeepQLearning(optimizer, network, 0.99f, SolverConfig.GetInstance().memory_size, SolverConfig.GetInstance().batch_size, SolverConfig.GetInstance().qtupdate_size);
        _critic.SetAlpha(SolverConfig.GetInstance().learning_rate);
    }
Ejemplo n.º 6
0
 private void button1_Click(object sender, EventArgs e)
 {
     network = new NeuralNetwork();
     network.AddLayer(9);
     network.AddLayer(7);
     button2.Enabled = true;
     button3.Enabled = true;
 }
Ejemplo n.º 7
0
        public void TestUninitializedNetworkThrowsInvalidOperationExceptionOnCompute()
        {
            NeuralNetwork nn = new NeuralNetwork();

            nn.AddLayer(1, new LinearActivation());
            nn.AddLayer(1, new UnipolarSigmoidActivation());
            Vector <double> result = nn.ComputeOutput(new double[] { 0 });
        }
Ejemplo n.º 8
0
        public void TestNeuralNetworkIsCreatedWith2Layers()
        {
            NeuralNetwork nn = new NeuralNetwork();

            nn.AddLayer(2, new LinearActivation());
            nn.AddLayer(1, new UnipolarSigmoidActivation());
            nn.Initialize(CreationModes.RandomizeWeights);
            Assert.AreEqual(2, nn.LayerCount);
        }
Ejemplo n.º 9
0
        public void TestIncorrectInputLengthThrowsInvalidOperationExceptionOnCompute()
        {
            NeuralNetwork nn = new NeuralNetwork();

            nn.AddLayer(1, new LinearActivation());
            nn.AddLayer(1, new UnipolarSigmoidActivation());
            nn.Initialize(CreationModes.RandomizeWeights);
            Vector <double> result = nn.ComputeOutput(new double[] { 0, 1 });
        }
Ejemplo n.º 10
0
        public void CreateNetwork()
        { 
            Network = new NeuralNetwork();
            Network.AddLayer(InputLayerSize);
            Network.AddLayer(HiddenLayerSize);
            Network.AddLayer(OutputLayerSize);

            Network.LogMessage = Log;

            Log?.Invoke("Network Created");
        }
Ejemplo n.º 11
0
 public void CreatNeuralNetwork(float lr)
 {
     nn = new NeuralNetwork(lr, new LossPowSum());
     nn.AddLayer(new LinerLayer(2, 3));
     nn.AddLayer(new LinerLayer(3, 2));
     nn.AddLayer(new LinerLayer(2, 2));
     nn.RandomWeigths();
     Lr_txt.text        = "学习步长:" + lr.ToString() + "f";
     Train_Num_txt.text = "训练总次:" + TrainNum.ToString();
     Speed_txt.text     = "单次时长:" + trainspeed.ToString() + "s";
 }
Ejemplo n.º 12
0
        public void TestSeveralNeuronsPerceptronProcessingIndividualWeightModifications()
        {
            NeuralNetwork nn = new NeuralNetwork();

            nn.AddLayer(3, new LinearActivation());
            nn.AddLayer(2, new LinearActivation());
            nn.Initialize(CreationModes.RandomizeWeights);
            nn.SetIncomingWeightsForNeuron(1, 0, new double[] { 1, 0, 2 });
            nn.SetIncomingWeightsForNeuron(1, 1, new double[] { 0, 4, 1 });
            Vector <double> result = nn.ComputeOutput(new double[] { 1, 2, 3 });

            Assert.AreEqual(7.0, result[0], EPSILON);
            Assert.AreEqual(11.0, result[1], EPSILON);
        }
Ejemplo n.º 13
0
    private void Awake()
    {
        Func <double, double>         sigmoid    = (x) => (1f / (1f + Math.Pow(Math.E, -x)));
        Func <double, double>         linear     = (x) => x;
        Func <double, double, double> derSigmoid = (x, y) => (y * (1f - y));
        Func <double, double, double> derLinear  = (x, y) => 1f;

        perceptronModel = new NeuralNetwork(63, 1, 0.01f, linear, derLinear);

        //model masukin dari belakang :v
        backpropModel = new NeuralNetwork(63, 1, 0.01f, sigmoid, derSigmoid);
        backpropModel.AddLayer(2, "relu");
        backpropModel.AddLayer(4, "relu");
        backpropModel.AddLayer(8, "relu");
    }
Ejemplo n.º 14
0
        /// <summary>
        /// Creates fully connected NN
        /// </summary>
        /// <param name="topology">Network topology from input to output layer
        /// (i.e. [2,10,3] means NN with 2D input, 1 hidden layer with 10 neurons and 3D output)</param>
        public static NeuralNetwork CreateFullyConnectedNetwork(int[] topology,
                                                                IActivationFunction activation = null,
                                                                bool randomizeInitialWeights   = true,
                                                                int randomSeed = 0)
        {
            if (topology == null || topology.Length < 2)
            {
                throw new MLException("Network topology must have at least input and output dimensions");
            }

            var net = new NeuralNetwork(topology[0], activation);

            var lcount = topology.Length - 1;

            for (int i = 1; i <= lcount; i++)
            {
                var neuronCount = topology[i];
                var layer       = new NeuralLayer(neuronCount);
                net.AddLayer(layer);
            }

            net.Build();

            if (randomizeInitialWeights)
            {
                net.RandomizeParameters(randomSeed);
            }

            return(net);
        }
Ejemplo n.º 15
0
    private NeuralNetwork CreateNewNetwork()
    {
        //先创建一个神经网络
        var network = new NeuralNetwork();
        //创建一个全连接层,输出形状为(1, 10, 1),激活函数为Sigmoid
        //var d1 = new DenseLayer(new Shape(1, 10, 1), ActivationType.Sigmoid);
        //network.AddLayer(d1);
        //创建一个全连接层,输出形状为(1, 2, 1),注意这里是最后一层,输入形状要和你需要的输出对应,激活函数为Sigmoid
        var d2 = new DenseLayer(new Shape(1, 2, 1), ActivationType.Sigmoid);

        network.AddLayer(d2);

        //初始化参数
        var initArgs = new NetworkInitializeArgs();

        //输入形状 你的输入数据形状,这里是小车的demo,输入为射线的数量
        initArgs.inputShape = new Shape(1, rayNum, 1);
        //权重的初始化范围 weight = Random.Range(-0.1f, 0.1f)
        initArgs.initWeightRange = (-0.1f, 0.1f);
        //偏执项的初始化范围 bias = Random.Range(-0.1f, 0.1f)
        initArgs.initBiasRange = (-0.1f, 0.1f);
        //初始化
        network.Initialize(initArgs);
        return(network);
    }
Ejemplo n.º 16
0
        public void TestSimpleProcessing3()
        {
            NeuralNetwork nn = new NeuralNetwork();

            nn.AddLayer(1, new LinearActivation());
            nn.AddLayer(1, new UnipolarSigmoidActivation());
            nn.Initialize(CreationModes.NoAction);
            nn.SetIncomingWeightsForLayer(1, new List <double[]>()
            {
                new double[] { 2 }
            });
            Vector <double> result = nn.ComputeOutput(new double[] { 0 });

            TestContext.WriteLine(result[0].ToString());
            Assert.AreEqual(0.5, result[0]);
        }
    // Start is called before the first frame update
    void Start()
    {
        var network = new NeuralNetwork();
        var d1      = new DenseLayer(new Shape(1, 10, 1), ActivationType.Sigmoid);

        network.AddLayer(d1);

        var initArgs = new NetworkInitializeArgs();

        initArgs.inputShape      = new Shape(1, 28 * 28, 1);
        initArgs.initWeightRange = (-0.1f, 0.1f);
        initArgs.initBiasRange   = (-0.1f, 0.1f);
        network.Initialize(initArgs);

        var trainArgs = new NeuralNetworkTrainArgs();

        trainArgs.trainingData   = ReadTrainingData();   //设置数据
        trainArgs.trainingLabels = ReadTrainingLabels(); //设置标签
        trainArgs.learningRate   = 0.01f;                //设置学习速率,越大学习的速度越快,但出现不收敛的可能性也越大
        trainArgs.onOnceEpoch    = (i) =>
        {
            var accuracy = GetAccuracy(network, trainArgs.trainingData, trainArgs.trainingLabels);
            Debug.Log($"第{i}个训练回合, 准确率:{accuracy}");
        };
        trainArgs.trainEpoches = 100; //设置训练的回合数
        network.Train(trainArgs);     //开始训练

        TestNetwork(network, trainArgs.trainingData, trainArgs.trainingLabels);
    }
Ejemplo n.º 18
0
        public void TestSeveralNeuronsPerceptronProcessing()
        {
            NeuralNetwork nn = new NeuralNetwork();

            nn.AddLayer(3, new LinearActivation());
            nn.AddLayer(2, new LinearActivation());
            nn.Initialize(CreationModes.NoAction);
            nn.SetIncomingWeightsForLayer(1, new List <double[]>()
            {
                new double[] { 1, 0, 2 }, new double[] { 0, 4, 1 }
            });
            Vector <double> result = nn.ComputeOutput(new double[] { 1, 2, 3 });

            Assert.AreEqual(7.0, result[0], EPSILON);
            Assert.AreEqual(11.0, result[1], EPSILON);
        }
Ejemplo n.º 19
0
        public void TestNeuralNetworkAttributes5Layers()
        {
            NeuralNetwork nn = new NeuralNetwork();

            nn.AddLayer(2, new LinearActivation());
            nn.AddLayer(4, new UnipolarSigmoidActivation());
            nn.AddLayer(7, new UnipolarSigmoidActivation());
            nn.AddLayer(4, new UnipolarSigmoidActivation());
            nn.AddLayer(1, new UnipolarSigmoidActivation());
            nn.Initialize(CreationModes.RandomizeWeights);
            Assert.AreEqual(5, nn.LayerCount);
            Assert.AreEqual(3, nn.HiddenLayerCount);
            Assert.AreEqual(2, nn.InputCount);
            Assert.AreEqual(1, nn.OutputCount);
            Assert.AreEqual(4, nn.LayerAt(3).NeuronCount);
        }
Ejemplo n.º 20
0
        public void TestSettingNeuronWeights()
        {
            NeuralNetwork nn = new NeuralNetwork();

            nn.AddLayer(2, new LinearActivation(), false);
            nn.AddLayer(3, new LinearActivation(), false);
            nn.AddLayer(1, new LinearActivation(), false);
            nn.Initialize(CreationModes.NoAction);
            nn.SetIncomingWeightsForNeuron(1, 0, new double[] { 1, -2 });
            nn.SetIncomingWeightsForNeuron(1, 1, new double[] { 4, 0 });
            nn.SetIncomingWeightsForNeuron(1, 2, new double[] { -1, 3 });
            nn.SetIncomingWeightsForNeuron(2, 0, new double[] { -1, -2, 3 });
            Vector <double> result = nn.ComputeOutput(new double[] { 1.0, 2.0 });

            Assert.AreEqual(10.0, result[0], 0.0001);
        }
Ejemplo n.º 21
0
        public void TestSimpleMLPProcessing1()
        {
            NeuralNetwork nn = new NeuralNetwork();

            nn.AddLayer(1, new LinearActivation());
            nn.AddLayer(1, new LinearActivation());
            nn.AddLayer(1, new LinearActivation());
            nn.Initialize(CreationModes.NoAction);
            nn.SetIncomingWeightsForLayer(1, new List <double[]>()
            {
                new double[] { 2 }
            });
            nn.SetIncomingWeightsForLayer(2, new List <double[]>()
            {
                new double[] { 2 }
            });
            Vector <double> result = nn.ComputeOutput(new double[] { 3 });

            Assert.AreEqual(12, result[0]);
        }
Ejemplo n.º 22
0
        public void TestLastOutputInNetworkContext()
        {
            NeuralNetwork nn = new NeuralNetwork();

            nn.AddLayer(2, new LinearActivation(), false);
            nn.AddLayer(3, new LinearActivation(), false);
            nn.AddLayer(1, new LinearActivation(), false);
            nn.Initialize(CreationModes.NoAction);
            nn.SetIncomingWeightsForLayer(1, new List <double[]>()
            {
                new double[] { 1, -2 }, new double[] { 4, 0 }, new double[] { -1, 3 }
            });
            nn.SetIncomingWeightsForLayer(2, new List <double[]>()
            {
                new double[] { -1, -2, 3 }
            });
            Vector <double> result = nn.ComputeOutput(new double[] { 1.0, 2.0 });

            double[] lastOutput = nn.LayerAt(1).LastOutput.ToArray();
            // TODO: write assert
        }
Ejemplo n.º 23
0
        public void TestComplexMLPProcessingWithBiasBipolarSigmoid1()
        {
            NeuralNetwork nn = new NeuralNetwork();

            nn.AddLayer(2, new LinearActivation(), false);
            nn.AddLayer(3, new BipolarTanhActivation(), true);
            nn.AddLayer(1, new BipolarTanhActivation(), true);
            nn.Initialize(CreationModes.NoAction);
            nn.SetIncomingWeightsForLayer(1, new List <double[]>()
            {
                new double[] { 2, 1 }, new double[] { 3, -2 }, new double[] { -2, 7 }
            });
            nn.SetIncomingWeightsForLayer(2, new List <double[]>()
            {
                new double[] { 17, 13, 20 }
            });
            nn.SetBiasForLayer(1, new double[] { -100.0, -100.0, -100.0 });
            nn.SetBiasForLayer(2, new double[] { 50.0 });
            Vector <double> result = nn.ComputeOutput(new double[] { 0.8, 0.5 });

            Assert.AreEqual(0.0, result[0], 0.0001);
        }
Ejemplo n.º 24
0
        public void TestSimpleDeepNetProcessing1()
        {
            NeuralNetwork nn = new NeuralNetwork();

            nn.AddLayer(1, new LinearActivation());
            nn.AddLayer(1, new LinearActivation());
            nn.AddLayer(1, new LinearActivation());
            nn.AddLayer(1, new LinearActivation());
            nn.AddLayer(1, new LinearActivation());
            nn.Initialize(CreationModes.NoAction);
            for (int i = 1; i < 5; i++)
            {
                nn.SetIncomingWeightsForLayer(i, new List <double[]>()
                {
                    new double[] { 2 }
                });
            }

            Vector <double> result = nn.ComputeOutput(new double[] { 3 });

            TestContext.WriteLine(result[0].ToString());
            Assert.AreEqual(48, result[0]);
        }
Ejemplo n.º 25
0
        static void Main(string[] args)
        {
            var network      = new NeuralNetwork(3);
            var layerFactory = new NeuralLayerFactory();

            network.AddLayer(layerFactory.CreateNeuralLayer(3, new RectifiedActivationFuncion(), new WeightedSumFunction()));

            //network.AddLayer(layerFactory.CreateNeuralLayer(3, new RectifiedActivationFuncion(), new WeightedSumFunction()));

            network.AddLayer(layerFactory.CreateNeuralLayer(1, new SigmoidActivationFunction(0.7), new WeightedSumFunction()));

/*            network.PushExpectedValues(
 *              new double[][] {
 *                  new double[] { 0 },
 *                  new double[] { 1 },
 *                  new double[] { 1 },
 *                  new double[] { 0 },
 *                  new double[] { 1 },
 *                  new double[] { 0 },
 *                  new double[] { 0 },
 *              });
 *
 *          network.Train(
 *              new double[][] {
 *                  new double[] { 150, 2, 0 },
 *                  new double[] { 1002, 56, 1 },
 *                  new double[] { 1060, 59, 1 },
 *                  new double[] { 200, 3, 0 },
 *                  new double[] { 300, 3, 1 },
 *                  new double[] { 120, 1, 0 },
 *                  new double[] { 80, 1, 0 },
 *              }, 10000);
 *
 *          network.PushInputValues(new double[] { 1054, 54, 1 });
 *          var outputs = network.GetOutput(); */
        }
Ejemplo n.º 26
0
        private static NeuralNetwork InitializeDefaultNeuralNetwork(DataProvider dp)
        {
            using (CNNDataSet.TrainingRatesDataTable table = new CNNDataSet.TrainingRatesDataTable())
            {
                table.ReadXml(@"D:\prj\cnnwb\CNNWB.Data\TrainingSchemes\LeCun2.scheme-xml");
                CNNDataSet.TrainingRatesRow row = table.Rows[0] as CNNDataSet.TrainingRatesRow;
                _data = new TrainingRate(row.Rate, row.Epochs, row.MinimumRate, row.WeightDecayFactor, row.Momentum, row.BatchSize, row.InitialAvgLoss, row.DecayFactor, row.DecayAfterEpochs, row.WeightSaveTreshold, row.Distorted, row.DistortionPercentage, row.SeverityFactor, row.MaxScaling, row.MaxRotation, row.ElasticSigma, row.ElasticScaling);
            }

            //NeuralNetwork network = new NeuralNetwork(_dp, "LeNet-5", 10, 0.8D, LossFunctions.MeanSquareError,
            //                            DataProviderSets.MNIST, TrainingStrategy.SGDLevenbergMarquardt, 0.02D);
            //network.AddLayer(LayerTypes.Input, 1, 32, 32);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.Tanh, 6, 28, 28, 5, 5);
            //network.AddLayer(LayerTypes.AvgPooling, ActivationFunctions.Tanh, 6, 14, 14, 2, 2);

            //bool[] maps = new bool[6 * 16]
            //{
            // true, false,false,false,true, true, true, false,false,true, true, true, true, false,true, true,
            // true, true, false,false,false,true, true, true, false,false,true, true, true, true, false,true,
            // true, true, true, false,false,false,true, true, true, false,false,true, false,true, true, true,
            // false,true, true, true, false,false,true, true, true, true, false,false,true, false,true, true,
            // false,false,true, true, true, false,false,true, true, true, true, false,true, true, false,true,
            // false,false,false,true, true, true, false,false,true, true, true, true, false,true, true, true
            //};

            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.Tanh, 16, 10, 10, 5, 5, mappings: new Mappings(maps));
            //network.AddLayer(LayerTypes.AvgPooling, ActivationFunctions.Tanh, 16, 5, 5, 2, 2);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.Tanh, 120, 1, 1, 5, 5);
            //network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.Tanh, 10);

            //network.InitializeWeights();

            NeuralNetwork network = new NeuralNetwork(_dp, "Simard-6", 10, 0.8D, LossFunctions.MeanSquareError, DataProviderSets.MNIST, TrainingStrategy.SGDLevenbergMarquardt, 0.02D);

            network.AddLayer(LayerTypes.Input, 1, 32, 32);
            network.AddLayer(LayerTypes.ConvolutionalSubsampling, ActivationFunctions.Tanh, 6, 14, 14, 5, 5);
            network.AddLayer(LayerTypes.ConvolutionalSubsampling, ActivationFunctions.Tanh, 50, 5, 5, 5, 5);
            network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.Tanh, 100);
            network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.Tanh, 10);
            network.InitializeWeights();

            //NeuralNetwork network = new NeuralNetwork(DataProvider, "Simard-16", 10, 0.8D, LossFunctions.MeanSquareError, DataProviderSets.MNIST, TrainingStrategy.SGDLevenbergMarquardt, 0.1D);
            //network.AddLayer(LayerTypes.Input, 1, 32, 32);
            //network.AddLayer(LayerTypes.ConvolutionalSubsampling, ActivationFunctions.Tanh, 16, 14, 14, 5, 5);
            //network.AddLayer(LayerTypes.ConvolutionalSubsampling, ActivationFunctions.Tanh, 64, 5, 5, 5, 5);
            //network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.Tanh, 196);
            //network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.Tanh, 10);
            //network.InitializeWeights();

            //NeuralNetwork network = new NeuralNetwork(DataProvider, "LeNet-5", 10, 0.8D, LossFunctions.MeanSquareError, DataProviderSets.MNIST, TrainingStrategy.SGDLevenbergMarquardt, 0.02D);
            //network.AddLayer(LayerTypes.Input, 1, 32, 32);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.Tanh, 6, 28, 28, 5, 5, 1, 1);
            //network.AddLayer(LayerTypes.AvgPooling, ActivationFunctions.Tanh, 6, 14, 14, 2, 2, 2, 2);
            //bool[] maps = new bool[6 * 16]
            //{
            //    true,  false, false, false, true,  true,  true,  false, false, true,  true,  true,  true,  false, true,  true,
            //    true,  true,  false, false, false, true,  true,  true,  false, false, true,  true,  true,  true,  false, true,
            //    true,  true,  true,  false, false, false, true,  true,  true,  false, false, true,  false, true,  true,  true,
            //    false, true,  true,  true,  false, false, true,  true,  true,  true,  false, false, true,  false, true,  true,
            //    false, false, true,  true,  true,  false, false, true,  true,  true,  true,  false, true,  true,  false, true,
            //    false, false, false, true,  true,  true,  false, false, true,  true,  true,  true,  false, true,  true,  true
            //};
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.Tanh, 16, 10, 10, 5, 5, 1, 1, 0, 0, new Mappings(maps));
            //network.AddLayer(LayerTypes.AvgPooling, ActivationFunctions.Tanh, 16, 5, 5, 2, 2, 2, 2);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.Tanh, 120, 1, 1, 5, 5, 1, 1);
            //network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.Tanh, 10);
            //network.InitializeWeights();

            //NeuralNetwork network = new NeuralNetwork(DataProvider, "LeNet-5", 10, 1D, LossFunctions.CrossEntropy, DataProviderSets.MNIST, TrainingStrategy.SGDLevenbergMarquardt, 0.02D);
            //network.AddLayer(LayerTypes.Input, 1, 32, 32);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 6, 28, 28, 5, 5);
            //network.AddLayer(LayerTypes.AvgPooling, ActivationFunctions.Ident, 6, 14, 14, 2, 2, 2, 2);
            //bool[] maps = new bool[6 * 16]
            //{
            //    true,  false, false, false, true,  true,  true,  false, false, true,  true,  true,  true,  false, true,  true,
            //    true,  true,  false, false, false, true,  true,  true,  false, false, true,  true,  true,  true,  false, true,
            //    true,  true,  true,  false, false, false, true,  true,  true,  false, false, true,  false, true,  true,  true,
            //    false, true,  true,  true,  false, false, true,  true,  true,  true,  false, false, true,  false, true,  true,
            //    false, false, true,  true,  true,  false, false, true,  true,  true,  true,  false, true,  true,  false, true,
            //    false, false, false, true,  true,  true,  false, false, true,  true,  true,  true,  false, true,  true,  true
            //};
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 16, 10, 10, 5, 5, 1, 1, 0, 0, new Mappings(maps));
            //network.AddLayer(LayerTypes.AvgPooling, ActivationFunctions.Ident, 16, 5, 5, 2, 2, 2, 2);
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.ReLU, 120, 1, 1, 5, 5);
            //network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.SoftMax, 10);
            //network.InitializeWeights();

            //NeuralNetwork network = new NeuralNetwork(DataProvider, "MyNet-16", 10, 0.8D, LossFunctions.MeanSquareError, DataProviderSets.MNIST, TrainingStrategy.SGDLevenbergMarquardt, 0.02D);
            //network.AddLayer(LayerTypes.Input, 1, 32, 32);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.Tanh, 16, 28, 28, 5, 5);
            //network.AddLayer(LayerTypes.AvgPooling, ActivationFunctions.Tanh, 16, 14, 14, 2, 2);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.Tanh, 64, 10, 10, 5, 5, 1, 1, 0, 0, new Mappings(16, 64, 66, 1));
            //network.AddLayer(LayerTypes.AvgPooling, ActivationFunctions.Tanh, 64, 5, 5, 2, 2);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.Tanh, 256, 1, 1, 5, 5, 1, 1, 0, 0, new Mappings(64, 256, 66, 2));
            //network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.Tanh, 10);
            //network.InitializeWeights();

            //NeuralNetwork network = new NeuralNetwork(DataProvider, "CNN-CIFAR10-A", 10, 0.8D, LossFunctions.MeanSquareError, DataProviderSets.CIFAR10, TrainingStrategy.SGDLevenbergMarquardt, 0.02D);
            //network.AddLayer(LayerTypes.Input, 3, 32, 32);
            //bool[] maps = new bool[3 * 64]
            //{
            //    true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,
            //    false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,
            //    false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true
            //};
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 64, 28, 28, 5, 5, 1, 1, 0, 0, new Mappings(maps));
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 64, 14, 14, 3, 3, 2, 2);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 64, 10, 10, 5, 5, 1, 1, 0, 0, new Mappings(64, 64, 66, 1));
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 64, 5, 5, 3, 3, 2, 2);
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.ReLU, 64, 3, 3, 3, 3, 1, 1, 0, 0, new Mappings(64, 64, 66, 2));
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.ReLU, 386, 1, 1, 3, 3, 1, 1, 0, 0, 50);
            //network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.SoftSign, 10);
            //network.InitializeWeights();

            //NeuralNetwork network = new NeuralNetwork(DataProvider, "CNN-CIFAR10-Z2", 10, 1D, LossFunctions.CrossEntropy, DataProviderSets.CIFAR10, TrainingStrategy.SGDLevenbergMarquardt);
            //network.AddLayer(LayerTypes.Input, 3, 32, 32);
            //bool[] maps = new bool[3 * 64]
            //{
            //    true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,
            //    false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,
            //    false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true
            //};

            //bool[] maps = new bool[3 * 96]
            //{
            //    true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false,
            //    false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false,
            //    false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true,  false, false, true
            //};
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 64, 28, 28, 5, 5, 1, 1, 0, 0, new Mappings(maps));
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 64, 14, 14, 3, 3, 2, 2);
            ////network.AddLayer(LayerTypes.LocalResponseNormalizationCM, ActivationFunctions.None, 64, 14, 14, 3, 3);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 64, 10, 10, 5, 5, 1, 1, 0, 0, new Mappings(64, 64, 66, 1));
            ////network.AddLayer(LayerTypes.LocalResponseNormalizationCM, ActivationFunctions.None, 64, 10, 10, 3, 3);
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 64, 5, 5, 3, 3, 2, 2);
            ////network.AddLayer(LayerTypes.Local, ActivationFunctions.ReLU, 64, 1, 1, 5, 5, 1, 1, 0, 0, new Mappings(64, 64, 66, 2));
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.Logistic, 384, 1, 1, 5, 5, 1, 1, 0, 0, 50);
            //network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.SoftMax, 10);
            //network.InitializeWeights();
            //network.LoadWeights(StorageDirectory + @"\CNN-CIFAR10-Z2 (2259 errors).weights-bin");

            //NeuralNetwork network = new NeuralNetwork(DataProvider, "CNN-CIFAR10-B2", 10, 0.8D, LossFunctions.MeanSquareError, DataProviderSets.CIFAR10, TrainingStrategy.SGDLevenbergMarquardt, 0.02D);
            //network.AddLayer(LayerTypes.Input, 3, 32, 32);
            //bool[] maps = new bool[3 * 64]
            //{
            //    true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,
            //    false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,
            //    false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true
            //};
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 64, 28, 28, 5, 5, 1, 1, 0, 0, new Mappings(maps));
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 64, 14, 14, 3, 3, 2, 2);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 64, 10, 10, 5, 5, 1, 1, 0, 0, new Mappings(64, 64, 66, 1));
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 64, 5, 5, 3, 3, 2, 2);
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.Tanh, 512, 1, 1, 5, 5, 1, 1, 0, 0, 50);
            //network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.Tanh, 10);
            //network.InitializeWeights();

            //NeuralNetwork network = new NeuralNetwork(DataProvider, "CNN-CIFAR10-Z3", 10, 1D, LossFunctions.CrossEntropy, DataProviderSets.CIFAR10, TrainingStrategy.SGDLevenbergMarquardt, 0.02D);
            //network.AddLayer(LayerTypes.Input, 3, 32, 32);
            //bool[] maps = new bool[3 * 64]
            //{
            //    true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,
            //    false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,
            //    false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true
            //};
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 64, 32, 32, 5, 5, 1, 1, 2, 2, new Mappings(maps));
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 64, 16, 16, 3, 3, 2, 2);
            ////network.AddLayer(LayerTypes.LocalResponseNormalizationCM, ActivationFunctions.None, 64, 16, 16, 5, 5);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 64, 16, 16, 5, 5, 1, 1, 2, 2, new Mappings(64, 64, 66, 1));
            ////network.AddLayer(LayerTypes.LocalResponseNormalizationCM, ActivationFunctions.None, 64, 16, 16, 5, 5);
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 64, 8, 8, 3, 3, 2, 2);
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.ReLU, 32, 8, 8, 3, 3, 1, 1, 1, 1, new Mappings(64, 32, 50, 2));
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.SoftSign, 32, 8, 8, 3, 3, 1, 1, 1, 1, 50);
            //network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.SoftMax, 10);
            //network.InitializeWeights();

            //NeuralNetwork network = new NeuralNetwork(DataProvider, "CNN-CIFAR10-C", 10, 0.8D, LossFunctions.MeanSquareError, DataProviderSets.CIFAR10, TrainingStrategy.SGDLevenbergMarquardt, 0.02D);
            //network.AddLayer(LayerTypes.Input, 3, 32, 32);
            //bool[] maps = new bool[3 * 64]
            //{
            //    true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,
            //    false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,
            //    false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true
            //};
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 64, 28, 28, 5, 5, 1, 1, 0, 0, new Mappings(maps));
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 64, 14, 14, 3, 3, 2, 2);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 64, 10, 10, 5, 5, 1, 1, 0, 0, new Mappings(64, 64, 66, 1));
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 64, 5, 5, 3, 3, 2, 2);
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.ReLU, 32, 5, 5, 3, 3, 1, 1, 1, 1, new Mappings(64, 32, 50, 2));
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.ReLU, 32, 5, 5, 3, 3, 1, 1, 1, 1, 50);
            //network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.Tanh, 10);
            //network.InitializeWeights();

            //NeuralNetwork network = new NeuralNetwork(DataProvider, "CNN-CIFAR10-Z4", 10, 0.8D, LossFunctions.MeanSquareError, DataProviderSets.CIFAR10, TrainingStrategy.SGDLevenbergMarquardt, 0.02D, 2000, false);
            //network.AddLayer(LayerTypes.Input, 3, 32, 32);
            ////bool[] maps = new bool[3 * 64]
            ////{
            ////    true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true, true, true, true, true, true, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true, true, true, true, true, true, false, true,  false, true,  false, true,  false, true,
            ////    false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true, true, true, true, true, true, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true, true, true, true, true, true, true,  false, true,  false, true,  false, true,  false,
            ////    false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true, true, true, true, true, true, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true, true, true, true, true, true, false, true,  false, true,  false, true,  false, true
            ////};
            //bool[] maps = new bool[3 * 64]
            //{
            //    true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,
            //    false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,
            //    false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true
            //};
            ////bool[] maps = new bool[3 * 48]    //Z3
            ////{
            ////    true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false,
            ////    false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false,
            ////    false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true
            ////};

            ////bool[] maps = new bool[3 * 48]    //Z3
            ////{
            ////    true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true, true, true, true, true, true, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false,
            ////    false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true, true, true, true, true, true, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,
            ////    false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true, true, true, true, true, true, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true
            ////};
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 64, 32, 32, 5, 5, 1, 1, 2, 2, new Mappings(maps));
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 64, 16, 16, 3, 3, 2, 2);
            ////network.AddLayer(LayerTypes.LocalContrastNormalization, ActivationFunctions.None, 64, 16, 16, 5, 5);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 64, 16, 16, 5, 5, 1, 1, 2, 2, new Mappings(64, 64, 66, 1));
            ////network.AddLayer(LayerTypes.LocalContrastNormalization, ActivationFunctions.None, 64, 16, 16, 5, 5);
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 64, 8, 8, 3, 3, 2, 2);
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.ReLU, 32, 8, 8, 3, 3, 1, 1, 1, 1, new Mappings(64, 32, 50, 2));
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.ReLU, 32, 8, 8, 3, 3, 1, 1, 1, 1, 50);
            //network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.SoftSign, 10);
            //network.InitializeWeights();

            //NeuralNetwork network = new NeuralNetwork(DataProvider, "CNN-CIFAR10-D", 10, 0.8D, LossFunctions.MeanSquareError, DataProviderSets.CIFAR10, TrainingStrategy.SGDLevenbergMarquardt, 0.02D);
            //network.AddLayer(LayerTypes.Input, 3, 32, 32);
            //bool[] maps = new bool[3 * 64]
            //{
            //    true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,
            //    false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,
            //    false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true
            //};
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 64, 32, 32, 5, 5, 1, 1, 2, 2, new Mappings(maps));
            //network.AddLayer(LayerTypes.MaxPoolingWeightless, ActivationFunctions.Ident, 64, 16, 16, 3, 3, 2, 2);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 64, 16, 16, 3, 3, 1, 1, 1, 1, new Mappings(64, 64, 50, 1));
            //network.AddLayer(LayerTypes.AvgPoolingWeightless, ActivationFunctions.Ident, 64, 8, 8, 3, 3, 2, 2);
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.BReLU, 32, 8, 8, 3, 3, 1, 1, 1, 1, new Mappings(64, 32, 50, 2));
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.Tanh, 32, 8, 8, 3, 3, 1, 1, 1, 1, 50);
            //network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.Tanh, 10);
            ////network.InitializeWeights();

            //NeuralNetwork network = new NeuralNetwork(DataProvider, "CNN-CIFAR10-G", 10, 0.8D, LossFunctions.MeanSquareError, DataProviderSets.CIFAR10, TrainingStrategy.SGD, 0.02D);
            //network.AddLayer(LayerTypes.Input, 3, 32, 32);
            //bool[] maps = new bool[3 * 48]
            //{
            //    true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true, true, true, true, true, true, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false,
            //    false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  false, false, false, false, false, false, false, false, true,  true,  true, true, true, true, true, true, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,
            //    false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true, true, true, true, true, true, false, false, false, false, false, false, false, false, true,  true,  true,  true,  true,  true,  true,  true
            //};
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 48, 32, 32, 5, 5, 1, 1, 2, 2, new Mappings(maps));
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 48, 16, 16, 3, 3, 2, 2);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 64, 16, 16, 5, 5, 1, 1, 2, 2, new Mappings(48, 64, 66, 1));
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 64, 8, 8, 3, 3, 2, 2);
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.ReLU, 24, 8, 8, 3, 3, 1, 1, 1, 1, new Mappings(64, 24, 50, 2));
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.Tanh, 24, 8, 8, 3, 3, 1, 1, 1, 1, 50);
            //network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.Tanh, 10);
            //network.InitializeWeights();

            //NeuralNetwork network = new NeuralNetwork(DataProvider, "CNN-MNIST-A", 10, 1D, LossFunctions.CrossEntropy, DataProviderSets.MNIST, TrainingStrategy.SGD, 0.02D);
            //network.AddLayer(LayerTypes.Input, 1, 32, 32);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 32, 24, 24, 9, 9, 1, 1, 0, 0);
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 32, 12, 12, 3, 3, 2, 2);
            //network.AddLayer(LayerTypes.Convolutional, ActivationFunctions.ReLU, 32, 8, 8, 5, 5, 1, 1, 0, 0, new Mappings(32, 32, 66));
            //network.AddLayer(LayerTypes.StochasticPooling, ActivationFunctions.Ident, 32, 4, 4, 3, 3, 2, 2);
            //network.AddLayer(LayerTypes.Local, ActivationFunctions.ReLU, 256, 1, 1, 4, 4, 1, 1, 0, 0, 50);
            //network.AddLayer(LayerTypes.FullyConnected, ActivationFunctions.SoftMax, 10);
            //network.InitializeWeights();

            //network.RaiseNetworkProgressEvent += new EventHandler<EventArgs>(NetworkProgressEvent);
            //network.RaiseAddUnrecognizedTestSampleEvent += new EventHandler<AddUnrecognizedTestSampleEventArgs>(AddUnrecognizedTestSampleEvent);

            network.MaxDegreeOfParallelism = 8;
            network.AddGlobalTrainingRate(_data, true);

            return(network);
        }
Ejemplo n.º 27
0
    public static void Main()
    {
        int height = 100;
        int width  = 100;


        float[] input = new float[height * width];

        for (int i = 0; i < input.Length; i++)
        {
            input[i] = .5f;
        }


        Random rnd = new Random();

        InOutPair[] IO = new InOutPair[1];

        float[] toAdd = new float[height * width];

        for (int i = 0; i < height * width; i++)
        {
            toAdd[i] = 1f / i;
        }
        IO[0] = new InOutPair(toAdd, new float[] { .1f, .5f, -.1f });


        Epoch Ep = new Epoch(IO, 1000, true);

        NeuralNetwork NN = new NeuralNetwork(9, rnd, .01f, 1, 10, 200);

        NN.AddLayer(5, "HypTan");
        NN.AddLayer(5, "HypTan");
        NN.AddLayer(5, "HypTan");
        NN.AddLayer(3, "HypTan");


        Convolution CC = new Convolution(height, width);

        CC.AddLayer(5, 5, 5);
        CC.AddLayer(2, 2, 1, new HyperbolicTangent());
        CC.AddLayer(6, 6, 1);
        CC.AddLayer(2, 2, 2, true);
        CC.AddLayer(2, 2, 1, new HyperbolicTangent());
        CC.AddLayer(2, 2, 2, true);


        Combination C = new Combination(CC, NN);

        float[] layerOut = C.Run(input);
        for (int i = 0; i < layerOut.Length; i++)
        {
            Console.WriteLine(layerOut[i]);
        }
        C.Train(Ep);
        Console.WriteLine();
        layerOut = C.Run(input);
        for (int i = 0; i < layerOut.Length; i++)
        {
            Console.WriteLine(layerOut[i]);
        }
    }
Ejemplo n.º 28
0
        static void Main(string[] args)
        {
            Console.WriteLine("Hello World!");

            NeuralNetwork net = new NeuralNetwork();

            NeuralLayer LI = new NeuralLayer("LI");

            //LI.AddNeuron(new StepNeuron(1, 1));
            //LI.AddNeuron(new StepNeuron(1, 1));
            LI.AddNeuron(new InputNeuron());
            LI.AddNeuron(new InputNeuron());
            net.AddLayer(LI);

            NeuralLayer H0 = new NeuralLayer("H0");

            H0.AddNeuron(new StepNeuron(2, 0.1));
            H0.AddNeuron(new StepNeuron(2, 0.1));
            net.AddLayer(H0);

            NeuralLayer H1 = new NeuralLayer("H1");

            H1.AddNeuron(new StepNeuron(2, 0.1));
            H1.AddNeuron(new StepNeuron(2, 0.1));
            net.AddLayer(H1);

            //NeuralLayer H2 = new NeuralLayer("H1");
            //H2.AddNeuron(new StepNeuron(2, 0.2));
            //H2.AddNeuron(new StepNeuron(2, 0.2));
            //net.AddLayer(H2);

            NeuralLayer LO = new NeuralLayer("LO");

            LO.AddNeuron(new StepNeuron(2, 0.1));
            net.AddLayer(LO);


            //NeuralLayer LI = new NeuralLayer("LI");
            //LI.AddNeuron(new StepNeuron(2, 0.1));
            //net.AddLayer(LI);

            //for (int i = 0; i < 10; i++)
            //{
            //    net.SetInputs(0, 0);
            //    net.Compute();
            //    Console.WriteLine($"output: {net.GetOutputs().First().Value} error: {net.CalculateTotalError(0)}");

            //    net.SetInputs(0, 1);
            //    net.Compute();
            //    Console.WriteLine($"output: {net.GetOutputs().First().Value} error: {net.CalculateTotalError(0)}");

            //    net.SetInputs(1, 0);
            //    net.Compute();
            //    Console.WriteLine($"output: {net.GetOutputs().First().Value} error: {net.CalculateTotalError(0)}");

            //    net.SetInputs(1, 1);
            //    net.Compute();
            //    Console.WriteLine($"output: {net.GetOutputs().First().Value} error: {net.CalculateTotalError(1)}");

            //    Console.WriteLine("-------------");

            //    net.AdjustWeight(.1, 1);
            //}

            var inputs = new List <List <double> >()
            {
                new List <double> {
                    0, 0
                },
                new List <double> {
                    0, 1
                },
                new List <double> {
                    1, 0
                },
                new List <double> {
                    1, 1
                },
            };
            var expectedValues = new List <List <double> >()
            {
                new List <double> {
                    0
                },
                new List <double> {
                    0
                },
                new List <double> {
                    0
                },
                new List <double> {
                    1
                },
            };

            net.Train(inputs, expectedValues, 0.1, 10);

            foreach (var layer in net.Layers)
            {
                Console.Write($"{layer.LayerName} {layer.Neurons.Count}");
                foreach (var neuron in layer.Neurons)
                {
                    foreach (var dendrite in neuron.Dendrites)
                    {
                        Console.Write($", {dendrite.Weight}");
                    }
                }
                Console.WriteLine();
            }
        }
    public override void OnInspectorGUI()
    {
        #region Gizmo Options
        if (GUILayout.Button(showingGizmoOptions ? "Hide Gizmo Options" : "Show Gizmo Options"))
        {
            showingGizmoOptions = !showingGizmoOptions;
            if (showingGizmoOptions)
            {
                nodeSize = NeuralNetwork.Editor.nodeSize;
                nSpace   = nodeSpacing;
                lSpace   = layerSpacing;
                color    = nodeColor;
                gradient = axonGradient;
            }
        }
        if (showingGizmoOptions)
        {
            EditorGUI.indentLevel = 1;
            color    = EditorGUILayout.ColorField("Node Color", color);
            gradient = EditorGUILayout.GradientField("Relation Gradient", gradient);
            nodeSize = EditorGUILayout.FloatField("Node Size", nodeSize);
            nSpace   = EditorGUILayout.FloatField("Node Spacing", nSpace);
            lSpace   = EditorGUILayout.FloatField("Layer Spacing", lSpace);
            if (GUILayout.Button("Reset"))
            {
                ApplyGizmoSettings(Color.cyan, GetDefaultGradient(), 1.0f, 5.0f, 5.0f);
            }
            if (GUILayout.Button("Apply"))
            {
                ApplyGizmoSettings(color, gradient, nodeSize, nSpace, lSpace);
            }

            EditorGUI.indentLevel = 0;
        }
        #endregion

        GUILayout.Space(15);

        #region I/O Nodes
        int  prevVal   = 0;
        bool nodeAdded = false;

        var editorProp = serializedObject.FindProperty("editorUtil");
        var layersProp = editorProp.FindPropertyRelative("nodeHeights");

        modifiedLayers.Clear();
        nodeAdded |= NodeLayerMod(layersProp.GetArrayElementAtIndex(0), 0, "Num Input Nodes");
        nodeAdded |= NodeLayerMod(layersProp.GetArrayElementAtIndex(layersProp.arraySize - 1), layersProp.arraySize - 1, "Num Output Nodes");
        #endregion

        GUILayout.Space(5);

        #region Hidden Layers
        int  layerRem = -1;
        bool layerAdd = false;

        EditorGUI.indentLevel = 1;
        if (GUILayout.Button("Add Hidden Layer"))
        {
            layerAdd = true;
            int v = layersProp.GetArrayElementAtIndex(layersProp.arraySize - 2).intValue;
            layersProp.InsertArrayElementAtIndex(layersProp.arraySize - 1);
            layersProp.GetArrayElementAtIndex(layersProp.arraySize - 2).intValue = v;
        }

        for (int i = 1; i < layersProp.arraySize - 1; ++i)
        {
            var indexProp = layersProp.GetArrayElementAtIndex(i);
            nodeAdded |= NodeLayerMod(indexProp, i, $"Hidden Layer {i.ToString()} Nodes", out bool remove);
            if (remove)
            {
                layerRem = i;
                layersProp.DeleteArrayElementAtIndex(i--);
            }
        }
        EditorGUI.indentLevel = 0;

        #endregion

        GUILayout.Space(15);

        serializedObject.ApplyModifiedProperties();

        if (nodeAdded)
        {
            Debug.Log("Refreshing gizmo data");

            foreach (var v2i in modifiedLayers)
            {
                if (v2i.y < 0)
                {
                    nn.RemoveNodeFromLayer(v2i.x);
                }
                else
                {
                    nn.AddNodeToLayer(v2i.x, false);
                }
            }

            CacheGizmoDrawData(nn);
            EditorUtility.SetDirty(nn);
        }

        if (layerAdd)
        {
            var p = layersProp.GetArrayElementAtIndex(layersProp.arraySize - 2);
            nn.AddLayer(layersProp.arraySize - 2, p.intValue, false, false);
            CacheGizmoDrawData(nn);
            nn.RefreshConnections();
            EditorUtility.SetDirty(nn);
        }
        if (layerRem > -1)
        {
            nn.RemoveLayer(layerRem, false);
            CacheGizmoDrawData(nn);
            nn.RefreshConnections();
            EditorUtility.SetDirty(nn);
        }

        if (GUILayout.Button("Rebuild Network"))
        {
            ApplyNetworkChanges();
            EditorUtility.SetDirty(nn);
        }
        if (GUILayout.Button("Reroll Connections"))
        {
            nn.ConnectNetworkFresh();
            EditorUtility.SetDirty(nn);
        }
        GUILayout.Space(5);
        if (GUILayout.Button("Refresh Connections"))
        {
            nn.RefreshConnections();
            EditorUtility.SetDirty(nn);
        }
    }
Ejemplo n.º 30
0
    public void Run()
    {
        Stopwatch watch = Stopwatch.StartNew();

        NeuralNetwork network = new NeuralNetwork();

        network.AddLayer("input", new InputLayer(2), BaseLayer.TYPE.INPUT);
        network.AddLayer("hidden", new CoreLayer(8, ACTIVATION.SIGMOID, BaseLayer.TYPE.HIDDEN), BaseLayer.TYPE.HIDDEN);
        network.AddLayer("output", new CoreLayer(1, ACTIVATION.SIGMOID, BaseLayer.TYPE.OUTPUT), BaseLayer.TYPE.OUTPUT);
        network.AddConnection("input", "hidden", Connection.INIT.GLOROT_UNIFORM);
        network.AddConnection("hidden", "output", Connection.INIT.GLOROT_UNIFORM);

        /*
         * Optimizer optimizer = new BackProp(network, 1e-5f, 0.99f, true)
         * {
         *  Alpha = 0.1f
         * };
         */

        Optimizer optimizer = new RMSProp(network)
        {
            Alpha = 0.1f
        };

        optimizer.InitBatchMode(4);

        Vector[] input  = new Vector[4];
        Vector[] target = new Vector[4];
        //Vector output = null;

        input[0] = Vector.Build(2, new float[] { 0f, 0f });
        input[1] = Vector.Build(2, new float[] { 0f, 1f });
        input[2] = Vector.Build(2, new float[] { 1f, 0f });
        input[3] = Vector.Build(2, new float[] { 1f, 1f });

        target[0] = Vector.Build(1, new float[] { 0f });
        target[1] = Vector.Build(1, new float[] { 1f });
        target[2] = Vector.Build(1, new float[] { 1f });
        target[3] = Vector.Build(1, new float[] { 0f });


        for (int e = 0; e < 200; e++)
        {
            //Console.Write("Start ");
            //BasePool.Instance.Check();

            float err = 0;

            for (int i = 0; i < 4; i++)
            {
                err += optimizer.Train(input[i], target[i]);
            }

            Console.WriteLine(err);

            //Console.Write("End ");
            //BasePool.Instance.Check();
        }
        Console.WriteLine();

        for (int i = 0; i < 4; i++)
        {
            Console.WriteLine(network.Activate(input[i])[0]);
            Vector.Release(input[i]);
            Vector.Release(target[i]);
        }

        optimizer.Dispose();

        Console.Write("Finish ");
        BasePool.Instance.Check();

        watch.Stop();
        Console.WriteLine(watch.ElapsedMilliseconds);
    }