Ejemplo n.º 1
0
        private void Form1_Load(object sender, EventArgs e)
        {
            Input.Add(new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 });
            Input.Add(new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 1, 0 });
            Input.Add(new double[] { 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 });
            Input.Add(new double[] { 0, 0, 0, 0, 0, 0, 1, 0, 0, 0 });
            Input.Add(new double[] { 0, 0, 0, 0, 0, 1, 0, 0, 0, 0 });
            Input.Add(new double[] { 0, 0, 0, 0, 1, 0, 0, 0, 0, 0 });
            Input.Add(new double[] { 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 });
            Input.Add(new double[] { 0, 0, 1, 0, 0, 0, 0, 0, 0, 0 });
            Input.Add(new double[] { 0, 1, 0, 0, 0, 0, 0, 0, 0, 0 });
            Input.Add(new double[] { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0 });

            //Layer[] L1 = new Layer[2];
            //L1[0] = new Layer(784, Layer.ActivationFunction.ReLU);
            //L1[1] = new Layer(200, Layer.ActivationFunction.ReLU);

            //Layer[] L2 = new Layer[2];
            //L2[0] = new Layer(80, Layer.ActivationFunction.ReLU);
            //L2[1] = new Layer(10, Layer.ActivationFunction.Softmax);

            //LConnection[] C1 = new LConnection[1];
            //C1[0] = new LConnection(L1[0], L1[1]);

            //LConnection[] C2 = new LConnection[1];
            //C2[0] = new LConnection(L2[0], L2[1]);



            //NeuralNetwork[] NN = new NeuralNetwork[2];
            //NN[0] = new NeuralNetwork(L1, C1,0.01);
            //NN[1] = new NeuralNetwork(L2, C2,0.01);

            //NNConnection[] NNC = new NNConnection[1];
            //NNC[0] = new NNConnection(NN[0], NN[1]);

            //M = new Model(NN, NNC, Model.CostFunctions.CrossEntropy,0.01);

            D0 = new Dataset(784, 10);
            D0.LoadTesting("mnist_test.csv");
            D0.LoadTraining("mnist_test.csv");


            D1 = new Dataset(10, 10);
            D1.TrainingInput = Input;
            D1.TrainingLable = Input;

            Layer[] L0 = new Layer[2];
            L0[0] = new Layer(10, Layer.ActivationFunction.Sigmoid);
            L0[1] = new Layer(20, Layer.ActivationFunction.Sigmoid);

            Random rand = new Random();

            LConnection[] C0 = new LConnection[1];
            C0[0] = new LConnection(L0[0], L0[1], rand);


            NN[0] = new NeuralNetwork(L0, C0, 0.1);


            Layer[] L1 = new Layer[3];
            L1[0] = new Layer(784, Layer.ActivationFunction.ReLU);
            L1[1] = new Layer(200, Layer.ActivationFunction.ReLU);
            L1[2] = new Layer(10, Layer.ActivationFunction.Softmax);
            LConnection[] C1 = new LConnection[2];
            C1[0] = new LConnection(L1[0], L1[1], rand);
            C1[1] = new LConnection(L1[1], L1[2], rand);

            NN[1] = new NeuralNetwork(L1, C1, NeuralNetwork.CostFunctions.CrossEntropy, 0.01);



            NNConnection[] NNC = new NNConnection[1];
            NNC[0] = new NNConnection(NN[0], NN[1]);
            M      = new Model(NN, NNC, Model.CostFunctions.MeanSquareSrror, 0.1);
        }
Ejemplo n.º 2
0
Archivo: Form1.cs Proyecto: CaddyDz/DNN
        private void Form1_Load(object sender, EventArgs e)
        {
            Layer[] L1 = new Layer[2];
            L1[0] = new Layer(784, Layer.ActivationFunction.ReLU);
            L1[1] = new Layer(200, Layer.ActivationFunction.ReLU);

            Layer[] L2 = new Layer[2];
            L2[0] = new Layer(80, Layer.ActivationFunction.ReLU);
            L2[1] = new Layer(10, Layer.ActivationFunction.Softmax);

            LConnection[] C1 = new LConnection[1];
            C1[0] = new LConnection(L1[0], L1[1]);

            LConnection[] C2 = new LConnection[1];
            C2[0] = new LConnection(L2[0], L2[1]);



            NeuralNetwork[] NN = new NeuralNetwork[2];
            NN[0] = new NeuralNetwork(L1, C1, 0.01);
            NN[1] = new NeuralNetwork(L2, C2, 0.01);

            NNConnection[] NNC = new NNConnection[1];
            NNC[0] = new NNConnection(NN[0], NN[1]);

            M = new Model(NN, NNC, Model.CostFunctions.CrossEntropy, 0.01);

            D = new Dataset(784, 10);
            D.LoadDataset("mnist_test.csv");



            //string w = null;

            //    for (int i = 0; i < C[0].WeightBackMap.Length; i++)
            //    {
            //        w += C[0].WeightBackMap[i] + ",";
            //    }
            //for (int i = 0; i < C[1].WeightBackMap.Length; i++)
            //{
            //    w += (C[1].WeightBackMap[i]+25) + ",";
            //}

            //w += "\r\n";
            //w += "\r\n";
            //for (int i = 0; i < NN._WeightsBackMap.Length; i++)
            //{
            //    w+=NN._WeightsBackMap[NN._WeightsBackMap.Length-i-1] +",";
            //}
            //MessageBox.Show(w);
            Layer[] LTEST = new Layer[4];
            LTEST[0] = new Layer(784, Layer.ActivationFunction.ReLU);
            LTEST[1] = new Layer(200, Layer.ActivationFunction.ReLU);
            LTEST[2] = new Layer(80, Layer.ActivationFunction.ReLU);
            LTEST[3] = new Layer(10, Layer.ActivationFunction.Softmax);
            LConnection[] CTEST = new LConnection[3];
            CTEST[0] = new LConnection(LTEST[0], LTEST[1]);
            CTEST[1] = new LConnection(LTEST[1], LTEST[2]);
            CTEST[2] = new LConnection(LTEST[2], LTEST[3]);

            NNTEST = new NeuralNetwork(LTEST, CTEST, NeuralNetwork.CostFunctions.CrossEntropy, 0.01);
        }
Ejemplo n.º 3
0
 public NNConnection(NeuralNetwork input_neural_network, NeuralNetwork output_neural_network)
 {
     Input_Neural_Nerwork  = input_neural_network;
     Output_Neural_Nerwork = output_neural_network;
     Model_Connection      = new LConnection(Input_Neural_Nerwork.Layers[Input_Neural_Nerwork.Layers.Length - 1], Output_Neural_Nerwork.Layers[0]);//connect output layer of first model to the input layer of second model
 }