Beispiel #1
0
        private unsafe neural_network NN_init(int layer_num, int[] layer_neuron_num)
        {
            //Init NN
            fixed(int *p = layer_neuron_num)
            {
                neural_network nn = new neural_network(layer_num, p);

                return(nn);
            }
        }
Beispiel #2
0
        unsafe static private void init_NN()
        {
            //Read NN parameter file
            if (!File.Exists("NN_Parameter"))
            {
                MessageBox.Show("Open NN_Parameter fail!!");
                Console.WriteLine("Open NN_Parameter fail!!");
                return;
            }
            FileStream fsSource = new FileStream("NN_Parameter", FileMode.Open, FileAccess.Read);

            byte[] temp_byte = new byte[8];

            fsSource.Read(temp_byte, 0, 4);
            int layer_num = BitConverter.ToInt32(temp_byte, 0);

            //init nn layer parameter
            int[] layer_neuron_num = new int[layer_num];
            for (int i = 0; i < layer_num; i++)
            {
                fsSource.Read(temp_byte, 0, 4); //int is 4bytes
                layer_neuron_num[i] = BitConverter.ToInt32(temp_byte, 0);
            }


            //init nn weight parameter
            fsSource.Read(temp_byte, 0, 4); //int is 4bytes
            int weight_num = BitConverter.ToInt32(temp_byte, 0);

            double[] weight = new double[weight_num];
            for (int i = 0; i < weight_num; i++)
            {
                fsSource.Read(temp_byte, 0, 8); //double is 8bytes
                weight[i] = BitConverter.ToDouble(temp_byte, 0);
            }

            fsSource.Read(temp_byte, 0, 4); //int is 4bytes
            int bias_num = BitConverter.ToInt32(temp_byte, 0);

            double[] bias = new double[bias_num];
            for (int i = 0; i < bias_num; i++)
            {
                fsSource.Read(temp_byte, 0, 8); //double is 8bytes
                bias[i] = BitConverter.ToDouble(temp_byte, 0);
            }

            fsSource.Close();
            //nn = NN_init(layer_num, layer_neuron_num, weight, bias);
            //Init NN
            fixed(int *p = layer_neuron_num)
            {
                nn = new neural_network(layer_num, p);
                fixed(double *p1 = weight)
                {
                    fixed(double *p2 = bias)
                    {
                        nn.set_parameter(p1, p2);
                    }
                }
            }
        }
Beispiel #3
0
        private unsafe void NN_Training()
        {
            int    epoch           = int.Parse(textBox1.Text);
            int    iteration       = int.Parse(textBox2.Text);
            double learning_factor = double.Parse(textBox3.Text);
            double up_bound        = double.Parse(textBox5.Text);
            double lower_bound     = double.Parse(textBox4.Text);

            int[] layer_neuron_num = new int[4];
            layer_neuron_num[0] = 784;                        //Input layer Num
            layer_neuron_num[1] = Int32.Parse(textBox6.Text); // hidden_layer1 Num
            layer_neuron_num[2] = Int32.Parse(textBox7.Text); // hidden_layer2 Num
            layer_neuron_num[3] = 10;                         //Output_layer, Output layer Neuron 0 ~ 9 represent number 0 ~ 9

            byte[] image_data = new byte[784];                //MNistread return image
            Int16  label_data;                                //MNistread return label

            double[] input_data  = new double[784];           //NN_train input data
            double[] target_data = new double[10];            //NN_train target data
            bool     check_data  = false;                     //verify the nn_train function

            float [] error_log = new float [iteration];
            float    nn_error  = 1; //compara every epoch performmance

            for (int epoch_count = 0; epoch_count < epoch; epoch_count++)
            {
                //init NN
                neural_network nn = NN_init(4, layer_neuron_num);

                //Change Form3 epoch label
                f3.Change_Label(epoch_count, false, 100);

                //Training NN
                for (int iteration_count = 0; iteration_count < iteration; iteration_count++)
                {
                    //init error log
                    error_log[iteration_count] = 0;

                    //Change Form3 iteration label
                    f3.Change_iteration_label(iteration_count);

                    //Get Test Label & Image to train NN
                    for (int Label_count = 0; Label_count < 60000; Label_count++) //MNIST Train dataset number of item is 60000
                    {
                        //Read data
                        mnist.ReadImg("train-labels.idx1-ubyte", "train-images.idx3-ubyte", Label_count, image_data);
                        label_data = mnist.label;

                        //transform image_data to input data
                        for (int input_count = 0; input_count < 784; input_count++)
                        {
                            input_data[input_count] = image_data[input_count];
                        }

                        //Set Target Output data
                        for (int output_count = 0; output_count < 10; output_count++)
                        {
                            target_data[output_count] = 0;
                        }
                        target_data[label_data] = 1; //expect NN Output

                        //train_nn
                        fixed(double *p1 = target_data)
                        {
                            fixed(double *p2 = input_data)
                            {
                                //if check output is wrong then train NN
                                check_data = nn.Classification_train(p1, p2, learning_factor, up_bound, lower_bound);
                            }
                        }

                        if (check_data)
                        {
                            error_log[iteration_count]++;
                        }
                    }
                    error_log[iteration_count] = error_log[iteration_count] / 60000; //compute error average

                    //Save the best performance data
                    if (nn_error > error_log[iteration_count])
                    {
                        nn_error = error_log[iteration_count];
                        this.nn  = nn;
                    }
                }
                //show error log
                f3.show_to_graph(error_log, epoch_count);
            }
            //Train end
            f3.Change_Label(epoch - 1, true, nn_error);
        }