示例#1
0
 public NeuralNetwork(int inputSize, int[] neurons, double[] weightsLimits, TransitionFunction tf, bool useBias)
 {
     AbstractNeuralLayer[] layers = new AbstractNeuralLayer[neurons.Length];
     for (int i = 0; i < neurons.Length; i++)
     {
         layers[i] = new NeuralLayer(inputSize, neurons[i], true, weightsLimits, tf, useBias);
         inputSize = neurons[i];
     }
     this.layers = layers;
 }
示例#2
0
        static void Main(string[] args)
        {
            bool learn = false;
            bool cp_learn = false;
            String learnFilename = "";
            bool test = false;
            String testFilename = "";
            bool useNeighbourhood = true;
            bool randomWeights = false;
            double[] randomWeightsLimits = null;
            bool saveOutputNet = false;
            String outputFile = "";
            bool input = false;
            String inputFilename = "";

            bool bplearn = false;

            bool usebias = true;

            bool wh = false;

            int dimension = 2;

            // Learn params - default values
            // 1. Kohonen params
            int phases = 4;
            // 2. Counter Propagation params
            double ni = 0.1;
            int lengthOfPhase = 1000;
            double divisor = 10;

            int[] neuronCounts = new int[3] { 3, 2, 1 };

            for (int i = 0; i < args.Length; i++)
            {

                if (args[i] == "-i")
                {
                    input = true;
                    inputFilename = args[i + 1];
                    i++;
                }

                if (args[i] == "-nb")
                {
                    usebias = false;
                }

                if (args[i] == "-bpl")
                {
                    bplearn = true;
                    learnFilename = args[i + 1];
                    i++;
                }

                if (args[i] == "-l" || args[i] == "--learn")
                {
                    learn = true;
                    learnFilename = args[i + 1];
                    i += 2;
                }

                if (args[i] == "-phases")
                {
                    phases = int.Parse(args[i + 1]);
                    i += 2;
                }

                if (args[i] == "-cpl" || args[i] == "--counter-propagation-learn")
                {
                    cp_learn = true;
                    learnFilename = args[i + 1];
                    i++;
                }

                if (args[i] == "-ni")
                {
                    ni = double.Parse(args[i + 1]);
                    i++;
                }
                if (args[i] == "-length")
                {
                    lengthOfPhase = int.Parse(args[i + 1]);
                    i++;
                }
                if (args[i] == "-divisor")
                {
                    divisor = double.Parse(args[i + 1]);
                    i++;
                }

                if (args[i] == "-wh")
                {
                    wh = true;
                }

                if (args[i] == "-nn" || args[i] == "--no-neighbourhood")
                {
                    useNeighbourhood = false;
                }
                if (args[i] == "-rw" || args[i] == "--random-weights")
                {
                    randomWeights = true;
                    string limits = args[i + 1];
                    i++;
                    string[] lim = limits.Split(new char[1] { ';' }, 2);
                    randomWeightsLimits = new double[2];
                    randomWeightsLimits[0] = double.Parse(lim[0]);
                    randomWeightsLimits[1] = double.Parse(lim[1]);
                }
                if (args[i] == "-o")
                {
                    saveOutputNet = true;
                    outputFile = args[i + 1];
                    i++;
                }
                if (args[i] == "-t")
                {
                    test = true;
                    testFilename = args[i + 1];
                    i++;
                }
                if (args[i] == "-d")
                {
                    dimension = int.Parse(args[i + 1]);
                    i++;
                }

                if (args[i] == "-h")
                {
                    Console.WriteLine(File.ReadAllText("Help.txt"));
                    return;
                }

                if (args[i] == "-neurons")
                {
                    neuronCounts = ParseNeuronCounts(args[i + 1]);
                    i++;
                }
            }

            /*Console.WriteLine("wyjściowo: phases = " + phases
                + " / ni = " + ni
                + " / lengthOfPhase= " + lengthOfPhase
                + " / divisor= " + divisor);*/

            NeuralNetwork net = null;

            if (input)
            {
                net = XML.XMLNetworkCreator.Create(inputFilename);
            }

            if (learn)
            {
                double[][] data = ReadData(learnFilename);

                int neuralCount = data.Length;
                int inputSize = data[0].Length;
                KohonenLayer kohonenLayer = new KohonenLayer(inputSize, neuralCount, randomWeights, randomWeightsLimits, dimension);
                net = new NeuralNetwork(new AbstractNeuralLayer[1] { kohonenLayer });
                kohonenLayer.Learn(data, 10000, useNeighbourhood, phases);
            }

            if (cp_learn)
            {
                //dane wejściowe dla warstwy Kohonena + wyjściowe dla warstwy 2giej (output programu)
                List<double[][]> data = ReadInputAndAnswersData(learnFilename);

                double[][] kohonenInput = data[0];
                double[][] expectedOutputForSecondLayer = data[1];

                int inputSize = kohonenInput[0].Length;
                int neuralCount = kohonenInput.Length; // == expectedOutputForSecondLayer.Length
                KohonenLayer kohonenLayer = new KohonenLayer(inputSize, neuralCount, randomWeights, randomWeightsLimits, dimension);

                inputSize = neuralCount;
                neuralCount = expectedOutputForSecondLayer[0].Length;

                TransitionFunction tf = TransitionFunction.Sigmoid;
                if (wh)
                {
                    tf = TransitionFunction.Linear;
                }

                NeuralLayer secondLayer = new NeuralLayer(inputSize, neuralCount, randomWeights, randomWeightsLimits, tf, true);

                net = new NeuralNetwork(new AbstractNeuralLayer[2] { kohonenLayer, secondLayer });

                net.CounterPropagationLearn(kohonenInput, useNeighbourhood,
                    expectedOutputForSecondLayer, ni, divisor,
                    phases, lengthOfPhase);
            }

            if (bplearn)
            {
                List<double[][]> data = ReadInputAndAnswersData(learnFilename);
                double[][] inputData = data[0];
                double[][] expectedOutput = data[1];

                int inputSize = inputData[0].Length;

                net = new NeuralNetwork(inputSize, neuronCounts, randomWeightsLimits, TransitionFunction.Sigmoid, usebias);

                net.BackPropagationLearn(inputData, expectedOutput, lengthOfPhase, ni, usebias);
            }

            if (saveOutputNet)
            {
                SaveOutputNet(net, outputFile);
            }

            if (test)
            {
                double[][] data = ReadData(testFilename);
                double[][] result = Test(net, data);
                PrintResult(result);
            }
            Console.ReadKey();
        }