Example #1
0
        /// <summary>
        /// Override the Epoch() method
        /// </summary>
        /// <param name="trainingSet"></param>
        /// <param name="modalitiesMeanSquarredError"></param>
        /// <param name="globalMeanSquarred"></param>
        public override void Epoch(List <Dictionary <Signal, double[, ]> > trainingSet, out Dictionary <Signal, double> modalitiesMeanSquarredError, out double globalMeanSquarred)
        {
            //Convert to Accord format
            double[][] samples = new double[trainingSet.Count][];
            for (int i = 0; i < trainingSet.Count; i++)
            {
                samples[i] = concatenateTrainingSample(trainingSet[i]);
            }

            for (int _layerIndex = 0; _layerIndex < network.Layers.Length; _layerIndex++)
            {
                //Create the teacher for the layer
                teacher = new DeepBeliefNetworkLearning(network)
                {
                    Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
                    {
                        LearningRate = learningRate,
                        Momentum     = momentum,
                        Decay        = weightDecay,
                    },

                    LayerIndex = _layerIndex,
                };

                //Run the epoch for the layer
                double[][] sampleForLayer = teacher.GetLayerInput(samples);
                teacher.RunEpoch(sampleForLayer);
            }
            network.UpdateVisibleWeights();

            //Run manually a base class epoch to have the exact same error measurement as other algos
            learningLocked = true;
            base.Epoch(trainingSet, out modalitiesMeanSquarredError, out globalMeanSquarred);
            learningLocked = false;
        }
 public AccordNetwork()
 {
     network = new DeepBeliefNetwork(new BernoulliFunction(), inputLength, 1200, 600, 2);
     new NguyenWidrow(network).Randomize();
     network.UpdateVisibleWeights();
     unsuperVisedTeacher = GetUnsupervisedTeacherForNetwork(network);
     supervisedTeacher   = GetSupervisedTeacherForNetwork(network);
 }
Example #3
0
        private void learnLayerUnsupervised()
        {
            if (!Main.CanGenerate)
            {
                return;
            }
            Dispatcher dispatcher = Dispatcher.CurrentDispatcher;

            new Task(() =>
            {
                DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(Main.Network)
                {
                    Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
                    {
                        LearningRate = LearningRate,
                        Momentum     = 0.5,
                        Decay        = WeightDecay,
                    },

                    LayerIndex = SelectedLayerIndex - 1,
                };

                double[][] inputs;
                Main.Database.Training.GetInstances(out inputs);
                int batchCount = Math.Max(1, inputs.Length / BatchSize);

                // Create mini-batches to speed learning
                int[] groups = Accord.Statistics.Tools
                               .RandomGroups(inputs.Length, batchCount);
                double[][][] batches = inputs.Subgroups(groups);

                // Gather learning data for the layer
                double[][][] layerData = teacher.GetLayerInput(batches);
                var cd = teacher.GetLayerAlgorithm(teacher.LayerIndex) as ContrastiveDivergenceLearning;

                // Start running the learning procedure
                for (int i = 0; i < Epochs && !shouldStop; i++)
                {
                    double error = teacher.RunEpoch(layerData) / inputs.Length;

                    dispatcher.BeginInvoke((Action <int, double>)updateError,
                                           DispatcherPriority.ContextIdle, i + 1, error);

                    if (i == 10)
                    {
                        cd.Momentum = Momentum;
                    }
                }

                IsLearning = false;
            }).Start();
        }
        private DeepBeliefNetworkLearning GetUnsupervisedTeacherForNetwork(DeepBeliefNetwork deepNetwork)
        {
            var teacher = new DeepBeliefNetworkLearning(deepNetwork)
            {
                Algorithm = (hiddenLayer, visibleLayer, i) => new ContrastiveDivergenceLearning(hiddenLayer, visibleLayer)
                {
                    LearningRate = 0.1,
                    Momentum     = 0.5
                }
            };

            return(teacher);
        }
Example #5
0
        private static DeepBeliefNetwork createNetwork(double[][] inputs)
        {
            DeepBeliefNetwork network = new DeepBeliefNetwork(6, 2, 1);

            network.Machines[0].Hidden.Neurons[0].Weights[0] = 0.00461421;
            network.Machines[0].Hidden.Neurons[0].Weights[1] = 0.04337112;
            network.Machines[0].Hidden.Neurons[0].Weights[2] = -0.10839599;
            network.Machines[0].Hidden.Neurons[0].Weights[3] = -0.06234004;
            network.Machines[0].Hidden.Neurons[0].Weights[4] = -0.03017057;
            network.Machines[0].Hidden.Neurons[0].Weights[5] = 0.09520391;
            network.Machines[0].Hidden.Neurons[0].Threshold  = 0;

            network.Machines[0].Hidden.Neurons[1].Weights[0] = 0.08263872;
            network.Machines[0].Hidden.Neurons[1].Weights[1] = -0.118437;
            network.Machines[0].Hidden.Neurons[1].Weights[2] = -0.21710971;
            network.Machines[0].Hidden.Neurons[1].Weights[3] = 0.02332903;
            network.Machines[0].Hidden.Neurons[1].Weights[4] = 0.00953116;
            network.Machines[0].Hidden.Neurons[1].Weights[5] = 0.09870652;
            network.Machines[0].Hidden.Neurons[1].Threshold  = 0;

            network.Machines[0].Visible.Neurons[0].Threshold = 0;
            network.Machines[0].Visible.Neurons[1].Threshold = 0;
            network.Machines[0].Visible.Neurons[2].Threshold = 0;
            network.Machines[0].Visible.Neurons[3].Threshold = 0;
            network.Machines[0].Visible.Neurons[4].Threshold = 0;
            network.Machines[0].Visible.Neurons[5].Threshold = 0;

            network.UpdateVisibleWeights();


            DeepBeliefNetworkLearning target = new DeepBeliefNetworkLearning(network)
            {
                Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
            };

            for (int layer = 0; layer < 2; layer++)
            {
                target.LayerIndex = layer;

                double[][] layerInputs = target.GetLayerInput(inputs);

                int      iterations = 5000;
                double[] errors     = new double[iterations];
                for (int i = 0; i < iterations; i++)
                {
                    errors[i] = target.RunEpoch(layerInputs);
                }
            }

            return(network);
        }
Example #6
0
        public void Train(double[][] i, double[][] o = null, int outputLength = 10, int hiddenLayer = -1)
        {
            if (n == null)
            {
                if (File.Exists(p))
                {
                    n = DeepBeliefNetwork.Load(p);
                }
                else
                {
                    outputLength = (o == null) ? outputLength : o[0].Length;
                    hiddenLayer  = (hiddenLayer == -1) ? (int)Math.Log(i[0].Length, outputLength) : hiddenLayer;
                    List <int> layers = new List <int>();
                    for (int j = 0; j < hiddenLayer; j++)
                    {
                        layers.Add(i[0].Length);
                    }
                    layers.Add(outputLength);
                    n = new DeepBeliefNetwork(new BernoulliFunction(), i[0].Length, layers.ToArray());
                    new GaussianWeights(n).Randomize();
                }
            }

            dynamic t;

            if (o == null)
            {
                t = new DeepBeliefNetworkLearning(n)
                {
                    Algorithm = (h, v, j) => new ContrastiveDivergenceLearning(h, v), LayerIndex = n.Machines.Count - 1,
                };
                while (true)
                {
                    e = t.RunEpoch(t.GetLayerInput(i));
                }
            }
            else
            {
                t = new DeepNeuralNetworkLearning(n)
                {
                    Algorithm = (ann, j) => new ParallelResilientBackpropagationLearning(ann), LayerIndex = n.Machines.Count - 1,
                };
                while (true)
                {
                    e = t.RunEpoch(t.GetLayerInput(i), o);
                }
            }
        }
        public void CreateTeachers(DeepBeliefNetwork dbn)
        {
            unsupervisedTeacher = new DeepBeliefNetworkLearning(dbn)
            {
                Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
                {
                    LearningRate = unsupervisedLR,
                    Momentum     = unsupervisedM,
                    Decay        = unsupervisedD,
                }
            };

            supervisedTeacher = new BackPropagationLearning(dbn)
            {
                LearningRate = supervisedLR,
                Momentum     = supervisedM
            };
        }
Example #8
0
        public void RunTraining1(Training1Parameters parameters)
        {
            LogInfoUsingBothLoggers("Started unsupervised training.");

            var teacher = new DeepBeliefNetworkLearning(NeuralNetwork)
            {
                Algorithm = (hiddenLayer, visibleLayer, i) => new ContrastiveDivergenceLearning(hiddenLayer, visibleLayer)
                {
                    LearningRate = parameters.LearningRate,
                    Momentum     = parameters.Momentum,
                    Decay        = parameters.Decay,
                }
            };

            var inputs = _configuration.InputsOutputsData.Inputs;

            // Setup batches of input for learning.
            var batchCount = Math.Max(1, inputs.Length / 100);

            // Create mini-batches to speed learning.
            var groups  = Accord.Statistics.Tools.RandomGroups(inputs.Length, batchCount);
            var batches = inputs.Subgroups(groups);

            // Unsupervised learning on each hidden layer, except for the output layer.
            var guiLogIntensity = GetGuiLogIntensity(parameters.UnsupervisedEpochs);

            for (int layerIndex = 0; layerIndex < NeuralNetwork.Machines.Count - 1; layerIndex++)
            {
                teacher.LayerIndex = layerIndex;
                var layerData = teacher.GetLayerInput(batches);
                foreach (int i in Enumerable.Range(1, parameters.UnsupervisedEpochs))
                {
                    var error   = teacher.RunEpoch(layerData) / inputs.Length;
                    var message = $"Layer: {layerIndex} Epoch: {i}, Error: {error}";
                    LogCurrentEpochResult(message, guiLogIntensity, i, parameters.UnsupervisedEpochs);

                    if (_skipPhaseRequest.RequestedAndUnhandled)
                    {
                        LogPhaseSkippnigAndNotifyHandled(i, parameters.UnsupervisedEpochs);
                        break;
                    }
                }
            }
        }
Example #9
0
        static void Main(string[] args)
        {
            double[][] inputs;
            double[][] outputs;
            double[][] testInputs;
            double[][] testOutputs;

            const int SampleTrainingCount = 120;
            const int SampleTestCount     = 30;

            // Load ascii digits dataset.
            inputs = DataManager.LoadCSV(@"../../../data/iris.data", out outputs);
            //inputs = DataManager.Load(@"../../../data/data.txt", out outputs);

            // The first SampleTrainingCount data rows will be for training. The rest will be for testing.
            testInputs  = inputs.Skip(SampleTrainingCount).ToArray();
            testOutputs = outputs.Skip(SampleTrainingCount).ToArray();
            inputs      = inputs.Take(SampleTrainingCount).ToArray();
            outputs     = outputs.Take(SampleTrainingCount).ToArray();

            // Setup the deep belief network and initialize with random weights.
            DeepBeliefNetwork network = new DeepBeliefNetwork(inputs.First().Length, 10, 1);

            new GaussianWeights(network, 0.1).Randomize();
            network.UpdateVisibleWeights();

            // Setup the learning algorithm.
            DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network)
            {
                Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
                {
                    LearningRate = 0.1,
                    Momentum     = 0.5,
                    Decay        = 0.001,
                }
            };

            // Setup batches of input for learning.
            int batchCount = Math.Max(1, inputs.Length / 100);

            // Create mini-batches to speed learning.
            int[]        groups  = Accord.Statistics.Tools.RandomGroups(inputs.Length, batchCount);
            double[][][] batches = inputs.Subgroups(groups);
            // Learning data for the specified layer.
            double[][][] layerData;

            // Unsupervised learning on each hidden layer, except for the output layer.
            for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++)
            {
                teacher.LayerIndex = layerIndex;
                layerData          = teacher.GetLayerInput(batches);
                for (int i = 0; i < 200; i++)
                {
                    double error = teacher.RunEpoch(layerData) / inputs.Length;
                    if (i % 10 == 0)
                    {
                        Console.WriteLine(i + ", Error = " + error);
                    }
                }
            }

            // Supervised learning on entire network, to provide output classification.
            var teacher2 = new BackPropagationLearning(network)
            {
                LearningRate = 0.1,
                Momentum     = 0.5
            };

            // Run supervised learning.
            for (int i = 0; i < SampleTrainingCount; i++)
            {
                double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length;
                if (i % 10 == 0)
                {
                    Console.WriteLine(i + ", Error = " + error);
                }
            }

            // Test the resulting accuracy. SampleTestCount item
            int correct = 0;

            for (int i = 0; i < SampleTestCount; i++)
            {
                double[] outputValues = network.Compute(testInputs[i]);
                if (DataManager.FormatOutputResult(outputValues) == DataManager.FormatOutputResult(testOutputs[i]))
                {
                    correct++;
                }
            }

            Console.WriteLine("Correct " + correct + "/" + SampleTestCount + ", " + Math.Round(((double)correct / (double)SampleTestCount * 100), 2) + "%");
            Console.Write("Press any key to quit ..");
            Console.ReadKey();
        }
 public void LoadNetworkFromFile(string filePath)
 {
     network             = DeepBeliefNetwork.Load(filePath);
     supervisedTeacher   = GetSupervisedTeacherForNetwork(network);
     unsuperVisedTeacher = GetUnsupervisedTeacherForNetwork(network);
 }
Example #11
0
        public static void Learn(double[][] inputs, double[][] outputs)
        {
            var n           = (int)(count * 0.8);
            var testInputs  = inputs.Skip(n).ToArray();
            var testOutputs = outputs.Skip(n).ToArray();

            inputs  = inputs.Take(n).ToArray();
            outputs = outputs.Take(n).ToArray();

            var network = new DeepBeliefNetwork(inputs.First().Length, 10, 10);

            new GaussianWeights(network, 0.1).Randomize();
            network.UpdateVisibleWeights();

            // Setup the learning algorithm.
            var teacher = new DeepBeliefNetworkLearning(network)
            {
                Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
                {
                    LearningRate = 0.1,
                    Momentum     = 0.5,
                    Decay        = 0.001,
                }
            };
            // Setup batches of input for learning.
            int batchCount = Math.Max(1, inputs.Length / 100);

            // Create mini-batches to speed learning.
            int[]        groups  = Classes.Random(inputs.Length, batchCount);
            double[][][] batches = inputs.Subgroups(groups);
            // Learning data for the specified layer.
            double[][][] layerData;

            // Unsupervised learning on each hidden layer, except for the output layer.
            for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++)
            {
                teacher.LayerIndex = layerIndex;
                layerData          = teacher.GetLayerInput(batches);
                for (int i = 0; i < 200; i++)
                {
                    double error = teacher.RunEpoch(layerData) / inputs.Length;
                    if (i % 10 == 0)
                    {
                        Console.WriteLine(i + ", Error = " + error);
                    }
                }
            }


            // Supervised learning on entire network, to provide output classification.
            var teacher2 = new BackPropagationLearning(network)
            {
                LearningRate = 0.1,
                Momentum     = 0.5
            };

            // Run supervised learning.
            for (int i = 0; i < n; i++)
            {
                double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length;
                if (i % 10 == 0)
                {
                    Console.WriteLine(i + ", Error = " + error);
                }
            }

            // Test the resulting accuracy.
            int correct = 0;

            for (int i = 0; i < testInputs.Length; i++)
            {
                double[] outputValues = network.Compute(testInputs[i]);
                if (Compare(outputValues, testOutputs[i]))
                {
                    correct++;
                }
            }
            network.Save("deeplearning-countbits.net");
            Console.WriteLine("Correct " + correct + "/" + testInputs.Length + ", " + Math.Round(((double)correct / (double)testInputs.Length * 100), 2) + "%");
        }
Example #12
0
        static double Neural_Network(bool show)
        {
            double       error      = new double();
            DataTable    entireData = DataController.MakeDataTable("../../drug_consumption.txt");
            Codification codebook   = new Codification(entireData);
            //"Alcohol", "Amfet", !!"Amyl", "Benzos", "Cofeine", "Cannabis", "Chocolate", "Coke", (1)"Crac", ///"Ecstasy", !!"Heroine",
            //    !!"Ketamine", //"LegalH", "LSD", !!"Meth", //"Mushrooms", "Nicotine", lol "Semeron", "VSA"
            string LookingFor = "Heroine";
            int    good       = 0;

            string[][] outputs;
            string[][] inputs = DataController.MakeString("../../drug_consumption_500.txt", out outputs);
            string[][] testOutputs;
            string[][] testInputs = DataController.MakeString("../../drug_consumption_500.txt", out testOutputs);

            DataTable outputs1     = DataController.MakeDataFromString(outputs, "output");
            DataTable inputs1      = DataController.MakeDataFromString(inputs, "input");
            DataTable testOutputs1 = DataController.MakeDataFromString(testOutputs, "output");
            DataTable testInputs1  = DataController.MakeDataFromString(testInputs, "input");

            DataTable Isymbols  = codebook.Apply(inputs1);
            DataTable Osymbols  = codebook.Apply(outputs1);
            DataTable TIsymbols = codebook.Apply(testInputs1);
            DataTable TOsymbols = codebook.Apply(testOutputs1);

            double[][] inputsD  = Isymbols.ToJagged <double>("Age", "Gender", "Education", "Country", "Eticnity", "Nscore", "Escore", "Oscore", "Ascore", "Cscore", "Impulsive", "SS");
            double[][] outputsD = Osymbols.ToJagged <double>(LookingFor);
            outputsD = DataController.convertDT(outputsD);
            double[][] inputsT  = TIsymbols.ToJagged <double>("Age", "Gender", "Education", "Country", "Eticnity", "Nscore", "Escore", "Oscore", "Ascore", "Cscore", "Impulsive", "SS");
            double[][] outputsT = TOsymbols.ToJagged <double>(LookingFor);
            outputsT = DataController.convertDT(outputsT);

            DeepBeliefNetwork network = new DeepBeliefNetwork(inputs.First().Length, 10, 7);

            new GaussianWeights(network, 0.1).Randomize();
            network.UpdateVisibleWeights();
            DeepBeliefNetworkLearning FirstLearner = new DeepBeliefNetworkLearning(network)
            {
                Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
                {
                    LearningRate = 0.1,
                    Momentum     = 0.5,
                    Decay        = 0.001,
                }
            };

            int batchCount = Math.Max(1, inputs.Length / 100);

            int[]        groupsNew  = Accord.Statistics.Classes.Random(inputsD.Length, batchCount);
            double[][][] batchesNew = Accord.Statistics.Classes.Separate(inputsD, groupsNew);
            double[][][] layerData;

            for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++)
            {
                FirstLearner.LayerIndex = layerIndex;
                layerData = FirstLearner.GetLayerInput(batchesNew);
                for (int i = 0; i < 500; i++)
                {
                    error = FirstLearner.RunEpoch(layerData) / inputsD.Length;
                    if (i % 10 == 0 && show == true)
                    {
                        Console.WriteLine("Error value(" + LookingFor + ", test: " + i + ") = " + error);
                    }
                }
            }

            var SecondLearner = new BackPropagationLearning(network)
            {
                LearningRate = 0.15,
                Momentum     = 0.7
            };
            EvolutionaryLearning teacher = new EvolutionaryLearning(network, 100);

            for (int i = 0; i < 800; i++)
            {
                error = teacher.RunEpoch(inputsD, outputsD) / inputsD.Length;
                if (i % 50 == 0 && show == true)
                {
                    Console.WriteLine("Error value(" + LookingFor + ", test: " + i + ") = " + error);
                }
            }

            for (int i = 0; i < 800; i++)
            {
                error = SecondLearner.RunEpoch(inputsD, outputsD) / inputsD.Length;
                if (i % 10 == 0 && show == true)
                {
                    Console.WriteLine("Error value(" + LookingFor + ", test: " + i + ") = " + error);
                }
            }

            for (int i = 0; i < inputsD.Length; i++)
            {
                double[] outputValues = network.Compute(inputsT[i]);
                if (outputValues.ToList().IndexOf(outputValues.Max()) == outputsT[i].ToList().IndexOf(outputsT[i].Max()))
                {
                    good++;
                }
            }
            if (show == true)
            {
                Console.WriteLine("Poprawność - " + Math.Round(((double)good / (double)inputsD.Length * 100), 4) + "%");
                Console.ReadKey();
            }

            return(error);
        }
Example #13
0
        static void Main(string[] args)
        {
#if Cluster
            // output file
            List <string> outputLines = new List <string>();

            DateTime timeStart = new DateTime();
            // Some example documents.
            string[] documents = new GetTweets().GetTweetsFromExcelFile("Train_NN.xlsx");

            // Apply TF*IDF to the documents and get the resulting vectors.
            double[][] inputs = TFIDF.Transform(documents, 0);
            Console.WriteLine("time to transformation " + (DateTime.Now - timeStart));
            outputLines.Add("time to transformation " + (DateTime.Now - timeStart));
            Console.WriteLine("TFIDF transformation done...");

            inputs = TFIDF.Normalize(inputs);
            Console.WriteLine("time to Normalization " + (DateTime.Now - timeStart));
            outputLines.Add("time to Normalization " + (DateTime.Now - timeStart));
            Console.WriteLine("TFIDF Normalization done...");
            //inputs = Accord.Math.Norm.Norm2(inputs);

            string[] topics = TFIDF.Topics(documents, 5);
            Console.WriteLine("time to topics " + (DateTime.Now - timeStart));
            outputLines.Add("time to topics " + (DateTime.Now - timeStart));
            Console.WriteLine("Topics gathered...");

            //Random random = new Random();
            //double[][] rand = new double[inputs.Length][];

            //for (int i = 0; i < inputs.Length; i++)
            //{

            //    rand[i] = new double[inputs[i].Length];
            //    for (int j = 0; j < inputs[i].Length; j++)
            //    {

            //        rand[i][j] = random.NextDouble();
            //    }
            //}
            //Console.WriteLine("time to generate random numbers " + (DateTime.Now - timeStart));
            //outputLines.Add("time to topics " + (DateTime.Now - timeStart));
            //Console.WriteLine("Randoms generated...");

            KMeans cluster = new KMeans(topics.Length, Distance.Cosine);

            //cluster.MaxIterations = 1;
            //cluster.Randomize(rand);
            int[] index = cluster.Compute(inputs);
            Console.WriteLine("time to cluster " + (DateTime.Now - timeStart));
            outputLines.Add("time to cluster " + (DateTime.Now - timeStart));
            Console.WriteLine("Clustering done...");
            //Accord.Statistics.Analysis.PrincipalComponentAnalysis pca = new Accord.Statistics.Analysis.PrincipalComponentAnalysis(inputs, Accord.Statistics.Analysis.AnalysisMethod.Center);
            //pca.Compute();
            //double[][] newinput = pca.Transform(inputs, 2);

            //ScatterplotBox.Show("KMeans Clustering of Tweets", newinput, index).Hold();



            for (double i = 0; i <= topics.Length; i++)
            {
                outputLines.Add(Convert.ToString(i + 1));
                List <string> topicDecider = new List <string>();
                string[]      topicString;

                int j = 0;
                foreach (int x in index)
                {
                    if (x == i + 1)
                    {
                        topicDecider.Add(documents[j]);
                    }
                    j++;
                }

                topicString = TFIDF.Topics(topicDecider.ToArray(), topicDecider.Count / 2);

                if (topicString.Length == 0)
                {
                    outputLines.Add("--------------------------------------------------------");
                    outputLines.Add("TOPIC: other");
                    outputLines.Add("--------------------------------------------------------");
                }
                else
                {
                    outputLines.Add("--------------------------------------------------------");
                    outputLines.Add("TOPIC: " + topicString[0]);
                    outputLines.Add("--------------------------------------------------------");
                }

                j = 0;
                foreach (int x in index)
                {
                    if (x == i + 1)
                    {
                        outputLines.Add("Tweet ID " + j + ":\t" + documents[j]);
                    }
                    j++;
                }
                outputLines.Add("");
                outputLines.Add("");
                outputLines.Add("");
                outputLines.Add("");
            }

            System.IO.File.WriteAllLines(@"Train_NN_2.txt", outputLines.ToArray());
            Console.WriteLine("Output is written...");
#else
            // output file
            List <string> outputLines = new List <string>();

            DateTime timeStart = new DateTime();
            // Some example documents.
            string[]   documents_Train = new GetTweets().GetTweetsFromExcelFile("Train_NN.xlsx");
            double[][] Train_Labels    = new GetTweets().GetLabelsFromExcelFile("Train_Labels.xlsx");

            // Apply TF*IDF to the documents and get the resulting vectors.
            double[][] inputs = TFIDF.Transform(documents_Train, 0);
            Console.WriteLine("time to transformation " + (DateTime.Now - timeStart));
            outputLines.Add("time to transformation " + (DateTime.Now - timeStart));
            Console.WriteLine("TFIDF transformation done...");

            inputs = TFIDF.Normalize(inputs);
            Console.WriteLine("time to Normalization " + (DateTime.Now - timeStart));
            outputLines.Add("time to Normalization " + (DateTime.Now - timeStart));
            Console.WriteLine("TFIDF Normalization done...");


            //double[][] inputs;
            double[][] train_input = new double[140][];
            double[][] outputs;
            double[][] testInputs  = new double[1000 - 140][];
            double[][] testOutputs = new double[1000 - 140][];

            for (int i = 0; i < 140; i++)
            {
                train_input[i] = new double[inputs[i].Length];
                for (int j = 0; j < inputs[i].Length; j++)
                {
                    train_input[i][j] = inputs[i][j];
                }
            }

            for (int i = 0; i < 1000 - 140; i++)
            {
                testInputs[i] = new double[inputs[i].Length];
                for (int j = 0; j < inputs[i].Length; j++)
                {
                    testInputs[i][j] = inputs[i][j];
                }
            }


            // The first 500 data rows will be for training. The rest will be for testing.
            //testInputs = inputs.Skip(500).ToArray();
            //testOutputs = outputs.Skip(500).ToArray();
            //inputs = inputs.Take(500).ToArray();
            //outputs = outputs.Take(500).ToArray();

            // Setup the deep belief network and initialize with random weights.
            DeepBeliefNetwork network = new DeepBeliefNetwork(train_input.First().Length, 7);
            new GaussianWeights(network, 0.1).Randomize();
            network.UpdateVisibleWeights();

            // Setup the learning algorithm.
            DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network)
            {
                Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
                {
                    LearningRate = 0.1,
                    Momentum     = 0.5,
                    Decay        = 0.001,
                }
            };

            // Setup batches of input for learning.
            int batchCount = Math.Max(1, train_input.Length / 100);
            // Create mini-batches to speed learning.
            int[]        groups  = Accord.Statistics.Tools.RandomGroups(train_input.Length, batchCount);
            double[][][] batches = train_input.Subgroups(groups);
            // Learning data for the specified layer.
            double[][][] layerData;

            // Unsupervised learning on each hidden layer, except for the output layer.
            for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++)
            {
                teacher.LayerIndex = layerIndex;
                layerData          = teacher.GetLayerInput(batches);
                for (int i = 0; i < 200; i++)
                {
                    double error = teacher.RunEpoch(layerData) / train_input.Length;
                    if (i % 10 == 0)
                    {
                        Console.WriteLine(i + ", Error = " + error);
                    }
                }
            }

            // Supervised learning on entire network, to provide output classification.
            var teacher2 = new BackPropagationLearning(network)
            {
                LearningRate = 0.1,
                Momentum     = 0.5
            };

            //Transpose
            double[][] Train_Labels_T = new double[140][];
            for (int i = 0; i < 140; i++)
            {
                Train_Labels_T[i] = new double[7];
                for (int j = 0; j < 7; j++)
                {
                    Train_Labels_T[i][j] = Train_Labels[j][i];
                }
            }

            // Run supervised learning.
            for (int i = 0; i < 500; i++)
            {
                double error = teacher2.RunEpoch(train_input, Train_Labels_T) / train_input.Length;
                if (i % 10 == 0)
                {
                    Console.WriteLine(i + ", Error = " + error);
                }
            }
            outputLines.Add("time to Training " + (DateTime.Now - timeStart));
            // Test the resulting accuracy.
            double[][] outputValues = new double[testInputs.Length][];
            for (int i = 0; i < testInputs.Length; i++)
            {
                outputValues[i] = network.Compute(testInputs[i]);
            }
            outputLines.Add("time to Testing/clustering " + (DateTime.Now - timeStart));
            outputLines.Add("");
            outputLines.Add("");
            outputLines.Add("");

            List <string> class1 = new List <string>();
            List <string> class2 = new List <string>();
            List <string> class3 = new List <string>();
            List <string> class4 = new List <string>();
            List <string> class5 = new List <string>();
            List <string> class6 = new List <string>();
            List <string> class7 = new List <string>();

            //creating output file
            for (int i = 0; i < documents_Train.Length; i++)
            {
                if (i < 10 && i > -1)
                {
                    if (i == 0)
                    {
                        class1.Add("-------------------------------");
                        class1.Add("TOPIC: WEATHER");
                        class1.Add("-------------------------------");
                    }
                    class1.Add("Training_Tweet:\t" + documents_Train[i]);
                }
                if (i < 20 && i > 9)
                {
                    if (i == 10)
                    {
                        class2.Add("-------------------------------");
                        class2.Add("TOPIC: MUSIC");
                        class2.Add("-------------------------------");
                    }
                    class2.Add("Training_Tweet:\t" + documents_Train[i]);
                }
                if (i < 30 && i > 19)
                {
                    if (i == 20)
                    {
                        class3.Add("-------------------------------");
                        class3.Add("TOPIC: ITALY");
                        class3.Add("-------------------------------");
                    }
                    class3.Add("Training_Tweet:\t" + documents_Train[i]);
                }
                if (i < 40 && i > 29)
                {
                    if (i == 30)
                    {
                        class4.Add("-------------------------------");
                        class4.Add("TOPIC: FOOD");
                        class4.Add("-------------------------------");
                    }
                    class4.Add("Training_Tweet:\t" + documents_Train[i]);
                }
                if (i < 50 && i > 39)
                {
                    if (i == 40)
                    {
                        class5.Add("-------------------------------");
                        class5.Add("TOPIC: FASHION");
                        class5.Add("-------------------------------");
                    }
                    class5.Add("Training_Tweet:\t" + documents_Train[i]);
                }
                if (i < 60 && i > 49)
                {
                    if (i == 50)
                    {
                        class6.Add("-------------------------------");
                        class6.Add("TOPIC: FOOTBALL");
                        class6.Add("-------------------------------");
                    }
                    class6.Add("Training_Tweet:\t" + documents_Train[i]);
                }
                if (i < 140 && i > 59)
                {
                    if (i == 60)
                    {
                        class7.Add("-------------------------------");
                        class7.Add("TOPIC: OTHER");
                        class7.Add("-------------------------------");
                    }
                    class7.Add("Training_Tweet:\t" + documents_Train[i]);
                }
                if (i >= 140)
                {
                    int what;
                    what = outputValues[i - 140].IndexOf(outputValues[i - 140].Max());
                    switch (what)
                    {
                    case 0:
                        class1.Add("Test_Tweet:\t" + documents_Train[i]);
                        break;

                    case 1:
                        class2.Add("Test_Tweet:\t" + documents_Train[i]);
                        break;

                    case 2:
                        class3.Add("Test_Tweet:\t" + documents_Train[i]);
                        break;

                    case 3:
                        class4.Add("Test_Tweet:\t" + documents_Train[i]);
                        break;

                    case 4:
                        class5.Add("Test_Tweet:\t" + documents_Train[i]);
                        break;

                    case 5:
                        class6.Add("Test_Tweet:\t" + documents_Train[i]);
                        break;

                    case 6:
                        class7.Add("Test_Tweet:\t" + documents_Train[i]);
                        break;
                    }
                }
            }

            outputLines.Add("");
            outputLines.Add("");
            outputLines.Add("");
            outputLines.AddRange(class1);
            outputLines.Add("");
            outputLines.Add("");
            outputLines.Add("");
            outputLines.AddRange(class2);
            outputLines.Add("");
            outputLines.Add("");
            outputLines.Add("");
            outputLines.AddRange(class3);
            outputLines.Add("");
            outputLines.Add("");
            outputLines.Add("");
            outputLines.AddRange(class4);
            outputLines.Add("");
            outputLines.Add("");
            outputLines.Add("");
            outputLines.AddRange(class5);
            outputLines.Add("");
            outputLines.Add("");
            outputLines.Add("");
            outputLines.AddRange(class6);
            outputLines.Add("");
            outputLines.Add("");
            outputLines.Add("");
            outputLines.AddRange(class7);
            outputLines.Add("");
            outputLines.Add("");
            outputLines.Add("");


            System.IO.File.WriteAllLines(@"Train_NN_With_Test_2.txt", outputLines.ToArray());

            Console.Write("Press any key to quit ..");
#endif

            Console.ReadKey();
        }
Example #14
0
        //開始學習
        public bool Run()
        {
            bool IsDone = false;

            try
            {
                FlowDatas db = new FlowDatas();
                (double[][] Inputs, double[][] Outputs)
                    = DeepLearningTools.FlowSampleToLearningData(db.FlowSampleStatistics.Where(c => c.BehaviorNumber != 0).ToArray());
                db.Dispose();
                //產生DBN網路
                DBNetwork = new DeepBeliefNetwork(Inputs.First().Length,
                                                  (int)((Inputs.First().Length + Outputs.First().Length) / 1.5),
                                                  (int)((Inputs.First().Length + Outputs.First().Length) / 2),
                                                  Outputs.First().Length);
                //亂數打亂整個網路參數
                new GaussianWeights(DBNetwork, 0.1).Randomize();
                DBNetwork.UpdateVisibleWeights();
                //設定無監督學習組態
                DeepBeliefNetworkLearning teacher
                    = new DeepBeliefNetworkLearning(DBNetwork)
                    {
                    Algorithm = (h, v, i) =>
                                new ContrastiveDivergenceLearning(h, v)
                    {
                        LearningRate = 0.01,
                        Momentum     = 0.5,
                        Decay        = 0.001,
                    }
                    };

                //設置批量輸入學習。
                int batchCount1 = Math.Max(1, Inputs.Length / 10);
                //創建小批量加速學習。
                int[] groups1
                    = Accord.Statistics.Classes.Random(Inputs.Length, batchCount1);
                double[][][] batches = Inputs.Subgroups(groups1);
                //學習指定圖層的數據。
                double[][][] layerData;
                //運行無監督學習。
                for (int layerIndex = 0; layerIndex < DBNetwork.Machines.Count - 1; layerIndex++)
                {
                    teacher.LayerIndex = layerIndex;
                    layerData          = teacher.GetLayerInput(batches);
                    for (int i = 0; i < 200; i++)
                    {
                        double error = teacher.RunEpoch(layerData) / Inputs.Length;
                        if (i % 10 == 0)
                        {
                            Console.WriteLine(i + ", Error = " + error);
                        }
                    }
                }

                //對整個網絡進行監督學習,提供輸出分類。
                var teacher2 = new ParallelResilientBackpropagationLearning(DBNetwork);

                double error1 = double.MaxValue;

                //運行監督學習。
                for (int i = 0; i < 500; i++)
                {
                    error1 = teacher2.RunEpoch(Inputs, Outputs) / Inputs.Length;
                    Console.WriteLine(i + ", Error = " + error1);

                    DBNetwork.Save(Path);
                    Console.WriteLine("Save Done");
                }

                DBNetwork.Save(Path);
                Console.WriteLine("Save Done");

                IsDone = true;
            }
            catch (Exception ex)
            {
                Debug.Write(ex.ToString());
            }

            return(IsDone);
        }
Example #15
0
        private void DoTraining(string binaryFilePath, IOutputWriter writer)
        {
            double[][] inputs;
            double[][] outputs;
            // Load  dataset.
            inputs = DataManager.Load(binaryFilePath, out outputs);

            // Setup the deep belief n  etwork and initialize with random weights.
            if (network == null)
            {
                network = DeepLearningNetworkBuilder.CreateAndGetNetwork(inputs.First().Length);
            }


            // Setup the learning algorithm.
            DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network)
            {
                Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
                {
                    LearningRate = 0.1,
                    Momentum     = 0.5,
                    Decay        = 0.001,
                }
            };


            // Setup batches of input for learning.
            // int batchnumber = (int)totalCount / 10;
            int batchCount = Math.Max(1, inputs.Length / 5);

            // Create mini-batches to speed learning.
            int[]        groups  = Accord.Statistics.Classes.Random(inputs.Length, batchCount);
            double[][][] batches = inputs.Subgroups(groups);
            // Learning data for the specified layer.
            double[][][] layerData;

            // Unsupervised learning on each hidden layer, except for the output layer.
            for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++)
            {
                teacher.LayerIndex = layerIndex;
                layerData          = teacher.GetLayerInput(batches);

                for (int i = 0; i < (inputs.Length / 2); i++)
                {
                    double error = teacher.RunEpoch(layerData) / inputs.Length;

                    writer?.WriteOutput(i + ", Error = " + error);
                }
            }

            var teacher2 = new BackPropagationLearning(network)
            {
                LearningRate = 0.1,
                Momentum     = 0.5
            };

            // Run supervised learning.
            for (int i = 0; i < inputs.Length; i++)
            {
                double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length;
                writer?.WriteOutput(i + ", Error = " + error);
            }
        }
Example #16
0
        private static void Learn(string networkFile, double[][] inputs, double[][] outputs, double trainRate = 0.8)
        {
            var count          = inputs.Length;
            var n              = (int)(count * trainRate);
            var trainedInputs  = inputs.Take(n).ToArray();
            var trainedOutputs = outputs.Take(n).ToArray();
            var testInputs     = inputs.Skip(n).ToArray();
            var testOutputs    = outputs.Skip(n).ToArray();

            Console.WriteLine($"trained items: {trainedInputs.Length}, tested items: {testInputs.Length}");

            var network = new DeepBeliefNetwork(trainedInputs.First().Length, 10, trainedOutputs.First().Length);

            new GaussianWeights(network, 0.1).Randomize();
            network.UpdateVisibleWeights();

            // Setup the learning algorithm.
            var teacher = new DeepBeliefNetworkLearning(network)
            {
                Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
                {
                    LearningRate = 0.1,
                    Momentum     = 0.5,
                    Decay        = 0.001,
                }
            };
            // Setup batches of input for learning.
            int batchCount = Math.Max(1, trainedInputs.Length / 100);

            // Create mini-batches to speed learning.
            int[]        groups  = Classes.Random(trainedInputs.Length, batchCount);
            double[][][] batches = trainedInputs.Subgroups(groups);
            // Learning data for the specified layer.
            double[][][] layerData;

            // Unsupervised learning on each hidden layer, except for the output layer.
            for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++)
            {
                teacher.LayerIndex = layerIndex;
                layerData          = teacher.GetLayerInput(batches);
                for (int i = 0; i < 200; i++)
                {
                    double error = teacher.RunEpoch(layerData) / trainedInputs.Length;
                    if (i % 10 == 0)
                    {
                        Console.WriteLine(i + ", Error = " + error);
                    }
                }
            }


            // Supervised learning on entire network, to provide output classification.
            var teacher2 = new BackPropagationLearning(network)
            {
                LearningRate = 0.1,
                Momentum     = 0.5
            };

            // Run supervised learning.
            for (int i = 0; i < Math.Min(2000, n); i++)
            {
                double error = teacher2.RunEpoch(trainedInputs, trainedOutputs) / trainedInputs.Length;
                if (i % 10 == 0)
                {
                    Console.WriteLine(i + ", Error = " + error);
                }
            }
            network.Save(networkFile);
            Console.WriteLine($"save network: {networkFile}");

            // Test the resulting accuracy.
            Test(networkFile, testInputs, testOutputs);
        }
Example #17
0
        public static void test()
        {
            //double[][] inputs;
            //double[][] outputs;
            //double[][] testInputs;
            //double[][] testOutputs;

            //// Load ascii digits dataset.
            //inputs = DataManager.Load(@"../../../data/data.txt", out outputs);

            //// The first 500 data rows will be for training. The rest will be for testing.
            //testInputs = inputs.Skip(500).ToArray();
            //testOutputs = outputs.Skip(500).ToArray();
            //inputs = inputs.Take(500).ToArray();
            //outputs = outputs.Take(500).ToArray();
            //double[][] inputs = new double[4][] {
            //    new double[] {0, 0}, new double[] {0, 1},
            //    new double[] {1, 0}, new double[] {1, 1}
            //};
            //double[][] outputs = new double[4][] {
            //    new double[] {1, 0}, new double[] {0, 1},
            //    new double[] {0, 1}, new double[] {1, 0}
            //};

            double[][] inputs =
            {
                //               input         output
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 0, 0, 1, 0 }, //  0
                new double[] { 0, 1, 1, 0 }, //  0
                new double[] { 0, 1, 0, 0 }, //  0
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 0 }, //  1
                new double[] { 1, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 0, 0, 0, 1 }, //  1
                new double[] { 1, 1, 1, 1 }, //  2
                new double[] { 1, 0, 1, 1 }, //  2
                new double[] { 1, 1, 0, 1 }, //  2
                new double[] { 0, 1, 1, 1 }, //  2
                new double[] { 1, 1, 1, 1 }, //  2
            };

            double[][] outputs = // those are the class labels
            {
                new double[] { 1, 0, 0 },
                new double[] { 1, 0, 0 },
                new double[] { 1, 0, 0 },
                new double[] { 1, 0, 0 },
                new double[] { 1, 0, 0 },
                new double[] { 0, 1, 0 },
                new double[] { 0, 1, 0 },
                new double[] { 0, 1, 0 },
                new double[] { 0, 1, 0 },
                new double[] { 0, 1, 0 },
                new double[] { 0, 0, 1 },
                new double[] { 0, 0, 1 },
                new double[] { 0, 0, 1 },
                new double[] { 0, 0, 1 },
                new double[] { 0, 0, 1 },
            };


            // Setup the deep belief network and initialize with random weights.
            Console.WriteLine(inputs.First().Length);
            DeepBeliefNetwork network = new DeepBeliefNetwork(inputs.First().Length, 2, outputs.First().Length);

            new GaussianWeights(network, 0.1).Randomize();
            network.UpdateVisibleWeights();

            // Setup the learning algorithm.
            DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network)
            {
                Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
                {
                    LearningRate = 0.1,
                    Momentum     = 0.5,
                    Decay        = 0.001,
                }
            };

            // Setup batches of input for learning.
            int batchCount = Math.Max(1, inputs.Length / 100);

            // Create mini-batches to speed learning.
            int[]        groups  = Accord.Statistics.Tools.RandomGroups(inputs.Length, batchCount);
            double[][][] batches = inputs.Subgroups(groups);
            // Learning data for the specified layer.
            double[][][] layerData;

            // Unsupervised learning on each hidden layer, except for the output layer.
            for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++)
            {
                teacher.LayerIndex = layerIndex;
                layerData          = teacher.GetLayerInput(batches);
                for (int i = 0; i < 50000; i++)
                {
                    double error = teacher.RunEpoch(layerData) / inputs.Length;
                    //if (i % 10 == 0)
                    //{
                    //    Console.WriteLine(i + ", Error = " + error);
                    //}
                }
            }

            // Supervised learning on entire network, to provide output classification.
            var teacher2 = new Accord.Neuro.Learning.BackPropagationLearning(network)
            {
                LearningRate = 0.1,
                Momentum     = 0.5
            };

            // Run supervised learning.
            for (int i = 0; i < 50000; i++)
            {
                double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length;
                //if (i % 10 == 0)
                //{
                //    Console.WriteLine(i + ", Error = " + error);
                //}
            }

            // Test the resulting accuracy.
            //int correct = 0;
            //for (int i = 0; i < inputs.Length; i++)
            //{
            //    double[] outputValues = network.Compute(testInputs[i]);
            //    if (DataManager.FormatOutputResult(outputValues) == DataManager.FormatOutputResult(testOutputs[i]))
            //    {
            //        correct++;
            //    }
            //}

            //Console.WriteLine("Correct " + correct + "/" + inputs.Length + ", " + Math.Round(((double)correct / (double)inputs.Length * 100), 2) + "%");

            //double[] probs = network.GenerateOutput(new double[] { 0, 0 });
            //foreach (double p in probs)
            //{
            //    Console.Write(p + ", ");
            //}
            for (int i = 0; i < inputs.Length; i++)
            {
                double[] output = network.Compute(inputs[i]);
                int      imax; output.Max(out imax);
                Console.Write(imax + " -- ");
                foreach (double p in output)
                {
                    Console.Write(p + ", ");
                }
                Console.WriteLine("\n------------------");
            }
        }
Example #18
0
        public static void Excute2()
        {
            double[][] inputs;
            double[][] outputs;
            double[][] testInputs;
            double[][] testOutputs;

            // Load ascii digits dataset.
            inputs = DataManager.Load(@"data.txt", out outputs);

            // The first 500 data rows will be for training. The rest will be for testing. 第一个500数据用来训练,剩下的用来测试
            testInputs  = inputs.Skip(500).ToArray();
            testOutputs = outputs.Skip(500).ToArray();
            inputs      = inputs.Take(500).ToArray();
            outputs     = outputs.Take(500).ToArray();

            // Setup the deep belief network and initialize with random weights. 设置深度神经网络和初始化随机砝码
            DeepBeliefNetwork network = new DeepBeliefNetwork(inputs.First().Length, 10, 10); //网络的输入数量Length  每个层中隐藏的神经元的数量10, 10

            new GaussianWeights(network, 0.1).Randomize();                                    //高斯砝码 使用标准偏差。一般值在0.001—0.1范围内。 默认值为0.1。
            //Randomize 使用高斯分布的网络的权重
            network.UpdateVisibleWeights();                                                   //通过复制隐藏层中权重的反向来更新可见层的权重。

            // Setup the learning algorithm. 设置学习法则
            DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network);

            //自定义神经网络法则
            // 设置用于指定和创建深度网络的每个层的学习算法的配置函数。Algorithm
            teacher.Algorithm = (h, v, i) => {
                return(new ContrastiveDivergenceLearning(h, v)
                {
                    LearningRate = 0.1, //学习速率
                    Momentum = 0.5,     //动力
                    Decay = 0.001,      //腐烂
                });
            };

            //teacher.Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
            //{
            //    LearningRate = 0.1,
            //    Momentum = 0.5,
            //    Decay = 0.001,
            //};
            // Setup batches of input for learning.
            int batchCount = System.Math.Max(1, inputs.Length / 100);//设置学习次数

            // Create mini-batches to speed learning.
            int[]        groups  = Accord.Statistics.Classes.Random(inputs.Length, batchCount); //创建小批量 速度学习
            double[][][] batches = inputs.Separate(groups);                                     //分离
            // Learning data for the specified layer.
            double[][][] layerData;                                                             //为指定层 学习数据

            // Unsupervised learning on each hidden layer, except for the output layer.除了输出层之外,在每个隐藏层上进行无监督学习。
            //network.Machines.Count 在这个深网络的每一层上得到受限制的玻尔兹曼机器。
            for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++)
            {
                teacher.LayerIndex = layerIndex;

                /*
                 * 获取训练数据所需的学习数据。
                 * 这个函数的返回应该被传递给No.Posial.SurvivyFieldWorksPr.RunEpoch(System,Pouth[2][])。
                 * 去实践一个学习时代。
                 */
                layerData = teacher.GetLayerInput(batches);
                for (int i = 0; i < 200; i++)//200次学习
                {
                    var    learningResult = teacher.RunEpoch(layerData);
                    double error          = learningResult / inputs.Length;//RunEpoch运行纪元  Returns sum of learning errors.
                    if (i % 10 == 0)
                    {
                        Console.WriteLine(i + ", Error = " + error);
                    }
                }
            }

            // Supervised learning on entire network, to provide output classification.对整个网络进行监督学习,提供输出分类。
            var teacher2 = new Neuro.Learning.BackPropagationLearning(network)
            {
                LearningRate = 0.1, //学习速率
                Momentum     = 0.5  //动力
            };

            // Run supervised learning.运行监督学习。
            for (int i = 0; i < 500; i++)//500次学习
            {
                double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length;
                if (i % 10 == 0)
                {
                    Console.WriteLine(i + ", Error = " + error);
                }
            }

            // Test the resulting accuracy. 测试结果的准确性
            int correct = 0;

            for (int i = 0; i < inputs.Length; i++)
            {
                var cp  = testInputs[i].ToList();
                var cpn = testInputs[i + 1];
                foreach (var item in cpn)
                {
                    cp.Add(item);
                }
                double[] outputValues = network.Compute(cp.ToArray());

                //double[] outputValues = network.Compute(testInputs[i]);
                if (DataManager.FormatOutputResult(outputValues) == DataManager.FormatOutputResult(testOutputs[i]))
                {
                    correct++;
                }
            }

            Console.WriteLine("Correct " + correct + "/" + inputs.Length + ", " + System.Math.Round(((double)correct / (double)inputs.Length * 100), 2) + "%");
            Console.Write("Press any key to quit ..");
            Console.ReadKey();
        }
        private void train_Click(object sender, EventArgs e)
        {
            double[][] inputs;
            double[][] outputs;
            double[][] testInputs;
            double[][] testOutputs;
            GetData(out inputs, out outputs, out testInputs, out testOutputs);

            Stopwatch sw = Stopwatch.StartNew();

            // Setup the deep belief network and initialize with random weights.
            _network = new DeepBeliefNetwork(inputs.First().Length, LAYERS);
            new GaussianWeights(_network, 0.1).Randomize();
            _network.UpdateVisibleWeights();

            // Setup the learning algorithm.
            DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(_network)
            {
                Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
                {
                    LearningRate = 0.1,
                    Momentum     = 0.5,
                    Decay        = 0.001,
                }
            };

            // Setup batches of input for learning.
            int batchCount = Math.Max(1, inputs.Length / 100);

            // Create mini-batches to speed learning.
            int[]        groups  = Accord.Statistics.Tools.RandomGroups(inputs.Length, batchCount);
            double[][][] batches = inputs.Subgroups(groups);
            // Learning data for the specified layer.
            double[][][] layerData;

            // Unsupervised learning on each hidden layer, except for the output layer.
            for (int layerIndex = 0; layerIndex < _network.Machines.Count - 1; layerIndex++)
            {
                teacher.LayerIndex = layerIndex;
                layerData          = teacher.GetLayerInput(batches);
                for (int i = 0; i < UNSUPERVISED_EPOCHS; i++)
                {
                    double error = teacher.RunEpoch(layerData) / inputs.Length;
                    if (i % 10 == 0)
                    {
                        label1.Text = "Layer: " + layerIndex + " Epoch: " + i + ", Error: " + error;
                        label1.Refresh();
                    }
                }
            }

            // Supervised learning on entire network, to provide output classification.
            var teacher2 = new BackPropagationLearning(_network)
            {
                LearningRate = 0.1,
                Momentum     = 0.5
            };

            // Run supervised learning.
            for (int i = 0; i < SUPERVISED_EPOCHS; i++)
            {
                double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length;
                if (i % 10 == 0)
                {
                    label1.Text = "Supervised: " + i + ", Error = " + error;
                    label1.Refresh();
                }
            }

            // Test the resulting accuracy.
            label1.Text = "";
            int correct = 0;

            for (int i = 0; i < testInputs.Length; i++)
            {
                double[] outputValues = _network.Compute(testInputs[i]);
                int      y            = GetResult(outputValues);
                int      t            = GetResult(testOutputs[i]);
                label1.Text += "predicted: " + y + " actual: " + t + "\n";
                label1.Refresh();
                if (y == t)
                {
                    correct++;
                }
            }
            sw.Stop();

            label1.Text  = "Correct " + correct + "/" + testInputs.Length + ", " + Math.Round(((double)correct / (double)testInputs.Length * 100), 2) + "%";
            label1.Text += "\nElapsed train+test time: " + sw.Elapsed;
            label1.Refresh();
        }
Example #20
0
        public void ExampleTest1()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // We'll use a simple XOR function as input.

            double[][] inputs =
            {
                new double[] { 0, 0 }, // 0 xor 0
                new double[] { 0, 1 }, // 0 xor 1
                new double[] { 1, 0 }, // 1 xor 0
                new double[] { 1, 1 }, // 1 xor 1
            };

            // XOR output, corresponding to the input.
            double[][] outputs =
            {
                new double[] { 0 }, // 0 xor 0 = 0
                new double[] { 1 }, // 0 xor 1 = 1
                new double[] { 1 }, // 1 xor 0 = 1
                new double[] { 0 }, // 1 xor 1 = 0
            };

            // Setup the deep belief network (2 inputs, 3 hidden, 1 output)
            DeepBeliefNetwork network = new DeepBeliefNetwork(2, 3, 1);

            // Initialize the network with Gaussian weights
            new GaussianWeights(network, 0.1).Randomize();

            // Update the visible layer with the new weights
            network.UpdateVisibleWeights();


            // Setup the learning algorithm.
            DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network)
            {
                Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
                {
                    LearningRate = 0.1,
                    Momentum     = 0.5,
                    Decay        = 0.001,
                }
            };



            // Unsupervised learning on each hidden layer, except for the output.
            for (int i = 0; i < network.Layers.Length - 1; i++)
            {
                teacher.LayerIndex = i;

                // Compute the learning data with should be used
                var layerInput = teacher.GetLayerInput(inputs);

                // Train the layer iteratively
                for (int j = 0; j < 5000; j++)
                {
                    teacher.RunEpoch(layerInput);
                }
            }



            // Supervised learning on entire network, to provide output classification.
            var backpropagation = new BackPropagationLearning(network)
            {
                LearningRate = 0.1,
                Momentum     = 0.5
            };

            // Run supervised learning.
            for (int i = 0; i < 5000; i++)
            {
                backpropagation.RunEpoch(inputs, outputs);
            }


            // Test the resulting accuracy.
            int correct = 0;

            for (int i = 0; i < inputs.Length; i++)
            {
                double[] outputValues = network.Compute(inputs[i]);
                double   outputResult = outputValues.First() >= 0.5 ? 1 : 0;

                if (outputResult == outputs[i].First())
                {
                    correct++;
                }
            }

            Assert.AreEqual(4, correct);
        }
Example #21
0
        public override double Train(Data info, float lr, float mom)
        {
            //Setup trainer using backpropagation.
            DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(DeepAccordNetwork)
            {
                Algorithm = (h, v, _) => new ContrastiveDivergenceLearning(h, v)
                {
                    LearningRate = lr,
                    Momentum     = mom,
                    Decay        = 0.001,
                }
            };

            // Setup batches of input for learning.
            int batchCount = Math.Max(1, info.InputData.Length / 100);

            // Create mini-batches to speed learning.
            int[]        groups  = Accord.Statistics.Classes.Random(info.InputData.Length, batchCount); //Tools.RandomGroups(info.InputData.Length, batchCount);
            double[][][] batches = Accord.Statistics.Classes.Separate(info.InputData, groups);          //info.InputData.Subgroups(groups);
            // Learning data for the specified layer.
            double[][][] layerData;

            double error = double.PositiveInfinity;
            double lastError;

            // Unsupervised learning on each hidden layer, except for the output.
            for (int layerIndex = 0; layerIndex < DeepAccordNetwork.Machines.Count - 1; layerIndex++)
            {
                teacher.LayerIndex = layerIndex;
                layerData          = teacher.GetLayerInput(batches);

                do
                {
                    lastError = error;
                    int i = 0;

                    while (i < 100)
                    {
                        error = teacher.RunEpoch(layerData);
                        i++;
                    }
                } while (lastError - error > 0.0000001);
            }

            //Setup trainer using backpropagation.
            BackPropagationLearning teacher2 = new BackPropagationLearning(DeepAccordNetwork)
            {
                LearningRate = lr,
                Momentum     = mom
            };

            do
            {
                lastError = error;
                int i = 0;

                while (i < 1000)
                {
                    error = teacher2.RunEpoch(info.InputData, info.OutputData);
                    i++;
                }
            } while (lastError - error > 0.0000001);

            return(error);
        }