private double UpdateMeanSquaredError(WaveData waveData) { var input = waveData.WaveInputs.ToArray(); var outputData = waveData.WaveScores.ToArray(); var error = 0.0; var avmError = 0.0; for (var i = 0; i < outputData.Length; i++) { var prediction = network.Compute(input[i])[0]; var av = i == 0 ? 0 : outputData.Take(i).Average(d => d); var actual = outputData[i]; error += Math.Pow(prediction - actual, 2); avmError += Math.Pow(av - actual, 2); } error /= outputData.Length; avmError /= outputData.Length; LastSampleSize = outputData.Length; LastMSE = error; AVMMSE = avmError; return(error); }
/// <summary> /// Gets the learning data needed to train the <see cref="LayerIndex">currently /// selected layer</see>. The return of this function should then be passed to /// <see cref="RunEpoch(double[][])"/> to actually run a learning epoch. /// </summary> /// /// <param name="batches">The mini-batches of input data.</param> /// /// <returns>The learning data for the current layer.</returns> /// public double[][][] GetLayerInput(double[][][] batches) { if (layerIndex == 0) { return(batches); } var outputBatches = new double[batches.Length][][]; for (int j = 0; j < batches.Length; j++) { int batchSize = batches[j].Length; double[][] inputs = batches[j]; double[][] outputs = new double[batchSize][]; for (int i = 0; i < inputs.Length; i++) { network.Compute(inputs[i]); // double[] responses = outputs[i] = network.Machines[layerIndex - 1].Hidden.Output; } outputBatches[j] = outputs; } return(outputBatches); }
public void ComputeNetwork(double[][] input, double[][] output) { int err = 0; for (int i = 0; i < input.Length; i++) { var networkOutput = _network.Compute(input[i]); if (Array.IndexOf(networkOutput, networkOutput.Max()) != Array.IndexOf(output[i], output[i].Max())) { err++; } } Console.WriteLine($"Total:{input.Length} \n Err:{err} \n Err(%) ={((double)err / (double)input.Length)*100}%"); }
public int DoTesting(DeepBeliefNetwork network, string[] images, IOutputWriter writer) { int totalCount = images.Length; double[][] testInputs; double[][] testOutputs; string binaryFilePath = Path.Combine(ApplicationPath, TrainingFileName); //binary file generation ImageUtil util = new ImageUtil(ImageDimentionConstant.WIDTH, ImageDimentionConstant.HEIGHT); util.GenerateBinaryFile(binaryFilePath, images); // Load dataset. testInputs = DataManager.Load(binaryFilePath, out testOutputs); int correct = 0; for (int i = 0; i < totalCount; i++) { double[] outputValues = network.Compute(testInputs[i]); if (DataManager.FormatOutputResult(outputValues) == DataManager.FormatOutputResult(testOutputs[i])) { correct++; } } writer?.WriteOutput("Correct " + correct + "/" + totalCount + ", " + Math.Round(((double)correct / (double)totalCount * 100), 2) + "%"); return(correct); }
public NeutralNetwork(double[][] input, double[][] output, double[][] testInput, double[][] testOutput) { var network = new DeepBeliefNetwork(28 * 28, new int[] { 1000, 10 }); new GaussianWeights(network).Randomize(); network.UpdateVisibleWeights(); var teacher = new DeepNeuralNetworkLearning(network) { Algorithm = (ann, i) => new ParallelResilientBackpropagationLearning(ann), LayerIndex = network.Machines.Count - 1, }; var layerData = teacher.GetLayerInput(input); for (int i = 0; i < 5000; i++) { teacher.RunEpoch(layerData, output); } network.UpdateVisibleWeights(); var inputArr = new double[28 * 28]; for (int i = 0; i < 28 * 28; i++) { inputArr[i] = testInput[0][i]; } var a = network.Compute(testInput[0]); Console.WriteLine(Array.IndexOf(a, a.Max())); }
public void Compute() { if (!CanCompute) { return; } double[] input = UserInput; DeepBeliefNetwork network = Main.Network; IDatabase database = Main.Database; database.Normalize(input); { double[] output = network.GenerateOutput(input); double[] reconstruction = network.Reconstruct(output); NetworkOutput = (database.ToBitmap(reconstruction).ToBitmapImage()); } if (Main.CanClassify) { double[] output = network.Compute(input); int imax; output.Max(out imax); Classification = imax; } }
//進階分析,分析行為屬於哪種攻擊手法 private static List <(string, int)> DeepAnalysis(DataFlowStatistics[] FlowStatistics) { List <(double[], string)> Inputs = DeepLearningTools.FlowStatisticsToLearningData(FlowStatistics); List <(string, int)> Result = new List <(string, int)>(); for (int i = 0; i < Inputs.Count; i++) { double[] outputValues = DBNetwork.Compute(Inputs[i].Item1); Result.Add((Inputs[i].Item2, Convert.ToInt32(DeepLearningTools.FormatOutputResult(outputValues)))); } return(Result); }
public void predict(double[] test) { var pre = network.Compute(test); int imax; pre.Max(out imax); Console.WriteLine("class : {0}", imax); foreach (var item in pre) { Console.WriteLine("{0}", item); } }
private void Classify(object sender, EventArgs e) { if (_imageToClassify == null) { label1.Text = "You didn't choose an image!\n"; label1.Refresh(); return; } double[] input; _itoa.Convert(_imageToClassify, out input); double[] output = _network.Compute(input); label1.Text = "Prediction: " + _categories[GetResult(output)]; label1.Refresh(); }
public CategoryClassification ClassifyToCategory(double[] dataToClassify) { var categories = _configuration.Categories; var output = _network.Compute(dataToClassify); var categoryIndex = GetIndexOfResult(output); var predictedCategory = categories.Single(x => x.Index == categoryIndex); _guiLogger.LogWriteLine($"Prediction: {predictedCategory}"); var result = new CategoryClassification( predictedCategory, output.Max()); return(result); }
public void CreateActivationNetworkTest() { double[][] inputs = { new double[] { 1, 1, 1, 0, 0, 0 }, new double[] { 1, 0, 1, 0, 0, 0 }, new double[] { 1, 1, 1, 0, 0, 0 }, new double[] { 0, 0, 1, 1, 1, 0 }, new double[] { 0, 0, 1, 1, 0, 0 }, new double[] { 0, 0, 1, 1, 1, 0 } }; double[][] outputs = { new double[] { 0 }, new double[] { 0 }, new double[] { 0 }, new double[] { 1 }, new double[] { 1 }, new double[] { 1 }, }; DeepBeliefNetwork network = createNetwork(inputs); ParallelResilientBackpropagationLearning teacher = new ParallelResilientBackpropagationLearning(network); for (int i = 0; i < 100; i++) { teacher.RunEpoch(inputs, outputs); } double[] actual = new double[outputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = network.Compute(inputs[i])[0]; } Assert.AreEqual(0, actual[0], 1e-10); Assert.AreEqual(0, actual[1], 1e-10); Assert.AreEqual(0, actual[2], 1e-10); Assert.AreEqual(1, actual[3], 1e-10); Assert.AreEqual(1, actual[4], 1e-10); Assert.AreEqual(1, actual[5], 1e-10); }
/// <summary> /// Evaluate /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void Button3_Click() { double[] output = network.Compute(selectedColor); // Get an index with most probability. string result = ""; switch (System.Convert.ToInt32(output[0])) { case 1: result = "Warm Color"; break; case -1: result = "Cold Color"; break; } //string result = output[0] >= 0 ? "Warm Color" : "Cold Color"; label1.text = result; }
public double[] GenerateOutput(double[] inputs) { return(network.Compute(inputs)); }
static void Main(string[] args) { //Generate the training data int keySize = 64; int messageSize = 64; int trainingSetSize = 100; List <Triplet> trainingSet = GenerateDESDataset(trainingSetSize, keySize, messageSize); double[][] inputTraining, outputTraining; Triplet.Transform2IO(trainingSet, out inputTraining, out outputTraining); //Generate the test data List <Triplet> testSet = GenerateDESDataset(trainingSetSize, keySize, messageSize); double[][] inputTest, outputTest; Triplet.Transform2IO(testSet, out inputTest, out outputTest); //Find the right sizes, not sure why I have to do that :-/ int inputSize = trainingSet.First().original.Count() + trainingSet.First().encrypted.Count(); int outputSize = trainingSet.First().key.Count(); //Create a network var function = new SigmoidFunction(2.0); //ActivationNetwork network = new ActivationNetwork(function, inputSize, 25, outputSize); //ParallelResilientBackpropagationLearning teacher = new ParallelResilientBackpropagationLearning(network); DeepBeliefNetwork network = new DeepBeliefNetwork(inputSize, 10, outputSize); Accord.Neuro.Learning.DeepNeuralNetworkLearning teacher = new DeepNeuralNetworkLearning(network); //Train the network int epoch = 0; double stopError = 0.1; int resets = 0; double minimumErrorReached = double.PositiveInfinity; while (minimumErrorReached > stopError && resets < 1) { network.Randomize(); //teacher.Reset(0.0125); double errorTrain = double.PositiveInfinity; for (epoch = 0; epoch < 500000 && errorTrain > stopError; epoch++) { errorTrain = teacher.RunEpoch(inputTraining, outputTraining) / (double)trainingSetSize; //Console.WriteLine("Epoch " + epoch + " = \t" + error); if (errorTrain < minimumErrorReached) { minimumErrorReached = errorTrain; network.Save("cryptoDESNetwork.mlp"); } Console.Clear(); Console.WriteLine("Epoch : " + epoch); Console.WriteLine("Train Set Error : " + errorTrain.ToString("N2")); double errorTest = teacher.ComputeError(inputTest, outputTest) / (double)inputTest.Count(); Console.WriteLine("Test Set Error : " + errorTest.ToString("N2")); } //Console.Write("Reset (" + error+")->"); resets++; } Console.WriteLine(); //Compute the reall error foreach (Triplet tReal in testSet) { double[] rIn, rOut, pOut; byte[] brMsg, brEncrypted, brKey; tReal.ToBytes(out brMsg, out brEncrypted, out brKey); tReal.ToIO(out rIn, out rOut); pOut = network.Compute(rIn); Triplet tPredicted = new Triplet(rIn, pOut, messageSize); byte[] bpMsg, bpEncrypted, bpKey; tPredicted.ToBytes(out bpMsg, out bpEncrypted, out bpKey); int wrongBytes = 0; for (int i = 0; i < keySize / 8; i++) { if (brKey[i] != bpKey[i]) { wrongBytes++; } } Console.WriteLine("Wrong bytes = " + wrongBytes); //Console.WriteLine("REAL = \n" + tReal.GetBytesForm()); //Console.WriteLine("Predicted = \n" + tPredicted.GetBytesForm()); } Console.ReadKey(); }
public static void Learn(double[][] inputs, double[][] outputs) { var n = (int)(count * 0.8); var testInputs = inputs.Skip(n).ToArray(); var testOutputs = outputs.Skip(n).ToArray(); inputs = inputs.Take(n).ToArray(); outputs = outputs.Take(n).ToArray(); var network = new DeepBeliefNetwork(inputs.First().Length, 10, 10); new GaussianWeights(network, 0.1).Randomize(); network.UpdateVisibleWeights(); // Setup the learning algorithm. var teacher = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; // Setup batches of input for learning. int batchCount = Math.Max(1, inputs.Length / 100); // Create mini-batches to speed learning. int[] groups = Classes.Random(inputs.Length, batchCount); double[][][] batches = inputs.Subgroups(groups); // Learning data for the specified layer. double[][][] layerData; // Unsupervised learning on each hidden layer, except for the output layer. for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++) { teacher.LayerIndex = layerIndex; layerData = teacher.GetLayerInput(batches); for (int i = 0; i < 200; i++) { double error = teacher.RunEpoch(layerData) / inputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } } // Supervised learning on entire network, to provide output classification. var teacher2 = new BackPropagationLearning(network) { LearningRate = 0.1, Momentum = 0.5 }; // Run supervised learning. for (int i = 0; i < n; i++) { double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } // Test the resulting accuracy. int correct = 0; for (int i = 0; i < testInputs.Length; i++) { double[] outputValues = network.Compute(testInputs[i]); if (Compare(outputValues, testOutputs[i])) { correct++; } } network.Save("deeplearning-countbits.net"); Console.WriteLine("Correct " + correct + "/" + testInputs.Length + ", " + Math.Round(((double)correct / (double)testInputs.Length * 100), 2) + "%"); }
public override double[] propagateToEnd(double[] inputVec, double[] storage = null) { return(classifier.Compute(inputVec)); }
static void Main(string[] args) { double[][] inputs; double[][] outputs; double[][] testInputs; double[][] testOutputs; const int SampleTrainingCount = 120; const int SampleTestCount = 30; // Load ascii digits dataset. inputs = DataManager.LoadCSV(@"../../../data/iris.data", out outputs); //inputs = DataManager.Load(@"../../../data/data.txt", out outputs); // The first SampleTrainingCount data rows will be for training. The rest will be for testing. testInputs = inputs.Skip(SampleTrainingCount).ToArray(); testOutputs = outputs.Skip(SampleTrainingCount).ToArray(); inputs = inputs.Take(SampleTrainingCount).ToArray(); outputs = outputs.Take(SampleTrainingCount).ToArray(); // Setup the deep belief network and initialize with random weights. DeepBeliefNetwork network = new DeepBeliefNetwork(inputs.First().Length, 10, 1); new GaussianWeights(network, 0.1).Randomize(); network.UpdateVisibleWeights(); // Setup the learning algorithm. DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; // Setup batches of input for learning. int batchCount = Math.Max(1, inputs.Length / 100); // Create mini-batches to speed learning. int[] groups = Accord.Statistics.Tools.RandomGroups(inputs.Length, batchCount); double[][][] batches = inputs.Subgroups(groups); // Learning data for the specified layer. double[][][] layerData; // Unsupervised learning on each hidden layer, except for the output layer. for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++) { teacher.LayerIndex = layerIndex; layerData = teacher.GetLayerInput(batches); for (int i = 0; i < 200; i++) { double error = teacher.RunEpoch(layerData) / inputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } } // Supervised learning on entire network, to provide output classification. var teacher2 = new BackPropagationLearning(network) { LearningRate = 0.1, Momentum = 0.5 }; // Run supervised learning. for (int i = 0; i < SampleTrainingCount; i++) { double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } // Test the resulting accuracy. SampleTestCount item int correct = 0; for (int i = 0; i < SampleTestCount; i++) { double[] outputValues = network.Compute(testInputs[i]); if (DataManager.FormatOutputResult(outputValues) == DataManager.FormatOutputResult(testOutputs[i])) { correct++; } } Console.WriteLine("Correct " + correct + "/" + SampleTestCount + ", " + Math.Round(((double)correct / (double)SampleTestCount * 100), 2) + "%"); Console.Write("Press any key to quit .."); Console.ReadKey(); }
private void train_Click(object sender, EventArgs e) { double[][] inputs; double[][] outputs; double[][] testInputs; double[][] testOutputs; GetData(out inputs, out outputs, out testInputs, out testOutputs); Stopwatch sw = Stopwatch.StartNew(); // Setup the deep belief network and initialize with random weights. _network = new DeepBeliefNetwork(inputs.First().Length, LAYERS); new GaussianWeights(_network, 0.1).Randomize(); _network.UpdateVisibleWeights(); // Setup the learning algorithm. DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(_network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; // Setup batches of input for learning. int batchCount = Math.Max(1, inputs.Length / 100); // Create mini-batches to speed learning. int[] groups = Accord.Statistics.Tools.RandomGroups(inputs.Length, batchCount); double[][][] batches = inputs.Subgroups(groups); // Learning data for the specified layer. double[][][] layerData; // Unsupervised learning on each hidden layer, except for the output layer. for (int layerIndex = 0; layerIndex < _network.Machines.Count - 1; layerIndex++) { teacher.LayerIndex = layerIndex; layerData = teacher.GetLayerInput(batches); for (int i = 0; i < UNSUPERVISED_EPOCHS; i++) { double error = teacher.RunEpoch(layerData) / inputs.Length; if (i % 10 == 0) { label1.Text = "Layer: " + layerIndex + " Epoch: " + i + ", Error: " + error; label1.Refresh(); } } } // Supervised learning on entire network, to provide output classification. var teacher2 = new BackPropagationLearning(_network) { LearningRate = 0.1, Momentum = 0.5 }; // Run supervised learning. for (int i = 0; i < SUPERVISED_EPOCHS; i++) { double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length; if (i % 10 == 0) { label1.Text = "Supervised: " + i + ", Error = " + error; label1.Refresh(); } } // Test the resulting accuracy. label1.Text = ""; int correct = 0; for (int i = 0; i < testInputs.Length; i++) { double[] outputValues = _network.Compute(testInputs[i]); int y = GetResult(outputValues); int t = GetResult(testOutputs[i]); label1.Text += "predicted: " + y + " actual: " + t + "\n"; label1.Refresh(); if (y == t) { correct++; } } sw.Stop(); label1.Text = "Correct " + correct + "/" + testInputs.Length + ", " + Math.Round(((double)correct / (double)testInputs.Length * 100), 2) + "%"; label1.Text += "\nElapsed train+test time: " + sw.Elapsed; label1.Refresh(); }
public static double[][] Predict(DeepBeliefNetwork network, double[][] data) { return((from input in data select network.Compute(input)).ToArray()); }
public static void Excute2() { double[][] inputs; double[][] outputs; double[][] testInputs; double[][] testOutputs; // Load ascii digits dataset. inputs = DataManager.Load(@"data.txt", out outputs); // The first 500 data rows will be for training. The rest will be for testing. 第一个500数据用来训练,剩下的用来测试 testInputs = inputs.Skip(500).ToArray(); testOutputs = outputs.Skip(500).ToArray(); inputs = inputs.Take(500).ToArray(); outputs = outputs.Take(500).ToArray(); // Setup the deep belief network and initialize with random weights. 设置深度神经网络和初始化随机砝码 DeepBeliefNetwork network = new DeepBeliefNetwork(inputs.First().Length, 10, 10); //网络的输入数量Length 每个层中隐藏的神经元的数量10, 10 new GaussianWeights(network, 0.1).Randomize(); //高斯砝码 使用标准偏差。一般值在0.001—0.1范围内。 默认值为0.1。 //Randomize 使用高斯分布的网络的权重 network.UpdateVisibleWeights(); //通过复制隐藏层中权重的反向来更新可见层的权重。 // Setup the learning algorithm. 设置学习法则 DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network); //自定义神经网络法则 // 设置用于指定和创建深度网络的每个层的学习算法的配置函数。Algorithm teacher.Algorithm = (h, v, i) => { return(new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, //学习速率 Momentum = 0.5, //动力 Decay = 0.001, //腐烂 }); }; //teacher.Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) //{ // LearningRate = 0.1, // Momentum = 0.5, // Decay = 0.001, //}; // Setup batches of input for learning. int batchCount = System.Math.Max(1, inputs.Length / 100);//设置学习次数 // Create mini-batches to speed learning. int[] groups = Accord.Statistics.Classes.Random(inputs.Length, batchCount); //创建小批量 速度学习 double[][][] batches = inputs.Separate(groups); //分离 // Learning data for the specified layer. double[][][] layerData; //为指定层 学习数据 // Unsupervised learning on each hidden layer, except for the output layer.除了输出层之外,在每个隐藏层上进行无监督学习。 //network.Machines.Count 在这个深网络的每一层上得到受限制的玻尔兹曼机器。 for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++) { teacher.LayerIndex = layerIndex; /* * 获取训练数据所需的学习数据。 * 这个函数的返回应该被传递给No.Posial.SurvivyFieldWorksPr.RunEpoch(System,Pouth[2][])。 * 去实践一个学习时代。 */ layerData = teacher.GetLayerInput(batches); for (int i = 0; i < 200; i++)//200次学习 { var learningResult = teacher.RunEpoch(layerData); double error = learningResult / inputs.Length;//RunEpoch运行纪元 Returns sum of learning errors. if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } } // Supervised learning on entire network, to provide output classification.对整个网络进行监督学习,提供输出分类。 var teacher2 = new Neuro.Learning.BackPropagationLearning(network) { LearningRate = 0.1, //学习速率 Momentum = 0.5 //动力 }; // Run supervised learning.运行监督学习。 for (int i = 0; i < 500; i++)//500次学习 { double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } // Test the resulting accuracy. 测试结果的准确性 int correct = 0; for (int i = 0; i < inputs.Length; i++) { var cp = testInputs[i].ToList(); var cpn = testInputs[i + 1]; foreach (var item in cpn) { cp.Add(item); } double[] outputValues = network.Compute(cp.ToArray()); //double[] outputValues = network.Compute(testInputs[i]); if (DataManager.FormatOutputResult(outputValues) == DataManager.FormatOutputResult(testOutputs[i])) { correct++; } } Console.WriteLine("Correct " + correct + "/" + inputs.Length + ", " + System.Math.Round(((double)correct / (double)inputs.Length * 100), 2) + "%"); Console.Write("Press any key to quit .."); Console.ReadKey(); }
public int Predict(double[] input) { double[] result = _neuralNetwork.Compute(input); return(DictionaryTools.DoublesToInt(result)); }
public void ExampleTest1() { Accord.Math.Tools.SetupGenerator(0); // We'll use a simple XOR function as input. double[][] inputs = { new double[] { 0, 0 }, // 0 xor 0 new double[] { 0, 1 }, // 0 xor 1 new double[] { 1, 0 }, // 1 xor 0 new double[] { 1, 1 }, // 1 xor 1 }; // XOR output, corresponding to the input. double[][] outputs = { new double[] { 0 }, // 0 xor 0 = 0 new double[] { 1 }, // 0 xor 1 = 1 new double[] { 1 }, // 1 xor 0 = 1 new double[] { 0 }, // 1 xor 1 = 0 }; // Setup the deep belief network (2 inputs, 3 hidden, 1 output) DeepBeliefNetwork network = new DeepBeliefNetwork(2, 3, 1); // Initialize the network with Gaussian weights new GaussianWeights(network, 0.1).Randomize(); // Update the visible layer with the new weights network.UpdateVisibleWeights(); // Setup the learning algorithm. DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; // Unsupervised learning on each hidden layer, except for the output. for (int i = 0; i < network.Layers.Length - 1; i++) { teacher.LayerIndex = i; // Compute the learning data with should be used var layerInput = teacher.GetLayerInput(inputs); // Train the layer iteratively for (int j = 0; j < 5000; j++) { teacher.RunEpoch(layerInput); } } // Supervised learning on entire network, to provide output classification. var backpropagation = new BackPropagationLearning(network) { LearningRate = 0.1, Momentum = 0.5 }; // Run supervised learning. for (int i = 0; i < 5000; i++) { backpropagation.RunEpoch(inputs, outputs); } // Test the resulting accuracy. int correct = 0; for (int i = 0; i < inputs.Length; i++) { double[] outputValues = network.Compute(inputs[i]); double outputResult = outputValues.First() >= 0.5 ? 1 : 0; if (outputResult == outputs[i].First()) { correct++; } } Assert.AreEqual(4, correct); }
static double Neural_Network(bool show) { double error = new double(); DataTable entireData = DataController.MakeDataTable("../../drug_consumption.txt"); Codification codebook = new Codification(entireData); //"Alcohol", "Amfet", !!"Amyl", "Benzos", "Cofeine", "Cannabis", "Chocolate", "Coke", (1)"Crac", ///"Ecstasy", !!"Heroine", // !!"Ketamine", //"LegalH", "LSD", !!"Meth", //"Mushrooms", "Nicotine", lol "Semeron", "VSA" string LookingFor = "Heroine"; int good = 0; string[][] outputs; string[][] inputs = DataController.MakeString("../../drug_consumption_500.txt", out outputs); string[][] testOutputs; string[][] testInputs = DataController.MakeString("../../drug_consumption_500.txt", out testOutputs); DataTable outputs1 = DataController.MakeDataFromString(outputs, "output"); DataTable inputs1 = DataController.MakeDataFromString(inputs, "input"); DataTable testOutputs1 = DataController.MakeDataFromString(testOutputs, "output"); DataTable testInputs1 = DataController.MakeDataFromString(testInputs, "input"); DataTable Isymbols = codebook.Apply(inputs1); DataTable Osymbols = codebook.Apply(outputs1); DataTable TIsymbols = codebook.Apply(testInputs1); DataTable TOsymbols = codebook.Apply(testOutputs1); double[][] inputsD = Isymbols.ToJagged <double>("Age", "Gender", "Education", "Country", "Eticnity", "Nscore", "Escore", "Oscore", "Ascore", "Cscore", "Impulsive", "SS"); double[][] outputsD = Osymbols.ToJagged <double>(LookingFor); outputsD = DataController.convertDT(outputsD); double[][] inputsT = TIsymbols.ToJagged <double>("Age", "Gender", "Education", "Country", "Eticnity", "Nscore", "Escore", "Oscore", "Ascore", "Cscore", "Impulsive", "SS"); double[][] outputsT = TOsymbols.ToJagged <double>(LookingFor); outputsT = DataController.convertDT(outputsT); DeepBeliefNetwork network = new DeepBeliefNetwork(inputs.First().Length, 10, 7); new GaussianWeights(network, 0.1).Randomize(); network.UpdateVisibleWeights(); DeepBeliefNetworkLearning FirstLearner = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; int batchCount = Math.Max(1, inputs.Length / 100); int[] groupsNew = Accord.Statistics.Classes.Random(inputsD.Length, batchCount); double[][][] batchesNew = Accord.Statistics.Classes.Separate(inputsD, groupsNew); double[][][] layerData; for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++) { FirstLearner.LayerIndex = layerIndex; layerData = FirstLearner.GetLayerInput(batchesNew); for (int i = 0; i < 500; i++) { error = FirstLearner.RunEpoch(layerData) / inputsD.Length; if (i % 10 == 0 && show == true) { Console.WriteLine("Error value(" + LookingFor + ", test: " + i + ") = " + error); } } } var SecondLearner = new BackPropagationLearning(network) { LearningRate = 0.15, Momentum = 0.7 }; EvolutionaryLearning teacher = new EvolutionaryLearning(network, 100); for (int i = 0; i < 800; i++) { error = teacher.RunEpoch(inputsD, outputsD) / inputsD.Length; if (i % 50 == 0 && show == true) { Console.WriteLine("Error value(" + LookingFor + ", test: " + i + ") = " + error); } } for (int i = 0; i < 800; i++) { error = SecondLearner.RunEpoch(inputsD, outputsD) / inputsD.Length; if (i % 10 == 0 && show == true) { Console.WriteLine("Error value(" + LookingFor + ", test: " + i + ") = " + error); } } for (int i = 0; i < inputsD.Length; i++) { double[] outputValues = network.Compute(inputsT[i]); if (outputValues.ToList().IndexOf(outputValues.Max()) == outputsT[i].ToList().IndexOf(outputsT[i].Max())) { good++; } } if (show == true) { Console.WriteLine("Poprawność - " + Math.Round(((double)good / (double)inputsD.Length * 100), 4) + "%"); Console.ReadKey(); } return(error); }
static void Main(string[] args) { #if Cluster // output file List <string> outputLines = new List <string>(); DateTime timeStart = new DateTime(); // Some example documents. string[] documents = new GetTweets().GetTweetsFromExcelFile("Train_NN.xlsx"); // Apply TF*IDF to the documents and get the resulting vectors. double[][] inputs = TFIDF.Transform(documents, 0); Console.WriteLine("time to transformation " + (DateTime.Now - timeStart)); outputLines.Add("time to transformation " + (DateTime.Now - timeStart)); Console.WriteLine("TFIDF transformation done..."); inputs = TFIDF.Normalize(inputs); Console.WriteLine("time to Normalization " + (DateTime.Now - timeStart)); outputLines.Add("time to Normalization " + (DateTime.Now - timeStart)); Console.WriteLine("TFIDF Normalization done..."); //inputs = Accord.Math.Norm.Norm2(inputs); string[] topics = TFIDF.Topics(documents, 5); Console.WriteLine("time to topics " + (DateTime.Now - timeStart)); outputLines.Add("time to topics " + (DateTime.Now - timeStart)); Console.WriteLine("Topics gathered..."); //Random random = new Random(); //double[][] rand = new double[inputs.Length][]; //for (int i = 0; i < inputs.Length; i++) //{ // rand[i] = new double[inputs[i].Length]; // for (int j = 0; j < inputs[i].Length; j++) // { // rand[i][j] = random.NextDouble(); // } //} //Console.WriteLine("time to generate random numbers " + (DateTime.Now - timeStart)); //outputLines.Add("time to topics " + (DateTime.Now - timeStart)); //Console.WriteLine("Randoms generated..."); KMeans cluster = new KMeans(topics.Length, Distance.Cosine); //cluster.MaxIterations = 1; //cluster.Randomize(rand); int[] index = cluster.Compute(inputs); Console.WriteLine("time to cluster " + (DateTime.Now - timeStart)); outputLines.Add("time to cluster " + (DateTime.Now - timeStart)); Console.WriteLine("Clustering done..."); //Accord.Statistics.Analysis.PrincipalComponentAnalysis pca = new Accord.Statistics.Analysis.PrincipalComponentAnalysis(inputs, Accord.Statistics.Analysis.AnalysisMethod.Center); //pca.Compute(); //double[][] newinput = pca.Transform(inputs, 2); //ScatterplotBox.Show("KMeans Clustering of Tweets", newinput, index).Hold(); for (double i = 0; i <= topics.Length; i++) { outputLines.Add(Convert.ToString(i + 1)); List <string> topicDecider = new List <string>(); string[] topicString; int j = 0; foreach (int x in index) { if (x == i + 1) { topicDecider.Add(documents[j]); } j++; } topicString = TFIDF.Topics(topicDecider.ToArray(), topicDecider.Count / 2); if (topicString.Length == 0) { outputLines.Add("--------------------------------------------------------"); outputLines.Add("TOPIC: other"); outputLines.Add("--------------------------------------------------------"); } else { outputLines.Add("--------------------------------------------------------"); outputLines.Add("TOPIC: " + topicString[0]); outputLines.Add("--------------------------------------------------------"); } j = 0; foreach (int x in index) { if (x == i + 1) { outputLines.Add("Tweet ID " + j + ":\t" + documents[j]); } j++; } outputLines.Add(""); outputLines.Add(""); outputLines.Add(""); outputLines.Add(""); } System.IO.File.WriteAllLines(@"Train_NN_2.txt", outputLines.ToArray()); Console.WriteLine("Output is written..."); #else // output file List <string> outputLines = new List <string>(); DateTime timeStart = new DateTime(); // Some example documents. string[] documents_Train = new GetTweets().GetTweetsFromExcelFile("Train_NN.xlsx"); double[][] Train_Labels = new GetTweets().GetLabelsFromExcelFile("Train_Labels.xlsx"); // Apply TF*IDF to the documents and get the resulting vectors. double[][] inputs = TFIDF.Transform(documents_Train, 0); Console.WriteLine("time to transformation " + (DateTime.Now - timeStart)); outputLines.Add("time to transformation " + (DateTime.Now - timeStart)); Console.WriteLine("TFIDF transformation done..."); inputs = TFIDF.Normalize(inputs); Console.WriteLine("time to Normalization " + (DateTime.Now - timeStart)); outputLines.Add("time to Normalization " + (DateTime.Now - timeStart)); Console.WriteLine("TFIDF Normalization done..."); //double[][] inputs; double[][] train_input = new double[140][]; double[][] outputs; double[][] testInputs = new double[1000 - 140][]; double[][] testOutputs = new double[1000 - 140][]; for (int i = 0; i < 140; i++) { train_input[i] = new double[inputs[i].Length]; for (int j = 0; j < inputs[i].Length; j++) { train_input[i][j] = inputs[i][j]; } } for (int i = 0; i < 1000 - 140; i++) { testInputs[i] = new double[inputs[i].Length]; for (int j = 0; j < inputs[i].Length; j++) { testInputs[i][j] = inputs[i][j]; } } // The first 500 data rows will be for training. The rest will be for testing. //testInputs = inputs.Skip(500).ToArray(); //testOutputs = outputs.Skip(500).ToArray(); //inputs = inputs.Take(500).ToArray(); //outputs = outputs.Take(500).ToArray(); // Setup the deep belief network and initialize with random weights. DeepBeliefNetwork network = new DeepBeliefNetwork(train_input.First().Length, 7); new GaussianWeights(network, 0.1).Randomize(); network.UpdateVisibleWeights(); // Setup the learning algorithm. DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; // Setup batches of input for learning. int batchCount = Math.Max(1, train_input.Length / 100); // Create mini-batches to speed learning. int[] groups = Accord.Statistics.Tools.RandomGroups(train_input.Length, batchCount); double[][][] batches = train_input.Subgroups(groups); // Learning data for the specified layer. double[][][] layerData; // Unsupervised learning on each hidden layer, except for the output layer. for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++) { teacher.LayerIndex = layerIndex; layerData = teacher.GetLayerInput(batches); for (int i = 0; i < 200; i++) { double error = teacher.RunEpoch(layerData) / train_input.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } } // Supervised learning on entire network, to provide output classification. var teacher2 = new BackPropagationLearning(network) { LearningRate = 0.1, Momentum = 0.5 }; //Transpose double[][] Train_Labels_T = new double[140][]; for (int i = 0; i < 140; i++) { Train_Labels_T[i] = new double[7]; for (int j = 0; j < 7; j++) { Train_Labels_T[i][j] = Train_Labels[j][i]; } } // Run supervised learning. for (int i = 0; i < 500; i++) { double error = teacher2.RunEpoch(train_input, Train_Labels_T) / train_input.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } outputLines.Add("time to Training " + (DateTime.Now - timeStart)); // Test the resulting accuracy. double[][] outputValues = new double[testInputs.Length][]; for (int i = 0; i < testInputs.Length; i++) { outputValues[i] = network.Compute(testInputs[i]); } outputLines.Add("time to Testing/clustering " + (DateTime.Now - timeStart)); outputLines.Add(""); outputLines.Add(""); outputLines.Add(""); List <string> class1 = new List <string>(); List <string> class2 = new List <string>(); List <string> class3 = new List <string>(); List <string> class4 = new List <string>(); List <string> class5 = new List <string>(); List <string> class6 = new List <string>(); List <string> class7 = new List <string>(); //creating output file for (int i = 0; i < documents_Train.Length; i++) { if (i < 10 && i > -1) { if (i == 0) { class1.Add("-------------------------------"); class1.Add("TOPIC: WEATHER"); class1.Add("-------------------------------"); } class1.Add("Training_Tweet:\t" + documents_Train[i]); } if (i < 20 && i > 9) { if (i == 10) { class2.Add("-------------------------------"); class2.Add("TOPIC: MUSIC"); class2.Add("-------------------------------"); } class2.Add("Training_Tweet:\t" + documents_Train[i]); } if (i < 30 && i > 19) { if (i == 20) { class3.Add("-------------------------------"); class3.Add("TOPIC: ITALY"); class3.Add("-------------------------------"); } class3.Add("Training_Tweet:\t" + documents_Train[i]); } if (i < 40 && i > 29) { if (i == 30) { class4.Add("-------------------------------"); class4.Add("TOPIC: FOOD"); class4.Add("-------------------------------"); } class4.Add("Training_Tweet:\t" + documents_Train[i]); } if (i < 50 && i > 39) { if (i == 40) { class5.Add("-------------------------------"); class5.Add("TOPIC: FASHION"); class5.Add("-------------------------------"); } class5.Add("Training_Tweet:\t" + documents_Train[i]); } if (i < 60 && i > 49) { if (i == 50) { class6.Add("-------------------------------"); class6.Add("TOPIC: FOOTBALL"); class6.Add("-------------------------------"); } class6.Add("Training_Tweet:\t" + documents_Train[i]); } if (i < 140 && i > 59) { if (i == 60) { class7.Add("-------------------------------"); class7.Add("TOPIC: OTHER"); class7.Add("-------------------------------"); } class7.Add("Training_Tweet:\t" + documents_Train[i]); } if (i >= 140) { int what; what = outputValues[i - 140].IndexOf(outputValues[i - 140].Max()); switch (what) { case 0: class1.Add("Test_Tweet:\t" + documents_Train[i]); break; case 1: class2.Add("Test_Tweet:\t" + documents_Train[i]); break; case 2: class3.Add("Test_Tweet:\t" + documents_Train[i]); break; case 3: class4.Add("Test_Tweet:\t" + documents_Train[i]); break; case 4: class5.Add("Test_Tweet:\t" + documents_Train[i]); break; case 5: class6.Add("Test_Tweet:\t" + documents_Train[i]); break; case 6: class7.Add("Test_Tweet:\t" + documents_Train[i]); break; } } } outputLines.Add(""); outputLines.Add(""); outputLines.Add(""); outputLines.AddRange(class1); outputLines.Add(""); outputLines.Add(""); outputLines.Add(""); outputLines.AddRange(class2); outputLines.Add(""); outputLines.Add(""); outputLines.Add(""); outputLines.AddRange(class3); outputLines.Add(""); outputLines.Add(""); outputLines.Add(""); outputLines.AddRange(class4); outputLines.Add(""); outputLines.Add(""); outputLines.Add(""); outputLines.AddRange(class5); outputLines.Add(""); outputLines.Add(""); outputLines.Add(""); outputLines.AddRange(class6); outputLines.Add(""); outputLines.Add(""); outputLines.Add(""); outputLines.AddRange(class7); outputLines.Add(""); outputLines.Add(""); outputLines.Add(""); System.IO.File.WriteAllLines(@"Train_NN_With_Test_2.txt", outputLines.ToArray()); Console.Write("Press any key to quit .."); #endif Console.ReadKey(); }
public static void test() { //double[][] inputs; //double[][] outputs; //double[][] testInputs; //double[][] testOutputs; //// Load ascii digits dataset. //inputs = DataManager.Load(@"../../../data/data.txt", out outputs); //// The first 500 data rows will be for training. The rest will be for testing. //testInputs = inputs.Skip(500).ToArray(); //testOutputs = outputs.Skip(500).ToArray(); //inputs = inputs.Take(500).ToArray(); //outputs = outputs.Take(500).ToArray(); //double[][] inputs = new double[4][] { // new double[] {0, 0}, new double[] {0, 1}, // new double[] {1, 0}, new double[] {1, 1} //}; //double[][] outputs = new double[4][] { // new double[] {1, 0}, new double[] {0, 1}, // new double[] {0, 1}, new double[] {1, 0} //}; double[][] inputs = { // input output new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 0, 0, 1, 0 }, // 0 new double[] { 0, 1, 1, 0 }, // 0 new double[] { 0, 1, 0, 0 }, // 0 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 0 }, // 1 new double[] { 1, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 0, 0, 0, 1 }, // 1 new double[] { 1, 1, 1, 1 }, // 2 new double[] { 1, 0, 1, 1 }, // 2 new double[] { 1, 1, 0, 1 }, // 2 new double[] { 0, 1, 1, 1 }, // 2 new double[] { 1, 1, 1, 1 }, // 2 }; double[][] outputs = // those are the class labels { new double[] { 1, 0, 0 }, new double[] { 1, 0, 0 }, new double[] { 1, 0, 0 }, new double[] { 1, 0, 0 }, new double[] { 1, 0, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 0, 1 }, new double[] { 0, 0, 1 }, new double[] { 0, 0, 1 }, new double[] { 0, 0, 1 }, new double[] { 0, 0, 1 }, }; // Setup the deep belief network and initialize with random weights. Console.WriteLine(inputs.First().Length); DeepBeliefNetwork network = new DeepBeliefNetwork(inputs.First().Length, 2, outputs.First().Length); new GaussianWeights(network, 0.1).Randomize(); network.UpdateVisibleWeights(); // Setup the learning algorithm. DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; // Setup batches of input for learning. int batchCount = Math.Max(1, inputs.Length / 100); // Create mini-batches to speed learning. int[] groups = Accord.Statistics.Tools.RandomGroups(inputs.Length, batchCount); double[][][] batches = inputs.Subgroups(groups); // Learning data for the specified layer. double[][][] layerData; // Unsupervised learning on each hidden layer, except for the output layer. for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++) { teacher.LayerIndex = layerIndex; layerData = teacher.GetLayerInput(batches); for (int i = 0; i < 50000; i++) { double error = teacher.RunEpoch(layerData) / inputs.Length; //if (i % 10 == 0) //{ // Console.WriteLine(i + ", Error = " + error); //} } } // Supervised learning on entire network, to provide output classification. var teacher2 = new Accord.Neuro.Learning.BackPropagationLearning(network) { LearningRate = 0.1, Momentum = 0.5 }; // Run supervised learning. for (int i = 0; i < 50000; i++) { double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length; //if (i % 10 == 0) //{ // Console.WriteLine(i + ", Error = " + error); //} } // Test the resulting accuracy. //int correct = 0; //for (int i = 0; i < inputs.Length; i++) //{ // double[] outputValues = network.Compute(testInputs[i]); // if (DataManager.FormatOutputResult(outputValues) == DataManager.FormatOutputResult(testOutputs[i])) // { // correct++; // } //} //Console.WriteLine("Correct " + correct + "/" + inputs.Length + ", " + Math.Round(((double)correct / (double)inputs.Length * 100), 2) + "%"); //double[] probs = network.GenerateOutput(new double[] { 0, 0 }); //foreach (double p in probs) //{ // Console.Write(p + ", "); //} for (int i = 0; i < inputs.Length; i++) { double[] output = network.Compute(inputs[i]); int imax; output.Max(out imax); Console.Write(imax + " -- "); foreach (double p in output) { Console.Write(p + ", "); } Console.WriteLine("\n------------------"); } }