private void saveButton_Clicked(object sender, EventArgs e) { if (saveFileDialog1.ShowDialog() == DialogResult.OK) { Stream st; if ((st = saveFileDialog1.OpenFile()) != null) { _network.Save(st); st.Close(); } } }
public void SaveNetwork(string filePath) { network.Save(filePath); }
static void Main(string[] args) { //Generate the training data int keySize = 64; int messageSize = 64; int trainingSetSize = 100; List <Triplet> trainingSet = GenerateDESDataset(trainingSetSize, keySize, messageSize); double[][] inputTraining, outputTraining; Triplet.Transform2IO(trainingSet, out inputTraining, out outputTraining); //Generate the test data List <Triplet> testSet = GenerateDESDataset(trainingSetSize, keySize, messageSize); double[][] inputTest, outputTest; Triplet.Transform2IO(testSet, out inputTest, out outputTest); //Find the right sizes, not sure why I have to do that :-/ int inputSize = trainingSet.First().original.Count() + trainingSet.First().encrypted.Count(); int outputSize = trainingSet.First().key.Count(); //Create a network var function = new SigmoidFunction(2.0); //ActivationNetwork network = new ActivationNetwork(function, inputSize, 25, outputSize); //ParallelResilientBackpropagationLearning teacher = new ParallelResilientBackpropagationLearning(network); DeepBeliefNetwork network = new DeepBeliefNetwork(inputSize, 10, outputSize); Accord.Neuro.Learning.DeepNeuralNetworkLearning teacher = new DeepNeuralNetworkLearning(network); //Train the network int epoch = 0; double stopError = 0.1; int resets = 0; double minimumErrorReached = double.PositiveInfinity; while (minimumErrorReached > stopError && resets < 1) { network.Randomize(); //teacher.Reset(0.0125); double errorTrain = double.PositiveInfinity; for (epoch = 0; epoch < 500000 && errorTrain > stopError; epoch++) { errorTrain = teacher.RunEpoch(inputTraining, outputTraining) / (double)trainingSetSize; //Console.WriteLine("Epoch " + epoch + " = \t" + error); if (errorTrain < minimumErrorReached) { minimumErrorReached = errorTrain; network.Save("cryptoDESNetwork.mlp"); } Console.Clear(); Console.WriteLine("Epoch : " + epoch); Console.WriteLine("Train Set Error : " + errorTrain.ToString("N2")); double errorTest = teacher.ComputeError(inputTest, outputTest) / (double)inputTest.Count(); Console.WriteLine("Test Set Error : " + errorTest.ToString("N2")); } //Console.Write("Reset (" + error+")->"); resets++; } Console.WriteLine(); //Compute the reall error foreach (Triplet tReal in testSet) { double[] rIn, rOut, pOut; byte[] brMsg, brEncrypted, brKey; tReal.ToBytes(out brMsg, out brEncrypted, out brKey); tReal.ToIO(out rIn, out rOut); pOut = network.Compute(rIn); Triplet tPredicted = new Triplet(rIn, pOut, messageSize); byte[] bpMsg, bpEncrypted, bpKey; tPredicted.ToBytes(out bpMsg, out bpEncrypted, out bpKey); int wrongBytes = 0; for (int i = 0; i < keySize / 8; i++) { if (brKey[i] != bpKey[i]) { wrongBytes++; } } Console.WriteLine("Wrong bytes = " + wrongBytes); //Console.WriteLine("REAL = \n" + tReal.GetBytesForm()); //Console.WriteLine("Predicted = \n" + tPredicted.GetBytesForm()); } Console.ReadKey(); }
public static void Learn(double[][] inputs, double[][] outputs) { var n = (int)(count * 0.8); var testInputs = inputs.Skip(n).ToArray(); var testOutputs = outputs.Skip(n).ToArray(); inputs = inputs.Take(n).ToArray(); outputs = outputs.Take(n).ToArray(); var network = new DeepBeliefNetwork(inputs.First().Length, 10, 10); new GaussianWeights(network, 0.1).Randomize(); network.UpdateVisibleWeights(); // Setup the learning algorithm. var teacher = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; // Setup batches of input for learning. int batchCount = Math.Max(1, inputs.Length / 100); // Create mini-batches to speed learning. int[] groups = Classes.Random(inputs.Length, batchCount); double[][][] batches = inputs.Subgroups(groups); // Learning data for the specified layer. double[][][] layerData; // Unsupervised learning on each hidden layer, except for the output layer. for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++) { teacher.LayerIndex = layerIndex; layerData = teacher.GetLayerInput(batches); for (int i = 0; i < 200; i++) { double error = teacher.RunEpoch(layerData) / inputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } } // Supervised learning on entire network, to provide output classification. var teacher2 = new BackPropagationLearning(network) { LearningRate = 0.1, Momentum = 0.5 }; // Run supervised learning. for (int i = 0; i < n; i++) { double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } // Test the resulting accuracy. int correct = 0; for (int i = 0; i < testInputs.Length; i++) { double[] outputValues = network.Compute(testInputs[i]); if (Compare(outputValues, testOutputs[i])) { correct++; } } network.Save("deeplearning-countbits.net"); Console.WriteLine("Correct " + correct + "/" + testInputs.Length + ", " + Math.Round(((double)correct / (double)testInputs.Length * 100), 2) + "%"); }
//開始學習 public bool Run() { bool IsDone = false; try { FlowDatas db = new FlowDatas(); (double[][] Inputs, double[][] Outputs) = DeepLearningTools.FlowSampleToLearningData(db.FlowSampleStatistics.Where(c => c.BehaviorNumber != 0).ToArray()); db.Dispose(); //產生DBN網路 DBNetwork = new DeepBeliefNetwork(Inputs.First().Length, (int)((Inputs.First().Length + Outputs.First().Length) / 1.5), (int)((Inputs.First().Length + Outputs.First().Length) / 2), Outputs.First().Length); //亂數打亂整個網路參數 new GaussianWeights(DBNetwork, 0.1).Randomize(); DBNetwork.UpdateVisibleWeights(); //設定無監督學習組態 DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(DBNetwork) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.01, Momentum = 0.5, Decay = 0.001, } }; //設置批量輸入學習。 int batchCount1 = Math.Max(1, Inputs.Length / 10); //創建小批量加速學習。 int[] groups1 = Accord.Statistics.Classes.Random(Inputs.Length, batchCount1); double[][][] batches = Inputs.Subgroups(groups1); //學習指定圖層的數據。 double[][][] layerData; //運行無監督學習。 for (int layerIndex = 0; layerIndex < DBNetwork.Machines.Count - 1; layerIndex++) { teacher.LayerIndex = layerIndex; layerData = teacher.GetLayerInput(batches); for (int i = 0; i < 200; i++) { double error = teacher.RunEpoch(layerData) / Inputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } } //對整個網絡進行監督學習,提供輸出分類。 var teacher2 = new ParallelResilientBackpropagationLearning(DBNetwork); double error1 = double.MaxValue; //運行監督學習。 for (int i = 0; i < 500; i++) { error1 = teacher2.RunEpoch(Inputs, Outputs) / Inputs.Length; Console.WriteLine(i + ", Error = " + error1); DBNetwork.Save(Path); Console.WriteLine("Save Done"); } DBNetwork.Save(Path); Console.WriteLine("Save Done"); IsDone = true; } catch (Exception ex) { Debug.Write(ex.ToString()); } return(IsDone); }
private static void Learn(string networkFile, double[][] inputs, double[][] outputs, double trainRate = 0.8) { var count = inputs.Length; var n = (int)(count * trainRate); var trainedInputs = inputs.Take(n).ToArray(); var trainedOutputs = outputs.Take(n).ToArray(); var testInputs = inputs.Skip(n).ToArray(); var testOutputs = outputs.Skip(n).ToArray(); Console.WriteLine($"trained items: {trainedInputs.Length}, tested items: {testInputs.Length}"); var network = new DeepBeliefNetwork(trainedInputs.First().Length, 10, trainedOutputs.First().Length); new GaussianWeights(network, 0.1).Randomize(); network.UpdateVisibleWeights(); // Setup the learning algorithm. var teacher = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; // Setup batches of input for learning. int batchCount = Math.Max(1, trainedInputs.Length / 100); // Create mini-batches to speed learning. int[] groups = Classes.Random(trainedInputs.Length, batchCount); double[][][] batches = trainedInputs.Subgroups(groups); // Learning data for the specified layer. double[][][] layerData; // Unsupervised learning on each hidden layer, except for the output layer. for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++) { teacher.LayerIndex = layerIndex; layerData = teacher.GetLayerInput(batches); for (int i = 0; i < 200; i++) { double error = teacher.RunEpoch(layerData) / trainedInputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } } // Supervised learning on entire network, to provide output classification. var teacher2 = new BackPropagationLearning(network) { LearningRate = 0.1, Momentum = 0.5 }; // Run supervised learning. for (int i = 0; i < Math.Min(2000, n); i++) { double error = teacher2.RunEpoch(trainedInputs, trainedOutputs) / trainedInputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } network.Save(networkFile); Console.WriteLine($"save network: {networkFile}"); // Test the resulting accuracy. Test(networkFile, testInputs, testOutputs); }
/// <summary> /// DeepBeliefNetworkを保存します。 /// </summary> /// <param name="network">DeepBeliefNetwork</param> public void SaveDeepBeliefNetwork(DeepBeliefNetwork network, AiPlayer.Version version) { var filePath = string.Format(LearningConfig.LearnerSavePath + @"/{0}_{1}_{2}.bin", DeepBeliefNetworkFileName, DateTime.Now.ToString("yyyyMMddhhmmss"), version.ToString().ToLower()); network.Save(filePath); }
public void SaveNetwork(String path) { network.Save(path); }
public void SaveNetwork(string name) { _network.Save(name); }