/// <summary> /// DatSet Yüklenemsi - Giriş Çıkış Belirlenmesi /// </summary> public void DataSetLoadAndInOutResult() { #region DataSet Yüklenmesi - Giriş - Çıkış Belirlenmesi trainingSet.Add(new TrainingSample(MyDataSet.A, new double[5] { Constansts.ones, Constansts.zeros, Constansts.zeros, Constansts.zeros, Constansts.zeros })); lstLog.Items.Insert(Constansts.zeros, Constansts.A); trainingSet.Add(new TrainingSample(MyDataSet.B, new double[5] { Constansts.zeros, Constansts.ones, Constansts.zeros, Constansts.zeros, Constansts.zeros })); lstLog.Items.Insert(Constansts.zeros, Constansts.B); trainingSet.Add(new TrainingSample(MyDataSet.C, new double[5] { Constansts.zeros, Constansts.zeros, Constansts.ones, Constansts.zeros, Constansts.zeros })); lstLog.Items.Insert(Constansts.zeros, Constansts.C); trainingSet.Add(new TrainingSample(MyDataSet.D, new double[5] { Constansts.zeros, Constansts.zeros, Constansts.zeros, Constansts.ones, Constansts.zeros })); lstLog.Items.Insert(Constansts.zeros, Constansts.D); trainingSet.Add(new TrainingSample(MyDataSet.E, new double[5] { Constansts.zeros, Constansts.zeros, Constansts.zeros, Constansts.zeros, Constansts.ones })); lstLog.Items.Insert(Constansts.zeros, Constansts.E); #endregion }
public void Run() { // create training set (logical XOR function) TrainingSet trainingSet = new TrainingSet(2, 1); trainingSet.Add(new SupervisedTrainingElement(new double[] { 0, 0 }, new double[] { 0 })); trainingSet.Add(new SupervisedTrainingElement(new double[] { 0, 1 }, new double[] { 1 })); trainingSet.Add(new SupervisedTrainingElement(new double[] { 1, 0 }, new double[] { 1 })); trainingSet.Add(new SupervisedTrainingElement(new double[] { 1, 1 }, new double[] { 0 })); // create multi layer perceptron MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.TANH, 2, 3, 1); // learn the training set Console.WriteLine("Training neural network..."); myMlPerceptron.LearnInSameThread(trainingSet); // test perceptron Console.WriteLine("Testing trained neural network"); TestNeuralNetwork(myMlPerceptron, trainingSet); // save trained neural network myMlPerceptron.Save("myMlPerceptron.nnet"); // load saved neural network NeuralNetwork loadedMlPerceptron = NeuralNetwork.Load("myMlPerceptron.nnet"); // test loaded neural network //Console.WriteLine("Testing loaded neural network"); //testNeuralNetwork(loadedMlPerceptron, trainingSet); }
public void LabTest1() { var inputLayer = new LinearLayer(5); var hiddenLayer = new TanhLayer(neuronCount); var outputLayer = new TanhLayer(2); new BackpropagationConnector(inputLayer, hiddenLayer); new BackpropagationConnector(hiddenLayer, outputLayer); _xorNetwork = new BackpropagationNetwork(inputLayer, outputLayer); _xorNetwork.SetLearningRate(learningRate); var trainingSet = new TrainingSet(5, 2); trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 0, 0 }, new double[] { 0, 0 })); trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 1, 0 }, new double[] { 3, 3 })); trainingSet.Add(new TrainingSample(new double[] { 0, 0, 1, 0, 0 }, new double[] { 2, 2 })); trainingSet.Add(new TrainingSample(new double[] { 0, 0, 1, 1, 0 }, new double[] { 2, 3 })); trainingSet.Add(new TrainingSample(new double[] { 0, 1, 0, 0, 0 }, new double[] { 1, 1 })); trainingSet.Add(new TrainingSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 3 })); trainingSet.Add(new TrainingSample(new double[] { 0, 1, 1, 0, 0 }, new double[] { 1, 2 })); trainingSet.Add(new TrainingSample(new double[] { 0, 1, 1, 1, 0 }, new double[] { 1, 3 })); trainingSet.Add(new TrainingSample(new double[] { 22, 1, 1, 1, 22 }, new double[] { 1, 3 })); _errorList = new double[cycles]; //_xorNetwork.EndEpochEvent += EndEpochEvent; _xorNetwork.Learn(trainingSet, cycles); var result = _xorNetwork.Run(new double[] { 0, 0, 1, 1, 0 }); }
void CreateTrainingSet() { if (NetworkManager.Instance._neuralNetwork == null) { Debug.Log("You need to create a network first!"); return; } if (NetworkManager.Instance.trainingSetInputs == null || NetworkManager.Instance.trainingSetInputs.Count == 0) { Debug.Log("You need to add training cases first!"); return; } TrainingSet trainingSet = new TrainingSet(NetworkManager.Instance.neuronCount, outputNum); List <double[]> tempInputs = NetworkManager.Instance.trainingSetInputs; List <double> tempOutput = NetworkManager.Instance.trainingSetOutputs; for (int i = 0; i < tempInputs.Count; i++) { if (tempOutput[i] == 0) { trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] { 1, -1, -1, -1 })); } else if (tempOutput[i] == 1) { trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] { -1, 1, -1, -1 })); } else if (tempOutput[i] == 2) { trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] { -1, -1, 1, -1 })); } else if (tempOutput[i] == 3) { trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] { -1, -1, -1, 1 })); } } Debug.Log("All training cases added succesfully"); NetworkManager.Instance.setTrainingSet(trainingSet); NetworkManager.Instance.TrainNetwork(epochs); }
/// <summary> /// Builds a training set. /// </summary> /// <param name="lags"></param> /// <param name="leaps"></param> /// <returns> /// The training set. /// </returns> public TrainingSet BuildTrainingSet(int[] lags, int[] leaps) { TrainingSet trainingSet = new TrainingSet(lags.Length, leaps.Length); // The following assumes that lags and leaps are ordered in ascending fashion. int maxLag = lags[0]; int maxLeap = leaps[leaps.Length - 1]; // Add training patterns into the training set. for (int i = -maxLag; i < Length - maxLeap; i++) { // Build the input vector. double[] inputVector = new double[lags.Length]; for (int j = 0; j < inputVector.Length; j++) { inputVector[j] = data[i + lags[j]]; } // Build the output vector. double[] outputVector = new double[leaps.Length]; for (int j = 0; j < outputVector.Length; j++) { outputVector[j] = data[i + leaps[j]]; } // Build a training pattern and add it to the training set. TrainingPattern trainingPattern = new TrainingPattern(inputVector, outputVector); trainingSet.Add(trainingPattern); } return(trainingSet); }
/// <summary> /// Brings all the ai list together into a training set to do some killer stuff. /// </summary> /// <returns>Compilation of a single training set.</returns> private TrainingSet CompileTrainingSet(List <CoastalRaidersFuedalResourceManager> rawMgxStats) { if (rawMgxStats.Count == 0) { Program.Logger.Error("There are currently now stats availible in the System to build a database."); Program.Logger.Error("Attemting to generate new entry...."); // Generate brand new AI entry in here to test the auto data collection capability. } TrainingSet tset = new TrainingSet(rawMgxStats[0].GetInputParams.Length * 2, rawMgxStats[0].GetOutputParams.Length * 2); for (int i = 0; i < rawMgxStats.Count; i += 2) { var player1 = rawMgxStats[i].GenerateAnnSample(); var player2 = rawMgxStats[i + 1].GenerateAnnSample(); var trainingSample = new TrainingSample( player1.InputVector.Concat(player2.InputVector).ToArray(), player1.OutputVector.Concat(player2.OutputVector).ToArray()); tset.Add(trainingSample); } return(tset); }
public void LoadData(string fileName, double limitTraining) { var buffer = System.IO.File.ReadAllLines(fileName); int limit = (int)(buffer.Length * limitTraining); HashSet <double> set = new HashSet <double>(); foreach (var item in buffer) { var values = item.Split(','); Vector <double> cur = new Vector <double>(); foreach (var val in values) { cur.Add(double.Parse(val)); } set.Add(cur[cur.Count - 1]); if (TrainingSet.Count < limit) { TrainingSet.Add(cur); } else { TestSet.Add(cur); } } Count = set.Count; c = set.ToArray(); }
protected override void SolveInstance(IGH_DataAccess DA) { CrowNetBP net = new CrowNetBP(); if (!networkLoaded) { int cycles = 1000; GH_Structure <GH_Number> tiv = new GH_Structure <GH_Number>(); GH_Structure <GH_Number> tov = new GH_Structure <GH_Number>(); DA.GetData(0, ref cycles); DA.GetData(1, ref net); DA.GetDataTree(2, out tiv); DA.GetDataTree(3, out tov); double[][] trainInVectors = Utils.GHTreeToMultidimensionalArray(tiv); double[][] trainOutVectors = Utils.GHTreeToMultidimensionalArray(tov); int trainVectorCount = trainInVectors.Length; if (trainVectorCount != trainOutVectors.Length) { AddRuntimeMessage(GH_RuntimeMessageLevel.Error, "Please supply an equal amount of input and output training vectors!"); } int trainInVectorDimension = trainInVectors[0].Length; int trainOutVectorDimension = trainOutVectors[0].Length; BackpropagationNetwork network = net.network(trainInVectorDimension, trainOutVectorDimension); // set Trainingset TrainingSet trainingSet = new TrainingSet(trainInVectorDimension, trainOutVectorDimension); for (int i = 0; i < trainVectorCount; i++) { trainingSet.Add(new TrainingSample(trainInVectors[i], trainOutVectors[i])); } // train network.Learn(trainingSet, cycles); this.Network = network; } if (this.Network != null) { DA.SetData(0, this.Network.MeanSquaredError.ToString("0.0000000000")); CrowNetBPP nn = new CrowNetBPP(this.Network); nn.hiddenLayerList = net.hiddenLayerList; nn.layerStructure = net.layerStructure; nn.neuronCount = net.neuronCount; DA.SetData(1, nn); } networkLoaded = false; }
public void Train(int cpi, int cs = 5, int seed = -1) { if (seed == -1) { seed = Environment.TickCount; } Random r = new Random(seed); int ai = 0; TrainingSet ts = new TrainingSet(Inputs, W * H * 3); foreach (var i in InImgs) { double[] iv = new double[Inputs]; double[] ov = new double[W * H * 3]; int ic = 0; for (int y = 0; y < i.H; y++) { for (int x = 0; x < i.W; x++) { iv[ic] = GV(i.Dat[ic++]); iv[ic] = GV(i.Dat[ic++]); iv[ic] = GV(i.Dat[ic++]); } } Image oi = OutImgs[ai]; int vv = 0; for (int y = 0; y < i.H; y++) { for (int x = 0; x < i.W; x++) { //int l = (i.H * y * 3) + (x * 3); ov[vv] = GV(i.Dat[vv++]); ov[vv] = GV(i.Dat[vv++]); ov[vv] = GV(i.Dat[vv++]); } } ai++; TrainingSample s = new TrainingSample(iv, ov); for (int xc = 0; xc < cpi; xc++) { ts.Add(s); } } Ready = false; //for(int t = 0; t < cs; t++) //{ // net.BeginEpochEvent += TrainE; net.EndEpochEvent += EndE; net.Learn(ts, cs); net.StopLearning(); Console.WriteLine("Done training mind."); }
/// <summary> /// Moves an example from the training set to /// the testing set. If the supplied example number /// is out of range no example is moved. /// </summary> /// <param name="exampleNum">The example to transfer to the training set.</param> public void moveToTrainingSet(int exampleNum) { if (exampleNum < 0 || exampleNum > (TestingSet.Count - 1)) { return; } TrainingSet.Add((int[])TestingSet.ElementAt(exampleNum)); TestingSet.RemoveAt(exampleNum); }
void CreateTrainingSet() { if (trainingSetInputs == null || trainingSetInputs.Count == 0) { Debug.Log("You need to add training cases first!"); return; } trainingSet = new TrainingSet(neurons, outputNum); List <double[]> tempInputs = trainingSetInputs; List <double> tempOutput = trainingSetOutputs; for (int i = 0; i < tempInputs.Count; i++) { if (tempOutput[i] == 0) { trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] { 1, -1, -1, -1 })); } else if (tempOutput[i] == 1) { trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] { -1, 1, -1, -1 })); } else if (tempOutput[i] == 2) { trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] { -1, -1, 1, -1 })); } else if (tempOutput[i] == 3) { trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] { -1, -1, -1, 1 })); } } neuralNetwork.Learn(this.trainingSet, epochs); }
public static TrainingSet ConvertToUnSupervisedTrainingSet(IForecastingDataSets sets) { TrainingSet trainingset = new TrainingSet(sets.InputData[0].Length); for (int i = 0; i < sets.InputData.Length; i++) { TrainingSample ts = new TrainingSample(sets.InputData[i]); trainingset.Add(ts); } return(trainingset); }
/// <summary> /// Brings all the ai list together into a training set to do some killer stuff. /// </summary> /// <returns></returns> private TrainingSet CompileTrainingSet() { TrainingSet tset = new TrainingSet(_currentStats.GetInputParams.Length, _currentStats.GetOutputParams.Length); foreach (var tsample in _rawMgxStats) { tset.Add(tsample.GenerateAnnSample()); } return(tset); }
private void button37_Click(object sender, EventArgs e) { TrainingSet egitimseti = new TrainingSet(35, 5); egitimseti.Add(new TrainingSample(VeriSeti.A, new double[5] { 1, 0, 0, 0, 0 })); egitimseti.Add(new TrainingSample(VeriSeti.A1, new double[5] { 1, 0, 0, 0, 0 })); egitimseti.Add(new TrainingSample(VeriSeti.B, new double[5] { 0, 1, 0, 0, 0 })); egitimseti.Add(new TrainingSample(VeriSeti.C, new double[5] { 0, 0, 1, 0, 0 })); egitimseti.Add(new TrainingSample(VeriSeti.D, new double[5] { 0, 0, 0, 1, 0 })); egitimseti.Add(new TrainingSample(VeriSeti.E, new double[5] { 0, 0, 0, 0, 1 })); ag.SetLearningRate(Convert.ToDouble(txt_ogrenmekatsayisi.Text)); ag.Learn(egitimseti, Convert.ToInt32(txt_ogrenmehizi.Text)); txt_ogrenmekatsayisi.Enabled = false; txt_ogrenmehizi.Enabled = false; lbl_hata.Text = ag.MeanSquaredError.ToString(); button37.Enabled = false; MessageBox.Show("Yapay Sinir Ağı Eğitildi.", "Bilgi"); }
/// <summary> /// 点击计算按钮 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void tsmiCalculate_Click(object sender, EventArgs e) { // 创建输入层、隐层和输出层 ActivationLayer inputLayer = GetLayer(cboInputLayerType.SelectedItem.ToString(), 2); ActivationLayer hiddenLayer = GetLayer(cboHiddenLayerType.SelectedItem.ToString(), int.Parse(txtHiddenLayerCount.Text)); ActivationLayer outputLayer = GetLayer(cboOutputLayerType.SelectedItem.ToString(), 1); // 创建层之间的关联 new BackpropagationConnector(inputLayer, hiddenLayer, ConnectionMode.Complete).Initializer = new RandomFunction(0, 0.3); new BackpropagationConnector(hiddenLayer, outputLayer, ConnectionMode.Complete).Initializer = new RandomFunction(0, 0.3); // 创建神经网络 var network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(double.Parse(txtInitialLearningRate.Text), double.Parse(txtFinalLearningRate.Text)); // 进行训练 var trainingSet = new TrainingSet(2, 1); for (var i = 0; i < 17; i++) { var x1 = data[i, 0]; var x2 = data[i, 1]; var y = data[i, 2]; var inputVector = new double[] { x1, x2 }; var outputVector = new double[] { y }; var trainingSample = new TrainingSample(inputVector, outputVector); trainingSet.Add(trainingSample); } network.SetLearningRate(0.3, 0.1); network.Learn(trainingSet, int.Parse(txtTrainingEpochs.Text)); network.StopLearning(); // 进行预测 for (var i = 0; i < 17; i++) { var x1 = data[i, 0]; var x2 = data[i, 1]; var y = data[i, 2]; var testInput = new double[] { x1, x2 }; var testOutput = network.Run(testInput)[0]; var absolute = testOutput - y; var relative = Math.Abs((testOutput - y) / testOutput); dgvData.Rows[i].Cells[3].Value = testOutput.ToString("f3"); dgvData.Rows[i].Cells[4].Value = absolute.ToString("f3"); dgvData.Rows[i].Cells[5].Value = (relative * 100).ToString("f1") + "%"; } }
// train butonu private void btnTrain_Click(object sender, EventArgs e) { TrainingSet trainingSet = new TrainingSet(35, 5); trainingSet.Add(new TrainingSample(Dataset.Letters.A, new double[5] { 1, 0, 0, 0, 0 })); trainingSet.Add(new TrainingSample(Dataset.Letters.B, new double[5] { 0, 1, 0, 0, 0 })); trainingSet.Add(new TrainingSample(Dataset.Letters.C, new double[5] { 0, 0, 1, 0, 0 })); trainingSet.Add(new TrainingSample(Dataset.Letters.D, new double[5] { 0, 0, 0, 1, 0 })); trainingSet.Add(new TrainingSample(Dataset.Letters.E, new double[5] { 0, 0, 0, 0, 1 })); neuralNetwork.SetLearningRate(Convert.ToDouble(0.3)); neuralNetwork.Learn(trainingSet, Convert.ToInt32(5000)); btnTrain.Enabled = false; btnGetResults.Enabled = true; }
async Task LearnNetworkAsync() { _network = new BackpropagationNetwork(_inputLayer, _outputLayer); _network.Initialize(); var trainingSet = new TrainingSet(1, 3); foreach (var b in from bomb in Enum.GetValues(typeof(BombTypes)).Cast <BombTypes>() where bomb != BombTypes.Mine select BombFabric.CreateBomb(bomb) into b where b != null select b) { trainingSet.Add(new TrainingSample(new double[] { b.BeepsLevel }, new double[] { (int)b.FirstStageDisarming, (int)b.SecondStageDisarming, (int)b.ThirdStageDisarming })); } _network.Learn(trainingSet, 100000); }
private void setData2Training() { double[] input, output; maxVal = 0d; maxVol = 0d; minVol = 999999d; minVal = 9999999d; foreach (var data in ValuesList) // najpierw szukamy wartosci min i max na liscie { for (int i = 0; i < data.Length; i++) { if (((i + 1) % 5) == 0) { maxVol = Math.Max(maxVol, data[i]); minVol = Math.Min(minVol, data[i]); } else { maxVal = Math.Max(maxVal, data[i]); minVal = Math.Min(minVal, data[i]); } } } foreach (var data in ValuesList) // nastepnie nalezy dane przeskalowac { input = new double[data.Length - 1]; output = new double[1]; for (int i = 0; i < data.Length - 1; i++) // -1 bo ostatnia liczba jest wyjsciem wiec trzeba ja przeniesc do osobnej tablicy { if (((i + 1) % 5) == 0) { data[i] = (data[i] - minVol) / maxVol; } else { data[i] = (data[i] - minVal) / maxVal; } input.SetValue(data[i], i); } data[data.Length - 1] = (data[data.Length - 1] - minVal) / maxVal; output.SetValue(data[data.Length - 1], 0); trainingSet.Add(new TrainingSample(input, output)); } }
public void button4_Click(object sender, EventArgs e) { var openWin = new OpenFileDialog(); openWin.DefaultExt = "txt"; openWin.ShowDialog(); string path = openWin.FileName; int nInput = Convert.ToInt32(textBox3.Text); int nOut = Convert.ToInt32(textBox5.Text); TrainingSet train = new TrainingSet(nInput, nOut); string[] lines = System.IO.File.ReadAllLines(path); string[] trainData = new string[nInput + nOut]; double[] trainInput = new double[nInput]; double[] trainOut = new double[nOut]; foreach (string line in lines) { trainData = line.Split(' '); for (int i = 0; i < nInput; i++) { trainInput[i] = Convert.ToDouble(trainData[i]); } for (int i = nInput; i < nOut; i++) { trainOut[i - nInput] = Convert.ToDouble(trainData[i]); } train.Add(new TrainingSample(trainInput, trainOut)); } network.Learn(train, Convert.ToInt32(textBox6.Text)); MessageBox.Show("Training OK"); }
/// <summary> /// parse from data file path /// </summary> public void parseDataFile(string dataFile) { FileParsers parser = new FileParsers(dataFile, DATAFILE); int numSample = parser.DataLines.Count; String[] rawSample; for (int i = 0; i < numSample - 1; i++) { rawSample = parser.extractDataSample(i); int[] dataSample = new int[Attributes.Count]; try { // 2.a. Deal with all the attributes. for (int j = 0; j < rawSample.Length; j++) { // There should be a 1-to-1 ordering between // the internal attributes vector and the // raw sample vector. Attribute currAtt = (Attribute)Attributes.ElementAt(j); int attPos = currAtt.getAttributeValuePosition((String)rawSample.ElementAt(j)); dataSample[j] = attPos; if (j == 0) { TargetSums[attPos]++; } } } catch (Exception e) { } TrainingSet.Add(dataSample); } }
/// <summary> /// Created this to test the custom neuron network with binary inputs. /// </summary> /// <param name="writer"></param> public static void Test( string file, int numberOfInputNeurons, int numberOfHiddenNeurons, int numberOfOutputNeurons, int numberOfCycles = 50000, double learningRate = 0.25) { TrainingSample sample = new TrainingSample( new double[] { }, new double[] { }); //We might make a gui for this later. double[] errorList = new double[numberOfCycles]; int totalNumberOfNeurons = numberOfInputNeurons + numberOfOutputNeurons; LinearLayer inputLayer = new LinearLayer(numberOfInputNeurons); SigmoidLayer hiddenLayer = new SigmoidLayer(numberOfHiddenNeurons); SigmoidLayer outputLayer = new SigmoidLayer(numberOfOutputNeurons); // This layer is a event handler that fires when the output is generated, hence backpropagation. BackpropagationConnector conn1 = new BackpropagationConnector(inputLayer, hiddenLayer); BackpropagationConnector conn2 = new BackpropagationConnector(hiddenLayer, outputLayer); BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(learningRate); TrainingSet trainingSet = new TrainingSet(10, 8); // A file stream reader. var inDefaule = Console.In; using (StreamReader reader = new StreamReader(file)) { Console.SetIn(reader); String line = ""; //trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 0, 1 }, new double[1] { 1 })); while ((line = reader.ReadLine()) != null) { String[] array = line.Split(','); double[] inputArray = new double[10]; double[] outputArray = new double[8]; for (int i = 0; i < 10; i++) { inputArray[i] = Convert.ToDouble(array[i]); } for (int i = 0; i < 8; i++) { outputArray[i] = Convert.ToDouble(array[i + 11]); } trainingSet.Add(new TrainingSample(inputArray, outputArray)); } } double max = 0; // create an anonymous function to capture the error value of each iteration, and report back the percent of completion. network.EndEpochEvent += delegate(object networkInput, TrainingEpochEventArgs args) { errorList[args.TrainingIteration] = network.MeanSquaredError; max = Math.Max(max, network.MeanSquaredError); // PercentComplete = args.TrainingIteration * 100 / numberOfCycles; }; network.Learn(trainingSet, numberOfCycles); double[] indices = new double[numberOfCycles]; // for (int i = 0; i < numberOfCycles; i++) { indices[i] = i; } .. oh nvm, its for graphing the learning curve // what to do for error list? // errorList => for plotting stuff. // for (int i = 0; i < numberOfCycles; i++) // { //Console.WriteLine(errorList[i]); // } // print out the error list for scientific evaluation. StreamUtilities.DumpData("dumpErrorValues.txt", errorList); double[] outputResult = network.OutputLayer.GetOutput(); outputResult = network.Run(new double[] { 0.47, 0.41, 0.12, 0.05, 0.1, 0.5, 0.1, 0.1, 0.05, 0.1 }); foreach (var d in outputResult) { Console.WriteLine("output: " + d); } // Console.WriteLine("final output"); }
static void Main(string[] args) { Console.WriteLine("{0:.10}, {1}", "Hello", "World"); // Step 1 : Alternative A : Building a training set manually // --------------------------------------------------------- int inputVectorLength = 2; int outputVectorLength = 1; TrainingSet trainingSet = new TrainingSet(inputVectorLength, outputVectorLength); TrainingPattern trainingPattern = new TrainingPattern(new double[2] { 0.0, 0.0 }, new double[1] { 0.0 }); trainingSet.Add(trainingPattern); trainingPattern = new TrainingPattern(new double[2] { 0.0, 1.0 }, new double[1] { 1.0 }); trainingSet.Add(trainingPattern); trainingPattern = new TrainingPattern(new double[2] { 1.0, 0.0 }, new double[1] { 1.0 }); trainingSet.Add(trainingPattern); trainingPattern = new TrainingPattern(new double[2] { 1.0, 1.0 }, new double[1] { 0.0 }); trainingSet.Add(trainingPattern); // Step 2 : Building a blueprint of a network // ------------------------------------------ LayerBlueprint inputLayerBlueprint = new LayerBlueprint(inputVectorLength); ActivationLayerBlueprint[] hiddenLayerBlueprints = new ActivationLayerBlueprint[1]; hiddenLayerBlueprints[0] = new ActivationLayerBlueprint(2, new LogisticActivationFunction()); ActivationLayerBlueprint outputLayerBlueprint = new ActivationLayerBlueprint(outputVectorLength, new LogisticActivationFunction()); NetworkBlueprint networkBlueprint = new NetworkBlueprint(inputLayerBlueprint, hiddenLayerBlueprints, outputLayerBlueprint); // Step 3 : Building a network // --------------------------- Network network = new Network(networkBlueprint); Console.WriteLine(network.ToString()); // Step 4 : Building a teacher // --------------------------- ITeacher teacher = new AntColonyOptimizationTeacher(trainingSet, null, null); // Step 5 : Training the network // ----------------------------- int maxIterationCount = 10000; double maxTolerableNetworkError = 1e-3; TrainingLog trainingLog = teacher.Train(network, maxIterationCount, maxTolerableNetworkError); Console.WriteLine("Number of runs used : " + trainingLog.RunCount); Console.WriteLine("Number of iterations used : " + trainingLog.IterationCount); Console.WriteLine("Minimum network error achieved : " + trainingLog.NetworkError); // Step 6 : Using the trained network // ---------------------------------- foreach (TrainingPattern tp in trainingSet.TrainingPatterns) { double[] inputVector = tp.InputVector; double[] outputVector = network.Evaluate(inputVector); Console.WriteLine(tp.ToString() + " -> " + TrainingPattern.VectorToString(outputVector)); } }
private void Start(object sender, EventArgs e) { CleanseGraph(); EnableControls(false); curve.Color = enabledColor; if (!int.TryParse(txtCycles.Text, out cycles)) { cycles = 10000; } if (!double.TryParse(txtLearningRate.Text, out learningRate)) { learningRate = 0.25d; } if (!int.TryParse(txtNeuronCount.Text, out neuronCount)) { neuronCount = 10; } if (cycles <= 0) { cycles = 10000; } if (learningRate < 0 || learningRate > 1) { learningRate = 0.25d; } if (neuronCount <= 0) { neuronCount = 10; } txtCycles.Text = cycles.ToString(); txtLearningRate.Text = learningRate.ToString(); txtNeuronCount.Text = neuronCount.ToString(); LinearLayer inputLayer = new LinearLayer(1); SigmoidLayer hiddenLayer = new SigmoidLayer(neuronCount); SigmoidLayer outputLayer = new SigmoidLayer(1); new BackpropagationConnector(inputLayer, hiddenLayer).Initializer = new RandomFunction(0d, 0.3d); new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(learningRate); TrainingSet trainingSet = new TrainingSet(1, 1); for (int i = 0; i < curve.Points.Count; i++) { double xVal = curve.Points[i].X; for (double input = xVal - 0.05; input < xVal + 0.06; input += 0.01) { trainingSet.Add(new TrainingSample(new double[] { input }, new double[] { curve.Points[i].Y })); } } network.EndEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs args) { trainingProgressBar.Value = (int)(args.TrainingIteration * 100d / cycles); Application.DoEvents(); }); network.Learn(trainingSet, cycles); StopLearning(this, EventArgs.Empty); }
/// <summary> /// This constructs a training procedure for standard backpropagation techniques. /// More advanced ones will be used as seen in the example. /// </summary> /// <param name="writer"></param> public TestingNdn(StreamWriter writer) { TrainingSample sample = new TrainingSample( new double[] { }, new double[] { }); //We might make a gui for this later. int numberOfNeurons = 3; double learningRate = 0.5; int numberOfCycles = 10000; double[] errorList = new double[numberOfCycles]; LinearLayer inputLayer = new LinearLayer(2); SigmoidLayer hiddenLayer = new SigmoidLayer(numberOfNeurons); SigmoidLayer outputLayer = new SigmoidLayer(1); // This layer is a event handler that fires when the output is generated, hence backpropagation. BackpropagationConnector conn1 = new BackpropagationConnector(inputLayer, hiddenLayer); BackpropagationConnector conn2 = new BackpropagationConnector(hiddenLayer, outputLayer); BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(learningRate); TrainingSet trainingSet = new TrainingSet(2, 1); trainingSet.Add(new TrainingSample(new double[2] { 0, 0 }, new double[1] { 0 })); trainingSet.Add(new TrainingSample(new double[2] { 0, 1 }, new double[1] { 1 })); trainingSet.Add(new TrainingSample(new double[2] { 1, 0 }, new double[1] { 1 })); trainingSet.Add(new TrainingSample(new double[2] { 1, 1 }, new double[1] { 0 })); double max = 0; // create an anonymous function to capture the error value of each iteration, and report back the percent of completion. network.EndEpochEvent += delegate(object networkInput, TrainingEpochEventArgs args) { errorList[args.TrainingIteration] = network.MeanSquaredError; max = Math.Max(max, network.MeanSquaredError); PercentComplete = args.TrainingIteration * 100 / numberOfCycles; }; network.Learn(trainingSet, numberOfCycles); double[] indices = new double[numberOfCycles]; // for (int i = 0; i < numberOfCycles; i++) { indices[i] = i; } .. oh nvm, its for graphing the learning curve // what to do for error list? // errorList => for plotting stuff. for (int i = 0; i < numberOfCycles; i++) { //Console.WriteLine(errorList[i]); } double[] outputResult = network.OutputLayer.GetOutput(); Console.WriteLine("final output"); double[] r1 = new double[] { 0, 0 }; double[] r2 = new double[] { 0, 1 }; double[] r3 = new double[] { 1, 0 }; double[] r4 = new double[] { 1, 1 }; Console.WriteLine(" 0 0 => " + network.Run(r1)[0]); Console.WriteLine(" 0 1 => " + network.Run(r2)[0]); Console.WriteLine(" 1 0 => " + network.Run(r3)[0]); Console.WriteLine(" 1 1 => " + network.Run(r4)[0]); }
void Solve() { CrowNetP NetP = new CrowNetP(); if (netUP.netType == "som") { #region self organizing maps #region prepare and assign trainingSet.Clear(); int trainVectorDimension = 3; if (trainDataArePoints) { for (int i = 0; i < pointsList.Count; i++) { trainingSet.Add(new TrainingSample(new double[] { pointsList[i].Value.X, pointsList[i].Value.Y, pointsList[i].Value.Z })); } } else { trainVectorDimension = trainingVectorTree.Branches[0].Count; trainingSet = new TrainingSet(trainVectorDimension); for (int i = 0; i < trainingVectorTree.Branches.Count; i++) { double[] values = new double[trainVectorDimension]; for (int j = 0; j < trainVectorDimension; j++) { values[j] = trainingVectorTree.Branches[i][j].Value; } trainingSet.Add(new TrainingSample(values)); } } /// process /// start learning int learningRadius = Math.Max(layerWidth, layerHeight) / 2; INeighborhoodFunction neighborhoodFunction = new GaussianFunction(learningRadius, netUP.neighborDistance) as INeighborhoodFunction; if (neighborhood) { neighborhoodFunction = new MexicanHatFunction(learningRadius) as INeighborhoodFunction; } LatticeTopology topology = LatticeTopology.Rectangular; if (latticeTopology) { topology = LatticeTopology.Hexagonal; } KohonenLayer inputLayer = new KohonenLayer(trainVectorDimension); KohonenLayer outputLayer = new KohonenLayer(new Size(layerWidth, layerHeight), neighborhoodFunction, topology); KohonenConnector connector = new KohonenConnector(inputLayer, outputLayer); connector.Initializer = randomizer; outputLayer.SetLearningRate(learningRate, 0.05d); outputLayer.IsRowCircular = isCircularRows; outputLayer.IsColumnCircular = isCircularColumns; network = new KohonenNetwork(inputLayer, outputLayer); network.useRandomTrainingOrder = opt.UseRandomTraining; #endregion #region delegates network.BeginEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs args) { #region TrainingCycle if (network == null || !GO) { return; } int iPrev = layerWidth - 1; allValuesTree = new GH_Structure <GH_Number>(); for (int i = 0; i < layerWidth; i++) { for (int j = 0; j < layerHeight; j++) { IList <ISynapse> synapses = (network.OutputLayer as KohonenLayer)[i, j].SourceSynapses; double x = synapses[0].Weight; double y = synapses[1].Weight; double z = synapses[2].Weight; for (int k = 0; k < trainVectorDimension; k++) { allValuesTree.Append(new GH_Number(synapses[k].Weight), new GH_Path(i, j)); } rowX[j][i] = x; rowY[j][i] = y; rowZ[j][i] = z; columnX[i][j] = x; columnY[i][j] = y; columnZ[i][j] = z; if (j % 2 == 1) { hexagonalX[i][j] = x; hexagonalY[i][j] = y; hexagonalZ[i][j] = z; } else { hexagonalX[iPrev][j] = x; hexagonalY[iPrev][j] = y; hexagonalZ[iPrev][j] = z; } } iPrev = i; } if (isCircularRows) { for (int i = 0; i < layerHeight; i++) { rowX[i][layerWidth] = rowX[i][0]; rowY[i][layerWidth] = rowY[i][0]; rowZ[i][layerWidth] = rowZ[i][0]; } } if (isCircularColumns) { for (int i = 0; i < layerWidth; i++) { columnX[i][layerHeight] = columnX[i][0]; columnY[i][layerHeight] = columnY[i][0]; columnZ[i][layerHeight] = columnZ[i][0]; hexagonalX[i][layerHeight] = hexagonalX[i][0]; hexagonalY[i][layerHeight] = hexagonalY[i][0]; hexagonalZ[i][layerHeight] = hexagonalZ[i][0]; } } Array.Clear(isWinner, 0, layerHeight * layerWidth); #endregion NetP = new CrowNetP("som", layerWidth, layerHeight, isCircularRows, isCircularColumns, latticeTopology, neighborhood, isWinner, rowX, rowY, rowZ, columnX, columnY, columnZ, hexagonalX, hexagonalY, hexagonalZ, allValuesTree); counter++; }); network.EndSampleEvent += new TrainingSampleEventHandler( delegate(object senderNetwork, TrainingSampleEventArgs args) { isWinner[network.Winner.Coordinate.X, network.Winner.Coordinate.Y] = true; }); #endregion #endregion } network.Learn(trainingSet, cycles); }
void Solve() { #region prepare and assign trainingSet.Clear(); for (int i = 0; i < trainVectorCount; i++) { List <double> dl = new List <double>(); for (int j = 0; j < trainVectorDimension; j++) { dl.Add(trainVectors[i][j]); } trainingSet.Add(new TrainingSample(dl.ToArray())); } /// process /// start learning /// get learning radius for neighborhood function int learningRadius = 0; for (int i = 0; i < dimension; i++) { if (size[i] > learningRadius) { learningRadius = size[i]; } } learningRadius /= 2; INeighborhoodFunction neighborhoodFunction = new GaussianFunction(learningRadius, netUP.neighborDistance) as INeighborhoodFunction; if (neighborhood) { neighborhoodFunction = new MexicanHatFunction(learningRadius) as INeighborhoodFunction; } LatticeTopology topology = LatticeTopology.Rectangular; if (latticeTopology) { topology = LatticeTopology.Hexagonal; } /// instantiate relevant network layers KohonenLayer inputLayer = new KohonenLayer(trainVectorDimension); KohonenLayerND outputLayer = new KohonenLayerND(size, neighborhoodFunction, topology); KohonenConnectorND connector = new KohonenConnectorND(inputLayer, outputLayer, netUP.initialNodes); if (netUP.initialNodes.Length != 0) { connector.Initializer = new GivenInput(netUP.initialNodes); } else { connector.Initializer = new RandomFunction(0.0, 1.0); } outputLayer.SetLearningRate(learningRate, 0.05d); outputLayer.IsDimensionCircular = isDimensionCircular; network = new KohonenNetworkND(inputLayer, outputLayer); network.useRandomTrainingOrder = randomTrainingOrder; inputLayer.ParallelComputation = false; outputLayer.ParallelComputation = parallelComputing; #endregion #region delegates network.BeginEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs args) { #region trainingCylce if (network == null || !GO) { return; } trainedVectors = new double[outputLayer.neuronCount, trainVectorDimension]; for (int i = 0; i < outputLayer.neuronCount; i++) { IList <ISynapse> synapses = (network.OutputLayer as KohonenLayerND)[outputLayer.adressBook[i]].SourceSynapses; for (int j = 0; j < trainVectorDimension; j++) { trainedVectors[i, j] = synapses[j].Weight; } } //make new net here netP = new CrowNetSOMNDP(size, isDimensionCircular, latticeTopology, neighborhood, trainedVectors, outputLayer.adressBook); counter++; #endregion }); network.EndSampleEvent += new TrainingSampleEventHandler( delegate(object senderNetwork, TrainingSampleEventArgs args) { netP.winner = outputLayer.WinnerND.CoordinateND; }); #endregion network.Learn(trainingSet, cycles); }
/// <summary> /// Creates a random testing dataset. Calling this /// method will destroy any previously built testing set. /// </summary> /// <param name="percentage">Percentage of the entire dataset to use for testing.</param> /// <param name="balanced">to create a balanced /// testing set, where the testing set and the /// remaining training set have the same proportion /// of each class.</param> public void createRndTestSet(int percentage, bool balanced) { if (percentage < 0 || percentage > 100) { throw new Exception("Percentage value out of range."); } // Move any examples that are part of the current testing // set back to the training set. for (int i = 0; i < TestingSet.Count; i++) { TrainingSet.Add((int[])TestingSet[i]); } TestingSet.Clear(); // Calculate the number of examples that should be // in the testing set. int totalNumExamples = TrainingSet.Count; int numTestingExamples = (int)Math.Round(totalNumExamples * ((float)percentage) / 100.0f); Random rand = new Random(); // If the set doesn't have to be balanced, then just // pick examples at random. if (!balanced) { for (int i = 0; i < numTestingExamples; i++) { int temp = rand.Next(TrainingSet.Count); TestingSet.Add((int[])TrainingSet[temp]); TrainingSet.RemoveAt(temp); } } else { // We have the target value distribution for the dataset, // so reference it. for (int i = 0; i < TargetSums.Length; i++) { int numExamplesToMove = (int)Math.Round(TargetSums[i] / ((float)totalNumExamples) * numTestingExamples); for (int j = 0; j < numExamplesToMove; j++) { // Attempt to randomly pick examples from the // dataset that have the required target classification. int[] example = null; while (true) { example = (int[])TrainingSet[rand.Next(TrainingSet.Count)]; if (example[0] == i) { break; } } int temp = TrainingSet.IndexOf(example); TestingSet.Add((int[])TrainingSet[temp]); TrainingSet.RemoveAt(temp); } } } }
private void Train(object sender, EventArgs e) { // btnTrain.Enabled = false; int cycles = 200; // if (!int.TryParse(txtCycles.Text, out cycles)) { cycles = 200; } // txtCycles.Text = cycles.ToString(); int currentCombination = 0; //int totalCombinations = Alphabet.LetterCount * (Alphabet.LetterCount - 1) / 2; for (int i = 0; i < Alphabet.LetterCount; i++) { for (int j = i + 1; j < Alphabet.LetterCount; j++) { ActivationLayer inputLayer = new LinearLayer(400); ActivationLayer hiddenLayer = new SigmoidLayer(4); ActivationLayer outputLayer = new SigmoidLayer(2); new BackpropagationConnector(inputLayer, hiddenLayer); new BackpropagationConnector(hiddenLayer, outputLayer); BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer); TrainingSet trainingSet = new TrainingSet(400, 2); Alphabet ithLetter = Alphabet.GetLetter(i); Alphabet jthLetter = Alphabet.GetLetter(j); foreach (Letter instance in ithLetter.Instances) { trainingSet.Add(new TrainingSample(instance.GetEquivalentVector(20, 20), new double[] { 1d, 0d })); } foreach (Letter instance in jthLetter.Instances) { trainingSet.Add(new TrainingSample(instance.GetEquivalentVector(20, 20), new double[] { 0d, 1d })); } //progressTraining.Value = 100 * currentCombination / totalCombinations; Application.DoEvents(); bool correct = false; int currentCycles = 35; int count = trainingSet.TrainingSampleCount; while (correct == false & currentCycles <= cycles) { network.Initialize(); network.Learn(trainingSet, currentCycles); correct = true; for (int sampleIndex = 0; sampleIndex < count; sampleIndex++) { double[] op = network.Run(trainingSet[sampleIndex].InputVector); if (((trainingSet[sampleIndex].OutputVector[0] > trainingSet[sampleIndex].OutputVector[1]) && op[0] - op[1] < 0.4) || ((trainingSet[sampleIndex].OutputVector[0] < trainingSet[sampleIndex].OutputVector[1]) && op[1] - op[0] < 0.4)) { correct = false; trainingSet.Add(trainingSet[sampleIndex]); } } currentCycles *= 2; } //lstLog.Items.Add(cboAplhabet.Items[i] + " & " + cboAplhabet.Items[j] + " = " + network.MeanSquaredError.ToString("0.0000")); // lstLog.TopIndex = lstLog.Items.Count - (int)(lstLog.Height / lstLog.ItemHeight); try { using (Stream stream = File.Open(Application.StartupPath + @"\Networks\" + i.ToString("00") + j.ToString("00") + ".ndn", FileMode.Create)) { IFormatter formatter = new BinaryFormatter(); formatter.Serialize(stream, network); } } catch (Exception) { MessageBox.Show("Failed to save trained neural networks", "Critical Error", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } currentCombination++; } } // progressTraining.Value = 0; // btnTrain.Enabled = false; }
public static TrainingSet CsvFileToTrainingSet(string path, ref int inputCount, ref int outputCount) { Regex split = new Regex(","); string line; string[] cols; bool headingPresent = false; double dbl; // // 默认输出计数为 1 // if (outputCount == 0) { outputCount = 1; } // // 如果inputCount为0,则解析文件以获取最后一列。 // 还确定是否存在标题。 // using (StreamReader sr = new StreamReader(path)) { line = sr.ReadLine(); cols = split.Split(line); if (!double.TryParse(cols[0], out dbl)) { headingPresent = true; } if (inputCount == 0) { inputCount = cols.Length - 1; } } // // 声明一个TrainingSet缓冲区 // TrainingSet ts = new TrainingSet(inputCount, outputCount); // // 循环内容并加载到TrainingSet中 // double[] inputVector = new double[inputCount]; double[] outputVector = new double[outputCount]; using (StreamReader sr = new StreamReader(path)) { // 如果存在,阅读标题 if (headingPresent) { line = sr.ReadLine(); } // Recurse并填充TrainingSet while ((line = sr.ReadLine()) != null) { cols = split.Split(line); // 检查我们解析了足够多的列 if (cols.Length < inputCount + outputCount) { throw new Exception("输入数据列数不足!"); } // 将字符串列移到向量 for (int index = 0; index < inputCount; index++) { inputVector[index] = double.Parse(cols[index]); } for (int index = 0; index < outputCount; index++) { outputVector[index] = double.Parse(cols[inputCount + index]); } // 添加到训练集作为新的TrainingSample ts.Add(new TrainingSample(inputVector, outputVector)); } } return(ts); }
/** * Generate the training data for the training sunspot years. * @return The training data. */ public TrainingSet GenerateTraining() { TrainingSet result = new TrainingSet(WINDOW_SIZE, 1); for (int year = TRAIN_START; year < TRAIN_END; year++) { double[] input = new double[WINDOW_SIZE]; double[] ideal = new double[1]; int index = 0; for (int i = year - WINDOW_SIZE; i < year; i++) { input[index++] = this.normalizedSunspots[i]; } ideal[0] = this.normalizedSunspots[year]; result.Add(new SupervisedTrainingElement(input, ideal)); } return result; }