void InitNeuralNet() { LinearLayer inputLayer = new LinearLayer(1); SigmoidLayer hiddenLayer = new SigmoidLayer(neuronCount); SigmoidLayer outputLayer = new SigmoidLayer(1); new BackpropagationConnector(inputLayer, hiddenLayer).Initializer = new RandomFunction(0d, 0.3d); new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(learningRate); TrainingSet trainingSet = new TrainingSet(inputVectorSize, outVectorSize); for (int i = 0; i < curve.Points.Count; i++) { double xVal = curve.Points[i].X; for (double input = xVal - 0.05; input < xVal + 0.06; input += 0.01) { trainingSet.Add(new TrainingSample(new double[] { input }, new double[] { curve.Points[i].Y })); } } network.EndEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs args) { trainingProgressBar.Value = (int)(args.TrainingIteration * 100d / cycles); Application.DoEvents(); }); network.Learn(trainingSet, cycles); }
public static void Treinar(string papel, string nomeRedeNeural, List<DadosBE> dadosBE, int numeroNeuronios, double taxaAprendizado, int ciclos, int numeroDivisoesCrossValidation, int shift, double versao) { if (dadosBE.Count == 0) return; List<Treinamento> treinamentos = DataBaseUtils.DataBaseUtils.SelecionarTreinamentos_V3(dadosBE, versao); treinamentos = treinamentos.Where(trein => trein.DivisaoCrossValidation != shift).ToList(); int inputLayerCount = treinamentos.First().Input.Count(); int outputLayerCount = treinamentos.First().Output.Count(); BackpropagationNetwork network; //int numeroNeuronios = 4; //double taxaAprendizado = 0.25d; ActivationLayer inputLayer = new LinearLayer(inputLayerCount); ActivationLayer hiddenLayer = new SigmoidLayer(numeroNeuronios); ActivationLayer outputLayer = new SigmoidLayer(outputLayerCount); new BackpropagationConnector(inputLayer, hiddenLayer).Initializer = new RandomFunction(0d, 0.3d); new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(taxaAprendizado); TrainingSet trainingSet = new TrainingSet(inputLayerCount, outputLayerCount); //foreach (KeyValuePair<double[], double[]> kvp in dadosPorJanelamento) //{ // trainingSet.Add(new TrainingSample(kvp.Key, kvp.Value)); //} foreach (Treinamento treinamento in treinamentos) { trainingSet.Add(new TrainingSample(treinamento.Input.ToArray(), treinamento.Output.ToArray())); } network.EndEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs argsNw) { //trainingProgressBar.Value = (int)(argsNw.TrainingIteration * 100d / cycles); //Application.DoEvents(); }); network.Learn(trainingSet, ciclos); double erroGeralRede = 0; foreach (Treinamento treinamento in treinamentos) { double[] previsao = network.Run(treinamento.Input.ToArray()); double erroRede = 1 - Math.Min(previsao.First(), treinamento.Output.First()) / Math.Max(previsao.First(), treinamento.Output.First()); erroGeralRede += erroRede; } erroGeralRede = erroGeralRede / treinamentos.Count; using (Stream stream = File.Open(diretorioRedes + "\\RedesPrevisaoFinanceira\\" + nomeRedeNeural + ".ndn", FileMode.Create)) { IFormatter formatter = new BinaryFormatter(); formatter.Serialize(stream, network); } }
public Cell() { LinearLayer inputLayer = new LinearLayer(INPUT_SIZE); SigmoidLayer hiddenLayer = new SigmoidLayer(neuronCount); SigmoidLayer outputLayer = new SigmoidLayer(OUTPUT_SIZE); new BackpropagationConnector(inputLayer, hiddenLayer).Initializer = new RandomFunction(0d, 0.3d); new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); setBirthDate(); }
private void createNetworks() { new BackpropagationConnector(inputLayer, hiddenLayer).Initializer = new RandomFunction(0d, 0.3d); new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); copyOfNetwork = ObjectCopier.Clone<BackpropagationNetwork>(network); }
public void shrit(bool all = true) { List <int> referee = new List <int>(); foreach (var item in layers) { referee.Add(item); } referee.Add(opt); LinearLayer inputLayer = new LinearLayer(npu); ActivationLayer last = null; for (int i = 0; i < layers.Count; i++) { ActivationLayer hiddenLayer = null; switch (valf) { case acti.Sigmoid: hiddenLayer = new SigmoidLayer(layers[0]); break; case acti.tanh: hiddenLayer = new TanhLayer(layers[0]); break; case acti.Logarith: hiddenLayer = new LogarithmLayer(layers[0]); break; case acti.Sine: hiddenLayer = new SineLayer(layers[0]); break; case acti.Linear: hiddenLayer = new LinearLayer(layers[0]); break; default: break; } if (last == null) { new BackpropagationConnector(inputLayer, hiddenLayer); } else { new BackpropagationConnector(last, hiddenLayer); } last = hiddenLayer; } ActivationLayer outputLayer = new SigmoidLayer(opt); if (last != null) { new BackpropagationConnector(last, outputLayer); } else { new BackpropagationConnector(inputLayer, outputLayer); } netamp = new BackpropagationNetwork(inputLayer, outputLayer); }
public void Tangensial() { LinearLayer entrada = new LinearLayer(4); TanhLayer oculta = new TanhLayer(3); TanhLayer salida = new TanhLayer(3); new BackpropagationConnector(entrada, oculta); new BackpropagationConnector(oculta, salida); red = new BackpropagationNetwork(entrada, salida); red.SetLearningRate(0.3); datos = new TrainingSet(4, 3); }
public void Sine() { LinearLayer entrada = new LinearLayer(4); SineLayer oculta = new SineLayer(3); //NeuronDotNet.Core.Backpropagation. SineLayer salida = new SineLayer(3); new BackpropagationConnector(entrada, oculta); new BackpropagationConnector(oculta, salida); red = new BackpropagationNetwork(entrada, salida); red.SetLearningRate(0.3); datos = new TrainingSet(4, 3); }
public void cargarredneuronal() { try { pBar.Value = 0; //C:\ARCHIVOS ARFF Stream stream = File.Open(@"C:\ARCHIVOSARFF\red\red.ndn", FileMode.Open); //Stream stream = File.Open(@"C:\ARCHIVOS ARFF\iris-datos.txt", FileMode.Open); IFormatter formatter = new BinaryFormatter(); red = (NeuronDotNet.Core.Backpropagation.BackpropagationNetwork)formatter.Deserialize(stream); stream = File.Open(@"C:\ARCHIVOSARFF\red\data.ndn", FileMode.Open); //stream = File.Open(@"C:\ARCHIVOS ARFF\iris-datos-n.txt", FileMode.Open); formatter = new BinaryFormatter(); datos = (TrainingSet)formatter.Deserialize(stream); for (int i = 1; i <= 100; i++) { pBar.Value = i; } MessageBox.Show("La red se ha cargado!"); } catch (Exception) { MessageBox.Show("Error al cargar la red neuronal", "Error Crítico", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
public Cell(CellInterface cell) { this.network = cell.getCopyOfNetwork(); setBirthDate(); }
static void Training(List<double> dadosTreinamento, int janela, int ciclos) { if (dadosTreinamento.Count < janela) return; /*Cria um mapeamento de entradas para saida com o janelamento informado*/ //List<KeyValuePair<double[], double>> dadosPorJanelamento = new List<KeyValuePair<double[], double>>(); //for (int i = 0; i < dadosTreinamento.Count - janela - 1; i += janela + 1) //{ // dadosPorJanelamento.Add(new KeyValuePair<double[], double>(dadosTreinamento.Skip(i).Take(janela).ToArray(), dadosTreinamento.Skip(i).Take(janela + 1).First())); //} List<KeyValuePair<double[], double>> dadosPorJanelamento = GetCotacoesPorJanelamento(dadosTreinamento, janela); /*Cria um mapeamento de entradas para saida com o janelamento informado*/ BackpropagationNetwork network; int neuronCount = 4; double learningRate = 0.25d; ActivationLayer inputLayer = new LinearLayer(janela); SigmoidLayer hiddenLayer = new SigmoidLayer(neuronCount); SigmoidLayer outputLayer = new SigmoidLayer(1); new BackpropagationConnector(inputLayer, hiddenLayer).Initializer = new RandomFunction(0d, 0.3d); new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(learningRate); TrainingSet trainingSet = new TrainingSet(janela, 1); foreach (KeyValuePair<double[], double> kvp in dadosPorJanelamento) { trainingSet.Add(new TrainingSample(kvp.Key, new double[] { kvp.Value })); } network.EndEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs argsNw) { //trainingProgressBar.Value = (int)(argsNw.TrainingIteration * 100d / cycles); //Application.DoEvents(); }); bool correct = false; int currentCycles = ciclos / 5; while (correct == false && currentCycles <= ciclos) { network.Learn(trainingSet, currentCycles); foreach (KeyValuePair<double[], double> kvp in dadosPorJanelamento) { double previsao = network.Run(kvp.Key)[0]; if (Math.Abs(kvp.Value - previsao) > (kvp.Value / 100 * 0.5))//Verifica se houve mais de 5% de erro { correct = false; trainingSet.Add(new TrainingSample(kvp.Key, new double[] { kvp.Value })); } else correct = true; } currentCycles += ciclos / 5; } using (Stream stream = File.Open(System.IO.Directory.GetCurrentDirectory() + @"\network.ndn", FileMode.Create)) { IFormatter formatter = new BinaryFormatter(); formatter.Serialize(stream, network); } }
public static double Treinar(string nomeRedeNeural, List<Treinamento> treinamentos, int numeroNeuronios, double taxaAprendizado, int ciclos) { if (treinamentos.Count == 0) return -1; int inputLayerCount = treinamentos.First().Input.Count(); int outputLayerCount = treinamentos.First().Output.Count(); BackpropagationNetwork network; //int numeroNeuronios = 4; //double taxaAprendizado = 0.25d; ActivationLayer inputLayer = new LinearLayer(inputLayerCount); //ActivationLayer hiddenLayer = new SigmoidLayer(numeroNeuronios); ActivationLayer outputLayer = new LinearLayer(outputLayerCount); outputLayer.UseFixedBiasValues = true; new BackpropagationConnector(inputLayer, outputLayer).Initializer = new RandomFunction(0, 0.3d); //new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(taxaAprendizado); TrainingSet trainingSet = new TrainingSet(inputLayerCount, outputLayerCount); foreach (Treinamento treinamento in treinamentos) { trainingSet.Add(new TrainingSample(treinamento.Input.ToArray(), treinamento.Output.ToArray())); } double lastMeanSquareError = 1; network.EndEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs argsNw) { if (argsNw.TrainingIteration > 0 && argsNw.TrainingIteration % 100 == 0) { double erroAt = Convert.ToDouble(senderNetwork.GetType().GetProperty("MeanSquaredError").GetValue(senderNetwork, null)); //Se o erro cresceu ou se o erro melhorou menos do que 0.05%, parar o aprendizado if (erroAt > lastMeanSquareError || Math.Abs(lastMeanSquareError - erroAt) < (lastMeanSquareError / 100))//0.1% de melhora.. { network.StopLearning(); } else lastMeanSquareError = erroAt; } }); network.Learn(trainingSet, ciclos); int numeroAcertos = 0; foreach (Treinamento treinamento in treinamentos) { double[] previsao = network.Run(treinamento.Input.ToArray()); //double erroRede = 1 - Math.Min(previsao.First(), treinamento.Output.First()) / Math.Max(previsao.First(), treinamento.Output.First()); if (ValoresMaximosNoMesmoIndice(previsao, treinamento.Output.ToArray())) { numeroAcertos++; } } double acertoRede = numeroAcertos / treinamentos.Count * 100; using (Stream stream = File.Open(diretorioRedes + nomeRedeNeural + ".ndn", FileMode.Create)) { IFormatter formatter = new BinaryFormatter(); formatter.Serialize(stream, network); } return acertoRede; }
/// <summary> /// Treina a rede que diz qual é a melhor configuração de rede para um papel em um dia /// </summary> /// <param name="nomeRede"></param> /// <param name="trainingSet"></param> private static void TreinarRedeDiaria(string nomeRede, TrainingSet trainingSet) { BackpropagationNetwork network; int numeroNeuronios = 4; double taxaAprendizado = 0.25d; ActivationLayer inputLayer = new LinearLayer(trainingSet.InputVectorLength); ActivationLayer hiddenLayer = new SigmoidLayer(numeroNeuronios); ActivationLayer outputLayer = new SigmoidLayer(trainingSet.OutputVectorLength); new BackpropagationConnector(inputLayer, hiddenLayer).Initializer = new RandomFunction(0d, 0.3d); new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(taxaAprendizado); //TrainingSet trainingSet = new TrainingSet(janelaEntrada, janelaSaida); //foreach (KeyValuePair<double[], double[]> kvp in dadosPorJanelamento) //{ // trainingSet.Add(new TrainingSample(kvp.Key, kvp.Value)); //} network.EndEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs argsNw) { //trainingProgressBar.Value = (int)(argsNw.TrainingIteration * 100d / cycles); //Application.DoEvents(); }); bool erroAceito = false; int cicloAtual = ciclos / 5; while (erroAceito == false && cicloAtual <= ciclos) { network.Learn(trainingSet, cicloAtual); foreach (TrainingSample treinamento in trainingSet.TrainingSamples.Distinct()) { double[] previsao = network.Run(treinamento.InputVector); double erroAcumulado = 0; for (int indRede = 0; indRede < trainingSet.OutputVectorLength; indRede++) { erroAcumulado += 1 - Math.Min(previsao[indRede], treinamento.OutputVector[indRede]) / Math.Max(previsao[indRede], treinamento.OutputVector[indRede]); } double erroMedio = erroAcumulado / trainingSet.TrainingSampleCount; if (erroMedio > 0.3)//Verifica se houve mais de 3% de erro { erroAceito = false; trainingSet.Add(treinamento); } else erroAceito = erroAceito && true; } cicloAtual += ciclos / 5; } using (Stream stream = File.Open(diretorioRedesCaptacao + nomeRede + ".ndn", FileMode.Create)) { IFormatter formatter = new BinaryFormatter(); formatter.Serialize(stream, network); } }
//public void TreinarRedeNeural(List<DadosBE> dadosBE, string papel = "PETR4") //{ //} public static void Treinar(string papel, string nomeRedeNeural, List<DadosBE> dadosBE, int janelaEntrada, int numeroNeuronios, double taxaAprendizado, int ciclos, int numeroDivisoesCrossValidation, int shift) { if (dadosBE.Count < janelaEntrada) return; /*//inputLayerCount será a soma de 2+n ao valor da janela de entrada, onde n é o (TOE(tamanho original da entrada) - 5) / 5(arredondado para baixo). Ou seja, para cada 5 dias informados além de 5 iniciais, +1 cotação do dolar será informada //EX: passaremos a cotação do dia 1 ao dia 9 (9 dias) para prever o decimo dia. Portanto: //-TOE - 5 = 4. //4 / 5 = 0.8, arredondando para baixo, 0. //portanto apenas 2 cotações do dolar serão informadas: a do dia 1 e a do dia 9 //de 10 a 14 dias de entrada, 3 cotaçoes do dolar. de 15 a 19, 4 cotações do dolar e assim por diante. int inputLayerCount = janelaEntrada + 2; //Somamos 2 a janela de entrada pois informaremos também a estação do ano e o valor de bollinger inputLayerCount += 2; //O primeiro valor será a cotação do ativo para o dia seguinte e o segundo valor a cotação do dolar para o dia seguinte int outputLayerCount = 2; */ List<Treinamento> treinamentos = DataBaseUtils.DataBaseUtils.SelecionarTreinamentos_V2(dadosBE, janelaEntrada, 1); treinamentos = treinamentos.Where(trein => trein.DivisaoCrossValidation != shift).ToList(); int inputLayerCount = treinamentos.First().Input.Count(); int outputLayerCount = treinamentos.First().Output.Count(); BackpropagationNetwork network; //int numeroNeuronios = 4; //double taxaAprendizado = 0.25d; ActivationLayer inputLayer = new LinearLayer(inputLayerCount); ActivationLayer hiddenLayer = new SigmoidLayer(numeroNeuronios); ActivationLayer outputLayer = new SigmoidLayer(outputLayerCount); new BackpropagationConnector(inputLayer, hiddenLayer).Initializer = new RandomFunction(0d, 0.3d); new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(taxaAprendizado); TrainingSet trainingSet = new TrainingSet(inputLayerCount, outputLayerCount); //foreach (KeyValuePair<double[], double[]> kvp in dadosPorJanelamento) //{ // trainingSet.Add(new TrainingSample(kvp.Key, kvp.Value)); //} foreach (Treinamento treinamento in treinamentos) { trainingSet.Add(new TrainingSample(treinamento.Input.ToArray(), treinamento.Output.ToArray())); } network.EndEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs argsNw) { //trainingProgressBar.Value = (int)(argsNw.TrainingIteration * 100d / cycles); //Application.DoEvents(); }); bool erroAceito = false; int cicloAtual = ciclos / 2; while (erroAceito == false && cicloAtual <= ciclos) { erroAceito = true; network.Learn(trainingSet, cicloAtual); double erroGeralRede = 0; foreach (Treinamento treinamento in treinamentos) { double[] previsao = network.Run(treinamento.Input.ToArray()); double erroRede = 1 - Math.Min(previsao.First(), treinamento.Output.First()) / Math.Max(previsao.First(), treinamento.Output.First()); erroGeralRede += erroRede; if (erroRede > 0.01)//Verifica se houve mais de 1% de erro { trainingSet.Add(new TrainingSample(treinamento.Input.ToArray(), treinamento.Output.ToArray())); } } erroGeralRede = erroGeralRede / treinamentos.Count; if (erroGeralRede > 0.01) erroAceito = false; cicloAtual += ciclos / 2; } using (Stream stream = File.Open(diretorioRedes + "\\RedesPrevisaoFinanceira\\" + nomeRedeNeural + ".ndn", FileMode.Create)) { IFormatter formatter = new BinaryFormatter(); formatter.Serialize(stream, network); } }
private bool LoadTrainedNetwork() { Log.Debug("LoadTrainedNetwork called..."); try{ string path = Path.Combine (FSpot.Global.BaseDirectory, "ann.dat"); bpnet = (BackpropagationNetwork)SerializeUtil.DeSerialize(path); }catch(Exception e){ Log.Exception(e); return false; } Log.Debug("LoadTrainedNetwork ended..."); return true; }
/// <summary> /// train and save as the spcified path /// </summary> /// <param name="eigen"> /// A <see cref="EigenValueTags"/> /// </param> private static void TrainNetwork(EigenValueTags eigen) { Log.Debug("================ Train Started ================ "); string[] dLabels = eigen.FacesLabel; int numInstances = eigen.eigenTaglist.Count; int inputNodes = eigen.eigenTaglist[0].val.Length; int outputNodes = dLabels.Length; int hiddenNodes = inputNodes+outputNodes; float[][] trainInput = new float[numInstances][]; float[][] trainOutput = new float[numInstances][]; //Random r = new Random(); int numstrain = 0; for(int i=0;i<numInstances;i++){ trainInput[numstrain] = new float[inputNodes]; trainOutput[numstrain] = new float[outputNodes]; for(int j=0;j<dLabels.Length;j++){ if(eigen.eigenTaglist[i].tag.Equals(dLabels[j])) trainOutput[numstrain][j] = 0.9f; else trainOutput[numstrain][j] = 0.1f; } for(int j=0;j<inputNodes;j++){ trainInput[numstrain][j] = eigen.eigenTaglist[i].val[j]; } numstrain++; } // convert to double Log.Debug("nums train = "+ numstrain); double[][] trainInputD = new double[numstrain][]; double[][] trainOutputD = new double[numstrain][]; for(int i=0;i<numstrain;i++){ trainInputD[i] = new double[inputNodes]; trainOutputD[i] = new double[outputNodes]; for(int j=0;j<outputNodes;j++){ trainOutputD[i][j] = trainOutput[i][j]; } for(int j=0;j<inputNodes;j++){ trainInputD[i][j] = trainInput[i][j]; } } // TimeSpan tp = System.DateTime.Now.TimeOfDay; Log.Debug("#in = {0}, #hid = {1}, #out = {2}",inputNodes,hiddenNodes,outputNodes); NeuronDotNet.Core.Backpropagation.SigmoidLayer inputLayer = new NeuronDotNet.Core.Backpropagation.SigmoidLayer(inputNodes); NeuronDotNet.Core.Backpropagation.SigmoidLayer hiddenlayer = new NeuronDotNet.Core.Backpropagation.SigmoidLayer(hiddenNodes); NeuronDotNet.Core.Backpropagation.SigmoidLayer outputlayer = new NeuronDotNet.Core.Backpropagation.SigmoidLayer(outputNodes); Log.Debug("BackpropagationConnector input_hidden = new BackpropagationConnector(inputLayer, hiddenlayer);"); BackpropagationConnector input_hidden = new BackpropagationConnector(inputLayer, hiddenlayer); BackpropagationConnector hidden_output = new BackpropagationConnector(hiddenlayer, outputlayer); input_hidden.Momentum = 0.3; hidden_output.Momentum = 0.3; Log.Debug("bpnet = new BackpropagationNetwork(inputLayer,outputlayer);"); bpnet = new BackpropagationNetwork(inputLayer,outputlayer); Log.Debug("TrainingSet tset = new TrainingSet(inputNodes, outputNodes);"); TrainingSet tset = new TrainingSet(inputNodes, outputNodes); for(int i=0;i<numstrain;i++) tset.Add(new TrainingSample(trainInputD[i], trainOutputD[i])); // prevent getting stuck in local minima bpnet.JitterNoiseLimit = 0.0001; bpnet.Initialize(); int numEpoch = 200; bpnet.SetLearningRate(0.2); bpnet.Learn(tset, numEpoch); // Log.Debug("error = {0}",bpnet.MeanSquaredError); // string savepath = facedbPath + "object/"; // if(!Directory.Exists(savepath)) // Directory.CreateDirectory(savepath); // Serialize string path = Path.Combine (FSpot.Global.BaseDirectory, "ann.dat"); SerializeUtil.Serialize(path, bpnet); // Deserialize //BackpropagationNetwork testnet = (BackpropagationNetwork)SerializeUtil.DeSerialize("nn.dat"); // Log.Debug("error = {0}",bpnet.MeanSquaredError); //bpnet = (BackpropagationNetwork)SerializeUtil.DeSerialize("/home/hyperjump/nn.dat"); //Log.Debug("error = {0}",bpnet.MeanSquaredError); // test by using training data // int correct = 0; // for(int i=0;i<numInstances;i++){ // // double[] v = new double[inputNodes]; // for(int j=0;j<v.Length;j++){ // v[j] = (double)eigen.eigenTaglist[i].val[j]; //Console.Write("{0},",v[j]); // } //Console.WriteLine(); // double[] netOutput = bpnet.Run(v); //Console.WriteLine("net out:"); // for(int j=0;j<netOutput.Length;j++) // Console.Write("{0},",netOutput[j]); // string result = FaceClassifier.Instance.AnalyseNetworkOutput(eigen, netOutput); // if(eigen.eigenTaglist[i].tag.Equals(result)) // correct++; // } // Log.Debug("% correct = " + (float)correct/(float)numInstances * 100); //Save Train Status Log.Debug("Saving Train Status..."); List<Tstate> tstateList = new List<Tstate>(); int[] num = new int[dLabels.Length]; Log.Debug("num length = {0}",num.Length); foreach(VTag vt in eigen.eigenTaglist){ for(int k=0;k<num.Length;k++) if(vt.tag.Equals(dLabels[k])) num[k]++; } for(int k=0;k<dLabels.Length;k++){ tstateList.Add(new Tstate(dLabels[k], num[k])); } FaceSpotDb.Instance.TrainingData.Trainstat = tstateList; // Log.Debug("time ="+ System.DateTime.Now.TimeOfDay.Subtract(tp)); Log.Debug("================ Train ended ================ "); }
private void Train(object sender, EventArgs e) { EnableControls(false); if (!int.TryParse(txtCycles.Text.Trim(), out cycles)) { cycles = 5000; } if (!double.TryParse(txtLearningRate.Text.Trim(), out learningRate)) { learningRate = 0.25d; } if (!int.TryParse(txtNeuronCount.Text.Trim(), out neuronCount)) { neuronCount = 3; } if (cycles < 1) { cycles = 1; } if (learningRate < 0.01) { learningRate = 0.01; } if (neuronCount < 1) { neuronCount = 1; } txtNeuronCount.Text = neuronCount.ToString(); txtCycles.Text = cycles.ToString(); txtLearningRate.Text = learningRate.ToString(); errorList = new double[cycles]; InitGraph(); LinearLayer inputLayer = new LinearLayer(2); SigmoidLayer hiddenLayer = new SigmoidLayer(neuronCount); SigmoidLayer outputLayer = new SigmoidLayer(1); new BackpropagationConnector(inputLayer, hiddenLayer); new BackpropagationConnector(hiddenLayer, outputLayer); xorNetwork = new BackpropagationNetwork(inputLayer, outputLayer); xorNetwork.SetLearningRate(learningRate); TrainingSet trainingSet = new TrainingSet(2, 1); trainingSet.Add(new TrainingSample(new double[2] { 0d, 0d }, new double[1] { 0d })); trainingSet.Add(new TrainingSample(new double[2] { 0d, 1d }, new double[1] { 1d })); trainingSet.Add(new TrainingSample(new double[2] { 1d, 0d }, new double[1] { 1d })); trainingSet.Add(new TrainingSample(new double[2] { 1d, 1d }, new double[1] { 0d })); double max = 0d; xorNetwork.EndEpochEvent += delegate(object network, TrainingEpochEventArgs args) { errorList[args.TrainingIteration] = xorNetwork.MeanSquaredError; max = Math.Max(max, xorNetwork.MeanSquaredError); progressBar.Value = (int)(args.TrainingIteration * 100d / cycles); }; xorNetwork.Learn(trainingSet, cycles); double[] indices = new double[cycles]; for (int i = 0; i < cycles; i++) { indices[i] = i; } lblTrainErrorVal.Text = xorNetwork.MeanSquaredError.ToString("0.000000"); LineItem errorCurve = new LineItem("Error Dynamics", indices, errorList, Color.Tomato, SymbolType.None, 1.5f); errorGraph.GraphPane.YAxis.Scale.Max = max; errorGraph.GraphPane.CurveList.Add(errorCurve); errorGraph.Invalidate(); EnableControls(true); }
private BackpropagationNetwork Treinar(int numeroNeuronios, double taxaAprendizado, int ciclos) { List<Agencia> agencias = Agencia.PegarTodas(); List<KeyValuePair<double[], double[]>> dadosEntrada = new List<KeyValuePair<double[], double[]>>(); foreach (Agencia agencia in agencias) { KeyValuePair<double[], double[]> kvp = new KeyValuePair<double[], double[]>(new double[] { NormalizarLatitude(agencia.Latitude), NormalizarLongitude(agencia.Longitude) }, ConverterZonaParaArrayDouble(agencia.Zona)); dadosEntrada.Add(kvp); } BackpropagationNetwork network; //int numeroNeuronios = 4; //double taxaAprendizado = 0.25d; ActivationLayer inputLayer = new LinearLayer(2); ActivationLayer hiddenLayer = new SigmoidLayer(numeroNeuronios); ActivationLayer outputLayer = new SigmoidLayer(4); new BackpropagationConnector(inputLayer, hiddenLayer).Initializer = new RandomFunction(0d, 0.3d); new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(taxaAprendizado); TrainingSet trainingSet = new TrainingSet(2, 4); foreach (KeyValuePair<double[], double[]> kvp in dadosEntrada) { trainingSet.Add(new TrainingSample(kvp.Key, kvp.Value)); } network.EndEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs argsNw) { //trainingProgressBar.Value = (int)(argsNw.TrainingIteration * 100d / cycles); //Application.DoEvents(); }); //network.Learn(trainingSet, ciclos); int cicloAtual = ciclos / 5; int acertos = 0; while (cicloAtual <= ciclos && acertos / agencias.Count < 0.99) { network.Learn(trainingSet, cicloAtual); acertos = 0; foreach (Agencia agencia in agencias) { double[] input = new double[] { NormalizarLatitude(agencia.Latitude), NormalizarLongitude(agencia.Longitude) }; double[] resultado = network.Run(input); if (ConverterArrayDoubleParaZona(resultado) != agencia.Zona) trainingSet.Add(new TrainingSample(input, ConverterZonaParaArrayDouble(agencia.Zona))); else acertos++; } cicloAtual += ciclos / 5; } //using (Stream stream = File.Open(System.IO.Directory.GetCurrentDirectory() + "\\" + nomeRedeNeural + ".ndn", FileMode.Create)) using (Stream stream = File.Open("..\\Pasta\\" + nomeRede + ".ndn", FileMode.Create)) { IFormatter formatter = new BinaryFormatter(); formatter.Serialize(stream, network); } return network; }
public BackpropagationNetwork(BackpropagationNetwork network) : base(network.inputLayer, network.outputLayer, TrainingMethod.Supervised) { }