void InitNeuralNet() { LinearLayer inputLayer = new LinearLayer(1); SigmoidLayer hiddenLayer = new SigmoidLayer(neuronCount); SigmoidLayer outputLayer = new SigmoidLayer(1); new BackpropagationConnector(inputLayer, hiddenLayer).Initializer = new RandomFunction(0d, 0.3d); new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(learningRate); TrainingSet trainingSet = new TrainingSet(inputVectorSize, outVectorSize); for (int i = 0; i < curve.Points.Count; i++) { double xVal = curve.Points[i].X; for (double input = xVal - 0.05; input < xVal + 0.06; input += 0.01) { trainingSet.Add(new TrainingSample(new double[] { input }, new double[] { curve.Points[i].Y })); } } network.EndEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs args) { trainingProgressBar.Value = (int)(args.TrainingIteration * 100d / cycles); Application.DoEvents(); }); network.Learn(trainingSet, cycles); }
public static void Treinar(string papel, string nomeRedeNeural, List<DadosBE> dadosBE, int numeroNeuronios, double taxaAprendizado, int ciclos, int numeroDivisoesCrossValidation, int shift, double versao) { if (dadosBE.Count == 0) return; List<Treinamento> treinamentos = DataBaseUtils.DataBaseUtils.SelecionarTreinamentos_V3(dadosBE, versao); treinamentos = treinamentos.Where(trein => trein.DivisaoCrossValidation != shift).ToList(); int inputLayerCount = treinamentos.First().Input.Count(); int outputLayerCount = treinamentos.First().Output.Count(); BackpropagationNetwork network; //int numeroNeuronios = 4; //double taxaAprendizado = 0.25d; ActivationLayer inputLayer = new LinearLayer(inputLayerCount); ActivationLayer hiddenLayer = new SigmoidLayer(numeroNeuronios); ActivationLayer outputLayer = new SigmoidLayer(outputLayerCount); new BackpropagationConnector(inputLayer, hiddenLayer).Initializer = new RandomFunction(0d, 0.3d); new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(taxaAprendizado); TrainingSet trainingSet = new TrainingSet(inputLayerCount, outputLayerCount); //foreach (KeyValuePair<double[], double[]> kvp in dadosPorJanelamento) //{ // trainingSet.Add(new TrainingSample(kvp.Key, kvp.Value)); //} foreach (Treinamento treinamento in treinamentos) { trainingSet.Add(new TrainingSample(treinamento.Input.ToArray(), treinamento.Output.ToArray())); } network.EndEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs argsNw) { //trainingProgressBar.Value = (int)(argsNw.TrainingIteration * 100d / cycles); //Application.DoEvents(); }); network.Learn(trainingSet, ciclos); double erroGeralRede = 0; foreach (Treinamento treinamento in treinamentos) { double[] previsao = network.Run(treinamento.Input.ToArray()); double erroRede = 1 - Math.Min(previsao.First(), treinamento.Output.First()) / Math.Max(previsao.First(), treinamento.Output.First()); erroGeralRede += erroRede; } erroGeralRede = erroGeralRede / treinamentos.Count; using (Stream stream = File.Open(diretorioRedes + "\\RedesPrevisaoFinanceira\\" + nomeRedeNeural + ".ndn", FileMode.Create)) { IFormatter formatter = new BinaryFormatter(); formatter.Serialize(stream, network); } }
public async Task<double[]> Start(int generations, string trainingFileLocation) { var results = new int[10000]; var trainingData = CreateTrainingDataFromImage(trainingFileLocation, @"C:\Users\pruss\Documents\Visual Studio 2015\Projects\WrittenTextRecognition\Training Data\train-labels.idx1-ubyte", out results); var resultsConverted = NumbersToBitRow(results); // TODO: Trainingsdaten aus Datei laden var trainingSet = new TrainingSet(28 * 28, 10); for(int i = 0; i < results.Length; i++) { trainingSet.Add(new TrainingSample(trainingData[i], resultsConverted[i])); } //trainingSet.Add(new TrainingSample(new double[2] { 0d, 0d }, new double[1] { 0d })); //trainingSet.Add(new TrainingSample(new double[2] { 0d, 1d }, new double[1] { 1d })); //trainingSet.Add(new TrainingSample(new double[2] { 1d, 0d }, new double[1] { 1d })); //trainingSet.Add(new TrainingSample(new double[2] { 1d, 1d }, new double[1] { 0d })); var errorList = new double[generations]; int j = 1; MainWindow.Network.EndEpochEvent += delegate (object network, TrainingEpochEventArgs args) { errorList[args.TrainingIteration] = MainWindow.Network.MeanSquaredError; UpdateProgressBar((int)(j / (generations / 100.0))); j++; }; var t1 = Task.Factory.StartNew(() => { MainWindow.Network.Learn(trainingSet, generations); }); await Task.WhenAll(t1); UpdateProgressBar(0); return errorList; }
/// <summary> /// Trains the neural network for the given training set (Batch Training) /// </summary> /// <param name="trainingSet"> /// The training set to use /// </param> /// <param name="trainingEpochs"> /// Number of training epochs. (All samples are trained in some random order, in every /// training epoch) /// </param> /// <exception cref="ArgumentNullException"> /// if <c>trainingSet</c> is <c>null</c> /// </exception> /// <exception cref="ArgumentException"> /// if <c>trainingEpochs</c> is zero or negative /// </exception> public virtual void Learn(TrainingSet trainingSet, int trainingEpochs) { // Validate Helper.ValidateNotNull(trainingSet, "trainingSet"); Helper.ValidatePositive(trainingEpochs, "trainingEpochs"); if ((trainingSet.InputVectorLength != inputLayer.NeuronCount) || (trainingMethod == TrainingMethod.Supervised && trainingSet.OutputVectorLength != outputLayer.NeuronCount) || (trainingMethod == TrainingMethod.Unsupervised && trainingSet.OutputVectorLength != 0)) { throw new ArgumentException("Invalid training set"); } // Reset isStopping isStopping = false; // Re-Initialize the network Initialize(); for (int currentIteration = 0; currentIteration < trainingEpochs; currentIteration++) { int[] randomOrder = Helper.GetRandomOrder(trainingSet.TrainingSampleCount); // Beginning a new training epoch OnBeginEpoch(currentIteration, trainingSet); // Check for Jitter Epoch if (jitterEpoch > 0 && currentIteration % jitterEpoch == 0) { for (int i = 0; i < connectors.Count; i++) { connectors[i].Jitter(jitterNoiseLimit); } } for (int index = 0; index < trainingSet.TrainingSampleCount; index++) { TrainingSample randomSample = trainingSet[randomOrder[index]]; // Learn a random training sample OnBeginSample(currentIteration, randomSample); LearnSample(trainingSet[randomOrder[index]], currentIteration, trainingEpochs); OnEndSample(currentIteration, randomSample); // Check if we need to stop if (isStopping) { isStopping = false; return; } } // Training Epoch successfully complete OnEndEpoch(currentIteration, trainingSet); // Check if we need to stop if (isStopping) { isStopping = false; return; } } }
public Optimiser(string filename) { Utils.Logger.Log("Loading stopwatch... "); stopWatch = new Stopwatch(); ResultCounter = new Stopwatch(); this.filename = filename; Utils.Logger.Log("Loading preprocessor parameters from " + filename); dataAccess = new DataAccess(filename); preprocessor = new Preprocessor(); preprocessor.ImageSize = new Size(Convert.ToInt32(dataAccess.GetParameter("Master_Width")), Convert.ToInt32(dataAccess.GetParameter("Master_Height"))); preprocessor.KeepAspectRatio = Convert.ToBoolean(dataAccess.GetParameter("Master_Aspect")); preprocessor.ScalingMethod = (ScalingMethods)Convert.ToInt32(dataAccess.GetParameter("Master_Resize")); preprocessor.ContrastStretch = Convert.ToBoolean(dataAccess.GetParameter("Filter_Stretch")); preprocessor.Histogram = Convert.ToBoolean(dataAccess.GetParameter("Filter_Histo")); preprocessor.Gaussian = Convert.ToBoolean(dataAccess.GetParameter("Filter_Gaussian")); preprocessor.GaussianStrength = Convert.ToInt32(dataAccess.GetParameter("Filter_BlurStr")); preprocessor.ContrastAdjustment = Convert.ToBoolean(dataAccess.GetParameter("Filter_Contrast")); preprocessor.ContrastStrength = Convert.ToDecimal(dataAccess.GetParameter("Filter_ContrastStr")); preprocessor.Greyscale = Convert.ToBoolean(dataAccess.GetParameter("Filter_Greyscale")); preprocessor.Bradley = Convert.ToBoolean(dataAccess.GetParameter("Filter_Bradley")); preprocessor.Threshold = Convert.ToBoolean(dataAccess.GetParameter("Filter_Threshold")); preprocessor.ThresholdStrength = Convert.ToDecimal(dataAccess.GetParameter("Filter_ThresholdStr")); /* dataAccess.SetParameter("Opt_Bp_LearningType", cmbLearningRateType.SelectedItem.ToString()); dataAccess.SetParameter("Opt_Bp_InitialLearnRate", txtInitialRate.Text); dataAccess.SetParameter("Opt_Bp_FinalLearnRate", txtFinalRate.Text); dataAccess.SetParameter("Opt_Bp_JitterEpoch", txtJitterEpoch.Text); dataAccess.SetParameter("Opt_Bp_JitterNoiseLimit", txtJitterNoiseLimit.Text); dataAccess.SetParameter("Opt_Bp_MaxIterations", txtMaxIterations.Text); dataAccess.SetParameter("Opt_Bp_MinError", txtMinimumError.Text); */ bool usePSO = false; bool useBP = false; try { useBP = Convert.ToBoolean(dataAccess.GetParameter("Opt_Bp_Enabled")); } catch (Exception) { Utils.Logger.Log("Warning unable to read BP params"); } try { usePSO = Convert.ToBoolean(dataAccess.GetParameter("Opt_Pso_Enabled")); } catch (Exception) { Utils.Logger.Log("Warning unable to read PSO params"); } if (usePSO && useBP) { throw new NotImplementedException("At this current time you cannot use both BP and PSO"); } InputGroup[] inputGroups = dataAccess.GetInputGroups(); SourceItem[] sourceItems = dataAccess.GetSourceItems(); /* Utils.Logger.Log("Preprocessing images..."); foreach (SourceItem item in sourceItems) { Utils.Logger.Log("Preprocessing item {0} ", item.Filename); item.InternalImage = preprocessor.Process((Bitmap)item.InternalImage); } */ int total = 0; foreach (InputGroup inputGroup in inputGroups) { if (inputGroup.InputGroupType == InputGroupType.Grid) { total += (inputGroup.Segments) * (inputGroup.Segments); } else { total += inputGroup.Segments; } } maxIterations = Convert.ToInt32(dataAccess.GetParameter("Opt_Global_MaxIterations")); minError = Convert.ToDouble(dataAccess.GetParameter("Opt_Global_MinError")); maxTime = Convert.ToInt32(dataAccess.GetParameter("Opt_Global_MaxTime")); results = new float[Convert.ToInt32(dataAccess.GetParameter("Opt_Global_BufferSize"))]; if (useBP) { int learningRateFunction = Convert.ToInt32(dataAccess.GetParameter("Opt_Bp_LearningType")); double initialLR = Convert.ToDouble(dataAccess.GetParameter("Opt_Bp_InitialLearnRate")); double finalLR = Convert.ToDouble(dataAccess.GetParameter("Opt_Bp_FinalLearnRate")); int jitterEpoch = Convert.ToInt32(dataAccess.GetParameter("Opt_Bp_JitterEpoch")); double jitterNoiseLimit = Convert.ToDouble(dataAccess.GetParameter("Opt_Bp_JitterNoiseLimit")); NeuronDotNet.Core.Backpropagation.LinearLayer inputLayer = new NeuronDotNet.Core.Backpropagation.LinearLayer(preprocessor.ImageSize.Width * preprocessor.ImageSize.Height); NeuronDotNet.Core.Backpropagation.SigmoidLayer hiddenLayer = new NeuronDotNet.Core.Backpropagation.SigmoidLayer(total); hiddenLayer.InputGroups = inputGroups.Length; NeuronDotNet.Core.Backpropagation.SigmoidLayer outputLayer = new NeuronDotNet.Core.Backpropagation.SigmoidLayer(1); hiddenLayer.Initializer = new NguyenWidrowFunction(); new BackpropagationConnector( inputLayer, hiddenLayer, inputGroups, preprocessor.ImageSize.Width, preprocessor.ImageSize.Height ); new BackpropagationConnector(hiddenLayer, outputLayer); network = new BackpropagationNetwork(inputLayer, outputLayer); switch (learningRateFunction) { case 0: network.SetLearningRate(initialLR); break; case 1: network.SetLearningRate(new NeuronDotNet.Core.LearningRateFunctions.ExponentialFunction(initialLR, finalLR));//exp break; case 2: network.SetLearningRate(new NeuronDotNet.Core.LearningRateFunctions.HyperbolicFunction(initialLR, finalLR));//hyp break; case 3: network.SetLearningRate(new NeuronDotNet.Core.LearningRateFunctions.LinearFunction(initialLR, finalLR));//lin break; default: throw new ArgumentOutOfRangeException("The learning rate index is out of range.\n"); } network.JitterEpoch = jitterEpoch; network.JitterNoiseLimit = jitterNoiseLimit; } if (usePSO) { double minP = Convert.ToDouble(dataAccess.GetParameter("Opt_Pso_MinP")); double maxP = Convert.ToDouble(dataAccess.GetParameter("Opt_Pso_MaxP")); double minI = Convert.ToDouble(dataAccess.GetParameter("Opt_Pso_MinI")); double maxI = Convert.ToDouble(dataAccess.GetParameter("Opt_Pso_MaxI")); double quant = Convert.ToDouble(dataAccess.GetParameter("Opt_Pso_Quant")); double vMax = Convert.ToDouble(dataAccess.GetParameter("Opt_Pso_vMax")); int clamping = Convert.ToInt32(dataAccess.GetParameter("Opt_Pso_Clamping")); int initLinks = Convert.ToInt32(dataAccess.GetParameter("Opt_Pso_InitLinks")); int randomness = Convert.ToInt32(dataAccess.GetParameter("Opt_Pso_Randomness")); int randOrder = Convert.ToInt32(dataAccess.GetParameter("Opt_Pso_ParticleOrder")); int rotation = Convert.ToInt32(dataAccess.GetParameter("Opt_Pso_Rotation")); int dimensions = Convert.ToInt32(dataAccess.GetParameter("Opt_Pso_Dimensions")); int swarmSize = Convert.ToInt32(dataAccess.GetParameter("Opt_Pso_Particles")); double k = Convert.ToDouble(dataAccess.GetParameter("Opt_Pso_k")); double p = Convert.ToDouble(dataAccess.GetParameter("Opt_Pso_p")); double w = Convert.ToDouble(dataAccess.GetParameter("Opt_Pso_w")); double c = Convert.ToDouble(dataAccess.GetParameter("Opt_Pso_c")); Parameters param = new Parameters(); param.vMax = vMax; param.clamping = clamping; // 0 => no clamping AND no evaluation. WARNING: the program // may NEVER stop (in particular with option move 20 (jumps)) 1 // *1 => classical. Set to bounds, and velocity to zero param.initLink = initLinks; // 0 => re-init links after each unsuccessful iteration // 1 => re-init links after each successful iteration param.rand = randomness; // 0 => Use KISS as random number generator. // Any other value => use the system one param.randOrder = randOrder; // 0 => at each iteration, particles are modified // always according to the same order 0..S-1 //*1 => at each iteration, particles numbers are // randomly permutated param.rotation = rotation; // WARNING. Experimental code, completely valid only for dimension 2 // 0 => sensitive to rotation of the system of coordinates // 1 => non sensitive (except side effects), // by using a rotated hypercube for the probability distribution // WARNING. Quite time consuming! param.stop = 0; // Stop criterion // 0 => error < pb.epsilon // 1 => eval >= pb.evalMax // 2 => ||x-solution|| < pb.epsilon // =========================================================== // RUNs // Initialize some objects //pb = new Problem(function); // You may "manipulate" S, p, w and c // but here are the suggested values param.S = swarmSize; if (param.S > 910) param.S = 910; param.K = (int)k; param.p = p; // (to simulate the global best PSO, set param.p=1) //param.p=1; param.w = w; param.c = c; NeuronDotNet.Core.PSO.LinearLayer inputLayer = new NeuronDotNet.Core.PSO.LinearLayer(preprocessor.ImageSize.Width * preprocessor.ImageSize.Height); NeuronDotNet.Core.PSO.SigmoidLayer hiddenLayer = new NeuronDotNet.Core.PSO.SigmoidLayer(total); hiddenLayer.InputGroups = inputGroups.Length; NeuronDotNet.Core.PSO.SigmoidLayer outputLayer = new NeuronDotNet.Core.PSO.SigmoidLayer(1); hiddenLayer.Initializer = new NguyenWidrowFunction(); new PSOConnector( inputLayer, hiddenLayer, inputGroups, preprocessor.ImageSize.Width, preprocessor.ImageSize.Height ); new PSOConnector(hiddenLayer, outputLayer); PSONetwork n = new PSONetwork(inputLayer, outputLayer); n.PsoParameters = param; n.PsoProblem.MaxI = maxI; n.PsoProblem.MinI = minI; n.PsoProblem.MaxP = maxP; n.PsoProblem.MinP = minP; n.PsoProblem.Quantisation = quant; network = n; } set = new TrainingSet(preprocessor.ImageSize.Width * preprocessor.ImageSize.Height, 1); foreach (SourceItem item in sourceItems) { double[] weights = Utils.getImageWeights(item.InternalImage, inputGroups); set.Add(new TrainingSample(weights, new double[] { (double)item.SampleType })); } network.EndEpochEvent += new TrainingEpochEventHandler(network_EndEpochEvent); network.Initialize(); }
/// <summary> /// Creates a new instance of training epoch event arguments /// </summary> /// <param name="trainingIteration"> /// Current training iteration /// </param> /// <param name="trainingSet"> /// The training set associated with the event /// </param> public TrainingEpochEventArgs(int trainingIteration, TrainingSet trainingSet) { this.trainingSet = trainingSet; this.trainingIteration = trainingIteration; }
private void Train(object sender, EventArgs e) { EnableControls(false); if (!int.TryParse(txtCycles.Text.Trim(), out cycles)) { cycles = 5000; } if (!double.TryParse(txtLearningRate.Text.Trim(), out learningRate)) { learningRate = 0.25d; } if (!int.TryParse(txtNeuronCount.Text.Trim(), out neuronCount)) { neuronCount = 3; } if (cycles < 1) { cycles = 1; } if (learningRate < 0.01) { learningRate = 0.01; } if (neuronCount < 1) { neuronCount = 1; } txtNeuronCount.Text = neuronCount.ToString(); txtCycles.Text = cycles.ToString(); txtLearningRate.Text = learningRate.ToString(); errorList = new double[cycles]; InitGraph(); LinearLayer inputLayer = new LinearLayer(2); SigmoidLayer hiddenLayer = new SigmoidLayer(neuronCount); SigmoidLayer outputLayer = new SigmoidLayer(1); new BackpropagationConnector(inputLayer, hiddenLayer); new BackpropagationConnector(hiddenLayer, outputLayer); xorNetwork = new BackpropagationNetwork(inputLayer, outputLayer); xorNetwork.SetLearningRate(learningRate); TrainingSet trainingSet = new TrainingSet(2, 1); trainingSet.Add(new TrainingSample(new double[2] { 0d, 0d }, new double[1] { 0d })); trainingSet.Add(new TrainingSample(new double[2] { 0d, 1d }, new double[1] { 1d })); trainingSet.Add(new TrainingSample(new double[2] { 1d, 0d }, new double[1] { 1d })); trainingSet.Add(new TrainingSample(new double[2] { 1d, 1d }, new double[1] { 0d })); double max = 0d; xorNetwork.EndEpochEvent += delegate(object network, TrainingEpochEventArgs args) { errorList[args.TrainingIteration] = xorNetwork.MeanSquaredError; max = Math.Max(max, xorNetwork.MeanSquaredError); progressBar.Value = (int)(args.TrainingIteration * 100d / cycles); }; xorNetwork.Learn(trainingSet, cycles); double[] indices = new double[cycles]; for (int i = 0; i < cycles; i++) { indices[i] = i; } lblTrainErrorVal.Text = xorNetwork.MeanSquaredError.ToString("0.000000"); LineItem errorCurve = new LineItem("Error Dynamics", indices, errorList, Color.Tomato, SymbolType.None, 1.5f); errorGraph.GraphPane.YAxis.Scale.Max = max; errorGraph.GraphPane.CurveList.Add(errorCurve); errorGraph.Invalidate(); EnableControls(true); }
//public void TreinarRedeNeural(List<DadosBE> dadosBE, string papel = "PETR4") //{ //} public static void Treinar(string papel, string nomeRedeNeural, List<DadosBE> dadosBE, int janelaEntrada, int numeroNeuronios, double taxaAprendizado, int ciclos, int numeroDivisoesCrossValidation, int shift) { if (dadosBE.Count < janelaEntrada) return; /*//inputLayerCount será a soma de 2+n ao valor da janela de entrada, onde n é o (TOE(tamanho original da entrada) - 5) / 5(arredondado para baixo). Ou seja, para cada 5 dias informados além de 5 iniciais, +1 cotação do dolar será informada //EX: passaremos a cotação do dia 1 ao dia 9 (9 dias) para prever o decimo dia. Portanto: //-TOE - 5 = 4. //4 / 5 = 0.8, arredondando para baixo, 0. //portanto apenas 2 cotações do dolar serão informadas: a do dia 1 e a do dia 9 //de 10 a 14 dias de entrada, 3 cotaçoes do dolar. de 15 a 19, 4 cotações do dolar e assim por diante. int inputLayerCount = janelaEntrada + 2; //Somamos 2 a janela de entrada pois informaremos também a estação do ano e o valor de bollinger inputLayerCount += 2; //O primeiro valor será a cotação do ativo para o dia seguinte e o segundo valor a cotação do dolar para o dia seguinte int outputLayerCount = 2; */ List<Treinamento> treinamentos = DataBaseUtils.DataBaseUtils.SelecionarTreinamentos_V2(dadosBE, janelaEntrada, 1); treinamentos = treinamentos.Where(trein => trein.DivisaoCrossValidation != shift).ToList(); int inputLayerCount = treinamentos.First().Input.Count(); int outputLayerCount = treinamentos.First().Output.Count(); BackpropagationNetwork network; //int numeroNeuronios = 4; //double taxaAprendizado = 0.25d; ActivationLayer inputLayer = new LinearLayer(inputLayerCount); ActivationLayer hiddenLayer = new SigmoidLayer(numeroNeuronios); ActivationLayer outputLayer = new SigmoidLayer(outputLayerCount); new BackpropagationConnector(inputLayer, hiddenLayer).Initializer = new RandomFunction(0d, 0.3d); new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(taxaAprendizado); TrainingSet trainingSet = new TrainingSet(inputLayerCount, outputLayerCount); //foreach (KeyValuePair<double[], double[]> kvp in dadosPorJanelamento) //{ // trainingSet.Add(new TrainingSample(kvp.Key, kvp.Value)); //} foreach (Treinamento treinamento in treinamentos) { trainingSet.Add(new TrainingSample(treinamento.Input.ToArray(), treinamento.Output.ToArray())); } network.EndEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs argsNw) { //trainingProgressBar.Value = (int)(argsNw.TrainingIteration * 100d / cycles); //Application.DoEvents(); }); bool erroAceito = false; int cicloAtual = ciclos / 2; while (erroAceito == false && cicloAtual <= ciclos) { erroAceito = true; network.Learn(trainingSet, cicloAtual); double erroGeralRede = 0; foreach (Treinamento treinamento in treinamentos) { double[] previsao = network.Run(treinamento.Input.ToArray()); double erroRede = 1 - Math.Min(previsao.First(), treinamento.Output.First()) / Math.Max(previsao.First(), treinamento.Output.First()); erroGeralRede += erroRede; if (erroRede > 0.01)//Verifica se houve mais de 1% de erro { trainingSet.Add(new TrainingSample(treinamento.Input.ToArray(), treinamento.Output.ToArray())); } } erroGeralRede = erroGeralRede / treinamentos.Count; if (erroGeralRede > 0.01) erroAceito = false; cicloAtual += ciclos / 2; } using (Stream stream = File.Open(diretorioRedes + "\\RedesPrevisaoFinanceira\\" + nomeRedeNeural + ".ndn", FileMode.Create)) { IFormatter formatter = new BinaryFormatter(); formatter.Serialize(stream, network); } }
public void Sine() { LinearLayer entrada = new LinearLayer(4); SineLayer oculta = new SineLayer(3); //NeuronDotNet.Core.Backpropagation. SineLayer salida = new SineLayer(3); new BackpropagationConnector(entrada, oculta); new BackpropagationConnector(oculta, salida); red = new BackpropagationNetwork(entrada, salida); red.SetLearningRate(0.3); datos = new TrainingSet(4, 3); }
public void cargarredneuronal() { try { pBar.Value = 0; //C:\ARCHIVOS ARFF Stream stream = File.Open(@"C:\ARCHIVOSARFF\red\red.ndn", FileMode.Open); //Stream stream = File.Open(@"C:\ARCHIVOS ARFF\iris-datos.txt", FileMode.Open); IFormatter formatter = new BinaryFormatter(); red = (NeuronDotNet.Core.Backpropagation.BackpropagationNetwork)formatter.Deserialize(stream); stream = File.Open(@"C:\ARCHIVOSARFF\red\data.ndn", FileMode.Open); //stream = File.Open(@"C:\ARCHIVOS ARFF\iris-datos-n.txt", FileMode.Open); formatter = new BinaryFormatter(); datos = (TrainingSet)formatter.Deserialize(stream); for (int i = 1; i <= 100; i++) { pBar.Value = i; } MessageBox.Show("La red se ha cargado!"); } catch (Exception) { MessageBox.Show("Error al cargar la red neuronal", "Error Crítico", MessageBoxButtons.OK, MessageBoxIcon.Error); } }
static void Training(List<double> dadosTreinamento, int janela, int ciclos) { if (dadosTreinamento.Count < janela) return; /*Cria um mapeamento de entradas para saida com o janelamento informado*/ //List<KeyValuePair<double[], double>> dadosPorJanelamento = new List<KeyValuePair<double[], double>>(); //for (int i = 0; i < dadosTreinamento.Count - janela - 1; i += janela + 1) //{ // dadosPorJanelamento.Add(new KeyValuePair<double[], double>(dadosTreinamento.Skip(i).Take(janela).ToArray(), dadosTreinamento.Skip(i).Take(janela + 1).First())); //} List<KeyValuePair<double[], double>> dadosPorJanelamento = GetCotacoesPorJanelamento(dadosTreinamento, janela); /*Cria um mapeamento de entradas para saida com o janelamento informado*/ BackpropagationNetwork network; int neuronCount = 4; double learningRate = 0.25d; ActivationLayer inputLayer = new LinearLayer(janela); SigmoidLayer hiddenLayer = new SigmoidLayer(neuronCount); SigmoidLayer outputLayer = new SigmoidLayer(1); new BackpropagationConnector(inputLayer, hiddenLayer).Initializer = new RandomFunction(0d, 0.3d); new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(learningRate); TrainingSet trainingSet = new TrainingSet(janela, 1); foreach (KeyValuePair<double[], double> kvp in dadosPorJanelamento) { trainingSet.Add(new TrainingSample(kvp.Key, new double[] { kvp.Value })); } network.EndEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs argsNw) { //trainingProgressBar.Value = (int)(argsNw.TrainingIteration * 100d / cycles); //Application.DoEvents(); }); bool correct = false; int currentCycles = ciclos / 5; while (correct == false && currentCycles <= ciclos) { network.Learn(trainingSet, currentCycles); foreach (KeyValuePair<double[], double> kvp in dadosPorJanelamento) { double previsao = network.Run(kvp.Key)[0]; if (Math.Abs(kvp.Value - previsao) > (kvp.Value / 100 * 0.5))//Verifica se houve mais de 5% de erro { correct = false; trainingSet.Add(new TrainingSample(kvp.Key, new double[] { kvp.Value })); } else correct = true; } currentCycles += ciclos / 5; } using (Stream stream = File.Open(System.IO.Directory.GetCurrentDirectory() + @"\network.ndn", FileMode.Create)) { IFormatter formatter = new BinaryFormatter(); formatter.Serialize(stream, network); } }
public static double Treinar(string nomeRedeNeural, List<Treinamento> treinamentos, int numeroNeuronios, double taxaAprendizado, int ciclos) { if (treinamentos.Count == 0) return -1; int inputLayerCount = treinamentos.First().Input.Count(); int outputLayerCount = treinamentos.First().Output.Count(); BackpropagationNetwork network; //int numeroNeuronios = 4; //double taxaAprendizado = 0.25d; ActivationLayer inputLayer = new LinearLayer(inputLayerCount); //ActivationLayer hiddenLayer = new SigmoidLayer(numeroNeuronios); ActivationLayer outputLayer = new LinearLayer(outputLayerCount); outputLayer.UseFixedBiasValues = true; new BackpropagationConnector(inputLayer, outputLayer).Initializer = new RandomFunction(0, 0.3d); //new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(taxaAprendizado); TrainingSet trainingSet = new TrainingSet(inputLayerCount, outputLayerCount); foreach (Treinamento treinamento in treinamentos) { trainingSet.Add(new TrainingSample(treinamento.Input.ToArray(), treinamento.Output.ToArray())); } double lastMeanSquareError = 1; network.EndEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs argsNw) { if (argsNw.TrainingIteration > 0 && argsNw.TrainingIteration % 100 == 0) { double erroAt = Convert.ToDouble(senderNetwork.GetType().GetProperty("MeanSquaredError").GetValue(senderNetwork, null)); //Se o erro cresceu ou se o erro melhorou menos do que 0.05%, parar o aprendizado if (erroAt > lastMeanSquareError || Math.Abs(lastMeanSquareError - erroAt) < (lastMeanSquareError / 100))//0.1% de melhora.. { network.StopLearning(); } else lastMeanSquareError = erroAt; } }); network.Learn(trainingSet, ciclos); int numeroAcertos = 0; foreach (Treinamento treinamento in treinamentos) { double[] previsao = network.Run(treinamento.Input.ToArray()); //double erroRede = 1 - Math.Min(previsao.First(), treinamento.Output.First()) / Math.Max(previsao.First(), treinamento.Output.First()); if (ValoresMaximosNoMesmoIndice(previsao, treinamento.Output.ToArray())) { numeroAcertos++; } } double acertoRede = numeroAcertos / treinamentos.Count * 100; using (Stream stream = File.Open(diretorioRedes + nomeRedeNeural + ".ndn", FileMode.Create)) { IFormatter formatter = new BinaryFormatter(); formatter.Serialize(stream, network); } return acertoRede; }
/// <summary> /// Treina a rede que diz qual é a melhor configuração de rede para um papel em um dia /// </summary> /// <param name="nomeRede"></param> /// <param name="trainingSet"></param> private static void TreinarRedeDiaria(string nomeRede, TrainingSet trainingSet) { BackpropagationNetwork network; int numeroNeuronios = 4; double taxaAprendizado = 0.25d; ActivationLayer inputLayer = new LinearLayer(trainingSet.InputVectorLength); ActivationLayer hiddenLayer = new SigmoidLayer(numeroNeuronios); ActivationLayer outputLayer = new SigmoidLayer(trainingSet.OutputVectorLength); new BackpropagationConnector(inputLayer, hiddenLayer).Initializer = new RandomFunction(0d, 0.3d); new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(taxaAprendizado); //TrainingSet trainingSet = new TrainingSet(janelaEntrada, janelaSaida); //foreach (KeyValuePair<double[], double[]> kvp in dadosPorJanelamento) //{ // trainingSet.Add(new TrainingSample(kvp.Key, kvp.Value)); //} network.EndEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs argsNw) { //trainingProgressBar.Value = (int)(argsNw.TrainingIteration * 100d / cycles); //Application.DoEvents(); }); bool erroAceito = false; int cicloAtual = ciclos / 5; while (erroAceito == false && cicloAtual <= ciclos) { network.Learn(trainingSet, cicloAtual); foreach (TrainingSample treinamento in trainingSet.TrainingSamples.Distinct()) { double[] previsao = network.Run(treinamento.InputVector); double erroAcumulado = 0; for (int indRede = 0; indRede < trainingSet.OutputVectorLength; indRede++) { erroAcumulado += 1 - Math.Min(previsao[indRede], treinamento.OutputVector[indRede]) / Math.Max(previsao[indRede], treinamento.OutputVector[indRede]); } double erroMedio = erroAcumulado / trainingSet.TrainingSampleCount; if (erroMedio > 0.3)//Verifica se houve mais de 3% de erro { erroAceito = false; trainingSet.Add(treinamento); } else erroAceito = erroAceito && true; } cicloAtual += ciclos / 5; } using (Stream stream = File.Open(diretorioRedesCaptacao + nomeRede + ".ndn", FileMode.Create)) { IFormatter formatter = new BinaryFormatter(); formatter.Serialize(stream, network); } }
/// <summary> /// Invokes EndEpochEvent /// </summary> /// <param name="currentIteration"> /// Current training iteration /// </param> /// <param name="trainingSet"> /// Training set which got trained successfully this epoch /// </param> protected virtual void OnEndEpoch(int currentIteration, TrainingSet trainingSet) { if (EndEpochEvent != null) { EndEpochEvent(this, new TrainingEpochEventArgs(currentIteration, trainingSet)); } }
public void Tangensial() { LinearLayer entrada = new LinearLayer(4); TanhLayer oculta = new TanhLayer(3); TanhLayer salida = new TanhLayer(3); new BackpropagationConnector(entrada, oculta); new BackpropagationConnector(oculta, salida); red = new BackpropagationNetwork(entrada, salida); red.SetLearningRate(0.3); datos = new TrainingSet(4, 3); }
private BackpropagationNetwork Treinar(int numeroNeuronios, double taxaAprendizado, int ciclos) { List<Agencia> agencias = Agencia.PegarTodas(); List<KeyValuePair<double[], double[]>> dadosEntrada = new List<KeyValuePair<double[], double[]>>(); foreach (Agencia agencia in agencias) { KeyValuePair<double[], double[]> kvp = new KeyValuePair<double[], double[]>(new double[] { NormalizarLatitude(agencia.Latitude), NormalizarLongitude(agencia.Longitude) }, ConverterZonaParaArrayDouble(agencia.Zona)); dadosEntrada.Add(kvp); } BackpropagationNetwork network; //int numeroNeuronios = 4; //double taxaAprendizado = 0.25d; ActivationLayer inputLayer = new LinearLayer(2); ActivationLayer hiddenLayer = new SigmoidLayer(numeroNeuronios); ActivationLayer outputLayer = new SigmoidLayer(4); new BackpropagationConnector(inputLayer, hiddenLayer).Initializer = new RandomFunction(0d, 0.3d); new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d); network = new BackpropagationNetwork(inputLayer, outputLayer); network.SetLearningRate(taxaAprendizado); TrainingSet trainingSet = new TrainingSet(2, 4); foreach (KeyValuePair<double[], double[]> kvp in dadosEntrada) { trainingSet.Add(new TrainingSample(kvp.Key, kvp.Value)); } network.EndEpochEvent += new TrainingEpochEventHandler( delegate(object senderNetwork, TrainingEpochEventArgs argsNw) { //trainingProgressBar.Value = (int)(argsNw.TrainingIteration * 100d / cycles); //Application.DoEvents(); }); //network.Learn(trainingSet, ciclos); int cicloAtual = ciclos / 5; int acertos = 0; while (cicloAtual <= ciclos && acertos / agencias.Count < 0.99) { network.Learn(trainingSet, cicloAtual); acertos = 0; foreach (Agencia agencia in agencias) { double[] input = new double[] { NormalizarLatitude(agencia.Latitude), NormalizarLongitude(agencia.Longitude) }; double[] resultado = network.Run(input); if (ConverterArrayDoubleParaZona(resultado) != agencia.Zona) trainingSet.Add(new TrainingSample(input, ConverterZonaParaArrayDouble(agencia.Zona))); else acertos++; } cicloAtual += ciclos / 5; } //using (Stream stream = File.Open(System.IO.Directory.GetCurrentDirectory() + "\\" + nomeRedeNeural + ".ndn", FileMode.Create)) using (Stream stream = File.Open("..\\Pasta\\" + nomeRede + ".ndn", FileMode.Create)) { IFormatter formatter = new BinaryFormatter(); formatter.Serialize(stream, network); } return network; }
/// <summary> /// train and save as the spcified path /// </summary> /// <param name="eigen"> /// A <see cref="EigenValueTags"/> /// </param> private static void TrainNetwork(EigenValueTags eigen) { Log.Debug("================ Train Started ================ "); string[] dLabels = eigen.FacesLabel; int numInstances = eigen.eigenTaglist.Count; int inputNodes = eigen.eigenTaglist[0].val.Length; int outputNodes = dLabels.Length; int hiddenNodes = inputNodes+outputNodes; float[][] trainInput = new float[numInstances][]; float[][] trainOutput = new float[numInstances][]; //Random r = new Random(); int numstrain = 0; for(int i=0;i<numInstances;i++){ trainInput[numstrain] = new float[inputNodes]; trainOutput[numstrain] = new float[outputNodes]; for(int j=0;j<dLabels.Length;j++){ if(eigen.eigenTaglist[i].tag.Equals(dLabels[j])) trainOutput[numstrain][j] = 0.9f; else trainOutput[numstrain][j] = 0.1f; } for(int j=0;j<inputNodes;j++){ trainInput[numstrain][j] = eigen.eigenTaglist[i].val[j]; } numstrain++; } // convert to double Log.Debug("nums train = "+ numstrain); double[][] trainInputD = new double[numstrain][]; double[][] trainOutputD = new double[numstrain][]; for(int i=0;i<numstrain;i++){ trainInputD[i] = new double[inputNodes]; trainOutputD[i] = new double[outputNodes]; for(int j=0;j<outputNodes;j++){ trainOutputD[i][j] = trainOutput[i][j]; } for(int j=0;j<inputNodes;j++){ trainInputD[i][j] = trainInput[i][j]; } } // TimeSpan tp = System.DateTime.Now.TimeOfDay; Log.Debug("#in = {0}, #hid = {1}, #out = {2}",inputNodes,hiddenNodes,outputNodes); NeuronDotNet.Core.Backpropagation.SigmoidLayer inputLayer = new NeuronDotNet.Core.Backpropagation.SigmoidLayer(inputNodes); NeuronDotNet.Core.Backpropagation.SigmoidLayer hiddenlayer = new NeuronDotNet.Core.Backpropagation.SigmoidLayer(hiddenNodes); NeuronDotNet.Core.Backpropagation.SigmoidLayer outputlayer = new NeuronDotNet.Core.Backpropagation.SigmoidLayer(outputNodes); Log.Debug("BackpropagationConnector input_hidden = new BackpropagationConnector(inputLayer, hiddenlayer);"); BackpropagationConnector input_hidden = new BackpropagationConnector(inputLayer, hiddenlayer); BackpropagationConnector hidden_output = new BackpropagationConnector(hiddenlayer, outputlayer); input_hidden.Momentum = 0.3; hidden_output.Momentum = 0.3; Log.Debug("bpnet = new BackpropagationNetwork(inputLayer,outputlayer);"); bpnet = new BackpropagationNetwork(inputLayer,outputlayer); Log.Debug("TrainingSet tset = new TrainingSet(inputNodes, outputNodes);"); TrainingSet tset = new TrainingSet(inputNodes, outputNodes); for(int i=0;i<numstrain;i++) tset.Add(new TrainingSample(trainInputD[i], trainOutputD[i])); // prevent getting stuck in local minima bpnet.JitterNoiseLimit = 0.0001; bpnet.Initialize(); int numEpoch = 200; bpnet.SetLearningRate(0.2); bpnet.Learn(tset, numEpoch); // Log.Debug("error = {0}",bpnet.MeanSquaredError); // string savepath = facedbPath + "object/"; // if(!Directory.Exists(savepath)) // Directory.CreateDirectory(savepath); // Serialize string path = Path.Combine (FSpot.Global.BaseDirectory, "ann.dat"); SerializeUtil.Serialize(path, bpnet); // Deserialize //BackpropagationNetwork testnet = (BackpropagationNetwork)SerializeUtil.DeSerialize("nn.dat"); // Log.Debug("error = {0}",bpnet.MeanSquaredError); //bpnet = (BackpropagationNetwork)SerializeUtil.DeSerialize("/home/hyperjump/nn.dat"); //Log.Debug("error = {0}",bpnet.MeanSquaredError); // test by using training data // int correct = 0; // for(int i=0;i<numInstances;i++){ // // double[] v = new double[inputNodes]; // for(int j=0;j<v.Length;j++){ // v[j] = (double)eigen.eigenTaglist[i].val[j]; //Console.Write("{0},",v[j]); // } //Console.WriteLine(); // double[] netOutput = bpnet.Run(v); //Console.WriteLine("net out:"); // for(int j=0;j<netOutput.Length;j++) // Console.Write("{0},",netOutput[j]); // string result = FaceClassifier.Instance.AnalyseNetworkOutput(eigen, netOutput); // if(eigen.eigenTaglist[i].tag.Equals(result)) // correct++; // } // Log.Debug("% correct = " + (float)correct/(float)numInstances * 100); //Save Train Status Log.Debug("Saving Train Status..."); List<Tstate> tstateList = new List<Tstate>(); int[] num = new int[dLabels.Length]; Log.Debug("num length = {0}",num.Length); foreach(VTag vt in eigen.eigenTaglist){ for(int k=0;k<num.Length;k++) if(vt.tag.Equals(dLabels[k])) num[k]++; } for(int k=0;k<dLabels.Length;k++){ tstateList.Add(new Tstate(dLabels[k], num[k])); } FaceSpotDb.Instance.TrainingData.Trainstat = tstateList; // Log.Debug("time ="+ System.DateTime.Now.TimeOfDay.Subtract(tp)); Log.Debug("================ Train ended ================ "); }