Ejemplo n.º 1
0
        public override void Train(IForecastingDataSets datasets)
        {
            OnStartRunning(new ComponentRunEventArgs(datasets));
            AnnModelParameter para = mParameter as AnnModelParameter;

            LinearLayer inputLayer = new LinearLayer(datasets.InputData[0].Length);

            SigmoidLayer hiddenLayer = new SigmoidLayer(para.HiddenNeuronsCount[0]);
            SigmoidLayer outputLayer = new SigmoidLayer(1);

            new BackpropagationConnector(inputLayer, hiddenLayer).Initializer  = new RandomFunction(0d, 0.3d);
            new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d);
            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.SetLearningRate(para.LearningRate);
            network.JitterEpoch      = para.JitterEpoch;
            network.JitterNoiseLimit = para.JitterNoiseLimit;
            network.EndEpochEvent   += new TrainingEpochEventHandler(
                delegate(object senderNetwork, TrainingEpochEventArgs args)
            {
                // TODO: trainning error needs to be calculated
                OnRunningEpoch(new AnnModelRunEpochEventArgs(args.TrainingIteration + 1, 0));
            });

            network.Learn(ForecastingDataSets.ConvertToTrainingSet(datasets), para.Iterations);

            datasets.ForecastedData = new double[datasets.InputData.Length][];
            for (int i = 0; i < datasets.InputData.Length; i++)
            {
                datasets.ForecastedData[i]    = new double[1];
                datasets.ForecastedData[i][0] = Forecast(datasets.InputData[i]);
            }
            OnFinishRunning(new ComponentRunEventArgs(datasets));
        }
        private void button37_Click(object sender, EventArgs e)
        {
            TrainingSet egitimseti = new TrainingSet(35, 5);

            egitimseti.Add(new TrainingSample(VeriSeti.A, new double[5] {
                1, 0, 0, 0, 0
            }));
            egitimseti.Add(new TrainingSample(VeriSeti.A1, new double[5] {
                1, 0, 0, 0, 0
            }));
            egitimseti.Add(new TrainingSample(VeriSeti.B, new double[5] {
                0, 1, 0, 0, 0
            }));
            egitimseti.Add(new TrainingSample(VeriSeti.C, new double[5] {
                0, 0, 1, 0, 0
            }));
            egitimseti.Add(new TrainingSample(VeriSeti.D, new double[5] {
                0, 0, 0, 1, 0
            }));
            egitimseti.Add(new TrainingSample(VeriSeti.E, new double[5] {
                0, 0, 0, 0, 1
            }));
            ag.SetLearningRate(Convert.ToDouble(txt_ogrenmekatsayisi.Text));
            ag.Learn(egitimseti, Convert.ToInt32(txt_ogrenmehizi.Text));
            txt_ogrenmekatsayisi.Enabled = false;
            txt_ogrenmehizi.Enabled      = false;
            lbl_hata.Text    = ag.MeanSquaredError.ToString();
            button37.Enabled = false;
            MessageBox.Show("Yapay Sinir Ağı Eğitildi.", "Bilgi");
        }
Ejemplo n.º 3
0
        public void TestMethod1()
        {
            // 创建输入层、隐层和输出层
            var inputLayer  = new LinearLayer(1);
            var hiddenLayer = new LinearLayer(5);
            var outputLayer = new LinearLayer(1);

            // 创建层之间的关联
            new BackpropagationConnector(inputLayer, hiddenLayer, ConnectionMode.Complete);
            new BackpropagationConnector(hiddenLayer, outputLayer, ConnectionMode.Complete);

            // 创建神经网络
            var network = new BackpropagationNetwork(inputLayer, outputLayer);

            //network.SetLearningRate(new LinearFunction(0.1, 0.6));
            network.Initialize();

            // 训练
            var ran = new Random();

            for (var i = 0; i < 100; i++)
            {
                var inputVector    = new double[] { i };
                var outputVector   = new double[] { Math.PI *i };
                var trainingSample = new TrainingSample(inputVector, outputVector);
                network.Learn(trainingSample, i, 100);
            }

            // 预测
            var testInput  = new double[] { 1 };
            var testOutput = network.Run(testInput);

            Console.WriteLine(testOutput[0]);
        }
Ejemplo n.º 4
0
        public void LabTest1()
        {
            var inputLayer  = new LinearLayer(5);
            var hiddenLayer = new TanhLayer(neuronCount);
            var outputLayer = new TanhLayer(2);

            new BackpropagationConnector(inputLayer, hiddenLayer);
            new BackpropagationConnector(hiddenLayer, outputLayer);
            _xorNetwork = new BackpropagationNetwork(inputLayer, outputLayer);
            _xorNetwork.SetLearningRate(learningRate);

            var trainingSet = new TrainingSet(5, 2);

            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 0, 0 }, new double[] { 0, 0 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 1, 0 }, new double[] { 3, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 1, 0, 0 }, new double[] { 2, 2 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 0, 1, 1, 0 }, new double[] { 2, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 0, 0, 0 }, new double[] { 1, 1 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 0, 1, 0 }, new double[] { 1, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 1, 0, 0 }, new double[] { 1, 2 }));
            trainingSet.Add(new TrainingSample(new double[] { 0, 1, 1, 1, 0 }, new double[] { 1, 3 }));
            trainingSet.Add(new TrainingSample(new double[] { 22, 1, 1, 1, 22 }, new double[] { 1, 3 }));

            _errorList = new double[cycles];

            //_xorNetwork.EndEpochEvent += EndEpochEvent;
            _xorNetwork.Learn(trainingSet, cycles);

            var result = _xorNetwork.Run(new double[] { 0, 0, 1, 1, 0 });
        }
Ejemplo n.º 5
0
        protected override void SolveInstance(IGH_DataAccess DA)
        {
            CrowNetBP net = new CrowNetBP();

            if (!networkLoaded)
            {
                int cycles = 1000;

                GH_Structure <GH_Number> tiv = new GH_Structure <GH_Number>();
                GH_Structure <GH_Number> tov = new GH_Structure <GH_Number>();

                DA.GetData(0, ref cycles);
                DA.GetData(1, ref net);
                DA.GetDataTree(2, out tiv);
                DA.GetDataTree(3, out tov);

                double[][] trainInVectors  = Utils.GHTreeToMultidimensionalArray(tiv);
                double[][] trainOutVectors = Utils.GHTreeToMultidimensionalArray(tov);


                int trainVectorCount = trainInVectors.Length;
                if (trainVectorCount != trainOutVectors.Length)
                {
                    AddRuntimeMessage(GH_RuntimeMessageLevel.Error, "Please supply an equal amount of input and output training vectors!");
                }

                int trainInVectorDimension  = trainInVectors[0].Length;
                int trainOutVectorDimension = trainOutVectors[0].Length;

                BackpropagationNetwork network = net.network(trainInVectorDimension, trainOutVectorDimension);


                // set Trainingset
                TrainingSet trainingSet = new TrainingSet(trainInVectorDimension, trainOutVectorDimension);

                for (int i = 0; i < trainVectorCount; i++)
                {
                    trainingSet.Add(new TrainingSample(trainInVectors[i], trainOutVectors[i]));
                }

                // train
                network.Learn(trainingSet, cycles);
                this.Network = network;
            }
            if (this.Network != null)
            {
                DA.SetData(0, this.Network.MeanSquaredError.ToString("0.0000000000"));

                CrowNetBPP nn = new CrowNetBPP(this.Network);
                nn.hiddenLayerList = net.hiddenLayerList;
                nn.layerStructure  = net.layerStructure;
                nn.neuronCount     = net.neuronCount;
                DA.SetData(1, nn);
            }

            networkLoaded = false;
        }
Ejemplo n.º 6
0
        public void Train(int cpi, int cs = 5, int seed = -1)
        {
            if (seed == -1)
            {
                seed = Environment.TickCount;
            }
            Random      r  = new Random(seed);
            int         ai = 0;
            TrainingSet ts = new TrainingSet(Inputs, W * H * 3);

            foreach (var i in InImgs)
            {
                double[] iv = new double[Inputs];
                double[] ov = new double[W * H * 3];

                int ic = 0;
                for (int y = 0; y < i.H; y++)
                {
                    for (int x = 0; x < i.W; x++)
                    {
                        iv[ic] = GV(i.Dat[ic++]);
                        iv[ic] = GV(i.Dat[ic++]);
                        iv[ic] = GV(i.Dat[ic++]);
                    }
                }
                Image oi = OutImgs[ai];
                int   vv = 0;
                for (int y = 0; y < i.H; y++)
                {
                    for (int x = 0; x < i.W; x++)
                    {
                        //int l = (i.H * y * 3) + (x * 3);
                        ov[vv] = GV(i.Dat[vv++]);
                        ov[vv] = GV(i.Dat[vv++]);
                        ov[vv] = GV(i.Dat[vv++]);
                    }
                }

                ai++;

                TrainingSample s = new TrainingSample(iv, ov);
                for (int xc = 0; xc < cpi; xc++)
                {
                    ts.Add(s);
                }
            }
            Ready = false;
            //for(int t = 0; t < cs; t++)
            //{
            //            net.BeginEpochEvent += TrainE;
            net.EndEpochEvent += EndE;

            net.Learn(ts, cs);
            net.StopLearning();
            Console.WriteLine("Done training mind.");
        }
Ejemplo n.º 7
0
        /// <summary>
        /// 点击计算按钮
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void tsmiCalculate_Click(object sender, EventArgs e)
        {
            // 创建输入层、隐层和输出层
            ActivationLayer inputLayer  = GetLayer(cboInputLayerType.SelectedItem.ToString(), 2);
            ActivationLayer hiddenLayer = GetLayer(cboHiddenLayerType.SelectedItem.ToString(), int.Parse(txtHiddenLayerCount.Text));
            ActivationLayer outputLayer = GetLayer(cboOutputLayerType.SelectedItem.ToString(), 1);

            // 创建层之间的关联
            new BackpropagationConnector(inputLayer, hiddenLayer, ConnectionMode.Complete).Initializer  = new RandomFunction(0, 0.3);
            new BackpropagationConnector(hiddenLayer, outputLayer, ConnectionMode.Complete).Initializer = new RandomFunction(0, 0.3);

            // 创建神经网络
            var network = new BackpropagationNetwork(inputLayer, outputLayer);

            network.SetLearningRate(double.Parse(txtInitialLearningRate.Text), double.Parse(txtFinalLearningRate.Text));

            // 进行训练
            var trainingSet = new TrainingSet(2, 1);

            for (var i = 0; i < 17; i++)
            {
                var x1 = data[i, 0];
                var x2 = data[i, 1];
                var y  = data[i, 2];

                var inputVector    = new double[] { x1, x2 };
                var outputVector   = new double[] { y };
                var trainingSample = new TrainingSample(inputVector, outputVector);
                trainingSet.Add(trainingSample);
            }
            network.SetLearningRate(0.3, 0.1);
            network.Learn(trainingSet, int.Parse(txtTrainingEpochs.Text));
            network.StopLearning();

            // 进行预测
            for (var i = 0; i < 17; i++)
            {
                var x1 = data[i, 0];
                var x2 = data[i, 1];
                var y  = data[i, 2];

                var testInput  = new double[] { x1, x2 };
                var testOutput = network.Run(testInput)[0];

                var absolute = testOutput - y;
                var relative = Math.Abs((testOutput - y) / testOutput);

                dgvData.Rows[i].Cells[3].Value = testOutput.ToString("f3");
                dgvData.Rows[i].Cells[4].Value = absolute.ToString("f3");
                dgvData.Rows[i].Cells[5].Value = (relative * 100).ToString("f1") + "%";
            }
        }
Ejemplo n.º 8
0
        async Task LearnNetworkAsync()
        {
            _network = new BackpropagationNetwork(_inputLayer, _outputLayer);
            _network.Initialize();
            var trainingSet = new TrainingSet(1, 3);

            foreach (var b in from bomb in Enum.GetValues(typeof(BombTypes)).Cast <BombTypes>() where bomb != BombTypes.Mine select BombFabric.CreateBomb(bomb) into b where b != null select b)
            {
                trainingSet.Add(new TrainingSample(new double[] { b.BeepsLevel },
                                                   new double[] {
                    (int)b.FirstStageDisarming,
                    (int)b.SecondStageDisarming,
                    (int)b.ThirdStageDisarming
                }));
            }
            _network.Learn(trainingSet, 100000);
        }
Ejemplo n.º 9
0
    void CreateTrainingSet()
    {
        if (trainingSetInputs == null || trainingSetInputs.Count == 0)
        {
            Debug.Log("You need to add training cases first!");
            return;
        }

        trainingSet = new TrainingSet(neurons, outputNum);
        List <double[]> tempInputs = trainingSetInputs;
        List <double>   tempOutput = trainingSetOutputs;

        for (int i = 0; i < tempInputs.Count; i++)
        {
            if (tempOutput[i] == 0)
            {
                trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] {
                    1, -1, -1, -1
                }));
            }
            else if (tempOutput[i] == 1)
            {
                trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] {
                    -1, 1, -1, -1
                }));
            }
            else if (tempOutput[i] == 2)
            {
                trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] {
                    -1, -1, 1, -1
                }));
            }
            else if (tempOutput[i] == 3)
            {
                trainingSet.Add(new TrainingSample(tempInputs[i], new double[outputNum] {
                    -1, -1, -1, 1
                }));
            }
        }

        neuralNetwork.Learn(this.trainingSet, epochs);
    }
Ejemplo n.º 10
0
        public void button4_Click(object sender, EventArgs e)
        {
            var openWin = new OpenFileDialog();

            openWin.DefaultExt = "txt";
            openWin.ShowDialog();
            string path = openWin.FileName;

            int nInput = Convert.ToInt32(textBox3.Text);
            int nOut   = Convert.ToInt32(textBox5.Text);

            TrainingSet train = new TrainingSet(nInput, nOut);

            string[] lines      = System.IO.File.ReadAllLines(path);
            string[] trainData  = new string[nInput + nOut];
            double[] trainInput = new double[nInput];
            double[] trainOut   = new double[nOut];

            foreach (string line in lines)
            {
                trainData = line.Split(' ');

                for (int i = 0; i < nInput; i++)
                {
                    trainInput[i] = Convert.ToDouble(trainData[i]);
                }

                for (int i = nInput; i < nOut; i++)
                {
                    trainOut[i - nInput] = Convert.ToDouble(trainData[i]);
                }


                train.Add(new TrainingSample(trainInput, trainOut));
            }

            network.Learn(train, Convert.ToInt32(textBox6.Text));
            MessageBox.Show("Training OK");
        }
Ejemplo n.º 11
0
        // train butonu
        private void btnTrain_Click(object sender, EventArgs e)
        {
            TrainingSet trainingSet = new TrainingSet(35, 5);

            trainingSet.Add(new TrainingSample(Dataset.Letters.A, new double[5] {
                1, 0, 0, 0, 0
            }));
            trainingSet.Add(new TrainingSample(Dataset.Letters.B, new double[5] {
                0, 1, 0, 0, 0
            }));
            trainingSet.Add(new TrainingSample(Dataset.Letters.C, new double[5] {
                0, 0, 1, 0, 0
            }));
            trainingSet.Add(new TrainingSample(Dataset.Letters.D, new double[5] {
                0, 0, 0, 1, 0
            }));
            trainingSet.Add(new TrainingSample(Dataset.Letters.E, new double[5] {
                0, 0, 0, 0, 1
            }));
            neuralNetwork.SetLearningRate(Convert.ToDouble(0.3));
            neuralNetwork.Learn(trainingSet, Convert.ToInt32(5000));
            btnTrain.Enabled      = false;
            btnGetResults.Enabled = true;
        }
Ejemplo n.º 12
0
        private void Start(object sender, EventArgs e)
        {
            CleanseGraph();
            EnableControls(false);
            curve.Color = enabledColor;

            if (!int.TryParse(txtCycles.Text, out cycles))
            {
                cycles = 10000;
            }
            if (!double.TryParse(txtLearningRate.Text, out learningRate))
            {
                learningRate = 0.25d;
            }
            if (!int.TryParse(txtNeuronCount.Text, out neuronCount))
            {
                neuronCount = 10;
            }

            if (cycles <= 0)
            {
                cycles = 10000;
            }
            if (learningRate < 0 || learningRate > 1)
            {
                learningRate = 0.25d;
            }
            if (neuronCount <= 0)
            {
                neuronCount = 10;
            }

            txtCycles.Text       = cycles.ToString();
            txtLearningRate.Text = learningRate.ToString();
            txtNeuronCount.Text  = neuronCount.ToString();

            LinearLayer  inputLayer  = new LinearLayer(1);
            SigmoidLayer hiddenLayer = new SigmoidLayer(neuronCount);
            SigmoidLayer outputLayer = new SigmoidLayer(1);

            new BackpropagationConnector(inputLayer, hiddenLayer).Initializer  = new RandomFunction(0d, 0.3d);
            new BackpropagationConnector(hiddenLayer, outputLayer).Initializer = new RandomFunction(0d, 0.3d);
            network = new BackpropagationNetwork(inputLayer, outputLayer);
            network.SetLearningRate(learningRate);

            TrainingSet trainingSet = new TrainingSet(1, 1);

            for (int i = 0; i < curve.Points.Count; i++)
            {
                double xVal = curve.Points[i].X;
                for (double input = xVal - 0.05; input < xVal + 0.06; input += 0.01)
                {
                    trainingSet.Add(new TrainingSample(new double[] { input }, new double[] { curve.Points[i].Y }));
                }
            }

            network.EndEpochEvent += new TrainingEpochEventHandler(
                delegate(object senderNetwork, TrainingEpochEventArgs args)
            {
                trainingProgressBar.Value = (int)(args.TrainingIteration * 100d / cycles);
                Application.DoEvents();
            });
            network.Learn(trainingSet, cycles);
            StopLearning(this, EventArgs.Empty);
        }
Ejemplo n.º 13
0
        /// <summary>
        /// This constructs a training procedure for standard backpropagation techniques.
        /// More advanced ones will be used as seen in the example.
        /// </summary>
        /// <param name="writer"></param>
        public TestingNdn(StreamWriter writer)
        {
            TrainingSample sample = new TrainingSample(
                new double[] { },
                new double[] { });

            //We might make a gui for this later.
            int    numberOfNeurons = 3;
            double learningRate    = 0.5;
            int    numberOfCycles  = 10000;

            double[] errorList = new double[numberOfCycles];

            LinearLayer  inputLayer  = new LinearLayer(2);
            SigmoidLayer hiddenLayer = new SigmoidLayer(numberOfNeurons);
            SigmoidLayer outputLayer = new SigmoidLayer(1);

            // This layer is a event handler that fires when the output is generated, hence backpropagation.
            BackpropagationConnector conn1 = new BackpropagationConnector(inputLayer, hiddenLayer);
            BackpropagationConnector conn2 = new BackpropagationConnector(hiddenLayer, outputLayer);

            BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);

            network.SetLearningRate(learningRate);

            TrainingSet trainingSet = new TrainingSet(2, 1);

            trainingSet.Add(new TrainingSample(new double[2] {
                0, 0
            }, new double[1] {
                0
            }));
            trainingSet.Add(new TrainingSample(new double[2] {
                0, 1
            }, new double[1] {
                1
            }));
            trainingSet.Add(new TrainingSample(new double[2] {
                1, 0
            }, new double[1] {
                1
            }));
            trainingSet.Add(new TrainingSample(new double[2] {
                1, 1
            }, new double[1] {
                0
            }));

            double max = 0;

            // create an anonymous function to capture the error value of each iteration, and report back the percent of completion.
            network.EndEpochEvent +=
                delegate(object networkInput, TrainingEpochEventArgs args)
            {
                errorList[args.TrainingIteration] = network.MeanSquaredError;
                max             = Math.Max(max, network.MeanSquaredError);
                PercentComplete = args.TrainingIteration * 100 / numberOfCycles;
            };

            network.Learn(trainingSet, numberOfCycles);

            double[] indices = new double[numberOfCycles];
            // for (int i = 0; i < numberOfCycles; i++) { indices[i] = i; } .. oh nvm, its for graphing the learning curve

            // what to do for error list?
            // errorList => for plotting stuff.
            for (int i = 0; i < numberOfCycles; i++)
            {
                //Console.WriteLine(errorList[i]);
            }

            double[] outputResult = network.OutputLayer.GetOutput();
            Console.WriteLine("final output");

            double[] r1 = new double[] { 0, 0 };
            double[] r2 = new double[] { 0, 1 };
            double[] r3 = new double[] { 1, 0 };
            double[] r4 = new double[] { 1, 1 };

            Console.WriteLine(" 0 0 => " + network.Run(r1)[0]);
            Console.WriteLine(" 0 1 => " + network.Run(r2)[0]);
            Console.WriteLine(" 1 0 => " + network.Run(r3)[0]);
            Console.WriteLine(" 1 1 => " + network.Run(r4)[0]);
        }
Ejemplo n.º 14
0
        private void Train(object sender, EventArgs e)
        {
            // btnTrain.Enabled = false;

            int cycles = 200;
            // if (!int.TryParse(txtCycles.Text, out cycles)) { cycles = 200; }
            // txtCycles.Text = cycles.ToString();

            int currentCombination = 0;

            //int totalCombinations = Alphabet.LetterCount * (Alphabet.LetterCount - 1) / 2;

            for (int i = 0; i < Alphabet.LetterCount; i++)
            {
                for (int j = i + 1; j < Alphabet.LetterCount; j++)
                {
                    ActivationLayer inputLayer  = new LinearLayer(400);
                    ActivationLayer hiddenLayer = new SigmoidLayer(4);
                    ActivationLayer outputLayer = new SigmoidLayer(2);
                    new BackpropagationConnector(inputLayer, hiddenLayer);
                    new BackpropagationConnector(hiddenLayer, outputLayer);
                    BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);

                    TrainingSet trainingSet = new TrainingSet(400, 2);
                    Alphabet    ithLetter   = Alphabet.GetLetter(i);
                    Alphabet    jthLetter   = Alphabet.GetLetter(j);
                    foreach (Letter instance in ithLetter.Instances)
                    {
                        trainingSet.Add(new TrainingSample(instance.GetEquivalentVector(20, 20), new double[] { 1d, 0d }));
                    }
                    foreach (Letter instance in jthLetter.Instances)
                    {
                        trainingSet.Add(new TrainingSample(instance.GetEquivalentVector(20, 20), new double[] { 0d, 1d }));
                    }

                    //progressTraining.Value = 100 * currentCombination / totalCombinations;

                    Application.DoEvents();

                    bool correct = false;

                    int currentCycles = 35;
                    int count         = trainingSet.TrainingSampleCount;

                    while (correct == false & currentCycles <= cycles)
                    {
                        network.Initialize();
                        network.Learn(trainingSet, currentCycles);
                        correct = true;
                        for (int sampleIndex = 0; sampleIndex < count; sampleIndex++)
                        {
                            double[] op = network.Run(trainingSet[sampleIndex].InputVector);
                            if (((trainingSet[sampleIndex].OutputVector[0] > trainingSet[sampleIndex].OutputVector[1]) && op[0] - op[1] < 0.4) || ((trainingSet[sampleIndex].OutputVector[0] < trainingSet[sampleIndex].OutputVector[1]) && op[1] - op[0] < 0.4))
                            {
                                correct = false;
                                trainingSet.Add(trainingSet[sampleIndex]);
                            }
                        }
                        currentCycles *= 2;
                    }

                    //lstLog.Items.Add(cboAplhabet.Items[i] + " & " + cboAplhabet.Items[j] + " = " + network.MeanSquaredError.ToString("0.0000"));
                    // lstLog.TopIndex = lstLog.Items.Count - (int)(lstLog.Height / lstLog.ItemHeight);
                    try
                    {
                        using (Stream stream = File.Open(Application.StartupPath + @"\Networks\" + i.ToString("00") + j.ToString("00") + ".ndn", FileMode.Create))
                        {
                            IFormatter formatter = new BinaryFormatter();
                            formatter.Serialize(stream, network);
                        }
                    }
                    catch (Exception)
                    {
                        MessageBox.Show("Failed to save trained neural networks", "Critical Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                        return;
                    }
                    currentCombination++;
                }
            }
            //  progressTraining.Value = 0;
            //  btnTrain.Enabled = false;
        }
Ejemplo n.º 15
0
        public void startTraining(string filepath)
        {
            initTrening();
            try
            {   // Open the text file using a stream reader.
                using (StreamReader sr = new StreamReader(filepath))
                {
                    int iter  = 0;
                    int index = 0;
                    // Read the stream to a string, and write the string to the console.
                    String   line = sr.ReadLine();           // pomijamy pierwsza linijke, bo jest tam naglowek
                    String[] tmp;                            // zmienna do ktorej beda separowane linijki
                    String[] lineMem;
                    for (int i = 0; i < daysToRead - 1; i++) // dwa osatatnie
                    {
                        line = sr.ReadLine();
                        tmp  = line.Split(',');

                        SampleInput.SetValue(Convert.ToDouble(tmp[2], System.Globalization.CultureInfo.InvariantCulture), index++); // kurs otwarcia
                        SampleInput.SetValue(Convert.ToDouble(tmp[3], System.Globalization.CultureInfo.InvariantCulture), index++); // max
                        SampleInput.SetValue(Convert.ToDouble(tmp[4], System.Globalization.CultureInfo.InvariantCulture), index++); // min
                        SampleInput.SetValue(Convert.ToDouble(tmp[5], System.Globalization.CultureInfo.InvariantCulture), index++); // kurs zamkniecia
                        SampleInput.SetValue(Convert.ToDouble(tmp[6], System.Globalization.CultureInfo.InvariantCulture), index++); // vol
                    }

                    while (!sr.EndOfStream && iter < trainingIterator)
                    {
                        // musimy wczytac dane z dnia pierwszego, drugiego i trzeciego. przy czym dane z trzeciego dnia beda w nastepnej iteracji danymi z dnia pierwszego

                        line    = sr.ReadLine();
                        lineMem = line.Split(',');
                        SampleInput.SetValue(Convert.ToDouble(lineMem[2], System.Globalization.CultureInfo.InvariantCulture), index);     // kurs otwarcia ostatniego dnia
                        SampleInput.SetValue(Convert.ToDouble(lineMem[5], System.Globalization.CultureInfo.InvariantCulture), index + 1); // kurs zamkniecia ostatniego dnia jako wartosc oczekiwana na wyjsciu
                        ValuesList.Add(SampleInput);
                        SampleInput = RollValInTable(SampleInput, lineMem);                                                               // przesuwamy wartosci w tabeli aby dane z dnia drugiego byly teraz danymi dla dnia 1szego. itp

                        //SampleOutput[0] = Double.Parse(tmp[5], System.Globalization.CultureInfo.InvariantCulture) / 100;
                        // parsujemy dane wczytane z pliku do tablic wejsc / wyjsc
                        //tmp = line1st.Split(',');

                        //SampleInput[0] = Convert.ToDouble(tmp[2], System.Globalization.CultureInfo.InvariantCulture) / 100;
                        //SampleInput[1] = Double.Parse(tmp[3], System.Globalization.CultureInfo.InvariantCulture) / 100;
                        //SampleInput[2] = Double.Parse(tmp[4], System.Globalization.CultureInfo.InvariantCulture) / 100;
                        //SampleInput[3] = Double.Parse(tmp[5], System.Globalization.CultureInfo.InvariantCulture) / 100;

                        //tmp = line2nd.Split(',');
                        //SampleInput[4] = Double.Parse(tmp[2], System.Globalization.CultureInfo.InvariantCulture) / 100;
                        //SampleInput[5] = Double.Parse(tmp[3], System.Globalization.CultureInfo.InvariantCulture) / 100;
                        //SampleInput[6] = Double.Parse(tmp[4], System.Globalization.CultureInfo.InvariantCulture) / 100;
                        //SampleInput[7] = Double.Parse(tmp[5], System.Globalization.CultureInfo.InvariantCulture) / 100;

                        //tmp = line3rd.Split(',');
                        //SampleInput[8] = Double.Parse(tmp[2], System.Globalization.CultureInfo.InvariantCulture) / 100;

                        //SampleOutput[0] = Double.Parse(tmp[5], System.Globalization.CultureInfo.InvariantCulture) / 100;
                        // koniec parsowania

                        //trainingSet.Add(new TrainingSample(SampleInput, SampleOutput));

                        iter++;
                    }
                    setData2Training();
                    network.Learn(trainingSet, cycles);
                    //Console.WriteLine(line);
                }
            }
            catch (Exception e)
            {
                Console.WriteLine("The file could not be read:");
                Console.WriteLine(e.Message);
            }
        }
        /// <summary>
        /// Created this to test the custom neuron network with binary inputs.
        /// </summary>
        /// <param name="writer"></param>
        public static void Test(
            string file,
            int numberOfInputNeurons,
            int numberOfHiddenNeurons,
            int numberOfOutputNeurons,
            int numberOfCycles  = 50000,
            double learningRate = 0.25)
        {
            TrainingSample sample = new TrainingSample(
                new double[] { },
                new double[] { });

            //We might make a gui for this later.

            double[] errorList = new double[numberOfCycles];

            int totalNumberOfNeurons = numberOfInputNeurons + numberOfOutputNeurons;

            LinearLayer  inputLayer  = new LinearLayer(numberOfInputNeurons);
            SigmoidLayer hiddenLayer = new SigmoidLayer(numberOfHiddenNeurons);
            SigmoidLayer outputLayer = new SigmoidLayer(numberOfOutputNeurons);

            // This layer is a event handler that fires when the output is generated, hence backpropagation.
            BackpropagationConnector conn1 = new BackpropagationConnector(inputLayer, hiddenLayer);
            BackpropagationConnector conn2 = new BackpropagationConnector(hiddenLayer, outputLayer);

            BackpropagationNetwork network = new BackpropagationNetwork(inputLayer, outputLayer);

            network.SetLearningRate(learningRate);

            TrainingSet trainingSet = new TrainingSet(10, 8);

            // A file stream reader.
            var inDefaule = Console.In;

            using (StreamReader reader = new StreamReader(file))
            {
                Console.SetIn(reader);
                String line = "";
                //trainingSet.Add(new TrainingSample(new double[] { 0, 0, 0, 0, 1 }, new double[1] { 1 }));
                while ((line = reader.ReadLine()) != null)
                {
                    String[] array       = line.Split(',');
                    double[] inputArray  = new double[10];
                    double[] outputArray = new double[8];

                    for (int i = 0; i < 10; i++)
                    {
                        inputArray[i] = Convert.ToDouble(array[i]);
                    }

                    for (int i = 0; i < 8; i++)
                    {
                        outputArray[i] = Convert.ToDouble(array[i + 11]);
                    }

                    trainingSet.Add(new TrainingSample(inputArray, outputArray));
                }
            }

            double max = 0;

            // create an anonymous function to capture the error value of each iteration, and report back the percent of completion.
            network.EndEpochEvent +=
                delegate(object networkInput, TrainingEpochEventArgs args)
            {
                errorList[args.TrainingIteration] = network.MeanSquaredError;
                max = Math.Max(max, network.MeanSquaredError);
                // PercentComplete = args.TrainingIteration * 100 / numberOfCycles;
            };

            network.Learn(trainingSet, numberOfCycles);

            double[] indices = new double[numberOfCycles];
            // for (int i = 0; i < numberOfCycles; i++) { indices[i] = i; } .. oh nvm, its for graphing the learning curve

            // what to do for error list?
            // errorList => for plotting stuff.
            // for (int i = 0; i < numberOfCycles; i++)
            // {
            //Console.WriteLine(errorList[i]);
            //  }

            // print out the error list for scientific evaluation.
            StreamUtilities.DumpData("dumpErrorValues.txt", errorList);

            double[] outputResult = network.OutputLayer.GetOutput();


            outputResult = network.Run(new double[] { 0.47, 0.41, 0.12, 0.05, 0.1, 0.5, 0.1, 0.1, 0.05, 0.1 });

            foreach (var d in outputResult)
            {
                Console.WriteLine("output: " + d);
            }

            // Console.WriteLine("final output");
        }