void backgroundWorkerTrainer_DoWork(object sender, DoWorkEventArgs e)
        {
            while (work)
            {
                // run epoch of learning procedure
                double error = teacher.RunEpoch(input, output);
                // check error value to see if we need to stop
                // ...

                var c = error * 10;
                var d = c.ToString();
                gerrror.Add(Convert.ToInt32(c));

                SetText(error.ToString());
                chartdata.Add(Convert.ToSingle(c));
                listPointsOne.Add((double)watch.ElapsedMilliseconds, error);
                axisChangeZedGraph(zedGraphControl1);
                Thread.Sleep(2);
                if (logger)
                {
                    SetTextLogger(error.ToString());
                }
                if (watch.IsRunning)
                {
                    SetTextTime(watch.ElapsedMilliseconds.ToString());
                }
            }
        }
        void TrainNetworkForWeight()
        {
            inputWeight = LoadFromFile("Ideal_Input_Weight.cfg");

            weightNetwork.Randomize();

            ResilientBackpropagationLearning learning = new ResilientBackpropagationLearning(weightNetwork);

            learning.LearningRate = 0.5;

            outputWeight = LoadFromFile("Ideal_Output_Weight.cfg");

            bool needToStop = false;
            int  iteration  = 0;

            while (!needToStop)
            {
                double error = learning.RunEpoch(inputWeight, outputWeight);

                if (error == 0)
                {
                    break;
                }
                else if (iteration < 50000)
                {
                    iteration++;
                }
                else
                {
                    needToStop = true;
                }
            }
        }
Ejemplo n.º 3
0
        public double TrainNetwork(int epochsCount, IProgress <int> progress, bool isInputNormalized = false)
        {
            if (!isInitialized)
            {
                return(-1);
            }

            input  = CreateEpochInput();
            output = CreateEpochOutput();

            if (isInputNormalized)
            {
                NormalizeInput(input);
            }

            double err = -1;

            PrepareFile();

            for (int i = 0; i < epochsCount; i++)
            {
                err = teacher.RunEpoch(input, output);
                SaveErrorToFile(i, err);
                progress.Report(((i + 1) * 100) / epochsCount);
            }
            return(err);
        }
Ejemplo n.º 4
0
        public void Train()
        {
            bool   b      = true;
            double error  = 0;
            int    cnt_it = 0;

            while (b)
            {
                // run epoch of learning procedure
                error = teacher.RunEpoch(
                    input.ConvertAll(x => x.ToArray()).ToArray(),
                    output.ConvertAll(x => x.ToArray()).ToArray());

                //teacher.LearningRate *= 0.99;

                // check error value to see if we need to stop
                // ...
                b = ((error > 0.1) && (cnt_it < cnt_epochs));

                if ((cnt_it % 1) == 0)
                {
                    PutCurrInfo(cnt_it, error);
                }

                ++cnt_it;
            }
            double accuracy = CheckAccuracy();

            total_cnt_epochs += (cnt_it - 1);
            PutTotalInfo(cnt_it - 1, error, accuracy);
        }
Ejemplo n.º 5
0
        public void ExecuteFold(int k)
        {
            int LengthOfInput  = this.FormattedData[0].Input.Count();
            int LengthOfOutput = this.FormattedData[0].Output.Count();

            ActivationNetwork NeuralNetwork = new ActivationNetwork(
                new SigmoidFunction(2),
                LengthOfInput,
                this.NumberOfHiddenLayerNeurons(LengthOfInput, LengthOfOutput),
                LengthOfOutput);

            NguyenWidrow weights = new NguyenWidrow(NeuralNetwork);

            weights.Randomize();

            ResilientBackpropagationLearning BackProp = new ResilientBackpropagationLearning(NeuralNetwork);

            BackProp.LearningRate = this.LearningRate;
            //BackProp.Momentum = 0.5;

            List <NetIO> TrainingData   = new List <NetIO>();
            List <NetIO> ValidationData = new List <NetIO>();

            ReadWrite.RemoveKFold(this.FormattedData, ref TrainingData, ref ValidationData, k);

            // for each epoch
            int epoch     = 0;
            int maxEpochs = int.MaxValue;
            EarlyStoppingTools netError = new EarlyStoppingTools(this.patience);

            do
            {
                ++epoch;

                double internalError = BackProp.RunEpoch(TrainingData.Select(l => l.Input.ToArray()).ToArray(),
                                                         TrainingData.Select(l => l.Output.ToArray()).ToArray());

                this.RssError = EarlyStoppingTools.RssError(NeuralNetwork,
                                                            ValidationData.Select(l => l.Input.ToArray()).ToArray(),
                                                            ValidationData.Select(l => l.Output.ToArray()).ToArray());

                //Console.WriteLine("Epochs: " + epoch);
                //Console.WriteLine("Training error: " + internalError);
                //Console.WriteLine("CV Error: " + this.RssError);
            } while (!netError.ExceedsPatience(RssError) && epoch < maxEpochs);

            Console.Write("Target: ");
            ValidationData[0].Output.ForEach(i => Console.Write(i));
            Console.WriteLine();
            Console.WriteLine("Result: " + string.Join(",", NeuralNetwork.Compute(ValidationData[0].Input.ToArray())));

            this.NumberOfEpochs = epoch;

            Console.WriteLine("Epochs required: " + epoch);
            Console.WriteLine("Error: " + RssError);
        }
Ejemplo n.º 6
0
        public Network Learn(double[][] features, double[] targets, int?hiddenLayerCount = null)
        {
            Debug.Assert(features.Length > 0);
            var inputsCount = features[0].Length;

            var outputs      = targets.Select(item => new[] { item }).ToArray();
            var outputsCount = 1;

            // Create an activation network with the function and
            var network = new ActivationNetwork(
                new BipolarSigmoidFunction(),
                inputsCount,
                hiddenLayerCount.GetValueOrDefault(2 * inputsCount),
                outputsCount);

            // Randomly initialize the network
            new NguyenWidrow(network).Randomize();

            // Teach the network using parallel Rprop:
            var teacher = new ResilientBackpropagationLearning(network);
            // var teacher = new LevenbergMarquardtLearning(network);
            //var teacher = new EvolutionaryLearning(network, 100);

            // Iterate until stop criteria is met
            var    error = teacher.RunEpoch(features, outputs);
            double previous;
            var    iteration = 0;

            do
            {
                previous = error;
                iteration++;
                // Compute one learning iteration
                error = teacher.RunEpoch(features, outputs);
            }while (Math.Abs(previous - error) > 0.000000001 * previous);

            // learn result
            var results = new (double, double, double)[features.Length];
Ejemplo n.º 7
0
        public override void Initialize()
        {
            var history = History("SPY", TimeSpan.FromDays(1000), Resolution.Daily);

            var highestClose  = history.Max(h => h.Close);
            var lowestClose   = history.Min(h => h.Close);
            var highestVolume = history.Max(h => h.Volume);
            var lowestVolume  = history.Min(h => h.Volume);

            var inputs = history.Select(h =>
                                        new[]
            {
                (double)((h.Close - lowestClose) / (highestClose - lowestClose)),
                (double)(h.Volume - lowestVolume) / (highestVolume - lowestVolume)
            }).ToArray();

            var classes = inputs.Take(inputs.Length - 1).Zip(inputs.Skip(1), (a, b) => b[0] < a[0] ? 0 : b[0] > a[0] ? 2 : 1).ToArray();

            var outputs = Jagged.OneHot(classes);

            var network = new ActivationNetwork(new SigmoidFunction(), 2, 3, 1);

            new NguyenWidrow(network).Randomize();

            var teacher2 = new ResilientBackpropagationLearning(network);
            var maxError = double.MaxValue;
            var error    = 0d;

            // Run supervised learning.
            while (error < maxError)
            {
                error = teacher2.RunEpoch(inputs, outputs);
                if (error < maxError)
                {
                    maxError = error;
                }
            }

            // Checks if the network has learned
            for (var i = 0; i < inputs.Length; i++)
            {
                var answer = network.Compute(inputs[i]);

                var expected = classes[i];
                int actual;
                answer.Max(out actual);
                // actual should be equal to expected
            }
        }
Ejemplo n.º 8
0
        public double Train(double[][] input, double[][] outputs)
        {
            var teacher = new ResilientBackpropagationLearning(network);

            teacher.LearningRate = topResults.TrainingSpeed;

            double error = 0;

            for (int iteration = 0; iteration < topResults.Iterations; iteration++)
            {
                error = teacher.RunEpoch(input, outputs);
            }

            return(error);
        }
Ejemplo n.º 9
0
 private void TrainSupervised(double[][] batchInputs, double[][] batchOutputs, int iterations, Action <double, int, string> progressCallback)
 {
     foreach (int i in Enumerable.Range(1, iterations))
     {
         var error = supervisedTeacher.RunEpoch(batchInputs, batchOutputs) / batchInputs.Length;
         if (progressCallback != null)
         {
             progressCallback(error, i, "Supervised");
         }
         if (this.ShouldStopTraning)
         {
             this.ShouldStopTraning = false;
             break;
         }
     }
 }
Ejemplo n.º 10
0
        public double Train(double[][] input, double[][] outputs, int iterations, float rate)
        {
            var teacher = new ResilientBackpropagationLearning(network);

            teacher.LearningRate = rate;
            var inVal  = input;
            var outVal = outputs;

            double error = 0;

            for (int iteration = 0; iteration < iterations; iteration++)
            {
                error = teacher.RunEpoch(inVal, outVal);
            }

            return(error);
        }
Ejemplo n.º 11
0
        public void RBPBuild(List <train> datalist)
        {
            double[][] inputs;
            double[][] outputs;
            double[][] matrix;

            GetData(out inputs, out outputs, out matrix, datalist);

            // create neural network
            network_rbp = new ActivationNetwork(
                new BipolarSigmoidFunction(),
                9, // two inputs in the network
                3, // two neurons in the first layer
                1  //ont neuron in the second layer
                );
            // Randomly initialize the network
            new NguyenWidrow(network_rbp).Randomize();

            // create teacher
            teacher_rbp = new ResilientBackpropagationLearning(network_rbp);

            int times = 0;

            // loop
            while (times++ < 50)
            {
                // run epoch of learning procedure
                double error = teacher_rbp.RunEpoch(inputs, outputs);
                // check error value to see if we need to stop
                // ...
            }


            // Checks if the network has learned

            /*  for (int i = 0; i < inputs.Length; i++)
             * {
             *    double[] answer = network.Compute(inputs[i]);
             *
             *    log(answer[0].ToString()) ;
             *
             *    // actual should be equal to expected
             * }*/
        }
Ejemplo n.º 12
0
            public IForecastingModel TrainNewModel(double[][] iInput, double[][] iOutput)
            {
                int inputSize = iInput[0].Length, samplesNum = iOutput.Length;

                if (samplesNum != iInput.Length)
                {
                    throw new ArgumentException();
                }

                for (int i = 0; i < samplesNum; ++i)
                {
                    if (iInput[i].Length != inputSize || iOutput[i].Length != 1) //iInput isn't a square matrix or iOutput isn't a vector
                    {
                        throw new ArgumentException();
                    }
                }

                int[]  neuronsCount       = (int[])ModelParametersDict[NeuronsInLayersKey];
                string activationFunction = (string)ModelParametersDict[ActivationFunctionKey];
                long   maxIterNum         = (long)ModelParametersDict[MaxIterationsNumberKey];
                double stopError          = (double)ModelParametersDict[StopErrorKey];

                ActivationNetwork   netToTrain = new ActivationNetwork(ActivationFunctionsDict[activationFunction], inputSize, neuronsCount);
                DataNormalizer      normalizer = new DataNormalizer(iInput.Concat(iOutput).ToArray());
                IForecastingModel   aModel     = new ANNforecastingModel(netToTrain, normalizer);
                ISupervisedLearning teacher    = new ResilientBackpropagationLearning(netToTrain);

                double[][] trainInputSet, trainOutputSet;
                TrainingSubsetGenerator.GenerateRandomly(iInput, iOutput, out trainInputSet, out trainOutputSet, iMultiplier: TrainSubsetMultiplier);

                trainInputSet = normalizer.Normalize(trainInputSet); trainOutputSet = normalizer.Normalize(trainOutputSet);

                long   epochsCount = 0;
                double nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput), prevError;

                do
                {
                    prevError = nextError;
                    teacher.RunEpoch(trainInputSet, trainOutputSet);
                    nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput);
                }while (epochsCount++ <= maxIterNum && Math.Abs(prevError - nextError) >= stopError);
                return(aModel);
            }
            public IForecastingModel TrainNewModel(double[][] iInput, double[][] iOutput)
            {
                int inputSize = iInput[0].Length, samplesNum = iOutput.Length;
                if (samplesNum != iInput.Length)
                    throw new ArgumentException();

                for (int i = 0; i < samplesNum;++i)
                    if (iInput[i].Length != inputSize || iOutput[i].Length != 1) //iInput isn't a square matrix or iOutput isn't a vector
                        throw new ArgumentException();

                int[] neuronsCount = (int[]) ModelParametersDict[NeuronsInLayersKey];
                string activationFunction = (string) ModelParametersDict[ActivationFunctionKey];
                long maxIterNum = (long) ModelParametersDict[MaxIterationsNumberKey];
                double stopError = (double)ModelParametersDict[StopErrorKey];

                ActivationNetwork netToTrain = new ActivationNetwork(ActivationFunctionsDict[activationFunction], inputSize, neuronsCount);
                DataNormalizer normalizer = new DataNormalizer(iInput.Concat(iOutput).ToArray());
                IForecastingModel aModel = new ANNforecastingModel(netToTrain, normalizer);
                ISupervisedLearning teacher = new ResilientBackpropagationLearning(netToTrain);

                double[][] trainInputSet, trainOutputSet;
                TrainingSubsetGenerator.GenerateRandomly(iInput, iOutput, out trainInputSet, out trainOutputSet, iMultiplier: TrainSubsetMultiplier);

                trainInputSet = normalizer.Normalize(trainInputSet); trainOutputSet = normalizer.Normalize(trainOutputSet);

                long epochsCount = 0;
                double nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput), prevError;
                do
                {
                    prevError = nextError;
                    teacher.RunEpoch(trainInputSet, trainOutputSet);
                    nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput);
                }
                while (epochsCount++ <= maxIterNum && Math.Abs(prevError - nextError) >= stopError);
                return aModel;
            }
Ejemplo n.º 14
0
        /// <summary>
        /// <inheritdoc />
        /// </summary>
        public override void Train()
        {
            var inputs = data.GetSelectedInput(features);

            var outputs = data.GetExpectedClassificationOutput();

            network = new ActivationNetwork(new SigmoidFunction(), inputs[0].Length, 25, 1);

            var initialization = new NguyenWidrow(network);

            initialization.Randomize();

            var teacher = new ResilientBackpropagationLearning(network);

            var NetworkOutputs = new double[inputs.Length][];

            for (int i = 0; i < NetworkOutputs.Length; i++)
            {
                NetworkOutputs[i] = new double[1] {
                    outputs[i]
                };
            }

            double error = double.PositiveInfinity;

            int epoch = 0;

            while (error > 2.5 && epoch < 5000)
            {
                error = teacher.RunEpoch(inputs, NetworkOutputs);

                epoch++;
            }

            Save();
        }
Ejemplo n.º 15
0
        void backgroundWorkerTrainer_DoWork(object sender, DoWorkEventArgs e)
        {
            while (work)
            {
                double error = 0;

                // run epoch of learning procedure

                if (selected_algorythm == 0)
                {
                    error = teacher.RunEpoch(input, output);
                    listPointsOne.Add((double)watch1.ElapsedMilliseconds, error);
                }
                if (selected_algorythm == 1)
                {
                    error = reprop.RunEpoch(input, output);
                    listPointsOne.Add((double)watch1.ElapsedMilliseconds, error);
                }
                if (selected_algorythm == 2)
                {
                    error = evteacher.RunEpoch(input, output);
                    listPointsOne.Add((double)watch1.ElapsedMilliseconds, error);
                }
                if (selected_algorythm == 3)
                {
                    error = lbteacher.RunEpoch(input, output);
                    listPointsOne.Add((double)watch1.ElapsedMilliseconds, error);
                }
                if (selected_algorythm == 4)
                {
                    error = delta.RunEpoch(input, output);
                    listPointsOne.Add((double)watch1.ElapsedMilliseconds, error);
                }
                if (selected_algorythm == 5)
                {
                    error = perceptron.RunEpoch(input, output);
                    listPointsOne.Add((double)watch1.ElapsedMilliseconds, error);
                }

                // check error value to see if we need to stop
                // ...

                var c = error * 10;
                var d = c.ToString();
                //gerrror.Add(Convert.ToInt32(c));

                SetText(error.ToString());
                chartdata.Add(Convert.ToSingle(c));



                axisChangeZedGraph(zedGraphControl1);
                Thread.Sleep(2);
                if (logger)
                {
                    SetTextLogger(error.ToString());
                }
                if (watch1.IsRunning)
                {
                    SetTextTime(watch1.ElapsedMilliseconds.ToString());
                }
                Thread.Sleep(sleeptime);
            }
        }
Ejemplo n.º 16
0
        private void listBox4_SelectedIndexChanged(object sender, EventArgs e)
        {
            if (Global.Model == null || listBox4.SelectedItem == null)
            {
                return;
            }

            var selected = ((DailyPrice)listBox4.SelectedItem).StockCode;

            toolStripStatusLabel3.Text = $"Predicting {((DailyPrice)listBox4.SelectedItem).StockCode}";
            IEnumerable <DailyPrice> priceForDate = Global.DataList
                                                    .Where(x => x.StockCode == selected)
                                                    .OrderBy(x => x.CloseDate);

            var currentList = priceForDate.ToList();

            chart2.Series[0].Points.DataBindY(priceForDate.Select(x => x.ClosePrice).ToArray());

            ActivationNetwork _model = null;

            //Training a new model for the stock
            if (checkBox1.Checked)
            {
                _model = new ActivationNetwork(new BipolarSigmoidFunction(2),
                                               Global.FeaturesCount, 15, 1); //hardcoded neurons count
                var _iterations  = 1000;
                var _teacher     = new ResilientBackpropagationLearning(_model);
                var _initializer = new NguyenWidrow(_model);
                _initializer.Randomize();

                var _inputs  = DataHelper.DataHelper.GetInputArray(currentList);
                var _outputs = DataHelper.DataHelper.GetOutputArray(currentList);

                for (int i = 0; i < _iterations; i++)
                {
                    var trainingError = _teacher.RunEpoch(_inputs, _outputs);
                    toolStripStatusLabel3.Text = $"Predicting {((DailyPrice)listBox4.SelectedItem).StockCode} {i}/{_iterations} | e={trainingError / _inputs.Length}";
                    Application.DoEvents();
                }
            }

            //end of training
            var firstItem  = priceForDate.Select(x => x.ClosePrice).FirstOrDefault();
            var lastItem   = priceForDate.Select(x => x.ClosePrice).LastOrDefault();
            var firstStock = priceForDate.FirstOrDefault();

            lbStockCurrent.Text = $"{selected} | {priceForDate.Count()} days: Profit: {lastItem - firstItem} VND | {Math.Round((lastItem / firstItem - 1) * 100, 2)}%\n" +
                                  $"Volatility: {Math.Round(firstStock.Volatility * 100, 2)}%\n";
            linkLabel1.Text = firstStock.StockCode;
            linkLabel1.Links.Clear();
            linkLabel1.Links.Add(new LinkLabel.Link()
            {
                LinkData = firstStock.URL
            });

            var    listPredict = priceForDate.ToList();
            int    dayCount    = listPredict.Count;
            double error       = 0.0;

            if (listPredict.Count > 1)
            {
                for (int i = 0; i < listPredict.Count - 1; i++)
                {
                    var previous = currentList[i];
                    var original = currentList[i + 1];
                    listPredict[i + 1] = PredictSingle(previous, Utils.GetNextDay(previous.CloseDate), _model);
                    error += Math.Sqrt(Math.Pow(listPredict[i + 1].Profit - original.Profit, 2));
                }
                error /= dayCount;
                var    startIdx         = listPredict.Count - 1;
                string predictedDetails = String.Empty;

                for (int i = 0; i < 5; i++)
                {
                    var newIdx       = startIdx + i;
                    var previous     = listPredict.LastOrDefault();
                    var predictValue = PredictSingle(previous, Utils.GetNextDay(previous.CloseDate));
                    listPredict.Add(predictValue);
                    predictedDetails += $"{predictValue.CloseDate} - {predictValue.ClosePrice} | {predictValue.ProfitPretified}%\n";
                }

                chart2.Series[1].Points.DataBindY(listPredict.Select(x => x.ClosePrice).ToArray());

                var firstPredict = listPredict[dayCount - 1].ClosePrice;
                var lastPredict  = Math.Round(listPredict.Select(x => x.ClosePrice).LastOrDefault());

                lbForecast.Text = $"Average error: {error}\n" +
                                  $"Last predicted price: {lastPredict}\n" +
                                  $"Change from now: {lastPredict - firstPredict} VND | {Math.Round((lastPredict / firstPredict - 1) * 100, 2)}%\n\n" +
                                  predictedDetails;
            }

            //moving average (4 days) as baseline
            int numDays     = 4;
            var averageList = new List <double>();

            for (int i = 0; i < listPredict.Count; i++)
            {
                if (i >= (numDays - 1))
                {
                    var average = 0.0d;
                    for (int a = 0; a < numDays; a++)
                    {
                        if (i < currentList.Count)
                        {
                            average += currentList[i - a].ClosePrice / numDays;
                        }
                        else
                        {
                            average += listPredict[i - a].ClosePrice / numDays;
                        }
                    }
                    averageList.Add(average);
                }
                else
                {
                    averageList.Add(double.NaN);
                }
            }
            chart2.Series[2].Points.DataBindY(averageList.ToArray());

            toolStripStatusLabel3.Text = $"Ready";
            checkBox1.Checked          = false;
        }
        /// <summary>
        /// <inheritdoc />
        /// </summary>
        public override void Train()
        {
            var inputsOriginal  = data.GetSelectedInput(features);
            var outputsOriginal = data.GetExpectedRegressionOutput();

            var tempInputs  = new List <double[]>();
            var tempOutputs = new List <double>();

            for (int i = 0; i < inputsOriginal.Length; i++)
            {
                if (positive && outputsOriginal[i] < 0.0)
                {
                    tempInputs.Add(inputsOriginal[i]);
                    tempOutputs.Add(outputsOriginal[i]);
                }

                if (!positive && outputsOriginal[i] > 0.0)
                {
                    tempInputs.Add(inputsOriginal[i]);
                    tempOutputs.Add(outputsOriginal[i]);
                }
            }

            var inputs  = tempInputs.ToArray();
            var outputs = tempOutputs.ToArray();

            var function = new SigmoidFunction();

            network = new ActivationNetwork(function, inputs[0].Length, 5, 1);

            var teacher = new ResilientBackpropagationLearning(network);

            var initialization = new NguyenWidrow(network);

            initialization.Randomize();

            var scaledOutputs = Vector.Scale(outputs, range, new DoubleRange(0.0, 1.0));

            var outputsNetwork = new double[outputs.Length][];

            for (int i = 0; i < outputs.Length; i++)
            {
                outputsNetwork[i] = new double[1] {
                    scaledOutputs[i]
                }
            }
            ;

            double error = Double.PositiveInfinity;

            double maxError = outputs.Length / 5e2;

            int epoch = 0;

            while (error > maxError && epoch < 5000)
            {
                error = teacher.RunEpoch(inputs, outputsNetwork);
            }

            Save();
        }
        public void Train()
        {
            var samples = GenerateSamples(category.Compositions);
            double[][] inputs = new double[samples.Length][];
            double[][] outputs = new double[samples.Length][];
            for (int i = 0; i < samples.Length; i++)
            {
                inputs[i] = samples[i].Inputs;
                outputs[i] = samples[i].Outputs;
            }
            // Create a Bernoulli activation function
            //var function = new BernoulliFunction(alpha: 0.5);
            var function = new SigmoidFunction(2);
            // Create a Restricted Boltzmann Machine for 6 inputs and with 1 hidden neuron
            //network = new RestrictedBoltzmannMachine(function, inputsCount: MAX_INPUTS, hiddenNeurons: MAX_OUTPUTS);
            network = new ActivationNetwork(function, MAX_INPUTS, 11, MAX_OUTPUTS);

            // Create the learning algorithm for RBMs
            /*    var teacher = new ContrastiveDivergenceLearning(network)
            {
                Momentum = 0.1,
                LearningRate = 0.02
            };*/

            // create neural network
             /*     network = new ActivationNetwork(
                new SigmoidFunction( 2 ),
                2, // two inputs in the network
                10, // two neurons in the first layer
                2 ); // one neuron in the second layer*/

            var teacher = new ResilientBackpropagationLearning(network as ActivationNetwork);

            // learn 5000 iterations

            for (int i = 0; i < Epochs; i++)
            {
                var e = teacher.RunEpoch(inputs,outputs);

                Console.WriteLine("{0} : {1}", i / (double)Epochs * 100, e);
            }

            Save();
        }
Ejemplo n.º 19
0
        async Task <KFoldData> kfold(int inputSize, int outputSize, int breadth, int depth, double trainingweights)
        {
            await Task.Delay(1).ConfigureAwait(false);

            double    bestKVal = double.MaxValue;
            KFoldData bestVal  = new KFoldData(0, 0, 0, 0, 0);

            for (int iterations = 10; iterations < 10000; iterations = iterations * 10)
            {
                int[] nodeArray = new int[depth + 1];
                for (int fillVal = 0; fillVal < depth; fillVal++)
                {
                    if (fillVal == 0) // depth - 1)
                    {
                        nodeArray[0] = outputSize;
                    }
                    else
                    {
                        nodeArray[fillVal] = breadth;
                    }
                }



                double kSumAvg = 0;
                for (int i = 0; i < 5; i++)
                {
                    var testNet     = new ActivationNetwork(new SigmoidFunction(), inputSize, nodeArray);
                    var testLearner = new ResilientBackpropagationLearning(testNet);
                    testLearner.LearningRate = trainingweights;

                    int length = dataset_in.GetLength(0) / 5;

                    var trainingArrayIn  = new double[dataset_in.GetLength(0) * 4 / 5][];
                    var trainingArrayOut = new double[dataset_out.GetLength(0) * 4 / 5][];
                    var testingArrayIn   = new double[dataset_in.GetLength(0) / 5][];
                    var testingArrayOut  = new double[dataset_out.GetLength(0) / 5][];

                    dataset_in.Take(i * length).ToArray().CopyTo(trainingArrayIn, 0);
                    dataset_in.Skip((i * length) + length).Take((length * 5) - (i * length + length)).ToArray().CopyTo(trainingArrayIn, i * length);

                    testingArrayIn = dataset_in.Skip(i * length).Take(length).ToArray();

                    dataset_out.Take(i * length).ToArray().CopyTo(trainingArrayOut, 0);
                    dataset_out.Skip((i * length) + length).Take((length * 5) - (i * length + length)).ToArray().CopyTo(trainingArrayOut, i * length);

                    testingArrayOut = dataset_out.Skip(i * length).Take(length).ToArray();


                    for (int iteration = 0; iteration < iterations; iteration++)
                    {
                        testLearner.RunEpoch(trainingArrayIn, trainingArrayOut);
                    }



                    double kSum = 0;
                    for (int k = 0; k < testingArrayIn.GetLength(0); k++)
                    {
                        var testResults = testNet.Compute(testingArrayIn[k]);
                        for (int j = 0; j < testResults.Length; j++)
                        {
                            kSum += Math.Abs(testResults[j] - testingArrayOut[k][j]);
                        }

                        kSumAvg += kSum;
                    }
                }

                kSumAvg = kSumAvg / dataset_in.GetLength(0);
                if (kSumAvg == 0)
                {
                    bestKVal = kSumAvg;
                    bestVal  = new KFoldData(breadth, depth, trainingweights, iterations, bestKVal);
                    Console.WriteLine("Thread Complete " + breadth + " " + depth + " " + trainingweights + " " + iterations + " " + bestKVal);
                    return(bestVal);
                }

                if (kSumAvg < bestKVal)
                {
                    bestKVal = kSumAvg;
                    bestVal  = new KFoldData(breadth, depth, trainingweights, iterations, bestKVal);
                }
            }
            Console.WriteLine("Thread Complete " + breadth + " " + depth + " " + trainingweights + " " + bestVal.Iterations + " " + bestKVal);

            return(bestVal);
            //return new Task<KFoldData>(() => helperFunction(inputSize, outputSize, breadth,depth,trainingweights));
        }