Exemple #1
0
        public static Perceptron Train(DataSet.DataSet dataSet, int batching, int epochs, double learningRate, int hiddenLayers, int hiddenLayersSize, ActivationFunction.ActivationFunction activationFunction, ErrorFunction.ErrorFunction errorFunction)
        {
            Perceptron p = new Perceptron(batching, learningRate, errorFunction);

            p.Layer(dataSet.FeatureSize, activationFunction);
            for (int i = 0; i < hiddenLayers; i++)
            {
                p.Layer(hiddenLayersSize, activationFunction);
            }
            p.Layer(dataSet.LabelSize, activationFunction);

            p.Train2(dataSet, epochs);

            return(p);
        }
Exemple #2
0
        public double CalculateMeanErrorOverDataSet(DataSet.DataSet dataSet)
        {
            double acum = 0;
            int    cont = 0;

            foreach (var row in dataSet.DataRows)
            {
                if (cont % (dataSet.DataRows.Count / 10) == 0)
                {
                    //Console.WriteLine("     " + cont);
                }
                cont++;
                var err = this.FeedForward(row.GetFeatures()).Zip(row.GetLabels(), (e, l) => ErrorFunction.GetValue(l, e)).Sum();

                acum += err;
            }
            acum /= (double)dataSet.DataRows.Count;
            return(acum);
        }
Exemple #3
0
        public void Train3(DataSet.DataSet dataSet, int epochs)
        {
            for (int i = 0; i < epochs; i++)
            {
                var miniBatch = dataSet.NextBatch(this.Batching);
                foreach (var example in miniBatch)
                {
                    double   error  = CalculateExampleLost(example);
                    double[] labels = example.Labels;

                    Layers.Last().BackPropagate(error, labels, ErrorFunction);
                }
                TakeGradientDescentStep(miniBatch.Count);
                if ((i + 1) % (epochs / 10) == 0)
                {
                    Console.WriteLine("Epoch " + (i + 1) + ", Avg.Loss:" + CalculateMeanErrorOverDataSet(dataSet));
                }
            }
        }
Exemple #4
0
 public double MeanLossOverDataSet(DataSet.DataSet dataSet)
 {
     return(dataSet.DataRows.Select(i => CalculateExampleLost(i)).Average());
 }
Exemple #5
0
        public void Train2(DataSet.DataSet dataSet, int epochs)
        {
            Console.WriteLine("Initial Loss:" + CalculateMeanErrorOverDataSet(dataSet));
            for (int i = 0; i < epochs; i++)
            {
                dataSet.Shuffle();
                List <DataRow> batch = dataSet.NextBatch(this.Batching);

                int count = 0;
                foreach (DataRow example in batch)
                {
                    count++;

                    double[] result = this.FeedForward(example.GetFeatures());
                    double[] labels = example.GetLabels();
                    if (result.Length != labels.Length)
                    {
                        throw new Exception("Inconsistent array size, Incorrect implementation.");
                    }
                    else
                    {
                        double error = CalculateExampleLost(example);


                        for (int l = this.Layers.Count - 1; l > 0; l--)
                        {
                            if (l == this.Layers.Count - 1)
                            {
                                for (int j = 0; j < this.Layers[l].CostDerivatives.Length; j++)
                                {
                                    this.Layers[l].CostDerivatives[j] = ErrorFunction.GetDerivativeValue(labels[j], this.Layers[l].Activations[j]);
                                }
                            }
                            else
                            {
                                for (int j = 0; j < this.Layers[l].CostDerivatives.Length; j++)
                                {
                                    double acum = 0;
                                    for (int j2 = 0; j2 < Layers[l + 1].Size; j2++)
                                    {
                                        acum += Layers[l + 1].WeightMatrix[j2, j] * this.Layers[l + 1].ActivationFunction.GetDerivativeValue(Layers[l + 1].WeightedSum[j2]) * Layers[l + 1].CostDerivatives[j2];
                                    }
                                    this.Layers[l].CostDerivatives[j] = acum;
                                }
                            }

                            for (int j = 0; j < this.Layers[l].Activations.Length; j++)
                            {
                                this.Layers[l].BiasVectorChangeRecord[j] += this.Layers[l].ActivationFunction.GetDerivativeValue(Layers[l].WeightedSum[j]) * Layers[l].CostDerivatives[j];
                                for (int k = 0; k < Layers[l].WeightMatrix.GetLength(1); k++)
                                {
                                    this.Layers[l].WeightMatrixChangeRecord[j, k] += Layers[l - 1].Activations[k]
                                                                                     * this.Layers[l].ActivationFunction.GetDerivativeValue(Layers[l].WeightedSum[j])
                                                                                     * Layers[l].CostDerivatives[j];
                                }
                            }
                        }
                    }
                }
                TakeGradientDescentStep(batch.Count);

                if ((i + 1) % (epochs / 10) == 0)
                {
                    Console.WriteLine("Epoch " + (i + 1) + ", Avg.Loss:" + CalculateMeanErrorOverDataSet(dataSet));
                }
            }
        }
Exemple #6
0
        static void Main(string[] args)
        {
            //Set the path of the file containing the data set
            //string dataFilePath = @"C:\Users\kevin\Desktop\squaredtest.csv"; NutrioxDataset
            string dataFilePath = @"C:\Users\Bruker\Desktop\NutrioxDataset.csv";

            //string dataFilePath = @"C:\Users\Bruker\Desktop\-5to5-200Rows.csv";

            //Create a new data set
            DataSet.DataSet dataSet = new DataSet.DataSet(dataFilePath, true);

            //Apply desired data preprocessing to the data set
            dataSet.PreProcessDataSet(NormalizationType.MinMax, 2, EncodingType.None, null);

            //Create a model hyperparameter layer structure
            LayerStructure layerStructure = new LayerStructure()
            {
                numberOfInputNodes = 2, HiddenLayerList = new List <int> {
                    5, 5
                }, numberOfOutputNodes = 1
            };

            //Create an instance of the desired optimalization strategy to use

            var regularizationStrategyFactory = new RegularizationStrategyFactory();
            StochasticGradientDescent SGD     = new StochasticGradientDescent(new SigmoidFunction(), new IdentityFunction(), new MeanSquaredError(), RegularizationType.None, regularizationStrategyFactory);

            //Create training hyperparameters
            TrainingParameters trainingParams = new TrainingParameters()
            {
                epochs = 500, learningRate = 0.01, momentum = 0.01, RegularizationLambda = 0.00
            };

            //Create an instance of a neural network
            //ArtificialNeuralNetwork ann = new ArtificialNeuralNetwork(layerStructure, trainingParams, dataSet, SGD, new GaussianDistribution());

            //Or Load a Network from XML

            XML xml = new XML();

            ArtificialNeuralNetwork ann = xml.LoadNetwork(@"C:\Users\Bruker\Desktop\BestNet.xml", dataSet) as ArtificialNeuralNetwork;

            //Apply the desired training/test data set split ratios.
            ann.SplitDataSetIntoTrainAndTestSets(0.7);

            //Initiate network training
            //ann.TrainNetwork();

            var crossValidationStrategyFactory = new CrossValidationStrategyFactory();
            NetworkEvaluator evaluator         = new NetworkEvaluator(ann);
            CrossValidator   crossValidator    = new CrossValidator(ann, evaluator, crossValidationStrategyFactory);

            //Cross-validate the fitted model
            //crossValidator.KFold(10, 0.007);

            //Evaluate the fitted model on the test set
            evaluator.EvaluateNetwork(0.007);


            //--Optional--//

            //Serialize and save the fitted model

            //XML xml = new XML();
            //xml.SaveNetwork(dataFilePath, ann);

            //Extract model information

            //ann.SaveListOfErrors();

            //ann.GetApproximatedFunction(ann.SavePath + "/Function.txt");

            Console.ReadLine();
        }
Exemple #7
0
        private void Train(DataSet.DataSet dataSet, int epochs)
        {
            Console.WriteLine("MSE:" + CalculateMeanErrorOverDataSet(dataSet));
            for (int i = 0; i < epochs; i++)
            {
                dataSet.Shuffle();
                List <List <DataRow> > batch = dataSet.Batch(this.Batching);
                int step = 0;
                foreach (List <DataRow> row in batch)
                {
                    foreach (DataRow example in row)
                    {
                        double[] result = this.FeedForward(example.GetFeatures());
                        double[] labels = example.GetLabels();
                        if (result.Length != labels.Length)
                        {
                            throw new Exception("Inconsistent array size, Incorrect implementation.");
                        }
                        else
                        {
                            double error = labels.Zip(result, (x, y) => Math.Pow(x - y, 2)).Sum();
                            for (int l = this.Layers.Count - 1; l > 0; l--)
                            {
                                if (l == this.Layers.Count - 1)
                                {
                                    for (int j = 0; j < this.Layers[l].CostDerivatives.Length; j++)
                                    {
                                        this.Layers[l].CostDerivatives[j] = 2.0 * (this.Layers[l].Activations[j] - labels[j]);
                                    }
                                }
                                else
                                {
                                    for (int j = 0; j < this.Layers[l].CostDerivatives.Length; j++)
                                    {
                                        //this.Layers[l].CostDerivatives[j] = 2.0 * (this.Layers[l].Activations[j] - labels[j]);
                                        double acum = 0;
                                        for (int j2 = 0; j2 < Layers[l + 1].Size; j2++)
                                        {
                                            acum += Layers[l + 1].WeightMatrix[j2, j] * Layers[l + 1].ActivationFunction.GetDerivativeValue(Layers[l + 1].WeightedSum[j2]) * Layers[l + 1].CostDerivatives[j2];
                                        }
                                        this.Layers[l].CostDerivatives[j] = acum;
                                    }
                                }

                                for (int j = 0; j < this.Layers[l].Activations.Length; j++)
                                {
                                    this.Layers[l].BiasVectorChangeRecord[j] += Layers[l].ActivationFunction.GetDerivativeValue(Layers[l].WeightedSum[j]) * Layers[l].CostDerivatives[j];
                                    for (int k = 0; k < Layers[l].WeightMatrix.GetLength(1); k++)
                                    {
                                        this.Layers[l].WeightMatrixChangeRecord[j, k] += Layers[l - 1].Activations[k]
                                                                                         * Layers[l].ActivationFunction.GetDerivativeValue(Layers[l].WeightedSum[j])
                                                                                         * Layers[l].CostDerivatives[j];
                                    }
                                }
                            }
                        }
                    }
                    // Console.WriteLine("Step "+step);
                    step++;
                    TakeGradientDescentStep(row.Count);

                    //
                }
                Console.WriteLine(i + ":" + CalculateMeanErrorOverDataSet(dataSet));
            }
        }
Exemple #8
0
 /**
  * <summary> Constructor that sets the dataSet.</summary>
  *
  * <param name="dataSet">DataSet that will bu used.</param>
  */
 public TrainedFeatureFilter(DataSet.DataSet dataSet) : base(dataSet)
 {
 }
Exemple #9
0
 /**
  * <summary> Constructor for normalize feature filter. It calculates and stores the mean (m) and standard deviation (s) of
  * the sample.</summary>
  *
  * <param name="dataSet">Instances whose continuous attribute values will be normalized.</param>
  */
 public Normalize(DataSet.DataSet dataSet) : base(dataSet)
 {
     _averageInstance           = dataSet.GetInstanceList().Average();
     _standardDeviationInstance = dataSet.GetInstanceList().StandardDeviation();
 }
 /**
  * <summary> Constructor that sets the dataSet and dimension. Then calls train method.</summary>
  *
  * <param name="dataSet">           DataSet that will bu used.</param>
  * <param name="numberOfDimensions">Dimension number.</param>
  */
 public Pca(DataSet.DataSet dataSet, int numberOfDimensions) : base(dataSet)
 {
     this._numberOfDimensions = numberOfDimensions;
     Train();
 }
 /**
  * <summary> Constructor that sets the dataSet and dimension. Then calls train method.</summary>
  *
  * <param name="dataSet">DataSet that will bu used.</param>
  */
 public Pca(DataSet.DataSet dataSet) : base(dataSet)
 {
     Train();
 }
 /**
  * <summary> Constructor that sets the dataSet and covariance explained. Then calls train method.</summary>
  *
  * <param name="dataSet">            DataSet that will bu used.</param>
  * <param name="covarianceExplained">Number that shows the explained covariance.</param>
  */
 public Pca(DataSet.DataSet dataSet, double covarianceExplained) : base(dataSet)
 {
     this._covarianceExplained = covarianceExplained;
     Train();
 }
 /**
  * <summary> Constructor that sets the dataSet and all the attributes distributions.</summary>
  *
  * <param name="dataSet">DataSet that will be used.</param>
  */
 public LaryFilter(DataSet.DataSet dataSet) : base(dataSet)
 {
     attributeDistributions = dataSet.GetInstanceList().AllAttributesDistribution();
 }
 /**
  * <summary> Constructor for discrete to indexed filter.</summary>
  *
  * <param name="dataSet">The dataSet whose instances whose discrete attributes will be converted to indexed attributes</param>
  */
 public DiscreteToIndexed(DataSet.DataSet dataSet) : base(dataSet)
 {
 }
Exemple #15
0
 /**
  * <summary> Constructor for L-ary discrete to binary discrete filter.</summary>
  *
  * <param name="dataSet">The instances whose L-ary discrete attributes will be converted to binary discrete attributes.</param>
  */
 public LaryToBinary(DataSet.DataSet dataSet) : base(dataSet)
 {
 }
Exemple #16
0
 /**
  * <summary> Constructor for discrete to continuous filter.</summary>
  *
  * <param name="dataSet">The dataSet whose instances whose discrete attributes will be converted to continuous attributes using
  *                1-of-L encoding.</param>
  */
 public DiscreteToContinuous(DataSet.DataSet dataSet) : base(dataSet)
 {
 }
Exemple #17
0
 /**
  * <summary> Constructor that sets the dataSet.</summary>
  *
  * <param name="dataSet">DataSet that will bu used.</param>
  */
 public FeatureFilter(DataSet.DataSet dataSet)
 {
     this.dataSet = dataSet;
 }
 /**
  * <summary> Constructor for a specific machine learning experiment</summary>
  * <param name="classifier">Classifier used in the machine learning experiment</param>
  * <param name="parameter">Parameter(s) of the classifier.</param>
  * <param name="dataSet">DataSet on which the classifier is run.</param>
  */
 public Experiment(Classifier.Classifier classifier, Parameter.Parameter parameter, DataSet.DataSet dataSet)
 {
     this._classifier = classifier;
     this._parameter  = parameter;
     this._dataSet    = dataSet;
 }