Esempio n. 1
0
 public EditFeatureVector(FeatureVector fv)
 {
     InitializeComponent();
     textBox1.Text = string.IsNullOrEmpty(fv.name) ? "Incognito " + count : fv.name;
     numericUpDown1.Value = fv.size;
     count++;
 }
Esempio n. 2
0
        static FeatureVector[] StateFromConfiguration(string config)
        {
            var configs = config.Split(';');
            FeatureVector[] res = new FeatureVector[configs.Length];

            for (int i = 0; i < res.Length; i++)
            {
                var toks = configs[i].Split('=');
                var name = toks[0];
                var values = toks[1].Split(',').Select(x => double.Parse(x)).ToArray();

                res[i] = new FeatureVector();
                res[i].name = name;
                res[i].state = new Matrix(values.Length, 1);

                for (int c = 0; c < values.Length; c++)
                {
                    res[i].state[c, 0] = values[c];
                }
            }

            return res;
        }
Esempio n. 3
0
        private void TrainFor(int iterations, TrainingType type, double learningRate, Action <double> callbackWithError)
        {
            OneToManyMap <GraphMap <FeatureVector, WeightMatrix> .ILinkable, Pair <GraphMap <FeatureVector, WeightMatrix> .ILinkable, GraphMap <FeatureVector, WeightMatrix> .Link <WeightMatrix> > > backwards;
            Queue <GraphMap <FeatureVector, WeightMatrix> .ILinkable> inputVectors;
            Queue <GraphMap <FeatureVector, WeightMatrix> .ILinkable> outputVectors;

            GetGraphOrder(out backwards, out inputVectors, out outputVectors);

            List <Dictionary <string, FeatureVector> > trainingData = new List <Dictionary <string, FeatureVector> >();

            // Get training data
            foreach (DataGridViewRow row in data_training.Rows)
            {
                string config = row.Cells[2].Value.ToString();
                trainingData.Add(StateFromConfiguration(config).ToDictionary(x => x.name));
            }

            int iterationCounter = 0;

            double averager     = 1.0 / trainingData.Count;
            double learningrate = learningRate;

            // begin training...
            while (true)
            {
                double squaredTrainingError     = 0;
                int    totalTrainingNeuronCount = 0;

                // Matrix weight gradients
                Dictionary <GraphMap <FeatureVector, WeightMatrix> .Link <WeightMatrix>, Matrix> dw = new Dictionary <GraphMap <FeatureVector, WeightMatrix> .Link <WeightMatrix>, Matrix>();

                foreach (var inputVector in inputVectors)
                {
                    foreach (var edge in inputVector.Edges)
                    {
                        // make space to store the weight gradients
                        dw[edge.Value] = new Matrix(edge.Value.Data.weights.RowCount, edge.Value.Data.weights.ColumnCount);
                    }
                }

                foreach (var trainingCase in trainingData)
                {
                    double perTrainingSquaredError = 0;
                    int    perTrainingNeuronCount  = 0;

                    // errors in all input vectors
                    Dictionary <string, Matrix> dy = new Dictionary <string, Matrix>();

                    // set all feature vectors to a training case
                    foreach (var feature in trainingCase)
                    {
                        if (vectors[feature.Key].Data.layer != LayerType.OUTPUT)
                        {
                            vectors[feature.Key].Data.state = feature.Value.state;
                        }
                    }

                    // forward prop
                    foreach (var outputVector in outputVectors)
                    {
                        var sources = backwards[outputVector];
                        outputVector.Data.state = new Matrix(outputVector.Data.state.RowCount, outputVector.Data.state.ColumnCount);
                        foreach (var source in sources)
                        {
                            var x = Training.AddBiasTerm(source.a.Data.state);
                            outputVector.Data.state += ((BaseMatrix)source.b.Data.weights * (BaseMatrix)x);
                        }
                        outputVector.Data.state = outputVector.Data.type.Func(outputVector.Data.state);
                    }

                    // Calculate errors
                    foreach (var output in outputs)
                    {
                        dy[output.Data.name] = -(trainingCase[output.Data.name].state - output.Data.state);
                        for (int i = 0; i < output.Data.state.RowCount; i++)
                        {
                            double error = dy[output.Data.name][i, 0];
                            perTrainingSquaredError += error * error;
                            perTrainingNeuronCount++;
                        }
                    }

                    squaredTrainingError     += perTrainingSquaredError;
                    totalTrainingNeuronCount += perTrainingNeuronCount;

                    // Establish space for the input vectors
                    foreach (var inputVec in inputVectors)
                    {
                        dy[inputVec.Data.name] = new Matrix(inputVec.Data.state.RowCount, inputVec.Data.state.ColumnCount);
                    }

                    // backprop and add to weight gradients
                    foreach (var inputVec in inputVectors)
                    {
                        foreach (var edge in inputVec.Edges)
                        {
                            Matrix dHidden;
                            Matrix dWeights;
                            var    x = Training.AddBiasTerm(inputVec.Data.state);
                            GraphMap <FeatureVector, WeightMatrix> .ILinkable asd;
                            asd = edge.Key;
                            FeatureVector data = asd.Data;
                            Training.BackpropLayer(dy[data.name], x, edge.Value.Data.weights, data.type, out dHidden, out dWeights);
                            dy[inputVec.Data.name] += Training.RemoveBiasTerm(dHidden);

                            if (type == TrainingType.Batch)
                            {
                                dw[edge.Value] -= dWeights;
                            }
                            else if (type == TrainingType.Online)
                            {
                                dw[edge.Value] = -dWeights;
                            }
                        }
                    }

                    // update weights
                    if (type == TrainingType.Online)
                    {
                        foreach (var inputVec in inputVectors)
                        {
                            foreach (var edge in inputVec.Edges)
                            {
                                edge.Value.Data.weights = (edge.Value.Data.weights + dw[edge.Value] * learningrate);
                            }
                        }
                    }
                }


                if (type == TrainingType.Batch)
                {
                    // update weights
                    foreach (var inputVec in inputVectors)
                    {
                        foreach (var edge in inputVec.Edges)
                        {
                            edge.Value.Data.weights = (edge.Value.Data.weights + dw[edge.Value] * averager * learningrate);
                        }
                    }
                }

                // calculate total error
                double totalError = Math.Sqrt(squaredTrainingError) / totalTrainingNeuronCount;
                callbackWithError(totalError);
                //Debug.WriteLine(totalError);

                iterationCounter++;

                // repeat until stopped
                if (iterationCounter == iterations)
                {
                    break;
                }
            }
        }