コード例 #1
0
        // check the accuracy so far
        private double GetAccuracy(int instance, VMatrix features, VMatrix labels)
        {
            var eCount = 0;

            for (var row = 0; row < features.Rows(); row++)
            {
                double net = 0;

                for (var col = 0; col < features.Cols(); col++)
                {
                    net += m_weights[instance][col] * features.Row(row)[col];
                }

                // add the bias
                net += m_weights[instance][m_weights[instance].Length - 1];

                var z = (net > 0 ? 1.0 : 0);
                var t = labels.Row(row)[0];
                if (m_count > 2)
                {
                    t = (t == instance) ? 1.0 : 0;
                }

                if (t != z)
                {
                    eCount++;
                }
            }

            return(1.0 - (1.0 * eCount / features.Rows()));
        }
コード例 #2
0
ファイル: SOM.cs プロジェクト: robertst01/MLSystemManager
        private void TrainEpoch(VMatrix features, VMatrix labels)
        {
            double minDistance;
            Node   bmu;
            object lo = new object();

            Console.Write("TrainEpoch ");
            int cl = Console.CursorLeft;

            if (m_iterations < 1)
            {
                m_iterations = features.Rows() * 10;
            }

            double mapRadius    = (double)m_gridSize / 2;
            double timeConstant = (double)m_iterations / Math.Log(mapRadius);

            for (int iteration = 0; iteration < m_iterations; iteration++)
            {
                int row = m_rand.Next(features.Rows());
                minDistance = double.MaxValue;
                bmu         = null;

                if (((iteration % 100) == 0) || (iteration == (m_iterations - 1)))
                {
                    Console.SetCursorPosition(cl, Console.CursorTop);
                    Console.Write(iteration);
                }

                // calculate the distance
#if parallel
                Parallel.ForEach(m_layers[0], node =>
#else
                foreach (var node in m_layers[0])
#endif
                {
                    node.distance = 0;

                    // calculate the distance
                    for (var w = 0; w < node.weights.Length; w++)
                    {
                        node.distance += (features.Get(row, w) - node.weights[w]) * (features.Get(row, w) - node.weights[w]);
                    }

                    lock (lo)
                    {
                        if (node.distance < minDistance)
                        {
                            minDistance = node.distance;
                            bmu         = node;
                        }
                    }
#if parallel
                });
コード例 #3
0
            public Cluster(int number, VMatrix features, int row, List <int> ignore)
            {
                Number   = number;
                Features = features;
                Centroid = new double[features.Cols()];
                for (var col = 0; col < Centroid.Length; col++)
                {
                    Centroid[col] = features.Get(row, col);
                }

                Instances = new List <int>();
                Ignore    = ignore;
            }
コード例 #4
0
        private double TrainEpoch(int epoch, VMatrix features, VMatrix labels)
        {
            double sse = 0;
            object lo  = new object();

            Console.Write("TrainEpoch ");
            int cl = Console.CursorLeft;

            for (var row = 0; row < features.Rows(); row++)
            {
                if (((row % 100) == 0) || (row == (features.Rows() - 1)))
                {
                    Console.SetCursorPosition(cl, Console.CursorTop);
                    Console.Write(row);
                }

                // calculate the output
                for (var layer = 0; layer < m_layers.Count; layer++)
                {
#if parallel
                    Parallel.ForEach(m_layers[layer], node =>
#else
                    foreach (var node in m_layers[layer])
#endif
                    {
                        node.net = 0;

                        // calculate the net value
                        for (var w = 0; w < node.weights.Length - 1; w++)
                        {
                            if (layer == 0)
                            {
                                node.net += node.weights[w] * features.Get(row, w);
                            }
                            else
                            {
                                node.net += node.weights[w] * m_layers[layer - 1][w].output;
                            }
                        }
                        // add the bias
                        node.net += node.weights[node.weights.Length - 1];

                        node.output = Activation(node.net);
#if parallel
                    });
コード例 #5
0
        public override void VTrain(VMatrix features, VMatrix labels)
        {
            if (m_hidden.Length < 1)
            {
                m_hidden = new int[1] {
                    features.Cols() * 2
                };
            }

            // add the input nodes
            List <Node> iNodes = new List <Node>();

            for (var i = 0; i < features.Cols(); i++)
            {
                iNodes.Add(new InputNode(i, i, m_rand));
            }

            m_layers.Add(iNodes);
            int prevNodes = iNodes.Count + 1;

            // add the hidden nodes
            for (var layer = 0; layer < m_hidden.Length; layer++)
            {
                List <Node> hNodes = new List <Node>();

                for (var n = 0; n < m_hidden[layer]; n++)
                {
                    var node = new HiddenNode(n, prevNodes, m_rand);
                    if (m_activation == "relu")
                    {
                        if (m_actRandom)
                        {
                            node.alpha     = m_actAlpha * m_rand.NextDouble();
                            node.threshold = m_actThreshold * m_rand.NextDouble();
                            node.beta      = ((m_actBeta - 1.0) * m_rand.NextDouble()) + 1.0;
                        }
                        else
                        {
                            node.alpha     = m_actAlpha;
                            node.threshold = m_actThreshold;
                            node.beta      = m_actBeta;
                        }
                    }
                    hNodes.Add(node);
                }

                m_layers.Add(hNodes);
                prevNodes = hNodes.Count + 1;
            }

            // add the output nodes
            List <Node> oNodes = new List <Node>();

            for (var col = 0; col < labels.Cols(); col++)
            {
                var labelValueCount = labels.ValueCount(col);

                if (labelValueCount < 2)
                {
                    // continuous
                    var node = new OutputNode(oNodes.Count, prevNodes, true, col, -1, m_rand);
                    if (m_activation == "relu")
                    {
                        if (m_actRandom)
                        {
                            node.alpha     = m_actAlpha * m_rand.NextDouble();
                            node.threshold = m_actThreshold * m_rand.NextDouble();
                            node.beta      = ((m_actBeta - 1.0) * m_rand.NextDouble()) + 1.0;
                        }
                        else
                        {
                            node.alpha     = m_actAlpha;
                            node.threshold = m_actThreshold;
                            node.beta      = m_actBeta;
                        }
                    }
                    oNodes.Add(node);
                }
                else
                {
                    for (var n = 0; n < labelValueCount; n++)
                    {
                        var node = new OutputNode(oNodes.Count, prevNodes, false, col, n, m_rand);
                        if (m_activation == "relu")
                        {
                            if (m_actRandom)
                            {
                                node.alpha     = m_actAlpha * m_rand.NextDouble();
                                node.threshold = m_actThreshold * m_rand.NextDouble();
                                node.beta      = ((m_actBeta - 1.0) * m_rand.NextDouble()) + 1.0;
                            }
                            else
                            {
                                node.alpha     = m_actAlpha;
                                node.threshold = m_actThreshold;
                                node.beta      = m_actBeta;
                            }
                        }
                        oNodes.Add(node);
                    }
                }
            }

            m_layers.Add(oNodes);

            int     trainSize          = (int)(0.75 * features.Rows());
            VMatrix trainFeatures      = new VMatrix(features, 0, 0, trainSize, features.Cols());
            VMatrix trainLabels        = new VMatrix(labels, 0, 0, trainSize, labels.Cols());
            VMatrix validationFeatures = new VMatrix(features, trainSize, 0, features.Rows() - trainSize, features.Cols());
            VMatrix validationLabels   = new VMatrix(labels, trainSize, 0, labels.Rows() - trainSize, labels.Cols());

            Console.Write("Layers: ");
            Console.Write(iNodes.Count);
            Console.Write('x');
            for (var l = 0; l < m_hidden.Length; l++)
            {
                Console.Write(m_hidden[l]);
                Console.Write('x');
            }
            Console.WriteLine(oNodes.Count);

            Console.WriteLine("AF: " + m_activation);
            Console.WriteLine(string.Format("AParam: {0},{1},{2},{3}", m_actAlpha, m_actThreshold, m_actBeta, m_actRandom));
            Console.WriteLine("Boost: " + m_boost);

            Console.WriteLine("Epoch\tMSE (validation)");
            if (m_outputFile != null)
            {
                m_outputFile.Write("Layers: ");
                m_outputFile.Write(iNodes.Count);
                m_outputFile.Write('x');
                for (var l = 0; l < m_hidden.Length; l++)
                {
                    m_outputFile.Write(m_hidden[l]);
                    m_outputFile.Write('x');
                }
                m_outputFile.WriteLine(oNodes.Count);

                m_outputFile.WriteLine("Momentum: " + m_momentum);
                m_outputFile.WriteLine("AF: " + m_activation);
                m_outputFile.WriteLine(string.Format("AParam: {0},{1},{2},{3}", m_actAlpha, m_actThreshold, m_actBeta, m_actRandom));
                m_outputFile.WriteLine("Boost: " + m_boost);
                m_outputFile.WriteLine();
                m_outputFile.WriteLine("Weights");
                PrintWeights();
                m_outputFile.WriteLine("Epoch\tMSE (validation)");
            }

            for (int round = 1; round < m_layers.Count; round++)
            {
                int    epoch      = 0;                                  // current epoch number
                int    bestEpoch  = 0;                                  // epoch number of best MSE
                int    eCount     = 0;                                  // number of epochs since the best MSE
                bool   checkDone  = false;                              // if true, check to see if we're done
                double initialMSE = double.MaxValue;                    // MSE for first epoch
                double bestMSE    = double.MaxValue;                    // best validation MSE so far

                for (; ;)
                {
                    // shuffle the training set
                    trainFeatures.Shuffle(m_rand, trainLabels);

                    TrainEpoch(++epoch, trainFeatures, trainLabels, round);

                    // check the MSE after this epoch
                    double mse = VGetMSE(validationFeatures, validationLabels);

                    Console.WriteLine(string.Format("{0}:{1}-{2}\t{3}", round, epoch, eCount, mse));
                    if (m_outputFile != null)
                    {
                        m_outputFile.WriteLine(string.Format("{0}:{1}-{2}\t{3}", round, epoch, eCount, mse));
                        m_outputFile.Flush();
                    }

                    if ((mse == 0.0) || (epoch > 5000))
                    {
                        break;
                    }
                    else if ((epoch == 1) || (mse < bestMSE))
                    {
                        if (epoch == 1)
                        {
                            // save the initial MSE
                            initialMSE = mse;
                        }
                        else if (!checkDone && (mse < initialMSE * 0.9))
                        {
                            checkDone = true;
                        }
                        eCount = 0;

                        // save the best for later
                        bestMSE   = mse;
                        bestEpoch = epoch;
                        for (var layer = 1; layer < m_layers.Count; layer++)
                        {
                            foreach (var node in m_layers[layer])
                            {
                                node.SaveBestWeights();
                            }
                        }
                    }
                    else if (checkDone)
                    {
                        // check to see if we're done
                        eCount++;
                        if (eCount >= 20)
                        {
                            break;
                        }
                    }

                    if ((bestEpoch > 0) && (bestEpoch != epoch))
                    {
                        for (var layer = round; layer < m_layers.Count; layer++)
                        {
                            foreach (var node in m_layers[layer])
                            {
                                node.RestoreBestWeights();
                                node.InitDeltas();
                            }
                        }
                        if (m_outputFile != null)
                        {
                            m_outputFile.WriteLine();
                            m_outputFile.WriteLine(string.Format("Best Weights (from Epoch {0}, valMSE={1})", bestEpoch, bestMSE));
                            PrintWeights();
                        }
                    }
                }
            }

            if (m_outputFile != null)
            {
                m_outputFile.WriteLine();
                m_outputFile.WriteLine("Weights");
                PrintWeights();
                m_outputFile.Close();
            }
        }
コード例 #6
0
        private void TrainBatch(VMatrix features, VMatrix labels, int startIdx, int count)
        {
            for (var idx = 0; idx < count; idx++)
            {
                var row = startIdx + idx;
                if (row > (features.Rows() - 1))
                {
                    row = features.Rows() - 1;
                }

                // calculate the output
                foreach (var layer in Layers)
                {
#if parallel
                    Parallel.ForEach(layer.Nodes, node =>
#else
                    foreach (var node in layer.Nodes)
#endif
                    {
                        node.Net    = 0;
                        node.Output = 0;
                        node.Error  = 0;

                        if (layer.Type == LayerType.Input)
                        {
                            // input node
                            node.Net    = features.Get(row, node.Index);
                            node.Output = node.Net;
                        }
                        else
                        {
                            // calculate the net value
                            for (var w = 0; w < node.Weights.Length - 1; w++)
                            {
                                node.Net += node.Weights[w] * layer.Previous.Nodes[w].Output;
                            }
                            // add the bias
                            node.Net += node.Weights[node.Weights.Length - 1];

                            // calculate the output
                            switch (Parameters.Activation)
                            {
                            case "relu":
                                node.Output = node.Net < 0 ? 0.01 * node.Net : node.Net;
                                break;

                            case "softsign":
                                node.Output = (node.Net / (1.0 + Math.Abs(node.Net)));
                                break;

                            case "softplus":
                                node.Output = Math.Log(1.0 + Math.Exp(node.Net));
                                break;

                            default:
                                node.Output = 1.0 / (1.0 + Math.Exp(-node.Net));
                                break;
                            }
                        }
#if parallel
                    });
コード例 #7
0
ファイル: SAE.cs プロジェクト: robertst01/MLSystemManager
        private double TrainEpoch(int epoch, VMatrix features, VMatrix labels, bool isDAE, bool trainAll)
        {
            double sse = 0;
            object lo  = new object();

            Console.Write("TrainEpoch ");
            int cl = Console.CursorLeft;

            StreamWriter aFile = null;

            if (!isDAE && (epoch == 1))
            {
                aFile = File.CreateText("dbnTrain.arff");
                aFile.WriteLine("@RELATION DAE");
                aFile.WriteLine();
                for (var i = 1; i <= m_layers[m_layers.Count - 3].Count; i++)
                {
                    aFile.WriteLine($"@ATTRIBUTE hn{i}	real");
                }
                aFile.WriteLine("@ATTRIBUTE class	{0,1,2,3,4,5,6,7,8,9}");
                aFile.WriteLine();
                aFile.WriteLine("@DATA");
            }

            for (var row = 0; row < features.Rows(); row++)
            {
                if (((row % 100) == 0) || (row == (features.Rows() - 1)))
                {
                    Console.SetCursorPosition(cl, Console.CursorTop);
                    Console.Write(row);
                }

                // calculate the output
                for (var layer = 0; layer < m_layers.Count; layer++)
                {
#if parallel
                    Parallel.ForEach(m_layers[layer], node =>
#else
                    foreach (var node in m_layers[layer])
#endif
                    {
                        node.net     = 0;
                        node.output  = 0;
                        node.output2 = 0;
                        node.error   = 0;

                        if (layer == 0)
                        {
                            // input node
                            node.output  = features.Get(row, node.index);
                            node.output2 = node.output;
                        }
                        else
                        {
                            // calculate the net value
                            for (var w = 0; w < node.weights.Length - 1; w++)
                            {
                                node.net += node.weights[w] * m_layers[layer - 1][w].output;
                            }
                            // add the bias
                            node.net += node.weights[node.weights.Length - 1];

                            // calculate the output
                            node.output  = Activation(node.net);
                            node.output2 = node.output;
                        }

                        if (isDAE && (layer == m_layers.Count - 3) && (node.output != 0))
                        {
                            lock (lo)
                            {
                                // corrupt the output
                                if (m_rand.NextDouble() < m_corruptLevel)
                                {
                                    node.output = 0;
                                }
                            }
                        }
#if parallel
                    });
コード例 #8
0
ファイル: DMLP.cs プロジェクト: robertst01/MLSystemManager
        public override void VTrain(VMatrix features, VMatrix labels)
        {
            if (m_hidden.Length < 1)
            {
                m_hidden = new int[1] {
                    features.Cols() * 2
                };
            }

            // create output nodes
            List <Node> oNodes = new List <Node>();

            // figure out how many outputs we need
            for (var col = 0; col < labels.Cols(); col++)
            {
                var labelValueCount = labels.ValueCount(col);

                if (labelValueCount < 2)
                {
                    // continuous
                    oNodes.Add(new OutputNode(m_hidden[m_hidden.Length - 1] + 1, true, col, -1, m_rand));
                }
                else
                {
                    for (var n = 0; n < labelValueCount; n++)
                    {
                        oNodes.Add(new OutputNode(m_hidden[m_hidden.Length - 1] + 1, false, col, n, m_rand));
                    }
                }
            }

            int oCount = oNodes.Count;

            for (var plane = 0; plane < oCount; plane++)
            {
                m_layers.Add(new List <List <Node> >());

                // add the input nodes
                List <Node> iNodes = new List <Node>();
                for (var i = 0; i < features.Cols(); i++)
                {
                    iNodes.Add(new InputNode(i, 0, m_rand));
                }

                m_layers[plane].Add(iNodes);

                int prevNodes = iNodes.Count + 1;

                for (var layer = 0; layer <= m_hidden.Length; layer++)
                {
                    if (layer < m_hidden.Length)
                    {
                        // add hidden nodes
                        List <Node> hNodes = new List <Node>();

                        for (var n = 0; n < m_hidden[layer]; n++)
                        {
                            hNodes.Add(new HiddenNode(prevNodes, m_rand));
                        }

                        m_layers[plane].Add(hNodes);

                        prevNodes = hNodes.Count + 1;
                    }
                    else
                    {
                        // add output node
                        m_layers[plane].Add(new List <Node>()
                        {
                            oNodes[plane]
                        });
                    }
                }
            }

            InitNodes();

            int     trainSize          = (int)(0.75 * features.Rows());
            VMatrix trainFeatures      = new VMatrix(features, 0, 0, trainSize, features.Cols());
            VMatrix trainLabels        = new VMatrix(labels, 0, 0, trainSize, labels.Cols());
            VMatrix validationFeatures = new VMatrix(features, trainSize, 0, features.Rows() - trainSize, features.Cols());
            VMatrix validationLabels   = new VMatrix(labels, trainSize, 0, labels.Rows() - trainSize, labels.Cols());

            Console.WriteLine(string.Format("Planes: {0}", oCount));
            Console.Write(string.Format("Layers: {0}x", features.Cols()));
            for (var l = 0; l < m_hidden.Length; l++)
            {
                Console.Write(m_hidden[l]);
                Console.Write('x');
            }
            Console.WriteLine("1");
            Console.WriteLine("Momentum: " + m_momentum);
            Console.WriteLine("AF: " + m_activation);
            Console.WriteLine(string.Format("AParam: {0},{1},{2},{3}", m_actLeak, m_actThreshold, m_actSlope, m_actRandom));
            Console.WriteLine("P-R-C\tMSE (validation)");
            if (m_outputFile != null)
            {
                m_outputFile.WriteLine(string.Format("Planes: {0}", oCount));
                m_outputFile.Write(string.Format("Layers: {0}x", features.Cols()));
                for (var l = 0; l < m_hidden.Length; l++)
                {
                    m_outputFile.Write(m_hidden[l]);
                    m_outputFile.Write('x');
                }
                m_outputFile.WriteLine("1");
                m_outputFile.WriteLine("Momentum: " + m_momentum);
                m_outputFile.WriteLine("AF: " + m_activation);
                m_outputFile.WriteLine(string.Format("AParam: {0},{1},{2},{3}", m_actLeak, m_actThreshold, m_actSlope, m_actRandom));
                m_outputFile.WriteLine();
                m_outputFile.WriteLine("P-R-C\tMSE (validation)");
            }

            // train the net, one plane at a time
            for (var plane = 0; plane < oCount; plane++)
            {
                int    epoch      = 0;                                  // current epoch number
                int    bestEpoch  = 0;                                  // epoch number of best MSE
                int    eCount     = 0;                                  // number of epochs since the best MSE
                bool   checkDone  = false;                              // if true, check to see if we're done
                double initialMSE = double.MaxValue;                    // MSE for first epoch
                double bestMSE    = double.MaxValue;                    // best validation MSE so far

                for (; ;)
                {
                    // shuffle the training set
                    trainFeatures.Shuffle(m_rand, trainLabels);
                    TrainEpoch(plane, ++epoch, trainFeatures, trainLabels);

                    // check the MSE after this epoch
                    double mse = PGetMSE(plane, validationFeatures, validationLabels);

                    Console.WriteLine(string.Format("{0}-{1}-{2}\t{3}", plane, epoch, eCount, mse));
                    if (m_outputFile != null)
                    {
                        m_outputFile.WriteLine(string.Format("{0}-{1}-{3}\t{3}", plane, epoch, eCount, mse));
                        m_outputFile.Flush();
                    }

                    if ((mse == 0.0) || (epoch > 10000))
                    {
                        break;
                    }
                    else if ((epoch == 1) || (mse < bestMSE))
                    {
                        if (epoch == 1)
                        {
                            // save the initial MSE
                            initialMSE = mse;
                        }
                        else if (!checkDone && (mse < initialMSE * 0.9))
                        {
                            checkDone = true;
                        }
                        eCount = 0;

                        // save the best for later
                        bestMSE   = mse;
                        bestEpoch = epoch;
                        for (var layer = 0; layer < m_layers[plane].Count - 1; layer++)
                        {
                            foreach (var node in m_layers[plane][layer])
                            {
                                node.SaveBestWeights();
                            }
                        }
                    }
                    else if (checkDone)
                    {
                        // check to see if we're done
                        eCount++;
                        if (eCount >= 20)
                        {
                            break;
                        }
                    }
                    else if ((epoch > 100) && /*(mse < initialMSE) &&*/ (mse > ((bestMSE + initialMSE) / 2)))
                    {
                        checkDone = true;
                    }
                }

                if ((bestEpoch > 0) && (bestEpoch != epoch))
                {
                    for (var layer = 0; layer < m_layers[plane].Count - 1; layer++)
                    {
                        foreach (var node in m_layers[plane][layer])
                        {
                            node.RestoreBestWeights();
                        }
                    }

                    Console.WriteLine(string.Format("Best Weights (from Epoch {0}, valMSE={1})", bestEpoch, bestMSE));
                    if (m_outputFile != null)
                    {
                        m_outputFile.WriteLine();
                        m_outputFile.WriteLine(string.Format("Best Weights (from Epoch {0}, valMSE={1})", bestEpoch, bestMSE));
                        m_outputFile.Flush();
                    }
                }
            }

            if (m_outputFile != null)
            {
                m_outputFile.WriteLine();
                m_outputFile.WriteLine("Weights");
                PrintWeights();
            }

            if (m_outputFile != null)
            {
                m_outputFile.Close();
            }
        }
コード例 #9
0
        public override void VTrain(VMatrix features, VMatrix labels)
        {
            if (m_hidden.Length < 1)
            {
                m_hidden = new int[1] {
                    features.Cols() * 2
                };
            }

            int prevNodes = features.Cols() + 1;
            int wIdx      = 0;                                                          // index into the weights array

            for (var layer = 0; layer <= m_hidden.Length; layer++)
            {
                // add the nodes for this layer
                List <Node> nodes = new List <Node>();

                if (layer < m_hidden.Length)
                {
                    // hidden layer
                    for (var n = 0; n < m_hidden[layer]; n++)
                    {
                        if (m_weights != null)
                        {
                            nodes.Add(new Node(prevNodes, false, 0, 0, m_rand, m_weights[wIdx++]));
                        }
                        else
                        {
                            nodes.Add(new Node(prevNodes, false, 0, 0, m_rand, null));
                        }
                    }
                }
                else
                {
                    // output layer - figure out how many outputs we need
                    for (var col = 0; col < labels.Cols(); col++)
                    {
                        var labelValueCount = labels.ValueCount(col);

                        if (labelValueCount < 2)
                        {
                            // continuous
                            if (m_weights != null)
                            {
                                nodes.Add(new Node(prevNodes, true, col, -1, m_rand, m_weights[wIdx++]));
                            }
                            else
                            {
                                nodes.Add(new Node(prevNodes, true, col, -1, m_rand, null));
                            }
                        }
                        else
                        {
                            for (var n = 0; n < labelValueCount; n++)
                            {
                                if (m_weights != null)
                                {
                                    nodes.Add(new Node(prevNodes, false, col, n, m_rand, m_weights[wIdx++]));
                                }
                                else
                                {
                                    nodes.Add(new Node(prevNodes, false, col, n, m_rand, null));
                                }
                            }
                        }
                    }
                }

                prevNodes = nodes.Count + 1;

                m_layers.Add(nodes);
            }

            InitNodes();

            int     trainSize          = (int)(0.75 * features.Rows());
            VMatrix trainFeatures      = new VMatrix(features, 0, 0, trainSize, features.Cols());
            VMatrix trainLabels        = new VMatrix(labels, 0, 0, trainSize, labels.Cols());
            VMatrix validationFeatures = new VMatrix(features, trainSize, 0, features.Rows() - trainSize, features.Cols());
            VMatrix validationLabels   = new VMatrix(labels, trainSize, 0, labels.Rows() - trainSize, labels.Cols());

            int    epoch        = 0;                            // current epoch number
            double bestTrainMSE = double.MaxValue;              // best training MSE so far
            double bestMSE      = double.MaxValue;              // best validation MSE so far
            double bestAccuracy = double.MaxValue;              // best validationa accuracy so far
            double initialMSE   = double.MaxValue;              // MSE for first epoch
            int    eCount       = 0;                            // number of epochs since the best MSE
            int    bestEpoch    = 0;                            // epoch number of best MSE
            bool   done         = false;
            bool   checkDone    = false;                        // if true, check to see if we're done

            Console.WriteLine("Epoch\tMSE (training)\t\tMSE (validation)\taccuracy (validation)");
            if (m_outputFile != null)
            {
                m_outputFile.WriteLine(string.Format("{0} layers, {1} output nodes", m_layers.Count, m_layers[m_layers.Count - 1].Count));
                m_outputFile.WriteLine("Momentum: " + m_momentum);
                m_outputFile.WriteLine();
                m_outputFile.WriteLine("Weights");
                PrintWeights();
                m_outputFile.WriteLine("Epoch\tMSE (training)\t\tMSE (validation)\taccuracy (validation)");
            }

            do
            {
                // shuffle the training set
                trainFeatures.Shuffle(m_rand, trainLabels);

                double trainMSE;

                if (m_weights != null)
                {
                    // not training
                    trainMSE = VGetMSE(trainFeatures, trainLabels);
                    epoch++;
                }
                else
                {
                    trainMSE = TrainEpoch(++epoch, trainFeatures, trainLabels);
                }

                // check the MSE after this epoch
                double mse = VGetMSE(validationFeatures, validationLabels);

                // check the validation accuracy after this epoch
                double accuracy = VMeasureAccuracy(validationFeatures, validationLabels, null);

                Console.WriteLine(string.Format("{0}\t{1}\t{2}\t{3}", epoch, trainMSE, mse, accuracy));
                if (m_outputFile != null)
                {
                    m_outputFile.WriteLine(string.Format("{0}\t{1}\t{2}\t{3}", epoch, trainMSE, mse, accuracy));
                }

                if (m_weights != null)
                {
                    // not really training
                    done = true;
                }
                else if (mse == 0.0)
                {
                    // can't get better than this
                    done = true;
                }
                else if ((epoch == 1) || (mse <= bestMSE))
                {
                    if (epoch == 1)
                    {
                        // save the initial MSE
                        initialMSE = mse;
                    }
                    else if (!checkDone && (mse < initialMSE * 0.9))
                    {
                        checkDone = true;
                    }

                    // save the best for later
                    bestTrainMSE = trainMSE;
                    bestMSE      = mse;
                    bestAccuracy = accuracy;
                    bestEpoch    = epoch;
                    eCount       = 0;
                    for (var layer = 0; layer < m_layers.Count - 1; layer++)
                    {
                        foreach (var node in m_layers[layer])
                        {
                            node.SaveBestWeights();
                        }
                    }
                }
                else if (checkDone)
                {
                    // check to see if we're done
                    eCount++;
                    if (eCount >= 20)
                    {
                        done = true;
                    }
                }
                else if (mse > initialMSE * 1.1)
                {
                    // are we getting really worse?
                    checkDone = true;
                }
                else if (epoch >= 10000)
                {
                    // time to stop
                    done = true;
                }
            } while (!done);

            if (m_outputFile != null)
            {
                m_outputFile.WriteLine();
                m_outputFile.WriteLine("Weights");
                PrintWeights();
            }

            if ((bestEpoch > 0) && (bestEpoch != epoch))
            {
                for (var layer = 0; layer < m_layers.Count - 1; layer++)
                {
                    foreach (var node in m_layers[layer])
                    {
                        node.RestoreBestWeights();
                    }
                }
                if (m_outputFile != null)
                {
                    m_outputFile.WriteLine();
                    m_outputFile.WriteLine(string.Format("Best Weights (from Epoch {0}, trainMSE={1}, valMSE={2}, valAcc={3})", bestEpoch, bestTrainMSE, bestMSE, bestAccuracy));
                    PrintWeights();
                }
            }

            if (m_outputFile != null)
            {
                m_outputFile.Close();
            }
        }
コード例 #10
0
        private double TrainEpoch(int epoch, VMatrix features, VMatrix labels)
        {
            double sse = 0;
            var    lo  = new object();
            var    cl  = 0;

            if (Parameters.Verbose)
            {
                Console.Write("TrainEpoch ");
                cl = Console.CursorLeft;
            }

            for (var rowCount = 1; rowCount <= features.Rows(); rowCount++)
            {
                if (Parameters.Verbose)
                {
                    Console.SetCursorPosition(cl, Console.CursorTop);
                    Console.Write(rowCount);
                }

                var row = m_rand.Next(features.Rows() - m_k + 1) + m_k - 1;

                for (var r = row - m_k + 1; r <= row; r++)
                {
                    SetInputs(features, r);
                }

                // calculate the output
                for (var layer = 1; layer < m_layers.Count; layer++)
                {
                    Parallel.ForEach(m_layers[layer], node =>
                    {
                        if (!(node is InputNode))
                        {
                            node.net    = 0;
                            node.output = 0;
                            node.error  = 0;

                            // calculate the net value
                            for (var w = 0; w < node.weights.Length - 1; w++)
                            {
                                var nNode = m_layers[layer - 1][w];
                                node.net += node.weights[w] * nNode.output;
                            }
                            // add the bias
                            node.net += node.weights[node.weights.Length - 1];

                            // calculate the output
                            node.output = 1.0 / (1.0 + Math.Exp(-node.net));
                        }
                    });
                }

                // calculate the error and weight changes
                for (var layer = m_layers.Count - 1; layer > 0; layer--)
                {
                    Parallel.ForEach(m_layers[layer], node =>
                    {
                        if (!(node is InputNode))
                        {
                            var fPrime = node.output * (1.0 - node.output);
                            if (node is OutputNode)
                            {
                                // output layer
                                var oNode  = node as OutputNode;
                                var target = labels.Get(row, oNode.labelCol);
                                if (!oNode.isContinuous)
                                {
                                    // nominal
                                    if (target == oNode.labelVal)
                                    {
                                        target = 0.9;
                                    }
                                    else
                                    {
                                        target = 0.1;
                                    }
                                }

                                var error  = target - node.output;
                                node.error = error * fPrime;
                                lock (lo) { sse += error * error; }
                            }
                            else
                            {
                                // hidden layer
                                double sum = 0;
                                foreach (var tn in m_layers[layer + 1])
                                {
                                    if (!(tn is InputNode))
                                    {
                                        sum += tn.error * tn.weights[node.index];
                                    }
                                }
                                node.error = sum * fPrime;
                            }

                            // calculate the weight changes
                            double delta;
                            for (var w = 0; w < node.weights.Length - 1; w++)
                            {
                                var dNode      = m_layers[layer - 1][w];
                                delta          = m_rate * node.error * dNode.output;
                                delta         += m_momentum * node.deltas[w];
                                node.deltas[w] = delta;
                            }

                            // calculate the bias weight change
                            delta  = m_rate * node.error;
                            delta += m_momentum * node.deltas[node.weights.Length - 1];
                            node.deltas[node.weights.Length - 1] = delta;
                        }
                    });
                }

                // update the weights
                for (var layer = 1; layer < m_layers.Count; layer++)
                {
                    var idx = m_inputs;
                    foreach (var node in m_layers[layer])
                    {
                        if (node is OutputNode)
                        {
                            for (var w = 0; w < node.weights.Length; w++)
                            {
                                node.weights[w] += node.deltas[w];
                            }
                        }
                        else if (node is HiddenNode)
                        {
                            var dNode = m_layers[1][idx++] as HiddenNode;
                            for (var w = 0; w < node.weights.Length; w++)
                            {
                                dNode.weights[w] += node.deltas[w];
                            }
                        }
                    }
                }

                CopyWeights();
            }

            if (Parameters.Verbose)
            {
                Console.WriteLine();
            }

            return(sse / features.Rows());
        }
コード例 #11
0
        private double TrainDBN(int hLayer, int epoch, VMatrix features, VMatrix labels)
        {
            double sse;
            double sseAccum = 0;
            object lo       = new object();

            Console.Write(string.Format("TrainDBN {0} - ", hLayer));
            int cl = Console.CursorLeft;

//			if ((hLayer == 1) && (epoch == 1))
//			{
//				m_layers[0][0].weights[0] = 0;
//				m_layers[0][1].weights[0] = 0;
//				m_layers[1][0].weights[0] = 0.6;
//				m_layers[1][0].weights[1] = 0.4;
//				m_layers[1][0].weights[2] = 0;
//				//m_layers[1][0].weights[3] = 0;
//				m_layers[1][1].weights[0] = 0.5;
//				m_layers[1][1].weights[1] = -0.1;
//				m_layers[1][1].weights[2] = 0;
//				//m_layers[1][1].weights[3] = 0;
//			}

            for (var row = 0; row < features.Rows(); row++)
            {
                sse = 0;

                Console.SetCursorPosition(cl, Console.CursorTop);
                Console.Write(row);

                //DropNodes();

                // calculate the output
                for (var layer = 0; layer <= hLayer; layer++)
                {
#if parallel
                    Parallel.ForEach(m_layers[layer], node =>
#else
                    foreach (var node in m_layers[layer])
#endif
                    {
                        node.net     = 0;
                        node.output  = 0;
                        node.sample  = 0;
                        node.net2    = 0;
                        node.output2 = 0;
                        node.sample2 = 0;
                        node.error   = 0;

                        if (node.isActive)
                        {
                            if (layer == 0)
                            {
                                // input node
                                node.output = features.Get(row, node.index);
                                node.sample = node.output;
                            }
                            else
                            {
                                // calculate the net value
                                int wCount = m_layers[layer - 1].Count;
                                for (var w = 0; w < wCount; w++)
                                {
                                    var nNode = m_layers[layer - 1][w];
                                    if (nNode.isActive)
                                    {
                                        if (m_sample)
                                        {
                                            node.net += node.weights[w] * nNode.sample;
                                        }
                                        else
                                        {
                                            node.net += node.weights[w] * nNode.output;
                                        }
                                    }
                                }
                                // add the bias
                                node.net += node.weights[wCount];

                                // calculate the output
                                node.output = 1.0 / (1.0 + Math.Exp(-node.net));

                                // sample
                                lock (lo) { node.sample = (m_rand.NextDouble() < node.output ? 1 : 0); }
                            }
                        }
#if parallel
                    });
コード例 #12
0
        public override void VTrain(VMatrix features, VMatrix labels)
        {
            if (m_hidden.Length < 1)
            {
                m_hidden = new int[1] {
                    features.Cols() * 2
                };
            }

            // add the input nodes
            List <Node> iNodes = new List <Node>();

            for (var i = 0; i < features.Cols(); i++)
            {
                iNodes.Add(new InputNode(i, 0, m_rand));
            }

            m_layers.Add(iNodes);

            int prevNodes = iNodes.Count + 1;
            int wIdx      = 0;                                                          // index into the weights array

            // add the hidden nodes
            for (var layer = 0; layer < m_hidden.Length; layer++)
            {
                // add the nodes for this layer
                List <Node> hNodes = new List <Node>();

                // if not the last 2 hidden layers, add c bias weight
                if (layer < m_hidden.Length - 2)
                {
                    prevNodes++;
                }

                for (var n = 0; n < m_hidden[layer]; n++)
                {
                    if (m_weights != null)
                    {
                        hNodes.Add(new HiddenNode(prevNodes, m_rand, m_weights[wIdx++]));
                    }
                    else
                    {
                        hNodes.Add(new HiddenNode(prevNodes, m_rand, null));
                    }
                }

                prevNodes = hNodes.Count + 1;
                m_layers.Add(hNodes);
            }

            // add the output nodes - figure out how many outputs we need
            List <Node> oNodes = new List <Node>();

            for (var col = 0; col < labels.Cols(); col++)
            {
                var labelValueCount = labels.ValueCount(col);

                if (labelValueCount < 2)
                {
                    // continuous
                    if (m_weights != null)
                    {
                        oNodes.Add(new OutputNode(prevNodes, true, col, -1, m_rand, m_weights[wIdx++]));
                    }
                    else
                    {
                        oNodes.Add(new OutputNode(prevNodes, true, col, -1, m_rand, null));
                    }
                }
                else
                {
                    for (var n = 0; n < labelValueCount; n++)
                    {
                        if (m_weights != null)
                        {
                            oNodes.Add(new OutputNode(prevNodes, false, col, n, m_rand, m_weights[wIdx++]));
                        }
                        else
                        {
                            oNodes.Add(new OutputNode(prevNodes, false, col, n, m_rand, null));
                        }
                    }
                }
            }

            m_layers.Add(oNodes);

            InitNodes();

            int     trainSize          = (int)(0.75 * features.Rows());
            VMatrix trainFeatures      = new VMatrix(features, 0, 0, trainSize, features.Cols());
            VMatrix trainLabels        = new VMatrix(labels, 0, 0, trainSize, labels.Cols());
            VMatrix validationFeatures = new VMatrix(features, trainSize, 0, features.Rows() - trainSize, features.Cols());
            VMatrix validationLabels   = new VMatrix(labels, trainSize, 0, labels.Rows() - trainSize, labels.Cols());

            int    epoch     = 0;                               // current epoch number
            int    bestEpoch = 0;                               // epoch number of best MSE
            int    eCount;                                      // number of epochs since the best MSE
            bool   checkDone;                                   // if true, check to see if we're done
            double bestTrainMSE = double.MaxValue;              // best training MSE so far
            double bestMSE      = double.MaxValue;              // best validation MSE so far
            double bestAccuracy = double.MaxValue;              // best validationa accuracy so far

            Console.WriteLine("Epoch\tMSE (training)\t\tMSE (validation)\taccuracy (validation)");
            if (m_outputFile != null)
            {
                m_outputFile.WriteLine(string.Format("{0} layers, {1} output nodes", m_layers.Count, m_layers[m_layers.Count - 1].Count));
                m_outputFile.WriteLine("Momentum: " + m_momentum);
                m_outputFile.WriteLine();
                m_outputFile.WriteLine("Weights");
                PrintWeights();
                m_outputFile.WriteLine("Epoch\tMSE (training)\t\tMSE (validation)\taccuracy (validation)");
            }

            if (m_weights != null)
            {
                // not training
                double trainMSE = VGetMSE(trainFeatures, trainLabels);
                epoch = 1;
                double mse      = VGetMSE(validationFeatures, validationLabels);
                double accuracy = VMeasureAccuracy(validationFeatures, validationLabels, null);
                Console.WriteLine(string.Format("{0}\t{1}\t{2}\t{3}", epoch, trainMSE, mse, accuracy));
                if (m_outputFile != null)
                {
                    m_outputFile.WriteLine(string.Format("{0}\t{1}\t{2}\t{3}", epoch, trainMSE, mse, accuracy));
                }
            }
            else
            {
                for (int hLayer = 1; hLayer <= m_hidden.Length; hLayer++)
                {
                    if (hLayer < m_hidden.Length)
                    {
                        // dbn layer
                        epoch     = 0;
                        eCount    = 0;
                        checkDone = false;
                        double wDelta    = 0;
                        double lastDelta = 0;
                        double bestDelta = double.MaxValue;
                        int    maxEpochs = 500000 / trainFeatures.Rows();
                        if (maxEpochs < 10)
                        {
                            maxEpochs = 10;
                        }

                        for (; ;)
                        {
                            // shuffle the training set
                            trainFeatures.Shuffle(m_rand, trainLabels);
                            wDelta = TrainDBN(hLayer, ++epoch, trainFeatures, trainLabels);

                            Console.WriteLine(string.Format("{0}\t{1}", epoch, wDelta));
                            if (m_outputFile != null)
                            {
                                m_outputFile.WriteLine(string.Format("{0}\t{1}", epoch, wDelta));
                            }

                            if (epoch > maxEpochs)
                            {
                                break;
                            }
                            else if (epoch == 1)
                            {
                                bestDelta = wDelta;
                            }
                            else if ((wDelta / lastDelta) >= 0.99)
                            {
                                if (!checkDone)
                                {
                                    checkDone = true;
                                    eCount    = 0;
                                }
                            }
                            else if (wDelta < bestDelta)
                            {
                                checkDone = false;
                            }
                            else if (!checkDone)
                            {
                                checkDone = true;
                                eCount    = 0;
                            }

                            if (checkDone)
                            {
                                // check to see if we're done
                                eCount++;
                                if (eCount >= 5)
                                {
                                    break;
                                }
                            }

                            if (wDelta < bestDelta)
                            {
                                bestDelta = wDelta;
                            }
                            lastDelta = wDelta;
                        }
                    }
                    else
                    {
                        // final hidden layer
                        epoch     = 0;
                        eCount    = 0;
                        checkDone = false;

                        double initialMSE = double.MaxValue;                            // MSE for first epoch

                        for (; ;)
                        {
                            // shuffle the training set
                            trainFeatures.Shuffle(m_rand, trainLabels);
                            double trainMSE = TrainEpoch(++epoch, trainFeatures, trainLabels);

                            // check the MSE after this epoch
                            double mse = VGetMSE(validationFeatures, validationLabels);

                            // check the validation accuracy after this epoch
                            double accuracy = VMeasureAccuracy(validationFeatures, validationLabels, null);

                            Console.WriteLine(string.Format("{0}\t{1}\t{2}\t{3}", epoch, trainMSE, mse, accuracy));
                            if (m_outputFile != null)
                            {
                                m_outputFile.WriteLine(string.Format("{0}\t{1}\t{2}\t{3}", epoch, trainMSE, mse, accuracy));
                            }

                            if (mse == 0.0)
                            {
                                // can't get better than this
                                break;
                            }
                            else if ((epoch == 1) || (mse <= bestMSE))
                            {
                                if (epoch == 1)
                                {
                                    // save the initial MSE
                                    initialMSE = mse;
                                }
                                else if (!checkDone && (mse < initialMSE * 0.9))
                                {
                                    checkDone = true;
                                }

                                // save the best for later
                                bestTrainMSE = trainMSE;
                                bestMSE      = mse;
                                bestAccuracy = accuracy;
                                bestEpoch    = epoch;
                                eCount       = 0;
                                for (var layer = 0; layer < m_layers.Count - 1; layer++)
                                {
                                    foreach (var node in m_layers[layer])
                                    {
                                        node.SaveBestWeights();
                                    }
                                }
                            }
                            else if (checkDone)
                            {
                                // check to see if we're done
                                eCount++;
                                if (eCount >= 20)
                                {
                                    break;
                                }
                            }
                            else if (mse > initialMSE * 1.1)
                            {
                                // are we getting really worse?
                                checkDone = true;
                            }
                            else if (epoch >= 10000)
                            {
                                // time to stop
                                break;
                            }
                        }
                    }
                }
            }

            if (m_outputFile != null)
            {
                m_outputFile.WriteLine();
                m_outputFile.WriteLine("Weights");
                PrintWeights();
            }

            if ((bestEpoch > 0) && (bestEpoch != epoch))
            {
                for (var layer = 0; layer < m_layers.Count - 1; layer++)
                {
                    foreach (var node in m_layers[layer])
                    {
                        node.RestoreBestWeights();
                    }
                }
                if (m_outputFile != null)
                {
                    m_outputFile.WriteLine();
                    m_outputFile.WriteLine(string.Format("Best Weights (from Epoch {0}, trainMSE={1}, valMSE={2}, valAcc={3})", bestEpoch, bestTrainMSE, bestMSE, bestAccuracy));
                    PrintWeights();
                }
            }

            if (m_outputFile != null)
            {
                m_outputFile.Close();
            }
        }
コード例 #13
0
        public override void VTrain(VMatrix features, VMatrix labels)
        {
            if (labels.ValueCount(0) > 2)
            {
                m_count = labels.ValueCount(0);
            }

            // create one set of weights for each output
            m_weights = new List <double[]>();
            for (var p = 0; p < m_count; p++)
            {
                var weights = new double[features.Cols() + 1];
                for (var i = 0; i < weights.Length; i++)
                {
                    weights[i] = 1.0 - (m_rand.NextDouble() * 2.0);
                }
                m_weights.Add(weights);
            }

            // iterate through each of the instances
            for (var instance = 0; instance < m_count; instance++)
            {
                double error;                                          // error rate for the current epoch
                var    bestError    = 1.0;                             // best (smallest) error rate so far
                var    eCount       = 0;                               // number of epochs since the best error
                var    epoch        = 0;                               // current epoch number
                var    done         = false;
                double bestAccuracy = 0;                               // best accuracy so far
                var    bestEpoch    = 0;                               // epoch number of best accuracy
                var    bestWeights  = new double[features.Cols() + 1]; // best weights

                if (m_outputFile != null)
                {
                    m_outputFile.WriteLine("Instance " + instance);
                    m_outputFile.WriteLine("Epoch\tError Rate");
                }

                do
                {
                    // shuffle the training set
                    features.Shuffle(m_rand, labels);

                    error = TrainEpoch(instance, ++epoch, features, labels);

                    // check the accuracy after this epoch
                    var accuracy = GetAccuracy(instance, features, labels);
                    if (accuracy > bestAccuracy)
                    {
                        // save the best for later
                        bestAccuracy = accuracy;
                        bestEpoch    = epoch;
                        for (var i = 0; i < bestWeights.Length; i++)
                        {
                            bestWeights[i] = m_weights[instance][i];
                        }
                    }

                    if (error == 0.0)
                    {
                        // can't get better than this
                        done = true;
                    }
                    else if ((epoch == 1) || (error <= bestError))
                    {
                        // save the best error so far
                        bestError = error;
                        eCount    = 0;
                    }
                    else
                    {
                        // check to see if we're done
                        eCount++;
                        if (eCount >= 10)
                        {
                            done = true;
                        }
                    }
                } while (!done);

                if (m_outputFile != null)
                {
                    m_outputFile.WriteLine();
                    m_outputFile.WriteLine("Weights");
                    for (var i = 0; i < m_weights[instance].Length - 1; i++)
                    {
                        m_outputFile.Write(string.Format("{0}\t", m_weights[instance][i]));
                    }
                    m_outputFile.WriteLine(string.Format("{0}\t", m_weights[instance][m_weights[instance].Length - 1]));
                    m_outputFile.WriteLine();
                }

                if (bestEpoch != epoch)
                {
                    for (var i = 0; i < bestWeights.Length; i++)
                    {
                        m_weights[instance][i] = bestWeights[i];
                    }
                    if (m_outputFile != null)
                    {
                        m_outputFile.WriteLine();
                        m_outputFile.WriteLine(string.Format("Best Weights (from Epoch {0}, accuracy={1})", bestEpoch, bestAccuracy));
                        for (var i = 0; i < m_weights[instance].Length - 1; i++)
                        {
                            m_outputFile.Write(string.Format("{0}\t", m_weights[instance][i]));
                        }
                        m_outputFile.WriteLine(string.Format("{0}\t", m_weights[instance][m_weights[instance].Length - 1]));
                        m_outputFile.WriteLine();
                    }
                }
            }

            if (m_outputFile != null)
            {
                m_outputFile.Close();
            }
        }
コード例 #14
0
        private double TrainEpoch(int instance, int epoch, VMatrix features, VMatrix labels)
        {
            if (m_outputFile == null)
            {
                Console.WriteLine(epoch);
            }

            var eCount = 0;

            for (var row = 0; row < features.Rows(); row++)
            {
                double net = 0;

                // calculate the net value
                for (var col = 0; col < features.Cols(); col++)
                {
                    net += m_weights[instance][col] * features.Row(row)[col];
                }

                // add the bias
                net += m_weights[instance][m_weights[instance].Length - 1];

                var z = (net > 0 ? 1.0 : 0);
                var t = labels.Row(row)[0];
                if (m_count > 2)
                {
                    t = (t == instance) ? 1.0 : 0;
                }

                // check to see if the predicted matches the actual
                if (z != t)
                {
                    eCount++;
                    double delta;

                    // adjust the weights
                    for (var i = 0; i < m_weights[instance].Length - 1; i++)
                    {
                        delta = (t - z) * m_rate * features.Row(row)[i];
                        //Console.Write(string.Format("{0}\t", delta));
                        m_weights[instance][i] += delta;
                    }
                    // adjust the bias weight
                    delta = (t - z) * m_rate;
                    //Console.WriteLine(delta);
                    m_weights[instance][m_weights[instance].Length - 1] += delta;
                }
            }

            // print the new weights
            if (m_outputFile == null)
            {
                for (var i = 0; i < m_weights[instance].Length - 1; i++)
                {
                    Console.Write(string.Format("{0}\t", m_weights[instance][i]));
                }
                Console.WriteLine(m_weights[instance][m_weights[instance].Length - 1]);
            }

            var error = 1.0 * eCount / features.Rows();

            if (m_outputFile == null)
            {
                Console.WriteLine(error);
                Console.WriteLine();
            }
            else
            {
                m_outputFile.WriteLine(string.Format("{0}\t{1}", epoch, error));
            }

            return(error);
        }
コード例 #15
0
ファイル: SOM.cs プロジェクト: robertst01/MLSystemManager
        public override void VTrain(VMatrix features, VMatrix labels)
        {
            int numWeights = features.Cols();

            // add the nodes
            List <Node> nodes = new List <Node>();

            for (int row = 0; row < m_gridSize; row++)
            {
                for (int col = 0; col < m_gridSize; col++)
                {
                    var labelValueCount = labels.ValueCount(0);

                    if (labelValueCount < 2)
                    {
                        // continuous
                        throw new Exception("Output must be nominal");
                    }
                    else
                    {
                        nodes.Add(new Node(row, col, numWeights, labelValueCount, m_rand));
                    }
                }
            }

            m_layers.Add(nodes);

            if (m_outputFile != null)
            {
                m_outputFile.WriteLine(string.Format("Grid size: {0}", m_gridSize));
                m_outputFile.WriteLine(string.Format("Iterations: {0}", m_iterations));
                m_outputFile.WriteLine();
                m_outputFile.WriteLine("Weights");
                PrintWeights();
            }

            TrainEpoch(features, labels);

            //Console.WriteLine();
            //for (int row = 0; row < m_gridSize; row++)
            //{
            //	for (int col = 0; col < m_gridSize; col++)
            //	{
            //		int n = (row * m_gridSize) + col;
            //		int output = (int)m_layers[0][n].output;
            //		if (output >= 0)
            //		{
            //			Console.Write(output);
            //		}
            //		else
            //		{
            //			Console.Write('_');
            //		}
            //	}
            //	Console.WriteLine();
            //}

            FixOutputs();

            Console.WriteLine();
            for (int row = 0; row < m_gridSize; row++)
            {
                for (int col = 0; col < m_gridSize; col++)
                {
                    int n      = (row * m_gridSize) + col;
                    int output = (int)m_layers[0][n].output;
                    if (output >= 0)
                    {
                        Console.Write(output);
                    }
                    else
                    {
                        Console.Write('_');
                    }
                }
                Console.WriteLine();
            }

            if (m_outputFile != null)
            {
                m_outputFile.WriteLine();
                for (int row = 0; row < m_gridSize; row++)
                {
                    for (int col = 0; col < m_gridSize; col++)
                    {
                        int n      = (row * m_gridSize) + col;
                        int output = (int)m_layers[0][n].output;
                        if (output >= 0)
                        {
                            m_outputFile.Write(output);
                        }
                        else
                        {
                            m_outputFile.Write('_');
                        }
                    }
                    m_outputFile.WriteLine();
                }

                m_outputFile.WriteLine();
                m_outputFile.WriteLine("Weights");
                PrintWeights();
            }

            if (m_outputFile != null)
            {
                m_outputFile.Close();
            }
        }
コード例 #16
0
 // Move the inputs down one slot
 private void SetInputs(VMatrix features, int row)
 {
     SetInputs(features.Row(row));
 }
コード例 #17
0
 /// <summary>Copies values from the given matrix object.</summary>
 /// <param name="sourceMatrix">The matrix to copy values from.</param>
 public void Set(VMatrix sourceMatrix) => CallVoid(nameof(Set), sourceMatrix);
コード例 #18
0
        // Calculate the MSE
        public override double VGetMSE(VMatrix features, VMatrix labels)
        {
            double sse = 0;
            var    cl  = 0;

            if (Parameters.Verbose)
            {
                Console.Write("VGetMSE ");
                cl = Console.CursorLeft;
            }

            for (var row = 0; row < features.Rows(); row++)
            {
                if (Parameters.Verbose)
                {
                    Console.SetCursorPosition(cl, Console.CursorTop);
                    Console.Write(row);
                }

                SetInputs(features, row);

                if (row >= m_k - 1)
                {
                    // calculate the output
                    for (var layer = 1; layer < m_layers.Count; layer++)
                    {
                        Parallel.ForEach(m_layers[layer], node =>
                        {
                            if (!(node is InputNode))
                            {
                                node.net    = 0;
                                node.output = 0;

                                // calculate the net value
                                for (var w = 0; w < node.weights.Length - 1; w++)
                                {
                                    node.net += node.weights[w] * m_layers[layer - 1][w].output;
                                }
                                // add the bias
                                node.net += node.weights[node.weights.Length - 1];

                                node.output = 1.0 / (1.0 + Math.Exp(-node.net));
                            }
                        });
                    }

                    // calculate the error of the output layer
                    foreach (OutputNode node in m_layers[m_layers.Count - 1])
                    {
                        var target = labels.Get(row, node.labelCol);
                        if (!node.isContinuous)
                        {
                            // nominal
                            if (target == node.labelVal)
                            {
                                target = 0.9;
                            }
                            else
                            {
                                target = 0.1;
                            }
                        }
                        var error = target - node.output;

                        // update the error
                        sse += error * error;
                    }
                }
            }

            if (Parameters.Verbose)
            {
                Console.WriteLine();
            }

            return(sse / (features.Rows() - m_k + 1));
        }
コード例 #19
0
        public double VMeasureAccuracy(VMatrix features, VMatrix labels, Matrix confusion)
        {
            if (features.Rows() != labels.Rows())
            {
                throw (new Exception("Expected the features and labels to have the same number of rows"));
            }
            if (labels.Cols() != 1)
            {
                throw (new Exception("Sorry, this method currently only supports one-dimensional labels"));
            }
            if (features.Rows() == 0)
            {
                throw (new Exception("Expected at least one row"));
            }

            var cl = 0;

            if (Parameters.Verbose)
            {
                Console.Write("VMeasureAccuracy ");
                cl = Console.CursorLeft;
            }

            var count  = features.Rows();
            var begRow = 0;

            if (this is BPTT)
            {
                var learner = this as BPTT;
                begRow = learner.m_k - 1;
                count -= begRow;
            }

            var labelValues = labels.ValueCount(0);

            if (labelValues == 0)             // If the label is continuous...
            {
                // The label is continuous, so measure root mean squared error
                var pred = new double[1];
                var sse  = 0.0;
                for (var i = 0; i < features.Rows(); i++)
                {
                    if (Parameters.Verbose)
                    {
                        Console.SetCursorPosition(cl, Console.CursorTop);
                        Console.Write(i);
                    }

                    var feat = features.Row(i);
                    var targ = labels.Row(i);
                    pred[0] = 0.0;                     // make sure the prediction is not biased by a previous prediction
                    Predict(feat, pred);
                    if (i >= begRow)
                    {
                        var delta = targ[0] - pred[0];
                        sse += (delta * delta);
                    }
                }

                if (Parameters.Verbose)
                {
                    Console.WriteLine();
                }

                return(Math.Sqrt(sse / count));
            }
            else
            {
                // The label is nominal, so measure predictive accuracy
                if (confusion != null)
                {
                    confusion.SetSize(labelValues, labelValues);
                    for (var i = 0; i < labelValues; i++)
                    {
                        confusion.SetAttrName(i, labels.AttrValue(0, i));
                    }
                }
                var correctCount = 0;
                var prediction   = new double[1];
                for (var i = 0; i < features.Rows(); i++)
                {
                    if (Parameters.Verbose)
                    {
                        Console.SetCursorPosition(cl, Console.CursorTop);
                        Console.Write(i);
                    }

                    var feat = features.Row(i);
                    var lab  = labels.Get(i, 0);
                    if (lab != Matrix.MISSING)
                    {
                        var targ = (int)lab;
                        if (targ >= labelValues)
                        {
                            throw new Exception("The label is out of range");
                        }
                        Predict(feat, prediction);
                        if (i >= begRow)
                        {
                            var pred = (int)prediction[0];
                            if (confusion != null)
                            {
                                confusion.Set(targ, pred, confusion.Get(targ, pred) + 1);
                            }
                            if (pred == targ)
                            {
                                correctCount++;
                            }
                        }
                    }
                    else
                    {
                        count--;
                    }
                }

                if (Parameters.Verbose)
                {
                    Console.WriteLine();
                }

                return((double)correctCount / count);
            }
        }
コード例 #20
0
 public abstract void VTrain(VMatrix features, VMatrix labels);
コード例 #21
0
 public virtual double VGetMSE(VMatrix features, VMatrix labels)
 {
     return(0);
 }
コード例 #22
0
ファイル: SAE.cs プロジェクト: robertst01/MLSystemManager
        public override void VTrain(VMatrix features, VMatrix labels)
        {
            if (m_hidden.Length < 1)
            {
                m_hidden = new int[1] {
                    features.Cols() * 2
                };
            }

            int wIdx = 0;                                                               // index into the weights array

            // add the input nodes
            List <Node> iNodes = new List <Node>();

            for (var i = 0; i < features.Cols(); i++)
            {
                iNodes.Add(new InputNode(i, 0, m_rand));
            }

            m_layers.Add(iNodes);

            int     trainSize          = (int)(0.75 * features.Rows());
            VMatrix trainFeatures      = new VMatrix(features, 0, 0, trainSize, features.Cols());
            VMatrix trainLabels        = new VMatrix(labels, 0, 0, trainSize, labels.Cols());
            VMatrix validationFeatures = new VMatrix(features, trainSize, 0, features.Rows() - trainSize, features.Cols());
            VMatrix validationLabels   = new VMatrix(labels, trainSize, 0, labels.Rows() - trainSize, labels.Cols());

            Console.WriteLine("R-E-C\tMSE (training)\t\tMSE (validation)\taccuracy (validation)");
            if (m_outputFile != null)
            {
                m_outputFile.WriteLine("Momentum: " + m_momentum);
                m_outputFile.WriteLine();
                m_outputFile.WriteLine("Weights");
                PrintWeights();
                m_outputFile.WriteLine("R-E-C\tMSE (training)\t\tMSE (validation)\taccuracy (validation)");
            }

            if (m_weights != null)
            {
                // not training
                double trainMSE = VGetMSE(trainFeatures, trainLabels);
                double mse      = VGetMSE(validationFeatures, validationLabels);
                double accuracy = VMeasureAccuracy(validationFeatures, validationLabels, null);
                Console.WriteLine($"1\t{trainMSE}\t{mse}\t{accuracy}");
                if (m_outputFile != null)
                {
                    m_outputFile.WriteLine($"1\t{trainMSE}\t{mse}\t{accuracy}");
                }
            }
            else
            {
                for (int round = 1; round <= m_hidden.Length + 1; round++)
                {
                    if (round <= m_hidden.Length)
                    {
                        // add hidden nodes
                        int         prevNodes = m_layers[m_layers.Count - 1].Count + 1;
                        List <Node> hNodes    = new List <Node>();

                        for (var n = 0; n < m_hidden[m_layers.Count - 1]; n++)
                        {
                            if (m_weights != null)
                            {
                                hNodes.Add(new HiddenNode(prevNodes, m_rand, m_weights[wIdx++]));
                            }
                            else
                            {
                                hNodes.Add(new HiddenNode(prevNodes, m_rand, null));
                            }
                        }

                        m_layers.Add(hNodes);

                        prevNodes = hNodes.Count + 1;

                        // add output nodes
                        List <Node> oNodes = new List <Node>();

                        if (round < m_hidden.Length)
                        {
                            // dae layer - add output nodes to match inputs
                            for (var col = 0; col < m_layers[m_layers.Count - 2].Count; col++)
                            {
                                oNodes.Add(new OutputNode(prevNodes, true, col, -1, m_rand, null));
                            }
                        }
                        else
                        {
                            // final layer - figure out how many outputs we need
                            for (var col = 0; col < labels.Cols(); col++)
                            {
                                var labelValueCount = labels.ValueCount(col);

                                if (labelValueCount < 2)
                                {
                                    // continuous
                                    if (m_weights != null)
                                    {
                                        oNodes.Add(new OutputNode(prevNodes, true, col, -1, m_rand, m_weights[wIdx++]));
                                    }
                                    else
                                    {
                                        oNodes.Add(new OutputNode(prevNodes, true, col, -1, m_rand, null));
                                    }
                                }
                                else
                                {
                                    for (var n = 0; n < labelValueCount; n++)
                                    {
                                        if (m_weights != null)
                                        {
                                            oNodes.Add(new OutputNode(prevNodes, false, col, n, m_rand, m_weights[wIdx++]));
                                        }
                                        else
                                        {
                                            oNodes.Add(new OutputNode(prevNodes, false, col, n, m_rand, null));
                                        }
                                    }
                                }
                            }
                        }

                        m_layers.Add(oNodes);

                        InitNodes();
                    }

                    int    epoch        = 0;                                    // current epoch number
                    int    bestEpoch    = 0;                                    // epoch number of best MSE
                    int    eCount       = 0;                                    // number of epochs since the best MSE
                    bool   checkDone    = false;                                // if true, check to see if we're done
                    double mse          = 0;                                    // validation MSE
                    double bestTrainMSE = double.MaxValue;                      // best training MSE so far
                    double bestMSE      = double.MaxValue;                      // best validation MSE so far
                    double accuracy     = 0;                                    // validation accuracy
                    double bestAccuracy = double.MaxValue;                      // best validationa accuracy so far

                    for (; ;)
                    {
                        // shuffle the training set
                        trainFeatures.Shuffle(m_rand, trainLabels);
                        double trainMSE = TrainEpoch(++epoch, trainFeatures, trainLabels, round <m_hidden.Length, round> m_hidden.Length);

                        // check the MSE after this epoch
                        if (round < m_hidden.Length)
                        {
                            mse      = IGetMSE(validationFeatures);
                            accuracy = 0;
                        }
                        else
                        {
                            mse = VGetMSE(validationFeatures, validationLabels);

                            // check the validation accuracy
                            accuracy = VMeasureAccuracy(validationFeatures, validationLabels, null);
                        }

                        Console.WriteLine($"{round}-{epoch}-{eCount}\t{trainMSE}\t{mse}\t{accuracy}");
                        if (m_outputFile != null)
                        {
                            m_outputFile.WriteLine($"{round}-{epoch}-{eCount}\t{trainMSE}\t{mse}\t{accuracy}");
                            m_outputFile.Flush();
                        }

                        if ((mse == 0.0) || (epoch > 10000))
                        {
                            break;
                        }
                        else if ((epoch == 1) || (mse < bestMSE))
                        {
                            if (epoch == 1)
                            {
                                // save the initial MSE
                                bestMSE = mse;
                            }
                            else if ((mse / bestMSE) > 0.99)
                            {
                                if (!checkDone)
                                {
                                    checkDone = true;
                                    eCount    = 0;
                                }
                            }
                            else
                            {
                                checkDone = false;
                                eCount    = 0;
                            }

                            // save the best for later
                            bestTrainMSE = trainMSE;
                            bestMSE      = mse;
                            bestAccuracy = accuracy;
                            bestEpoch    = epoch;
                            for (var layer = 0; layer < m_layers.Count - 1; layer++)
                            {
                                foreach (var node in m_layers[layer])
                                {
                                    node.SaveBestWeights();
                                }
                            }
                        }
                        else if (!checkDone)
                        {
                            checkDone = true;
                            eCount    = 0;
                        }

                        if (checkDone)
                        {
                            // check to see if we're done
                            eCount++;
                            if (eCount >= 20)
                            {
                                break;
                            }
                        }
                    }

                    if (round < m_hidden.Length)
                    {
                        // delete the output layer
                        m_layers.RemoveAt(m_layers.Count - 1);
                    }

                    if ((bestEpoch > 0) && (bestEpoch != epoch))
                    {
                        for (var layer = 0; layer < m_layers.Count - 1; layer++)
                        {
                            foreach (var node in m_layers[layer])
                            {
                                node.RestoreBestWeights();
                            }
                        }

                        Console.WriteLine($"Best Weights (from Epoch {bestEpoch}, trainMSE={bestTrainMSE}, valMSE={bestMSE})");
                        if (m_outputFile != null)
                        {
                            m_outputFile.WriteLine();
                            m_outputFile.WriteLine($"Best Weights (from Epoch {bestEpoch}, trainMSE={bestTrainMSE}, valMSE={bestMSE})");
                            m_outputFile.Flush();
                        }
                    }
                }
            }

            if (m_outputFile != null)
            {
                m_outputFile.WriteLine();
                m_outputFile.WriteLine("Weights");
                PrintWeights();
            }

            if (m_outputFile != null)
            {
                m_outputFile.Close();
            }
        }
コード例 #23
0
 /// <summary>Copies values from the given matrix object.</summary>
 /// <param name="sourceMatrix">The matrix to copy values from.</param>
 public void Set(VMatrix sourceMatrix) => CallVoid(nameof(Set), sourceMatrix);
コード例 #24
0
        public override void VTrain(VMatrix features, VMatrix labels)
        {
            if (Layers.Count < 1)
            {
                // create the layers
                if (Parameters.Hidden.Length < 1)
                {
                    Parameters.Hidden = new[] { features.Cols() * 2 };
                }

                // add the input nodes
                var iNodes = new List <Node>();
                for (var i = 0; i < features.Cols(); i++)
                {
                    iNodes.Add(new InputNode(i, i, _rand));
                }

                var iLayer = new Layer()
                {
                    Type     = LayerType.Input,
                    Nodes    = iNodes,
                    Previous = null,
                    Next     = null
                };
                Layers.Add(iLayer);

                var prevLayer = iLayer;

                // add the hidden nodes
                foreach (var t in Parameters.Hidden)
                {
                    var hNodes = new List <Node>();

                    for (var n = 0; n < t; n++)
                    {
                        var node = new HiddenNode(n, prevLayer.Nodes.Count + 1, t, _rand);
                        hNodes.Add(node);
                    }

                    var hLayer = new Layer()
                    {
                        Type     = LayerType.Hidden,
                        Nodes    = hNodes,
                        Previous = prevLayer,
                        Next     = null
                    };
                    Layers.Add(hLayer);

                    prevLayer.Next = hLayer;
                    prevLayer      = hLayer;
                }

                // add the output nodes
                var oNodes = new List <Node>();
                var oCount = 0;
                for (var col = 0; col < labels.Cols(); col++)
                {
                    var labelValueCount = labels.ValueCount(col);

                    if (labelValueCount < 2)
                    {
                        // continuous
                        oCount++;
                    }
                    else
                    {
                        oCount += labelValueCount;
                    }
                }

                for (var col = 0; col < labels.Cols(); col++)
                {
                    var labelValueCount = labels.ValueCount(col);

                    if (labelValueCount < 2)
                    {
                        // continuous
                        var node = new OutputNode(oNodes.Count, true, col, -1, prevLayer.Nodes.Count + 1, oCount, _rand);
                        oNodes.Add(node);
                    }
                    else
                    {
                        for (var n = 0; n < labelValueCount; n++)
                        {
                            var node = new OutputNode(oNodes.Count, false, col, n, prevLayer.Nodes.Count + 1, oCount, _rand);
                            oNodes.Add(node);
                        }
                    }
                }

                var oLayer = new Layer()
                {
                    Type     = LayerType.Output,
                    Nodes    = oNodes,
                    Previous = prevLayer
                };
                Layers.Add(oLayer);

                prevLayer.Next = oLayer;
            }

            var trainSize          = (int)(0.75 * features.Rows());
            var trainFeatures      = new VMatrix(features, 0, 0, trainSize, features.Cols());
            var trainLabels        = new VMatrix(labels, 0, 0, trainSize, labels.Cols());
            var validationFeatures = new VMatrix(features, trainSize, 0, features.Rows() - trainSize, features.Cols());
            var validationLabels   = new VMatrix(labels, trainSize, 0, labels.Rows() - trainSize, labels.Cols());

            Console.Write("Layers: ");
            foreach (var layer in Layers)
            {
                Console.Write(layer.Nodes.Count);
                if (layer.Type == LayerType.Output)
                {
                    Console.WriteLine();
                }
                else
                {
                    Console.Write('x');
                }
            }

            Console.WriteLine("AF: " + Parameters.Activation);

            Console.WriteLine("Epoch\tMSE (validation)");

            int    epoch;                                 // current epoch number
            var    bestEpoch     = 0;                     // epoch number of best MSE
            var    eCount        = 0;                     // number of epochs since the best MSE
            var    checkDone     = false;                 // if true, check to see if we're done
            var    initialMse    = Parameters.InitialMse; // MSE for first epoch
            var    bestMse       = Parameters.StartMse;   // best validation MSE so far
            double bestAccuracy  = 0;
            var    batchCount    = (trainFeatures.Rows() + Parameters.BatchSize - 1) / Parameters.BatchSize;
            int    countInterval = batchCount / 10;

            if (countInterval < 1)
            {
                countInterval = 1;
            }
            var startEpoch = Parameters.StartEpoch + 1;

            for (epoch = startEpoch;; epoch++)
            {
                // shuffle the training set
                trainFeatures.Shuffle(_rand, trainLabels);
                var cl = Console.CursorLeft;

                for (var batch = 0; batch < batchCount; batch++)
                {
                    var startIdx = batch * Parameters.BatchSize;
                    var count    = Parameters.BatchSize;
                    if ((startIdx + count) > trainFeatures.Rows())
                    {
                        count = trainFeatures.Rows() - startIdx;
                    }
                    TrainBatch(trainFeatures, trainLabels, startIdx, count);

                    if ((((batch + 1) % countInterval) == 0) || (batch == (batchCount - 1)))
                    {
                        Console.SetCursorPosition(cl, Console.CursorTop);
                        Console.Write(batch + 1);
                    }
                }

                Console.WriteLine();

                // check the MSE
                var mse = VGetMSE(validationFeatures, validationLabels);
                if ((epoch == startEpoch) && (initialMse == 0))
                {
                    // save the initial MSE
                    initialMse = mse;
                }
                var accuracy = VMeasureAccuracy(validationFeatures, validationLabels, null);

                if ((epoch % Parameters.SnapshotInterval) == 0)
                {
                    SaveSnapshot(epoch, mse, initialMse, accuracy);
                }

                Console.WriteLine($"{epoch}-{eCount}\t{mse}");

                if ((mse == 0) || (epoch > 5000))
                {
                    break;
                }

                if ((epoch == startEpoch) || (mse < bestMse))
                {
                    if ((epoch != startEpoch) && !checkDone && (mse < initialMse * 0.9))
                    {
                        checkDone = true;
                    }
                    eCount = 0;

                    // save the best for later
                    bestMse      = mse;
                    bestEpoch    = epoch;
                    bestAccuracy = accuracy;
                    foreach (var layer in Layers)
                    {
                        foreach (var node in layer.Nodes)
                        {
                            node.SaveBestWeights();
                        }
                    }
                }
                else if (checkDone)
                {
                    // check to see if we're done
                    eCount++;
                    if (eCount >= 20)
                    {
                        break;
                    }
                }
            }

            if ((bestEpoch > 0) && (bestEpoch != epoch))
            {
                foreach (var layer in Layers)
                {
                    foreach (var node in layer.Nodes)
                    {
                        node.RestoreBestWeights();
                    }
                }
            }

            SaveSnapshot(bestEpoch, bestMse, initialMse, bestAccuracy, true);
        }
コード例 #25
0
        public override void VTrain(VMatrix features, VMatrix labels)
        {
            if (m_hidden.Length < 1)
            {
                m_hidden = new int[1] {
                    features.Cols() * 2
                };
            }

            // add the input nodes
            var iNodes = new List <Node>();

            for (var i = 0; i < features.Cols(); i++)
            {
                iNodes.Add(new InputNode(i, 0, m_rand));
            }

            m_layers.Add(iNodes);

            // figure out how many outputs we need
            var oCount = 0;

            for (var col = 0; col < labels.Cols(); col++)
            {
                var labelValueCount = labels.ValueCount(col);

                if (labelValueCount < 2)
                {
                    // continuous
                    oCount++;
                }
                else
                {
                    oCount += labelValueCount;
                }
            }

            var trainSize          = (int)(0.75 * features.Rows());
            var trainFeatures      = new VMatrix(features, 0, 0, trainSize, features.Cols());
            var trainLabels        = new VMatrix(labels, 0, 0, trainSize, labels.Cols());
            var validationFeatures = new VMatrix(features, trainSize, 0, features.Rows() - trainSize, features.Cols());
            var validationLabels   = new VMatrix(labels, trainSize, 0, labels.Rows() - trainSize, labels.Cols());

            Console.Write("Layers: ");
            Console.Write(features.Cols());
            Console.Write('x');
            for (var l = 0; l < m_hidden.Length; l++)
            {
                Console.Write(m_hidden[l]);
                Console.Write('x');
            }
            Console.WriteLine(oCount);
            Console.WriteLine("Momentum: " + m_momentum);
            Console.WriteLine("C: " + m_corruptLevel);
            Console.WriteLine("AF: " + m_activation);
            Console.WriteLine($"AParam: {m_actLeak},{m_actThreshold},{m_actSlope},{m_actRandom}");
            Console.WriteLine("TrainAll: " + m_trainAll);
            Console.WriteLine("R-E-C\tMSE (validation)");
            if (m_outputFile != null)
            {
                m_outputFile.Write("Layers: ");
                m_outputFile.Write(features.Cols());
                m_outputFile.Write('x');
                for (var l = 0; l < m_hidden.Length; l++)
                {
                    m_outputFile.Write(m_hidden[l]);
                    m_outputFile.Write('x');
                }
                m_outputFile.WriteLine(oCount);
                m_outputFile.WriteLine("Momentum: " + m_momentum);
                m_outputFile.WriteLine("C: " + m_corruptLevel);
                m_outputFile.WriteLine("AF: " + m_activation);
                m_outputFile.WriteLine($"AParam: {m_actLeak},{m_actThreshold},{m_actSlope},{m_actRandom}");
                m_outputFile.WriteLine("TrainAll: " + m_trainAll);
                m_outputFile.WriteLine();
                m_outputFile.WriteLine("R-E-C\tMSE (validation)");
            }

            var maxRounds = (m_trainAll ? m_hidden.Length : m_hidden.Length + 1);

            for (var round = 1; round <= maxRounds; round++)
            {
                if (round <= m_hidden.Length)
                {
                    // add hidden nodes
                    var prevNodes = m_layers[m_layers.Count - 1].Count + 1;
                    var hNodes    = new List <Node>();

                    for (var n = 0; n < m_hidden[m_layers.Count - 1]; n++)
                    {
                        hNodes.Add(new HiddenNode(prevNodes, m_rand));
                    }

                    m_layers.Add(hNodes);

                    prevNodes = hNodes.Count + 1;

                    // add output nodes
                    var oNodes = new List <Node>();

                    // figure out how many outputs we need
                    for (var col = 0; col < labels.Cols(); col++)
                    {
                        var labelValueCount = labels.ValueCount(col);

                        if (labelValueCount < 2)
                        {
                            // continuous
                            oNodes.Add(new OutputNode(prevNodes, true, col, -1, m_rand));
                        }
                        else
                        {
                            for (var n = 0; n < labelValueCount; n++)
                            {
                                oNodes.Add(new OutputNode(prevNodes, false, col, n, m_rand));
                            }
                        }
                    }

                    m_layers.Add(oNodes);

                    InitNodes();
                }

                var epoch      = 0;                                     // current epoch number
                var bestEpoch  = 0;                                     // epoch number of best MSE
                var eCount     = 0;                                     // number of epochs since the best MSE
                var checkDone  = false;                                 // if true, check to see if we're done
                var initialMSE = double.MaxValue;                       // MSE for first epoch
                var bestMSE    = double.MaxValue;                       // best validation MSE so far

                for (; ;)
                {
                    // shuffle the training set
                    trainFeatures.Shuffle(m_rand, trainLabels);
                    TrainEpoch(++epoch, trainFeatures, trainLabels, round < m_hidden.Length, m_trainAll || (round > m_hidden.Length));

                    // check the MSE after this epoch
                    var mse = VGetMSE(validationFeatures, validationLabels);

                    Console.WriteLine($"{round}-{epoch}-{eCount}\t{mse}");
                    if (m_outputFile != null)
                    {
                        m_outputFile.WriteLine($"{round}-{epoch}-{eCount}\t{mse}");
                        m_outputFile.Flush();
                    }

                    if ((mse == 0.0) || (epoch > 10000))
                    {
                        break;
                    }
                    else if ((epoch == 1) || (mse < bestMSE))
                    {
                        if (epoch == 1)
                        {
                            // save the initial MSE
                            initialMSE = mse;
                        }
                        else if (!checkDone && (mse < initialMSE * 0.9))
                        {
                            checkDone = true;
                        }
                        eCount = 0;

                        // save the best for later
                        bestMSE   = mse;
                        bestEpoch = epoch;
                        for (var layer = 0; layer < m_layers.Count - 1; layer++)
                        {
                            foreach (var node in m_layers[layer])
                            {
                                node.SaveBestWeights();
                            }
                        }
                    }
                    else if (checkDone)
                    {
                        // check to see if we're done
                        eCount++;
                        if (eCount >= 20)
                        {
                            break;
                        }
                    }
                    else if ((epoch > 100) && /*(mse < initialMSE) &&*/ (mse > ((bestMSE + initialMSE) / 2)))
                    {
                        checkDone = true;
                    }
                }

                if (round < m_hidden.Length)
                {
                    // delete the output layer
                    m_layers.RemoveAt(m_layers.Count - 1);
                }

                if ((bestEpoch > 0) && (bestEpoch != epoch))
                {
                    for (var layer = 0; layer < m_layers.Count - 1; layer++)
                    {
                        foreach (var node in m_layers[layer])
                        {
                            node.RestoreBestWeights();
                        }
                    }

                    Console.WriteLine($"Best Weights (from Epoch {bestEpoch}, valMSE={bestMSE})");
                    if (m_outputFile != null)
                    {
                        m_outputFile.WriteLine();
                        m_outputFile.WriteLine($"Best Weights (from Epoch {bestEpoch}, valMSE={bestMSE})");
                        m_outputFile.Flush();
                    }
                }
            }

            if (m_outputFile != null)
            {
                m_outputFile.WriteLine();
                m_outputFile.WriteLine("Weights");
                PrintWeights();
            }

            if (m_outputFile != null)
            {
                m_outputFile.Close();
            }
        }
コード例 #26
0
        public override void VTrain(VMatrix features, VMatrix labels)
        {
            _features = new VMatrix(features, 0, 0, features.Rows(), features.Cols());
            if (labels.Data != null)
            {
                _labels = new VMatrix(labels, 0, 0, labels.Rows(), labels.Cols());
            }
            _clusters = new List <Cluster>();

            Console.Write("Algorithm: ");

            if (_algorithm == "k")
            {
                Console.WriteLine("k-means (k = " + _k + ")");

//				Features.Shuffle(Rand, Labels);

                // create the initial clusters
                for (var k = 0; k < _k; k++)
                {
                    var cluster = new Cluster(k, _features, k, _ignore);
                    _clusters.Add(cluster);
                    if (_outputFile != null)
                    {
                        cluster.PrintCentroid(_outputFile);
                    }
                }

                double lastSsd = double.MinValue;

                for (;;)
                {
                    var ssd = TrainK();
                    if (_outputFile != null)
                    {
                        _outputFile.WriteLine(string.Format("Sum squared-distance of each row with its centroid={0}", ssd));
                    }

                    if (ssd != lastSsd)
                    {
                        lastSsd = ssd;
                        if (_outputFile != null)
                        {
                            _outputFile.WriteLine("Recomputing the centroids of each cluster...");
                        }
                        foreach (var cluster in _clusters)
                        {
                            cluster.Recalculate();
                            cluster.ClearInstances();
                            if (_outputFile != null)
                            {
                                cluster.PrintCentroid(_outputFile);
                            }
                        }
                    }
                    else
                    {
                        break;
                    }
                }
            }
            else if (_algorithm == "single")
            {
                if (_outputFile != null)
                {
                    _outputFile.WriteLine("HAC single (k = " + _k + ")");
                }

                // create the initial clusters
                for (var row = 0; row < _features.Rows(); row++)
                {
                    var cluster = new Cluster(0, _features, row, _ignore);
                    cluster.AddInstance(row);
                    _clusters.Add(cluster);
                }

                // create the distance matrix
                _distances = new double[_features.Rows(), _features.Rows()];

                for (var row = 0; row < _features.Rows(); row++)
                {
                    for (var row2 = row; row2 < _features.Rows(); row2++)
                    {
                        double distance = 0;
                        if (row2 > row)
                        {
                            distance = _clusters[row].GetDistance(_features.Row(row2));
                        }
                        _distances[row, row2] = distance;
                        if (row != row2)
                        {
                            _distances[row2, row] = distance;
                        }
                    }
                }

                int iteration = 0;

                do
                {
                    TrainSingle(iteration++);
                } while (_clusters.Count > _k);
            }
            else if (_algorithm == "complete")
            {
                if (_outputFile != null)
                {
                    _outputFile.WriteLine("HAC complete (k = " + _k + ")");
                }

                // create the initial clusters
                for (var row = 0; row < _features.Rows(); row++)
                {
                    var cluster = new Cluster(0, _features, row, _ignore);
                    cluster.AddInstance(row);
                    _clusters.Add(cluster);
                }

                // create the distance matrix
                _distances = new double[_features.Rows(), _features.Rows()];

                for (var row = 0; row < _features.Rows(); row++)
                {
                    for (var row2 = row; row2 < _features.Rows(); row2++)
                    {
                        double distance = 0;
                        if (row2 > row)
                        {
                            distance = _clusters[row].GetDistance(_features.Row(row2));
                        }
                        _distances[row, row2] = distance;
                        if (row != row2)
                        {
                            _distances[row2, row] = distance;
                        }
                    }
                }

                int iteration = 0;

                do
                {
                    TrainComplete(iteration++);
                } while (_clusters.Count > _k);
            }
            else if (_algorithm == "average")
            {
                if (_outputFile != null)
                {
                    _outputFile.WriteLine("HAC average (k = " + _k + ")");
                }

                // create the initial clusters
                for (var row = 0; row < _features.Rows(); row++)
                {
                    var cluster = new Cluster(0, _features, row, _ignore);
                    cluster.AddInstance(row);
                    _clusters.Add(cluster);
                }

                // create the distance matrix
                _distances = new double[_features.Rows(), _features.Rows()];

                for (var row = 0; row < _features.Rows(); row++)
                {
                    for (var row2 = row; row2 < _features.Rows(); row2++)
                    {
                        double distance = 0;
                        if (row2 > row)
                        {
                            distance = _clusters[row].GetDistance(_features.Row(row2));
                        }
                        _distances[row, row2] = distance;
                        if (row != row2)
                        {
                            _distances[row2, row] = distance;
                        }
                    }
                }

                int iteration = 0;

                do
                {
                    TrainAverage(iteration++);
                } while (_clusters.Count > _k);
            }
            else
            {
                throw new Exception("Inavlid Algorithm - " + _algorithm);
            }

            if (_outputFile != null)
            {
                _outputFile.WriteLine();
                _outputFile.WriteLine("Cluster centroids:");

                _outputFile.Write("Cluster#\t\t\t");
                for (var c = 0; c < _clusters.Count; c++)
                {
                    _outputFile.Write("\t\t" + c);
                }
                _outputFile.WriteLine();

                _outputFile.Write("# of instances:\t\t\t");
                for (var c = 0; c < _clusters.Count; c++)
                {
                    _outputFile.Write("\t\t" + _clusters[c].Instances.Count);
                }
                _outputFile.WriteLine();

                _outputFile.WriteLine("==========================================================================================================");
                for (var col = 0; col < _features.Cols(); col++)
                {
                    if (!_ignore.Contains(col))
                    {
                        _outputFile.Write(_features.AttrName(col));
                        foreach (var cluster in _clusters)
                        {
                            if (cluster.Centroid[col] == Matrix.MISSING)
                            {
                                _outputFile.Write("\t?");
                            }
                            else if (_features.ValueCount(col) < 2)
                            {
                                // continuous
                                _outputFile.Write(string.Format("\t{0:0.#####}", cluster.Centroid[col]));
                            }
                            else
                            {
                                _outputFile.Write("\t" + _features.AttrValue(col, (int)cluster.Centroid[col]));
                            }
                        }
                        _outputFile.WriteLine();
                    }
                }

                double sse = 0;
                _outputFile.Write("Sum squared error:\t");
                foreach (var cluster in _clusters)
                {
                    var error = cluster.GetSSE();
                    sse += error;
                    _outputFile.Write(string.Format("\t{0:0.#####}", error));
                }
                _outputFile.WriteLine();

                _outputFile.WriteLine("Number of clusters: " + _clusters.Count);
                _outputFile.WriteLine(string.Format("Total sum squared error: {0:0.#####}", sse));
                _outputFile.WriteLine(string.Format("DBI: {0}", GetDBI()));
            }

            if (_outputFile != null)
            {
                _outputFile.Close();
            }
        }
コード例 #27
0
        private void TrainEpoch(int epoch, VMatrix features, VMatrix labels, bool corrupt, bool trainAll)
        {
            var lo = new object();

            Console.Write("TrainEpoch ");
            var cl = Console.CursorLeft;

            for (var row = 0; row < features.Rows(); row++)
            {
                if (((row % 100) == 0) || (row == (features.Rows() - 1)))
                {
                    Console.SetCursorPosition(cl, Console.CursorTop);
                    Console.Write(row);
                }

                // calculate the output
                for (var layer = 0; layer < m_layers.Count; layer++)
                {
#if parallel
                    Parallel.ForEach(m_layers[layer], node =>
#else
                    foreach (var node in m_layers[layer])
#endif
                    {
                        node.net    = 0;
                        node.output = 0;
                        node.error  = 0;

                        if (layer == 0)
                        {
                            // input node
                            node.output = features.Get(row, node.index);
                        }
                        else
                        {
                            // calculate the net value
                            for (var w = 0; w < node.weights.Length - 1; w++)
                            {
                                node.net += node.weights[w] * m_layers[layer - 1][w].output;
                            }
                            // add the bias
                            node.net += node.weights[node.weights.Length - 1];

                            // calculate the output
                            if (m_activation == "relu")
                            {
                                node.output = (node.net < node.threshold ? ((node.net - node.threshold) * m_actLeak) + node.threshold : node.net * m_actSlope);
                            }
                            else if (m_activation == "softsign")
                            {
                                node.output = (node.net / (1.0 + Math.Abs(node.net)));
                            }
                            else if (m_activation == "softplus")
                            {
                                node.output = Math.Log(1.0 + Math.Exp(node.net));
                            }
                            else
                            {
                                node.output = 1.0 / (1.0 + Math.Exp(-node.net));
                            }
                        }

                        if (corrupt && (m_corruptLevel > 0) && (layer == m_layers.Count - 3) && (node.output != 0))
                        {
                            lock (lo)
                            {
                                // corrupt the output
                                if (m_rand.NextDouble() < m_corruptLevel)
                                {
                                    node.output = 0;
                                }
                            }
                        }
#if parallel
                    });
コード例 #28
0
 public override void VTrain(VMatrix features, VMatrix labels)
 {
 }
コード例 #29
0
        public override void VTrain(VMatrix features, VMatrix labels)
        {
            if (m_hidden < 1)
            {
                m_hidden = features.Cols() * 2;
            }

            if (m_k < 2)
            {
                m_k = 2;
            }

            // add the input nodes
            var iNodes = new List <Node>();

            m_inputs = features.Cols();
            for (var i = 0; i < m_inputs; i++)
            {
                iNodes.Add(new InputNode(i, 0, m_rand));
            }

            // add the pseudo-hidden nodes
            for (var n = 0; n < m_hidden; n++)
            {
                iNodes.Add(new HiddenNode(0, m_rand, null));
            }

            m_layers.Add(iNodes);

            var prevNodes = iNodes.Count + 1;
            var wIdx      = 0;                                                          // index into the weights array

            for (var k = 0; k < m_k; k++)
            {
                // add the nodes for this layer
                var hNodes = new List <Node>();

                if (k < m_k - 1)
                {
                    // add the input nodes
                    for (var i = 0; i < m_inputs; i++)
                    {
                        hNodes.Add(new InputNode(i, 0, m_rand));
                    }
                }

                // add the hidden nodes
                for (var n = 0; n < m_hidden; n++)
                {
                    if (m_weights != null)
                    {
                        hNodes.Add(new HiddenNode(prevNodes, m_rand, m_weights[wIdx++]));
                    }
                    else
                    {
                        hNodes.Add(new HiddenNode(prevNodes, m_rand, null));
                    }
                }

                prevNodes = hNodes.Count + 1;
                m_layers.Add(hNodes);
            }

            // add the output nodes - figure out how many outputs we need
            var oNodes = new List <Node>();

            for (var col = 0; col < labels.Cols(); col++)
            {
                var labelValueCount = labels.ValueCount(col);

                if (labelValueCount < 2)
                {
                    // continuous
                    if (m_weights != null)
                    {
                        oNodes.Add(new OutputNode(prevNodes, true, col, -1, m_rand, m_weights[wIdx++]));
                    }
                    else
                    {
                        oNodes.Add(new OutputNode(prevNodes, true, col, -1, m_rand, null));
                    }
                }
                else
                {
                    for (var n = 0; n < labelValueCount; n++)
                    {
                        if (m_weights != null)
                        {
                            oNodes.Add(new OutputNode(prevNodes, false, col, n, m_rand, m_weights[wIdx++]));
                        }
                        else
                        {
                            oNodes.Add(new OutputNode(prevNodes, false, col, n, m_rand, null));
                        }
                    }
                }
            }

            m_layers.Add(oNodes);

            CopyWeights();

            InitNodes();

            var trainSize          = (int)(0.75 * features.Rows());
            var trainFeatures      = new VMatrix(features, 0, 0, trainSize, features.Cols());
            var trainLabels        = new VMatrix(labels, 0, 0, trainSize, labels.Cols());
            var validationFeatures = new VMatrix(features, trainSize, 0, features.Rows() - trainSize, features.Cols());
            var validationLabels   = new VMatrix(labels, trainSize, 0, labels.Rows() - trainSize, labels.Cols());

            var    epoch        = 0;                            // current epoch number
            var    bestTrainMSE = double.MaxValue;              // best training MSE so far
            var    bestMSE      = double.MaxValue;              // best validation MSE so far
            var    bestAccuracy = double.MaxValue;              // best validationa accuracy so far
            double firstMse     = 0;                            // first MSE
            var    eCount       = 0;                            // number of epochs less than firstMSE / 1000
            var    bestEpoch    = 0;                            // epoch number of best MSE
            var    done         = false;

            Console.WriteLine("Epoch\tMSE (training)\t\tMSE (validation)\taccuracy (validation)");
            if (m_outputFile != null)
            {
                m_outputFile.WriteLine($"{m_layers.Count} layers, {m_layers[m_layers.Count - 1].Count} output nodes");
                m_outputFile.WriteLine("Momentum: " + m_momentum);
                m_outputFile.WriteLine();
                m_outputFile.WriteLine("Weights");
                PrintWeights();
                m_outputFile.WriteLine("Epoch\tMSE (training)\t\tMSE (validation)\taccuracy (validation)");
            }

            do
            {
                double trainMSE;

                if (m_weights != null)
                {
                    // not training
                    trainMSE = VGetMSE(trainFeatures, trainLabels);
                    epoch++;
                }
                else
                {
                    trainMSE = TrainEpoch(++epoch, trainFeatures, trainLabels);
                }

                // check the MSE after this epoch
                var mse = VGetMSE(validationFeatures, validationLabels);

                // check the validation accuracy after this epoch
                var accuracy = VMeasureAccuracy(validationFeatures, validationLabels, null);

                Console.WriteLine($"{epoch}\t{trainMSE}\t{mse}\t{accuracy}");
                if (m_outputFile != null)
                {
                    m_outputFile.WriteLine($"{epoch}\t{trainMSE}\t{mse}\t{accuracy}");
                }

                if (m_weights != null)
                {
                    // not really training
                    done = true;
                }
                else if (mse == 0.0)
                {
                    // can't get better than this
                    done = true;
                }
                else
                {
                    if ((epoch == 1) || (mse < bestMSE))
                    {
                        // save the best for later
                        bestTrainMSE = trainMSE;
                        bestMSE      = mse;
                        if (epoch == 1)
                        {
                            firstMse = mse / 1000;
                        }
                        bestAccuracy = accuracy;
                        bestEpoch    = epoch;
                        for (var layer = 0; layer < m_layers.Count - 1; layer++)
                        {
                            foreach (var node in m_layers[layer])
                            {
                                node.SaveBestWeights();
                            }
                        }
                    }

                    if (epoch > 1)
                    {
                        if ((mse < firstMse) || (accuracy == 1.0))
                        {
                            eCount++;
                            if (eCount > 10)
                            {
                                done = true;
                            }
                        }
                    }

                    if (epoch >= 10000)
                    {
                        // time to stop
                        done = true;
                    }
                }
            } while (!done);

            if (m_outputFile != null)
            {
                m_outputFile.WriteLine();
                m_outputFile.WriteLine("Weights");
                PrintWeights();
            }

            if ((bestEpoch > 0) && (bestEpoch != epoch))
            {
                for (var layer = 0; layer < m_layers.Count - 1; layer++)
                {
                    foreach (var node in m_layers[layer])
                    {
                        node.RestoreBestWeights();
                    }
                }
                if (m_outputFile != null)
                {
                    m_outputFile.WriteLine();
                    m_outputFile.WriteLine(
                        $"Best Weights (from Epoch {bestEpoch}, trainMSE={bestTrainMSE}, valMSE={bestMSE}, valAcc={bestAccuracy})");
                    PrintWeights();
                }
            }

            if (m_outputFile != null)
            {
                m_outputFile.Close();
            }
        }
コード例 #30
0
        private void TrainEpoch(int epoch, VMatrix features, VMatrix labels, int currLayer)
        {
            object lo = new object();

            Console.Write("TrainEpoch ");
            int cl = Console.CursorLeft;

            for (var row = 0; row < features.Rows(); row++)
            {
                if (((row % 100) == 0) || (row == (features.Rows() - 1)))
                {
                    Console.SetCursorPosition(cl, Console.CursorTop);
                    Console.Write(row);
                }

                // calculate the output
                for (var layer = 0; layer < m_layers.Count; layer++)
                {
#if parallel
                    Parallel.ForEach(m_layers[layer], node =>
#else
                    foreach (var node in m_layers[layer])
#endif
                    {
                        node.net    = 0;
                        node.output = 0;
                        node.error  = 0;

                        if (layer == 0)
                        {
                            // input node
                            node.net    = features.Get(row, node.index);
                            node.output = node.net;
                        }
                        else
                        {
                            // calculate the net value
                            for (var w = 0; w < node.weights.Length - 1; w++)
                            {
                                node.net += node.weights[w] * m_layers[layer - 1][w].output;
                            }
                            // add the bias
                            node.net += node.weights[node.weights.Length - 1];

                            // calculate the output
                            if (m_activation == "relu")
                            {
                                if (node.net <= node.threshold)
                                {
                                    node.output = (node.net - node.threshold) * node.alpha;
                                }
                                else
                                {
                                    node.output = (node.net - node.threshold) * node.beta;
                                }
                            }
                            else if (m_activation == "softsign")
                            {
                                node.output = (node.net / (1.0 + Math.Abs(node.net)));
                            }
                            else if (m_activation == "softplus")
                            {
                                node.output = Math.Log(1.0 + Math.Exp(node.net));
                            }
                            else
                            {
                                node.output = 1.0 / (1.0 + Math.Exp(-node.net));
                            }
                        }
#if parallel
                    });
コード例 #31
0
        // Calculate the MSE
        public override double VGetMSE(VMatrix features, VMatrix labels)
        {
            double sse = 0;

            Console.Write("VGetMSE ");
            var cl = Console.CursorLeft;

            for (var row = 0; row < features.Rows(); row++)
            {
                Console.SetCursorPosition(cl, Console.CursorTop);
                Console.Write(row);

                // calculate the output
                for (var layer = 0; layer < m_layers.Count; layer++)
                {
                    Parallel.ForEach(m_layers[layer], node =>
                    {
                        node.net    = 0;
                        node.output = 0;

                        if (layer == 0)
                        {
                            // input node
                            node.output = features.Get(row, node.index);
                        }
                        else
                        {
                            // calculate the net value
                            for (var w = 0; w < node.weights.Length - 1; w++)
                            {
                                var weight = node.weights[w];
                                if (layer == 1)
                                {
                                    weight *= m_pi;
                                }
                                else
                                {
                                    weight *= m_ph;
                                }
                                node.net += weight * m_layers[layer - 1][w].output;
                            }
                            // add the bias
                            node.net += node.weights[node.weights.Length - 1];

                            node.output = 1.0 / (1.0 + Math.Exp(-node.net));
                        }
                    });
                }

                // calculate the error of the output layer
                for (var n = 0; n < m_layers[m_layers.Count - 1].Count; n++)
                {
                    var node   = m_layers[m_layers.Count - 1][n] as OutputNode;
                    var target = labels.Get(row, node.labelCol);
                    if (!node.isContinuous)
                    {
                        // nominal
                        if (target == node.labelVal)
                        {
                            target = 0.9;
                        }
                        else
                        {
                            target = 0.1;
                        }
                    }
                    var error = target - node.output;

                    // update the error
                    sse += error * error;
                }
            }

            Console.WriteLine();

            return(sse / features.Rows());
        }