public RowArrayPtr <SparseRowValue> getRowPtr(int row)
 {
     //Console.WriteLine("Creating row ptr {0} {1} ", row, val[row].Length-1);
     if (val.ContainsKey(row))
     {
         return(new RowArrayPtr <SparseRowValue> (val[row], 0, val[row].Length - 1));
     }
     return(RowArrayPtr <SparseRowValue> .Empty());
 }
 public RowArrayPtr <SparseRowValue> getRowPtr(int row)
 {
     if (rowLength(row) > 0)
     {
         return(new RowArrayPtr <SparseRowValue>(val, rowPtr[row], rowPtr[row + 1] - 1));
     }
     else
     {
         return(RowArrayPtr <SparseRowValue> .Empty());
     }
 }
 public RowArrayPtr <SparseRowValue> getRowPtr(int row)
 {
     if (val[row] != null)
     {
         return(new RowArrayPtr <SparseRowValue>(val[row], 0, val[row].Length - 1));
     }
     else
     {
         return(RowArrayPtr <SparseRowValue> .Empty());
     }
 }
Exemple #4
0
        public RowArrayPtr <SparseRowValue> getRowPtr(int row)
        {
            ContractAssertions.Requires <ArgumentOutOfRangeException> (row >= 0 && row <= rowCount, "Array Index out of bounds");
            if (rowLength(row) > 0)
            {
                var v = getRow(row);

                return(new RowArrayPtr <SparseRowValue> (v.ToArray(), 0, rowLength(row) - 1));
            }
            else
            {
                return(RowArrayPtr <SparseRowValue> .Empty());
            }
        }
Exemple #5
0
        double predict_probability(Model model_, RowArrayPtr <SparseRowValue> x, double[] prob_estimates)
        {
            if (model_.param.check_probability_model())
            {
                int i;
                int nr_class = model_.nr_class;
                int nr_w;
                if (nr_class == 2)
                {
                    nr_w = 1;
                }
                else
                {
                    nr_w = nr_class;
                }

                double label = predict_values(model_, x, prob_estimates);
                for (i = 0; i < nr_w; i++)
                {
                    prob_estimates[i] = 1 / (1 + Math.Exp(-prob_estimates[i]));
                }

                if (nr_class == 2) // for binary classification
                {
                    prob_estimates[1] = 1.0 - prob_estimates[0];
                }
                else
                {
                    double sum = 0;
                    for (i = 0; i < nr_class; i++)
                    {
                        sum += prob_estimates[i];
                    }

                    for (i = 0; i < nr_class; i++)
                    {
                        prob_estimates[i] = prob_estimates[i] / sum;
                    }
                }

                return(label);
            }
            else
            {
                return(0);
            }
        }
        public void Solve(double[] w)
        {
            int i, m, s;
            int l_nrclass = l * nr_class;
            int iter      = 0;

            double[] alpha       = new double[l_nrclass];
            int[]    alpha_index = new int[l_nrclass];
            double[] alpha_new   = new double[nr_class];
            int[]    index       = new int[l];
            double[] QD          = new double[l];
            int[]    d_ind       = new int[nr_class];
            double[] d_val       = new double[nr_class];

            int[] y_index     = new int[l];
            int   active_size = l;

            byte[] active_size_i  = new byte[l];
            double eps_shrink     = Math.Max(10.0 * eps, 1.0); // stopping tolerance for shrinking
            bool   start_from_all = true;

            // Initial alpha can be set here. Note that
            // sum_m alpha[i*nr_class+m] = 0, for all i=1,...,l-1
            // alpha[i*nr_class+m] <= C[GETI(i)] if prob->y[i] == m
            // alpha[i*nr_class+m] <= 0 if prob->y[i] != m
            // If initial alpha isn't zero, uncomment the for loop below to initialize w

            Array.Clear(alpha, 0, l_nrclass);

            Array.Clear(w, 0, w_size * nr_class);

            // Parallel.For(0,l, value  => {
            //     for(int m2 = 0; m2 < nr_class; m2++)
            //         alpha_index[(value * nr_class) + m2] = m2;

            //     QD[value] = prob.x.nrm2_sq(value);

            //     active_size_i[value] = (byte)nr_class;
            //     y_index[value] = (int)prob.y[value];
            //     index[value] = value;
            //  });

            for (i = 0; i < l; i++)
            {
                for (m = 0; m < nr_class; m++)
                {
                    alpha_index[(i * nr_class) + m] = m;
                }

                QD[i] = prob.x.nrm2_sq(i);

                active_size_i[i] = (byte)nr_class;
                y_index[i]       = (int)prob.y[i];
                index[i]         = i;
            }

            Codetimer ct = new Codetimer();
            double    QDi, stopping, minG, maxG, d;
            int       yi, inProbY, w_i_idx, nz_d, iNrclass, asi;

            while (iter < max_iter)
            {
                stopping = double.NegativeInfinity;
                for (i = 0; i < active_size; i++)
                {
                    //int j = i + (int)(rand.genrand_real1()*remSize) ;
                    int j = i + rand.Next(active_size - i);
                    Helper.swap(index, i, j);
                }
                for (s = 0; s < active_size; s++)
                {
                    i        = index[s];
                    QDi      = QD[i];
                    yi       = y_index[i];
                    inProbY  = (int)prob.y[i];
                    iNrclass = i * nr_class;
                    asi      = active_size_i[i];

                    if (QDi > 0)
                    {
                        for (m = 0; m < asi; m++)
                        {
                            G[m] = 1;
                        }

                        if (yi < asi)
                        {
                            G[yi] = 0;
                        }

                        RowArrayPtr <SparseRowValue> rap = prob.x.getRowPtr(i);
                        for (int j = 0; j < rap.Length; j++)
                        {
                            w_i_idx = rap.get(j).index *nr_class;
                            for (m = 0; m < asi - 1; m += 2)
                            {
                                G[m]     += w[w_i_idx + alpha_index[iNrclass + m]] * rap.get(j).value;
                                G[m + 1] += w[w_i_idx + alpha_index[iNrclass + m + 1]] * rap.get(j).value;
                            }
                            if (m < asi)
                            {
                                G[m] += w[w_i_idx + alpha_index[iNrclass + m]] * rap.get(j).value;
                            }
                        }

                        minG = Double.PositiveInfinity;
                        maxG = Double.NegativeInfinity;
                        for (m = 0; m < asi; m++)
                        {
                            if (G[m] < minG && alpha[iNrclass + alpha_index[iNrclass + m]] < 0)
                            {
                                minG = G[m];
                            }
                            if (G[m] > maxG)
                            {
                                maxG = G[m];
                            }
                        }

                        if (yi < asi)
                        {
                            if (G[yi] < minG && G[yi] < minG && alpha[iNrclass + inProbY] < C[inProbY])
                            {
                                minG = G[yi];
                            }
                        }


                        for (m = 0; m < asi; m++)
                        {
                            if (be_shrunk(i, m, yi, alpha[iNrclass + alpha_index[iNrclass + m]], minG))
                            {
                                asi--;

                                while (asi > m)
                                {
                                    if (!be_shrunk(i, asi, yi,
                                                   alpha[iNrclass + alpha_index[iNrclass + asi]], minG))
                                    {
                                        Helper.swap(alpha_index, iNrclass + m, iNrclass + asi);
                                        Helper.swap(G, m, asi);
                                        if (yi == asi)
                                        {
                                            yi = y_index[i] = m;
                                        }
                                        else if (yi == m)
                                        {
                                            yi = y_index[i] = asi;
                                        }
                                        break;
                                    }
                                    asi--;
                                }
                            }
                        }
                        active_size_i[i] = (byte)asi;

                        if (asi <= 1)
                        {
                            active_size--;
                            Helper.swap(index, s, active_size);
                            s--;
                            continue;
                        }

                        if (maxG - minG > CONSTANTS.SMALL_ERR)
                        {
                            stopping = Math.Max(maxG - minG, stopping);

                            for (m = 0; m < asi; m++)
                            {
                                B[m] = G[m] - QDi * alpha[iNrclass + alpha_index[iNrclass + m]];
                            }

                            solve_sub_problem(QDi, yi, C[inProbY], asi, alpha_new);

                            nz_d = 0;
                            for (m = 0; m < asi; m++)
                            {
                                d = alpha_new[m] - alpha[iNrclass + alpha_index[iNrclass + m]];
                                alpha[iNrclass + alpha_index[iNrclass + m]] = alpha_new[m];
                                if (Math.Abs(d) > CONSTANTS.SMALL_ERR)
                                {
                                    d_ind[nz_d] = alpha_index[iNrclass + m];
                                    d_val[nz_d] = d;
                                    nz_d++;
                                }
                            }

                            rap = prob.x.getRowPtr(i);
                            for (int j = 0; j < rap.Length; j++)
                            {
                                w_i_idx = rap.get(j).index *nr_class;
                                for (m = 0; m < nz_d; m++)
                                {
                                    w[w_i_idx + d_ind[m]] += d_val[m] * rap.get(j).value;
                                }
                            }
                        }
                    }
                }
                iter++;
                _logger.LogInformation("Loop Time {0}ms", ct.getTime());
                ct.reset();

                if (iter % 10 == 0)
                {
                    Console.Write(".");
                    //_logger.LogInformation(".");
                }

                if (stopping < eps_shrink)
                {
                    if (stopping < eps && start_from_all == true)
                    {
                        break;
                    }
                    else
                    {
                        active_size = l;
                        // for(i = 0; i < l; i++)
                        //     active_size_i[i] = nr_class;
                        Util.Memset(active_size_i, (byte)nr_class, l);
                        Console.Write("*");
                        //_logger.LogInformation("*");
                        eps_shrink     = Math.Max(eps_shrink / 2, eps);
                        start_from_all = true;
                    }
                }
                else
                {
                    start_from_all = false;
                }
            }

            Console.WriteLine();
            _logger.LogInformation("optimization finished, #iter = {0}", iter);
            if (iter >= max_iter)
            {
                _logger.LogInformation("WARNING: reaching max number of iterations");
            }

            // calculate objective value
            double v   = 0;
            int    nSV = 0;

            for (i = 0; i < w_size * nr_class; i++)
            {
                v += w[i] * w[i];
            }
            v = 0.5 * v;
            for (i = 0; i < l_nrclass; i++)
            {
                v += alpha[i];
                if (Math.Abs(alpha[i]) > 0)
                {
                    nSV++;
                }
            }

            int nr_class_ctr = 0;

            for (i = 0; i < l; i++, nr_class_ctr += nr_class)
            {
                v -= alpha[nr_class_ctr + (int)prob.y[i]];
            }
            _logger.LogInformation("Objective value = {0}", v);
            _logger.LogInformation("nSV = {0}", nSV);
        }
Exemple #7
0
 public double predict(Model model, RowArrayPtr <SparseRowValue> x)
 {
     double[] dec_values = new double[model.nr_class];
     return(predict_values(model, x, dec_values));
 }
Exemple #8
0
        public double predict_values(Model model, RowArrayPtr <SparseRowValue> x, double[] dec_values)
        {
            int n;

            if (model.bias >= 0)
            {
                n = model.nr_feature + 1;
            }
            else
            {
                n = model.nr_feature;
            }

            double[] w = model.w;

            int nr_w;

            if (model.nr_class == 2 && model.param.solver_type != SOLVER_TYPE.MCSVM_CS)
            {
                nr_w = 1;
            }
            else
            {
                nr_w = model.nr_class;
            }

            for (int i = 0; i < nr_w; i++)
            {
                dec_values[i] = 0;
            }

            double val;
            int    idx;

            for (int i = 0; i < x.Length; i++)
            {
                val = x.get(i).value;
                idx = x.get(i).index;

                // the dimension of testing data may exceed that of training
                if (idx <= n)
                {
                    idx *= nr_w;
                    for (int j = 0; j < nr_w; j++)
                    {
                        dec_values[j] += w[idx + j] * val;
                    }
                }
            }

            if (model.nr_class == 2)
            {
                if (model.param.check_regression_model())
                {
                    return(dec_values[0]);
                }
                else
                {
                    return((dec_values[0] > 0) ? model.label[0] : model.label[1]);
                }
            }
            else
            {
                int dec_max_idx = 0;
                for (int i = 1; i < model.nr_class; i++)
                {
                    if (dec_values[i] > dec_values[dec_max_idx])
                    {
                        dec_max_idx = i;
                    }
                }
                return(model.label[dec_max_idx]);
            }
        }