Esempio n. 1
0
 /**
  * 代入函数
  * @param feature 特征X(维度由构造时的index指定)
  * @param label Y
  * @return
  */
 public int Apply(MLFeature <FeatureT> feature, LabelT label)
 {
     if (feature[index].Equals(value) && label.Equals(this.label))
     {
         return(1);
     }
     return(0);
 }
Esempio n. 2
0
        ////
        /// <summary>
        /// 求delta_i
        /// </summary>
        /// <param name="empirical_e">@param empirical_e fi的期望</param>
        /// <param name="fi">@param fi fi的下标</param>
        /// <returns></returns>
        //private double iis_solve_delta(double empirical_e, int fi)
        private double iis_solve_delta(double empirical_e, string fi)
        {
            double delta = 0;
            double f_newton, df_newton;

            double[,] p_yx = calc_prob_y_given_x();

            int iters = 0;

            while (iters < 50)                                  // 牛顿法
            {
                f_newton = df_newton = 0;
                for (int i = 0; i < instances.Count; i++)
                {
                    MLInstance <int, int> instance = instances[i];
                    MLFeature <int>       tfeature = instance.Feature;
                    Feature feature = new Feature(tfeature);
                    int     index   = features.IndexOf(feature);
                    if (index == -1)
                    {
                        index = feature.getIndex(instance.Feature.Count);
                        if (string.Join("", features[index]) == string.Join("", feature))
                        {
                            features[index] = feature;
                        }
                    }
                    for (int y = minY; y <= maxY; y++)
                    {
                        int    f_sharp = apply_f_sharp(feature, y);
                        double prod    = p_yx[index, y] * functions[fi].Apply(feature, y) * Math.Exp(delta * f_sharp);
                        f_newton  += prod;
                        df_newton += prod * f_sharp;
                    }
                }
                f_newton  = empirical_e - f_newton / N;     // g
                df_newton = -df_newton / N;                 // g的导数

                if (Math.Abs(f_newton) < 0.0000001)
                {
                    return(delta);
                }

                double ratio = f_newton / df_newton;

                delta -= ratio;
                if (Math.Abs(ratio) < EPSILON)
                {
                    return(delta);
                }
                iters++;
            }
            return(double.NaN);                          //如果不收敛,返回NaN

            throw new Exception("IIS did not converge"); // w_i不收敛
        }
Esempio n. 3
0
 /// <summary>
 /// 计算经验期望
 /// </summary>
 private void calc_empirical_expects()
 {
     foreach (MLInstance <int, int> instance in instances)
     {
         int             y       = instance.Label;
         MLFeature <int> feature = instance.Feature;
         //for (int i = 0; i < functions.Count; i++)
         int i = 0;
         foreach (string key in functions.Keys)
         {
             empirical_expects[i] += functions[key].Apply(feature, y);
             i++;
         }
     }
     for (int i = 0; i < functions.Count; i++)
     {
         empirical_expects[i] /= 1.0 * N;
     }
     //if (DEBUG)  System.out.println(Arrays.toString(empirical_expects));
 }
Esempio n. 4
0
 public MLInstance(MLFeature <FeatureT> ft)
 {
     Feature = ft;
 }
Esempio n. 5
0
 public MLInstance(LabelT label, List <FeatureT> flist)
 {
     Feature = new MLFeature <FeatureT>();
     Label   = label;
 }
Esempio n. 6
0
 public MLInstance(List <FeatureT> flist)
 {
     Feature = new MLFeature <FeatureT>(flist);
 }