Пример #1
0
 /// <summary>
 /// Lifts a FiniteDist<A> into a SampleableDist<A>
 /// </summary>
 public static PrimitiveDist <A> ToSampleDist <A>(this FiniteDist <A> dist)
 {
     return(new SampleDist <A>(() =>
     {
         var rand = new MathNet.Numerics.Distributions.ContinuousUniform().Sample();
         return dist.Pick(Prob(rand));
     }));
 }
Пример #2
0
        /// <summary>
        /// this is the implementation of algorithm 1 in the paper
        /// </summary>
        /// <param name="QFeatureNum"></param>
        /// <param name="TFeatureNum"></param>
        /// <param name="MaxIter"></param>
        /// <param name="LearningRate"></param>
        /// <param name="Lamda"></param>
        /// <param name="StopGap"></param>
        public void Train(int QFeatureNum, int TFeatureNum, int MaxIter, double LearningRate, double Lamda, double StopGap
            ,double decay = 1.0)
        {
            this.QUESTIONFEATURENUM = QFeatureNum;
            this.TAGFEATURENUM = TFeatureNum;
            this.learningrate = LearningRate;
            this.lamda = Lamda;
            this.MAX_Iter = MaxIter;
            this.Stop_Gap = StopGap;

            MathNet.Numerics.Distributions.ContinuousUniform normal = new MathNet.Numerics.Distributions.ContinuousUniform();
            tagSimWeights = (Vector)Vector.Build.Random(TAGFEATURENUM, normal);
            questionSimWeights = (Vector)Vector.Build.Random(QUESTIONFEATURENUM, normal);
            for (int t = 0; t < this.MAX_Iter; t++)
            {
                loss = 0;
                int incorrect_pair = 0;
                DenseVector Deriviate_W = new DenseVector(QUESTIONFEATURENUM);
                DenseVector Deriviate_V = new DenseVector(TAGFEATURENUM);
                for (int i = 0; i < questions.Count; i++)
                {
                    tagFrequency.Clear();
                    var neighbours = questionNeighbours[i];
                    var question = questions[i];
                    candidates = instanceCandidates[i];

                    if(t==0)
                        Init(question, neighbours);
                    else
                    {
                        tagSim = new DenseMatrix(candidates.Count, candidates.Count);
                        questionsim = new DenseVector(neighbours.Count);
                        tagSimFeatures = instanceTagSimFeatures[i];
                        questionSimFeatures = instanceQuestionSimFeatures[i];

                        tagQuestionFeature = new Dictionary<string, Vector>();
                        foreach (var candidate in candidates)
                        {
                            Vector tmp_v = new DenseVector(QUESTIONFEATURENUM);
                            for (int k = 0; k < QUESTIONFEATURENUM; k++)
                            {
                                for (int j = 0; j < neighbours.Count; j++)
                                {
                                    if (neighbours[j].RelatedTags.Contains(candidate.Key))
                                        tmp_v[k] = Math.Max(tmp_v[k], questionSimFeatures[k][j]);// We implement function g() by Max function
                                }
                            }
                            tagQuestionFeature.Add(candidate.Key, tmp_v);
                        }
                    }
                    ComputeTagSim();
                    DerivateForPi = null;
                    ComputeTagSignificance(neighbours);
                    ComputeQuestionSim(question, neighbours);
                    ComputeQuestionTagSimHeriusticlly(question);
                    ComputeDerivateofH();
                    RankingTags(question, neighbours);

                    foreach(var postag in question.RelatedTags)
                    {
                        if (!tagScore.ContainsKey(postag))
                            continue;
                        foreach(var negtag in question.UnRelatedTags)
                        {
                            if(tagScore[postag]<tagScore[negtag])
                            {
                                incorrect_pair += 1;
                                loss += -tagScore[postag] + tagScore[negtag];
                                Deriviate_W += DenseVector.OfVector(ComputeDeriviateOfW(postag, negtag));
                                Deriviate_V += DenseVector.OfVector(ComputeDeriviateOfV(postag, negtag));
                            }
                        }
                    }

                    for (int i2 = 0; i2 < QUESTIONFEATURENUM; i2++)
                    {
                        questionSimWeights[i2] -= (learningrate *(Math.Pow(decay,t)) * Deriviate_W[i2]);
                    }
                    for (int i2 = 0; i2 < TAGFEATURENUM; i2++)
                    {
                        tagSimWeights[i2] -= learningrate * (Math.Pow(decay, t))  * (Deriviate_V[i2]);
                    }
                    Deriviate_W = new DenseVector(QUESTIONFEATURENUM);
                    Deriviate_V = new DenseVector(TAGFEATURENUM);
                }

                if(lastLoss-loss<StopGap)
                {
                   // break;
                }
                else
                {
                    lastLoss = loss;
                }
                Console.Write("loss:{0}\tIncorrect_Pair:{1}\t", loss,incorrect_pair);
                for (int i = 0; i < QUESTIONFEATURENUM; i++)
                { Console.Write("QuestionFeature{1}:{0}\t", questionSimWeights[i],i); }
                for (int i = 0; i < TAGFEATURENUM; i++)
                { Console.Write("TagFeature{1}:{0}\t", tagSimWeights[i],i); }
                Console.WriteLine();

            }
            PrintResult();
        }
Пример #3
0
        private void Init(Question question, List<Question> neighbours)
        {
            featureExtractor = new RandomFeatureExtractor();
            MathNet.Numerics.Distributions.ContinuousUniform normal = new MathNet.Numerics.Distributions.ContinuousUniform();

            tagSimWeights = (Vector)Vector.Build.Random(TAGFEATURENUM,normal);
            questionSimWeights = (Vector)Vector.Build.Random(QUESTIONFEATURENUM,normal);

            tagSimFeatures = featureExtractor.ExtractTagSim(candidates.Keys.ToList());
            questionSimFeatures = featureExtractor.ExtractQuestionSim(question, neighbours);
            tagSim = new DenseMatrix(candidates.Count, candidates.Count);
            questionsim = new DenseVector(neighbours.Count);
        }
Пример #4
0
 public double[] GetVentSizes()
 {
     MathNet.Numerics.Distributions.ContinuousUniform uniformDistribution = new MathNet.Numerics.Distributions.ContinuousUniform(0.8, 1.5);
     return(uniformDistribution.Samples().ToArray());
 }
Пример #5
0
 private Matrix RandomTagFeature(int rowcount, int columncount)
 {
     MathNet.Numerics.Distributions.ContinuousUniform normal = new MathNet.Numerics.Distributions.ContinuousUniform();
     return (Matrix)Matrix.Build.Random(rowcount, columncount, normal);
 }
Пример #6
0
 private Vector RandomQuestionFeature(int count)
 {
     MathNet.Numerics.Distributions.ContinuousUniform normal = new MathNet.Numerics.Distributions.ContinuousUniform();
     return (Vector)Vector.Build.Random(count, normal);
 }