示例#1
0
        public virtual string ToString(FeatureMetaData[] FEATURES, IList <int> Y)
        {
            int[]         X        = corpus.featureVectors[corpusVectorIndex];
            InputDocument doc      = corpus.documentsPerExemplar[corpusVectorIndex];
            string        features = Trainer._toString(FEATURES, doc, X);
            int           line     = X[Trainer.INDEX_INFO_LINE];
            string        lineText = doc.getLine(line);
            int           col      = X[Trainer.INDEX_INFO_CHARPOS];

            // insert a dot right before char position
            if (!string.ReferenceEquals(lineText, null))
            {
                lineText = lineText.Substring(0, col) + '\u00B7' + lineText.Substring(col, lineText.Length - col);
            }
            int cat = Y[corpusVectorIndex];

            int[] elements = Trainer.triple(cat);
            //		String display = String.format("%d|%d|%d", cat&0xFF, elements[0], elements[1]);
            string wsDisplay    = Formatter.getWSCategoryStr(cat);
            string alignDisplay = Formatter.getHPosCategoryStr(cat);
            string display      = !string.ReferenceEquals(wsDisplay, null) ? wsDisplay : alignDisplay;

            if (string.ReferenceEquals(display, null))
            {
                display = string.Format("{0,8}", "none");
            }
            return(string.Format("{0} ({1},d={2,1:F3}): {3}", features, display, distance, lineText));
        }
示例#2
0
        public virtual string getPredictionAnalysis(InputDocument doc, int k, int[] unknown, IList <int> Y, double distanceThreshold)
        {
            FeatureVectorAsObject key = new FeatureVectorAsObject(unknown, FEATURES);

            Neighbor[] kNN = null;
            neighborCache.TryGetValue(key, out kNN);
            nNNCalls++;
            if (kNN == null)
            {
                kNN = this.kNN(unknown, k, distanceThreshold);
                neighborCache[key] = kNN;
            }
            else
            {
                nNNCacheHits++;
            }
            IDictionary <int, MutableDouble> similarities = getCategoryToSimilarityMap(kNN, k, Y);
            int cat = getCategoryWithMaxValue(similarities);

            if (cat == -1)
            {
                // try with less strict match threshold to get some indication of alignment
                kNN          = this.kNN(unknown, k, org.antlr.codebuff.Trainer.MAX_CONTEXT_DIFF_THRESHOLD2);
                similarities = getCategoryToSimilarityMap(kNN, k, Y);
                cat          = getCategoryWithMaxValue(similarities);
            }

            string displayCat;
            int    c = cat & 0xFF;

            if (c == org.antlr.codebuff.Trainer.CAT_INJECT_NL || c == org.antlr.codebuff.Trainer.CAT_INJECT_WS)
            {
                displayCat = Formatter.getWSCategoryStr(cat);
            }
            else
            {
                displayCat = Formatter.getHPosCategoryStr(cat);
            }
            displayCat = !string.ReferenceEquals(displayCat, null) ? displayCat : "none";

            StringBuilder buf = new StringBuilder();

            buf.Append(Trainer.featureNameHeader(FEATURES));
            buf.Append(Trainer._toString(FEATURES, doc, unknown) + "->" + similarities + " predicts " + displayCat);
            buf.Append("\n");
            if (kNN.Length > 0)
            {
                kNN = kNN.Take(Math.Min(k, kNN.Length)).ToArray();
                foreach (Neighbor n in kNN)
                {
                    buf.Append(n.ToString(FEATURES, Y));
                    buf.Append("\n");
                }
            }
            return(buf.ToString());
        }