public DependencyInstance(string[] sentence, string[] pos, FeatureVector fv)
 {
     Sentence = sentence;
     POS = pos;
     Fv = fv;
     Length = sentence.Length;
 }
        public bool Add(int s, int type, int dir, double score, FeatureVector fv)
        {
            bool added = false;

            if (Chart[s, s, dir, 0, 0] == null)
            {
                for (int i = 0; i < K; i++)
                    Chart[s, s, dir, 0, i] = new ParseForestItem(s, type, dir, double.NegativeInfinity, null);
            }

            if (Chart[s, s, dir, 0, K - 1].Prob > score)
                return false;

            for (int i = 0; i < K; i++)
            {
                if (Chart[s, s, dir, 0, i].Prob < score)
                {
                    ParseForestItem tmp = Chart[s, s, dir, 0, i];
                    Chart[s, s, dir, 0, i] = new ParseForestItem(s, type, dir, score, fv);
                    for (int j = i + 1; j < K && tmp.Prob != double.NegativeInfinity; j++)
                    {
                        ParseForestItem tmp1 = Chart[s, s, dir, 0, j];
                        Chart[s, s, dir, 0, j] = tmp;
                        tmp = tmp1;
                    }
                    added = true;
                    break;
                }
            }

            return added;
        }
Example #3
0
 public double GetScore(FeatureVector fv)
 {
     double score = 0.0;
     foreach (Feature feature in fv.FVector)
     {
         if (feature.Index >= 0)
             score += parameters[feature.Index]*feature.Value;
     }
     return score;
 }
        public override FeatureVector CreateFeatureVector(string[] toks,
                                                 string[] pos, string[] labs,
                                                 int[] deps)
        {
            var posA = new string[pos.Length];
            for (int i = 0; i < pos.Length; i++)
            {
                posA[i] = pos[i].SubstringWithIndex(0, 1);
            }

            var fv = new FeatureVector();
            for (int i = 0; i < toks.Length; i++)
            {
                if (deps[i] == -1)
                    continue;
                int small = i < deps[i] ? i : deps[i];
                int large = i > deps[i] ? i : deps[i];
                bool attR = i < deps[i] ? false : true;
                fv = CreateFeatureVector(toks, pos, posA, small, large, attR, fv);
                if (Labeled)
                {
                    fv = CreateFeatureVector(toks, pos, posA, i, labs[i], attR, true, fv);
                    fv = CreateFeatureVector(toks, pos, posA, deps[i], labs[i], attR, false, fv);
                }
            }
            // find all trip features
            for (int i = 0; i < toks.Length; i++)
            {
                if (deps[i] == -1 && i != 0) continue;
                // Right children
                int prev = i;
                for (int j = i + 1; j < toks.Length; j++)
                {
                    if (deps[j] == i)
                    {
                        fv = CreateFeatureVector(toks, pos, posA, i, prev, j, fv);
                        fv = CreateFeatureVectorSib(toks, pos, prev, j, prev == i, fv);
                        prev = j;
                    }
                }
                prev = i;
                for (int j = i - 1; j >= 0; j--)
                {
                    if (deps[j] == i)
                    {
                        fv = CreateFeatureVector(toks, pos, posA, i, prev, j, fv);
                        fv = CreateFeatureVectorSib(toks, pos, prev, j, prev == i, fv);
                        prev = j;
                    }
                }
            }

            return fv;
        }
        // preproductions
        public ParseForestItem(int s, int type, int dir, double prob, FeatureVector fv)
        {
            S = s;
            Dir = dir;
            Type = type;
            Length = 2;

            Prob = prob;
            FV = fv;

            Left = null;
            Right = null;
        }
        // productions
        public ParseForestItem(int i, int k, int j, int type,
                               int dir, int comp,
                               double prob, FeatureVector fv,
                               ParseForestItem left, ParseForestItem right)
        {
            S = i;
            R = k;
            T = j;
            Dir = dir;
            Comp = comp;
            Type = type;
            Length = 6;

            Prob = prob;
            FV = fv;

            Left = left;
            Right = right;
        }
        public FeatureVector CreateFeatureVector(string[] toks,
                                                 string[] pos,
                                                 string[] posA,
                                                 int par,
                                                 int ch1, int ch2,
                                                 FeatureVector fv)
        {
            // ch1 is always the closes to par
            string dir = par > ch2 ? "RA" : "LA";

            string parPOS = pos[par];
            string ch1POS = ch1 == par ? "STPOS" : pos[ch1];
            string ch2POS = pos[ch2];
            string ch1Word = ch1 == par ? "STWRD" : toks[ch1];
            string ch2Word = toks[ch2];

            string pTrip = parPOS + "_" + ch1POS + "_" + ch2POS;
            Add("POS_TRIP=" + pTrip + "_" + dir, 1.0, fv);
            Add("APOS_TRIP=" + pTrip, 1.0, fv);

            return fv;
        }
        // same as decode, except return K best
        public object[,] DecodeNonProjective(DependencyInstance inst,
                                             FeatureVector[,,] fvs,
                                             double[,,] probs,
                                             FeatureVector[,,] fvsTrips,
                                             double[,,] probsTrips,
                                             FeatureVector[,,] fvsSibs,
                                             double[,,] probsSibs,
                                             FeatureVector[,,,] ntFvs,
                                             double[,,,] ntProbs, int K)
        {
            string[] toks = inst.Sentence;
            string[] pos = inst.POS;

            object[,] orig = DecodeProjective(inst, fvs, probs, fvsTrips, probsTrips, fvsSibs, probsSibs, ntFvs,
                                              ntProbs, 1);
            string[] o = ((string) orig[0, 1]).Split(' ');
            var par = new int[o.Length + 1];
            var labs = new int[o.Length + 1];
            labs[0] = 0;
            par[0] = -1;
            for (int i = 1; i < par.Length; i++)
            {
                par[i] = int.Parse(o[i - 1].Split("\\|".ToCharArray())[0]);
                labs[i] = m_pipe.Labeled ? int.Parse(o[i - 1].Split(':')[1]) : 0;
            }

            Rearrange(probs, probsTrips, probsSibs, ntProbs, par, labs);

            string pars = "";
            for (int i = 1; i < par.Length; i++)
                pars += par[i] + "|" + i + ":" + labs[i] + " ";

            orig[0, 0] = ((DependencyPipe2O) m_pipe).CreateFeatureVector(toks, pos, labs, par);
            orig[0, 1] = pars.Trim();

            return orig;
        }
        public void GetFeatureVector(DependencyInstance inst,
                                     FeatureVector[,,] fvs,
                                     double[,,] probs,
                                     FeatureVector[,,] fvsTrips,
                                     double[,,] probsTrips,
                                     FeatureVector[,,] fvsSibs,
                                     double[,,] probsSibs,
                                     FeatureVector[,,,] ntFvs,
                                     double[,,,] ntProbs, Parameters @params)
        {
            string[] toks = inst.Sentence;
            string[] pos = inst.POS;
            string[] labs = inst.Labs;

            var posA = new string[pos.Length];
            for (int i = 0; i < pos.Length; i++)
            {
                posA[i] = pos[i].SubstringWithIndex(0, 1);
            }

            // Get production crap.
            for (int w1 = 0; w1 < toks.Length; w1++)
            {
                for (int w2 = w1 + 1; w2 < toks.Length; w2++)
                {
                    for (int ph = 0; ph < 2; ph++)
                    {
                        bool attR = ph == 0 ? true : false;

                        int childInt = attR ? w2 : w1;
                        int parInt = attR ? w1 : w2;

                        FeatureVector prodFV = CreateFeatureVector(toks, pos, posA, w1, w2, attR,
                                                                   new FeatureVector());

                        double prodProb = @params.GetScore(prodFV);
                        fvs[w1, w2, ph] = prodFV;
                        probs[w1, w2, ph] = prodProb;
                    }
                }
            }

            if (Labeled)
            {
                for (int w1 = 0; w1 < toks.Length; w1++)
                {
                    for (int t = 0; t < Types.Length; t++)
                    {
                        string type = Types[t];

                        for (int ph = 0; ph < 2; ph++)
                        {
                            bool attR = ph == 0 ? true : false;

                            for (int ch = 0; ch < 2; ch++)
                            {
                                bool child = ch == 0 ? true : false;

                                FeatureVector prodFV = CreateFeatureVector(toks, pos, posA, w1,
                                                                           type, attR, child,
                                                                           new FeatureVector());

                                double ntProb = @params.GetScore(prodFV);
                                ntFvs[w1, t, ph, ch] = prodFV;
                                ntProbs[w1, t, ph, ch] = ntProb;
                            }
                        }
                    }
                }
            }

            for (int w1 = 0; w1 < toks.Length; w1++)
            {
                for (int w2 = w1; w2 < toks.Length; w2++)
                {
                    for (int w3 = w2 + 1; w3 < toks.Length; w3++)
                    {
                        FeatureVector prodFV = CreateFeatureVector(toks, pos, posA, w1, w2, w3,
                                                                   new FeatureVector());
                        double prodProb = @params.GetScore(prodFV);
                        fvsTrips[w1, w2, w3] = prodFV;
                        probsTrips[w1, w2, w3] = prodProb;
                    }
                }
                for (int w2 = w1; w2 >= 0; w2--)
                {
                    for (int w3 = w2 - 1; w3 >= 0; w3--)
                    {
                        FeatureVector prodFV = CreateFeatureVector(toks, pos, posA, w1, w2, w3,
                                                                   new FeatureVector());
                        double prodProb = @params.GetScore(prodFV);
                        fvsTrips[w1, w2, w3] = prodFV;
                        probsTrips[w1, w2, w3] = prodProb;
                    }
                }
            }

            for (int w1 = 0; w1 < toks.Length; w1++)
            {
                for (int w2 = 0; w2 < toks.Length; w2++)
                {
                    for (int wh = 0; wh < 2; wh++)
                    {
                        if (w1 != w2)
                        {
                            FeatureVector prodFV = CreateFeatureVectorSib(toks, pos, w1, w2, wh == 0,
                                                                          new FeatureVector());
                            double prodProb = @params.GetScore(prodFV);
                            fvsSibs[w1, w2, wh] = prodFV;
                            probsSibs[w1, w2, wh] = prodProb;
                        }
                    }
                }
            }
        }
        private static void TestLogisticRegressionUsingCrossValidation(FeatureVector training, FeatureVector test)
        {
            CrossValidator      cv      = new CrossValidator(new LogisticRegression(), new BinaryClassificationEvaluator(), 10);
            CrossValidatorModel cvModel = (CrossValidatorModel)cv.Fit(training);

            Console.WriteLine("10-fold cross validator accuracy: " + cv.Accuracy);
            FeatureVector predictions = cvModel.transform(test);

            PrintPredictionsAndEvaluate(predictions);
        }
        // TODO: sina: rename it to ReadFeatureVector
        public DependencyInstance GetFeatureVector(BinaryReader reader,
                                                   DependencyInstance inst,
                                                   FeatureVector[,,] fvs,
                                                   double[,,] probs,
                                                   FeatureVector[,,] fvsTrips,
                                                   double[,,] probsTrips,
                                                   FeatureVector[,,] fvsSibs,
                                                   double[,,] probsSibs,
                                                   FeatureVector[,,,] ntFvs,
                                                   double[,,,] ntProbs,
                                                   Parameters @params)
        {
            int length = inst.Length;

            // Get production crap.
            for (int w1 = 0; w1 < length; w1++)
            {
                for (int w2 = w1 + 1; w2 < length; w2++)
                {
                    for (int ph = 0; ph < 2; ph++)
                    {
                        var prodFV = new FeatureVector();

                        int indx = reader.ReadInt32();
                        while (indx != -2)
                        {
                            AddNewFeature(indx, 1.0, prodFV);
                            indx = reader.ReadInt32();
                        }

                        double prodProb = @params.GetScore(prodFV);
                        fvs[w1, w2, ph] = prodFV;
                        probs[w1, w2, ph] = prodProb;
                    }
                }
            }
            int last = reader.ReadInt32();
            if (last != -3)
            {
                Console.WriteLine("Error reading file.");
                throw new Exception("Bad File Format");
            }

            if (Labeled)
            {
                for (int w1 = 0; w1 < length; w1++)
                {
                    for (int t = 0; t < Types.Length; t++)
                    {
                        string type = Types[t];

                        for (int ph = 0; ph < 2; ph++)
                        {
                            for (int ch = 0; ch < 2; ch++)
                            {
                                var prodFV = new FeatureVector();

                                int indx = reader.ReadInt32();
                                while (indx != -2)
                                {
                                    AddNewFeature(indx, 1.0, prodFV);
                                    indx = reader.ReadInt32();
                                }

                                double ntProb = @params.GetScore(prodFV);
                                ntFvs[w1, t, ph, ch] = prodFV;
                                ntProbs[w1, t, ph, ch] = ntProb;
                            }
                        }
                    }
                }
                last = reader.ReadInt32();
                if (last != -3)
                {
                    Console.WriteLine("Error reading file.");
                    throw new Exception("Bad File Format");
                }
            }

            for (int w1 = 0; w1 < length; w1++)
            {
                for (int w2 = w1; w2 < length; w2++)
                {
                    for (int w3 = w2 + 1; w3 < length; w3++)
                    {
                        var prodFV = new FeatureVector();

                        int indx = reader.ReadInt32();
                        while (indx != -2)
                        {
                            AddNewFeature(indx, 1.0, prodFV);
                            indx = reader.ReadInt32();
                        }

                        double prodProb = @params.GetScore(prodFV);
                        fvsTrips[w1, w2, w3] = prodFV;
                        probsTrips[w1, w2, w3] = prodProb;
                    }
                }
                for (int w2 = w1; w2 >= 0; w2--)
                {
                    for (int w3 = w2 - 1; w3 >= 0; w3--)
                    {
                        var prodFV = new FeatureVector();

                        int indx = reader.ReadInt32();
                        while (indx != -2)
                        {
                            AddNewFeature(indx, 1.0, prodFV);

                            indx = reader.ReadInt32();
                        }

                        double prodProb = @params.GetScore(prodFV);
                        fvsTrips[w1, w2, w3] = prodFV;
                        probsTrips[w1, w2, w3] = prodProb;
                    }
                }
            }

            last = reader.ReadInt32();
            if (last != -3)
            {
                Console.WriteLine("Error reading file.");
                throw new Exception("Bad File Format");
            }

            for (int w1 = 0; w1 < length; w1++)
            {
                for (int w2 = 0; w2 < length; w2++)
                {
                    for (int wh = 0; wh < 2; wh++)
                    {
                        if (w1 != w2)
                        {
                            var prodFV = new FeatureVector();

                            int indx = reader.ReadInt32();
                            while (indx != -2)
                            {
                                AddNewFeature(indx, 1.0, prodFV);
                                indx = reader.ReadInt32();
                            }

                            double prodProb = @params.GetScore(prodFV);
                            fvsSibs[w1, w2, wh] = prodFV;
                            probsSibs[w1, w2, wh] = prodProb;
                        }
                    }
                }
            }

            last = reader.ReadInt32();
            if (last != -3)
            {
                Console.WriteLine("Error reading file.");
                throw new Exception("Bad File Format");
            }

            var nfv = new FeatureVector();
            int next = reader.ReadInt32();
            while (next != -4)
            {
                AddNewFeature(next, 1.0, nfv);
                next = reader.ReadInt32();
            }

            string[] toks = null;
            string[] pos = null;
            string[] labs = null;
            string actParseTree = null;
            try
            {
                int len = reader.ReadInt32(); //Added by MSR
                toks = new string[len];
                for (int i = 0; i < len; i++)
                {
                    toks[i] = reader.ReadString();
                }
                //next = reader.ReadInt32();
                len = reader.ReadInt32(); //Added by MSR
                pos = new string[len];
                for (int i = 0; i < len; i++)
                {
                    pos[i] = reader.ReadString();
                }
                next = reader.ReadInt32();
                len = reader.ReadInt32(); //Added by MSR

                labs = new string[len];
                for (int i = 0; i < len; i++)
                {
                    labs[i] = reader.ReadString();
                }
                next = reader.ReadInt32();
                actParseTree = reader.ReadString();
                next = reader.ReadInt32();
            }
            catch (Exception e)
            {
                // TODO: sina: A library MUST NOT call Environment.Exit in any form
                // throw exception instead.
                Console.WriteLine("Error reading file.");
                throw new Exception("Bad File Format");
            }

            if (next != -1)
            {
                // TODO: sina: A library MUST NOT call Environment.Exit in any form
                // throw exception instead.
                Console.WriteLine("Error reading file.");
                throw new Exception("Bad File Format");
            }

            var pti = new DependencyInstance(toks, pos, labs, nfv);
            pti.ActParseTree = actParseTree;
            return pti;
        }
        public DependencyInstance ReadFeatureVector(BinaryReader reader,
                                                   DependencyInstance inst,
                                                   FeatureVector[,,] fvs,
                                                   double[,,] probs,
                                                   FeatureVector[,,,] ntFvs,
                                                   double[,,,] ntProbs,
                                                   Parameters parameters)
        {
            int length = inst.Length;

            // Get production crap.
            for (int w1 = 0; w1 < length; w1++)
            {
                for (int w2 = w1 + 1; w2 < length; w2++)
                {
                    for (int ph = 0; ph < 2; ph++)
                    {
                        var prodFV = new FeatureVector();

                        int indx = reader.ReadInt32();
                        while (indx != -2)
                        {
                            AddNewFeature(indx, 1.0, prodFV);
                            indx = reader.ReadInt32();
                        }

                        double prodProb = parameters.GetScore(prodFV);
                        fvs[w1, w2, ph] = prodFV;
                        probs[w1, w2, ph] = prodProb;
                    }
                }
            }
            int last = reader.ReadInt32();
            if (last != -3)
            {
                Console.WriteLine("Error reading file.");

                throw new Exception("Bad File Format");
            }

            if (Labeled)
            {
                for (int w1 = 0; w1 < length; w1++)
                {
                    for (int t = 0; t < Types.Length; t++)
                    {
                        string type = Types[t];

                        for (int ph = 0; ph < 2; ph++)
                        {
                            for (int ch = 0; ch < 2; ch++)
                            {
                                var prodFV = new FeatureVector();

                                int indx = reader.ReadInt32();
                                while (indx != -2)
                                {
                                    AddNewFeature(indx, 1.0, prodFV);
                                    indx = reader.ReadInt32();
                                }

                                double ntProb = parameters.GetScore(prodFV);
                                ntFvs[w1, t, ph, ch] = prodFV;
                                ntProbs[w1, t, ph, ch] = ntProb;
                            }
                        }
                    }
                }
                last = reader.ReadInt32();
                if (last != -3)
                {
                    Console.WriteLine("Error reading file.");
                    throw new Exception("Bad File Format");
                }
            }

            var nfv = new FeatureVector();
            int next = reader.ReadInt32();
            while (next != -4)
            {
                AddNewFeature(next, 1.0, nfv);
                next = reader.ReadInt32();
            }

            string[] toks = null;
            string[] pos = null;
            string[] labs = null;
            string actParseTree = null;
            try
            {
                int len = reader.ReadInt32();
                toks = new string[len];
                for (int i = 0; i < len; i++)
                {
                    toks[i] = reader.ReadString();
                }
                next = reader.ReadInt32();
                len = reader.ReadInt32();
                pos = new string[len];
                for (int i = 0; i < len; i++)
                {
                    pos[i] = reader.ReadString();
                }
                next = reader.ReadInt32();
                len = reader.ReadInt32();
                labs = new string[len];
                for (int i = 0; i < len; i++)
                {
                    labs[i] = reader.ReadString();
                }
                next = reader.ReadInt32();
                actParseTree = reader.ReadString();
                next = reader.ReadInt32();
            }
            catch (Exception e)
            {
                Console.WriteLine("Error reading file.");
                throw new Exception("Bad File Format");
            }

            if (next != -1)
            {
                Console.WriteLine("Error reading file.");
                throw new Exception("Bad File Format");
            }

            var pti = new DependencyInstance(toks, pos, labs, nfv);
            pti.ActParseTree = actParseTree;
            return pti;
        }
Example #13
0
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET:
//ORIGINAL LINE: public boolean predict(org.maltparser.core.feature.FeatureModel featureModel, org.maltparser.parser.history.action.ComplexDecisionAction decision, boolean one_prediction) throws org.maltparser.core.exception.MaltChainedException
        public bool predict(FeatureModel featureModel, ComplexDecisionAction decision, bool one_prediction)
        {
            if (decision.numberOfDecisions() > 2)
            {
                throw new MaltChainedException("Number of decisions is greater than two,  which is unsupported in the light-weight parser (lw.parser)");
            }
            featureModel.update();
            bool success = true;

            for (int i = 0; i < decision.numberOfDecisions(); i++)
            {
                LWClassifier classifier = null;
//JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final':
//ORIGINAL LINE: final org.maltparser.parser.history.action.SingleDecision singleDecision = decision.getSingleDecision(i);
                SingleDecision singleDecision = decision.getSingleDecision(i);
//JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final':
//ORIGINAL LINE: final StringBuilder classifierString = new StringBuilder();
                StringBuilder classifierString = new StringBuilder();

//JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final':
//ORIGINAL LINE: final StringBuilder decisionModelString = new StringBuilder();
                StringBuilder decisionModelString = new StringBuilder();
                if (singleDecision.RelationToNextDecision == RelationToNextDecision.BRANCHED)
                {
                    decisionModelString.Append("bdm");
                }
                else if (singleDecision.RelationToNextDecision == RelationToNextDecision.SEQUANTIAL)
                {
                    decisionModelString.Append("sdm");
                }
                else
                {
                    decisionModelString.Append("odm");
                }
                decisionModelString.Append(i);
                string decisionSymbol = "";
                if (i == 1 && singleDecision.RelationToNextDecision == RelationToNextDecision.BRANCHED)
                {
                    decisionSymbol = singleDecision.DecisionSymbol;
                    decisionModelString.Append(decisionSymbol);
                }
                decisionModelString.Append('.');
                FeatureVector featureVector = featureModel.getFeatureVector(decisionSymbol, singleDecision.TableContainer.TableContainerName);

                if (featureModel.hasDivideFeatureFunction())
                {
                    SingleFeatureValue featureValue = (SingleFeatureValue)featureModel.DivideFeatureFunction.FeatureValue;
                    classifierString.Append(decisionModelString);
                    classifierString.Append(string.Format("{0:D3}", featureValue.IndexCode));
                    classifierString.Append('.');
                    classifierString.Append(classifierName);
                    classifier = classifiers[classifierString.ToString()];
                    if (classifier != null)
                    {
                        FeatureVector dividefeatureVector = featureModel.getFeatureVector("/" + featureVector.SpecSubModel.SubModelName);
                        success = classifier.predict(dividefeatureVector, singleDecision, one_prediction) && success;
                        continue;
                    }
                    classifierString.Length = 0;
                }

                classifierString.Append(decisionModelString);
                classifierString.Append(classifierName);
                classifier = classifiers[classifierString.ToString()];
                if (classifier != null)
                {
                    success = classifier.predict(featureVector, singleDecision, one_prediction) && success;
                }
                else
                {
                    singleDecision.addDecision(1);
                }
                if (!singleDecision.continueWithNextDecision())
                {
                    break;
                }
            }
            return(success);
        }
Example #14
0
        private double[] hildreth(FeatureVector[] a, double[] b)
        {
            int i;
            const int maxIter = 10000;
            const double eps = 0.00000001;
            const double zero = 0.000000000001;

            var alpha = new double[b.Length];

            var F = new double[b.Length];
            var kkt = new double[b.Length];
            double maxKkt = double.NegativeInfinity;

            int K = a.Length;

            var A = new double[K][];
            for (int j = 0; j < A.Length; j++)
            {
                A[j] = new double[K];
            }
            var isComputed = new bool[K];
            for (i = 0; i < K; i++)
            {
                A[i][i] = FeatureVector.DotProduct(a[i], a[i]);
                isComputed[i] = false;
            }

            int maxKktI = -1;

            for (i = 0; i < F.Length; i++)
            {
                F[i] = b[i];
                kkt[i] = F[i];
                if (kkt[i] > maxKkt)
                {
                    maxKkt = kkt[i];
                    maxKktI = i;
                }
            }

            int iter = 0;
            double diff_alpha;
            double try_alpha;
            double add_alpha;

            while (maxKkt >= eps && iter < maxIter)
            {
                diff_alpha = A[maxKktI][maxKktI] <= zero ? 0.0 : F[maxKktI]/A[maxKktI][maxKktI];
                try_alpha = alpha[maxKktI] + diff_alpha;
                add_alpha = 0.0;

                if (try_alpha < 0.0)
                    add_alpha = -1.0*alpha[maxKktI];
                else
                    add_alpha = diff_alpha;

                alpha[maxKktI] = alpha[maxKktI] + add_alpha;

                if (!isComputed[maxKktI])
                {
                    for (i = 0; i < K; i++)
                    {
                        A[i][maxKktI] = FeatureVector.DotProduct(a[i], a[maxKktI]); // for version 1
                        isComputed[maxKktI] = true;
                    }
                }

                for (i = 0; i < F.Length; i++)
                {
                    F[i] -= add_alpha*A[i][maxKktI];
                    kkt[i] = F[i];
                    if (alpha[i] > zero)
                        kkt[i] = Math.Abs(F[i]);
                }

                maxKkt = double.NegativeInfinity;
                maxKktI = -1;
                for (i = 0; i < F.Length; i++)
                    if (kkt[i] > maxKkt)
                    {
                        maxKkt = kkt[i];
                        maxKktI = i;
                    }

                iter++;
            }

            return alpha;
        }
 public void Add(string feat, double val,  FeatureVector fv)
 {
     int num = DataAlphabet.LookupIndex(feat);
     if (num >= 0)
         fv.FVector.AddFirst(new Feature(num, val));
 }
 public double Probabilty(FeatureVector featureVector, int landCoverClass)
 {
     return(Classifier.Probabilty(featureVector, landCoverClass));
 }
Example #17
0
 public State tick(ref PlayerAction action, FeatureVector vector, Brain brain)
 {
     action = PlayerAction.Prepare;
     return(this); //What this returns decides how the program behaves basically
 }
Example #18
0
        /// <summary>
        /// 評価値を返します。先手が有利ならプラス、後手が有利ならマイナス、互角は 0.0 です。
        /// </summary>
        /// <param name="args"></param>
        /// <returns></returns>
        public override void Evaluate(
            out float out_score,
#if DEBUG || LEARN
            out KyHyokaMeisai_Koumoku out_meisaiKoumoku_orNull,
#endif
            SkyConst srcSky,
            FeatureVector fv
            )
        {
            float score_p1 = 0.0f;
            float score_p2 = 0.0f;//2Pは、負の数なほどグッドということに注意。



            srcSky.Foreach_Starlights((Finger finger, IMoveHalf light, ref bool toBreak) =>
            {
                RO_Starlight ms = (RO_Starlight)light;

                RO_Star koma = Util_Starlightable.AsKoma(ms.Now);

                // 駒の種類による点数
                float komaScore_temp = fv.Komawari[(int)koma.Komasyurui];

                // 持ち駒は、価値を高めます。(ボーナス)序盤に駒をぽんぽん打つのを防ぐため。
                if (
                    (Okiba.Sente_Komadai | Okiba.Gote_Komadai).HasFlag(Conv_SyElement.ToOkiba(koma.Masu))
                    )
                {
                    //komaScore_temp *= 1.05f;// 1.05倍だと、相手の桂馬の利きに、桂馬をタダ捨てした。足りてないか。
                    komaScore_temp *= 1.13f;
                    //komaScore_temp *= 1.25f;// 1.25倍だと、金、金、角を打たずに王手されて終わってしまった。ボーナスを付けすぎたか☆
                }


                if (koma.Pside == Playerside.P1)
                {
                    score_p1 += komaScore_temp;
                }
                else
                {
                    // 駒割は、他の評価値と違って、
                    // 1プレイヤーも、2プレイヤーも正の数になっている。
                    // 2プレイヤーは 符号を反転させること。
                    score_p2 += -komaScore_temp;
                }
            });

            //
            // 2プレイヤーは 負の数になっている(負の数が多いほど有利)ので、
            // 足すだけでいい。
            //
            out_score = score_p1 + score_p2;

            //----------------------------------------
            // 明細項目
            //----------------------------------------
#if DEBUG || LEARN
            string utiwake = "";
            // 明細
            {
                StringBuilder sb = new StringBuilder();
                sb.Append("駒割");
                utiwake = sb.ToString();
            }

            // 明細項目
            out_meisaiKoumoku_orNull = new KyHyokaMeisai_KoumokuImpl(utiwake, out_score);
#endif
        }
 public int Predict(FeatureVector featureVector)
 {
     return(Classifier.Predict(featureVector));
 }
    public void ChangeStats(FeatureVector statChanges)
    {
        for (int i = 0; i < 20; i++)
        {
            subStats[i] += statChanges[i];
            if (subStats[i] < 0)
            {
                subStats[i] = 0;
            }
        }

        int mainStat1Change = 0; // Collection of substats 1-5
        int mainStat2Change = 0; // Collection of substats 6-10
        int mainStat3Change = 0; // Collection of substats 11-15
        int mainStat4Change = 0; // Collection of substats 16-20

        for (int i = 0; i < 4; i++)
        {
            mainStats[i] = 0;
        }



        for (int i = 0; i < 5; i++)
        {
            mainStat1Change += statChanges[i];
            mainStats[0]    += subStats[i];
        }
        for (int i = 5; i < 10; i++)
        {
            mainStat2Change += statChanges[i];
            mainStats[1]    += subStats[i];
        }
        for (int i = 10; i < 15; i++)
        {
            mainStat3Change += statChanges[i];
            mainStats[2]    += subStats[i];
        }
        for (int i = 15; i < 20; i++)
        {
            mainStat4Change += statChanges[i];
            mainStats[3]    += subStats[i];
        }

        //mainStats[0] += mainStat1Change;
        //mainStats[1] += mainStat2Change;
        //mainStats[2] += mainStat3Change;
        //mainStats[3] += mainStat4Change;


        // Handle Change events

        if (mainStat1Change > 0)
        {
            // Do something to show the stat went up
            guiManager.HandleStatChangeGraphics(0, mainStat1Change, mainStats[0]);
        }
        else if (mainStat1Change < 0)
        {
            // Do something to show the stat went down
            guiManager.HandleStatChangeGraphics(0, mainStat1Change, mainStats[0]);
        }
        if (mainStat2Change > 0)
        {
            // Do something to show the stat went up
            guiManager.HandleStatChangeGraphics(1, mainStat2Change, mainStats[1]);
        }
        else if (mainStat2Change < 0)
        {
            // Do something to show the stat went down
            guiManager.HandleStatChangeGraphics(1, mainStat2Change, mainStats[1]);
        }
        if (mainStat3Change > 0)
        {
            // Do something to show the stat went up
            guiManager.HandleStatChangeGraphics(2, mainStat3Change, mainStats[2]);
        }
        else if (mainStat3Change < 0)
        {
            // Do something to show the stat went down
            guiManager.HandleStatChangeGraphics(2, mainStat3Change, mainStats[2]);
        }
        if (mainStat4Change > 0)
        {
            // Do something to show the stat went up
            guiManager.HandleStatChangeGraphics(3, mainStat4Change, mainStats[3]);
        }
        else if (mainStat4Change < 0)
        {
            // Do something to show the stat went down
            guiManager.HandleStatChangeGraphics(3, mainStat4Change, mainStats[3]);
        }
    }
Example #21
0
        /// <summary>
        /// テキストを作ります。
        /// </summary>
        /// <param name="fv"></param>
        /// <returns></returns>
        public static string Format_KK(FeatureVector fv)
        {
            StringBuilder sb = new StringBuilder();

            //
            // コメント
            //
            sb.AppendLine("\"#紹介文\",");
            sb.AppendLine("\"#\",");
            sb.AppendLine("\"#ボナンザ6.0アレンジ式きふわらべ2駒関係\",");
            sb.AppendLine("\"#\",");
            sb.AppendLine("\"#----------------------------------------\",");
            sb.AppendLine("\"#KK表☆\",");
            sb.AppendLine("\"#----------------------------------------\",");
            sb.AppendLine("\"#\",");
            sb.AppendLine("\"#常に先手は正の数、後手は負の数の絶対値が大きい方が有利。0は互角。\",");

            //
            // 仕様バージョン
            //
            sb.AppendLine();
            sb.AppendLine("\"Version\",1.0,");
            sb.AppendLine();
            //----------------------------------------
            // プレイヤー1のK が 一段~九段
            //----------------------------------------
            for (int k1dan = 1; k1dan < 10; k1dan++)
            {
                // コメント行を作ります。
                sb.Append("\"#KK");//4文字
                string danStr = Conv_Int.ToKanSuji(k1dan);
                for (int suji = 9; suji > 0; suji--)
                {
                    string sujiStr = Conv_Int.ToArabiaSuji(suji);
                    sb.Append(" ");
                    sb.Append(sujiStr);
                    sb.Append(danStr);
                    sb.Append("1P玉 vs2P玉   ");//15文字
                    for (int col = 0; col < 5; col++)
                    {
                        sb.Append("     "); //5文字
                    }
                    sb.Append("     ");     //5文字

                    if (Const_FeatureVectorFormat.PARAMETER_INDEX_OUTPUT)
                    {
                        sb.Append("                                             ");//調整
                    }
                }
                sb.AppendLine("\",");

                //----------------------------------------
                // プレイヤー2のK が 一段~九段
                //----------------------------------------
                for (int k2dan = 1; k2dan < 10; k2dan++)
                {
                    // 行頭
                    sb.Append("    ");//4文字
                    //----------------------------------------
                    // プレイヤー1のK が 9筋~1筋
                    //----------------------------------------
                    for (int k1suji = 9; k1suji > 0; k1suji--)
                    {
                        int p1;
                        int p2;

                        //----------------------------------------
                        // プレイヤー2のK が 9筋~1筋
                        //----------------------------------------
                        for (int k2suji = 9; k2suji > 0; k2suji--)
                        {
                            int k2masu = Util_Masu10.Handle_OkibaSujiDanToMasu(Okiba.ShogiBan, k2suji, k2dan);

                            Conv_FvKoumoku522.Converter_KK_to_PP(k1dan, k2dan, k1suji, k2suji, out p1, out p2);

                            if (Const_FeatureVectorFormat.PARAMETER_INDEX_OUTPUT)
                            {
                                sb.Append(string.Format("{0,4}_{1,4}", p1, p2));
                            }
                            else
                            {
                                // スコアの倍率を復元します。
                                float scoreF = fv.NikomaKankeiPp_ForMemory[p1, p2] / fv.Bairitu_NikomaKankeiPp;
                                int   value  = (int)Math.Round(scoreF, 0);//小数点以下を丸めます。
                                sb.Append(string.Format("{0,4}", value));
                            }

                            sb.Append(",");
                        }

                        // 表の横の隙間
                        sb.Append("    ");
                    }
                    // 次の段へ
                    sb.AppendLine();
                }
                // 段の隙間
                sb.AppendLine();
            }

            return(sb.ToString());
        }
Example #22
0
 public CSVExporter(FeatureVector featureVector)
 {
     Vector = featureVector;
 }
        public void OutputParses(string[] words, string[] posTags, out string[] labels, out int[] deps)
        {
            DependencyInstance il = m_pipe.CreateInstance(ref words,ref posTags, out labels,out deps);
            string[] toks = il.Sentence;

            int length = toks.Length;

            var fvs = new FeatureVector[toks.Length,toks.Length,2];
            var probs = new double[toks.Length,toks.Length,2];
            var ntFvs = new FeatureVector[toks.Length,m_pipe.Types.Length,2,2];
            var ntProbs = new double[toks.Length,m_pipe.Types.Length,2,2];
            var fvsTrips = new FeatureVector[length,length,length];
            var probsTrips = new double[length,length,length];
            var fvsSibs = new FeatureVector[length,length,2];
            var probsSibs = new double[length,length,2];
            if (SecondOrder)
                ((DependencyPipe2O) m_pipe).GetFeatureVector(il, fvs, probs,
                                                             fvsTrips, probsTrips,
                                                             fvsSibs, probsSibs,
                                                             ntFvs, ntProbs, m_params);
            else
                m_pipe.GetFeatureVector(il, fvs, probs, ntFvs, ntProbs, m_params);

            int K = TestK;
            object[,] d = null;
            if (DecodeType == ProjectiveTypes.Projective)
            {
                if (SecondOrder)
                    d = ((DependencyDecoder2O) m_decoder).DecodeProjective(il, fvs, probs,
                                                                           fvsTrips, probsTrips,
                                                                           fvsSibs, probsSibs,
                                                                           ntFvs, ntProbs, K);
                else
                    d = m_decoder.DecodeProjective(il, fvs, probs, ntFvs, ntProbs, K);
            }
            if (DecodeType == ProjectiveTypes.NonProjective)
            {
                if (SecondOrder)
                    d = ((DependencyDecoder2O) m_decoder).DecodeNonProjective(il, fvs, probs,
                                                                              fvsTrips, probsTrips,
                                                                              fvsSibs, probsSibs,
                                                                              ntFvs, ntProbs, K);
                else
                    d = m_decoder.decodeNonProjective(il, fvs, probs, ntFvs, ntProbs, K);
            }

            string[] res = ((string) d[0, 1]).Split(' ');
            string[] pos = il.POS;
            for (int j = 1; j < pos.Length; j++)
            {
                string[] trip = res[j - 1].Split("[\\|:]".ToCharArray());
                deps[j] = int.Parse(trip[0]);
                labels[j] = m_pipe.Types[int.Parse(trip[2])];
            }
        }
Example #24
0
 public BreakoutState(State nextState, FeatureVector vector, int ticks)
 {
     this.nextState = nextState;
     timeRun        = ticks;
 }
        //////////////////////////////////////////////////////
        // Get Best Parses ///////////////////////////////////
        //////////////////////////////////////////////////////
        public void OutputParses(string tFile, string file)
        {
            long start = DateTime.Now.Ticks*10000;

            var pred = new StreamWriter(new FileStream(file, FileMode.Create), Encoding.UTF8);

            var in_ =
                new StreamReader(new FileStream(tFile, FileMode.Open), Encoding.UTF8);
            Console.Write("Processing Sentence: ");
            DependencyInstance il = m_pipe.CreateInstance(in_);
            int cnt = 0;
            while (il != null)
            {
                cnt++;
                Console.Write(cnt + " ");
                string[] toks = il.Sentence;

                int length = toks.Length;

                var fvs = new FeatureVector[toks.Length,toks.Length,2];
                var probs = new double[toks.Length,toks.Length,2];
                var ntFvs = new FeatureVector[toks.Length,m_pipe.Types.Length,2,2];
                var ntProbs = new double[toks.Length,m_pipe.Types.Length,2,2];
                var fvsTrips = new FeatureVector[length,length,length];
                var probsTrips = new double[length,length,length];
                var fvsSibs = new FeatureVector[length,length,2];
                var probsSibs = new double[length,length,2];
                if (SecondOrder)
                    ((DependencyPipe2O) m_pipe).GetFeatureVector(il, fvs, probs,
                                                               fvsTrips, probsTrips,
                                                               fvsSibs, probsSibs,
                                                               ntFvs, ntProbs, m_params);
                else
                    m_pipe.GetFeatureVector(il, fvs, probs, ntFvs, ntProbs, m_params);

                int K = TestK;
                object[,] d = null;
                if (DecodeType==ProjectiveTypes.Projective)
                {
                    if (SecondOrder)
                        d = ((DependencyDecoder2O) m_decoder).DecodeProjective(il, fvs, probs,
                                                                             fvsTrips, probsTrips,
                                                                             fvsSibs, probsSibs,
                                                                             ntFvs, ntProbs, K);
                    else
                        d = m_decoder.DecodeProjective(il, fvs, probs, ntFvs, ntProbs, K);
                }
                if (DecodeType == ProjectiveTypes.NonProjective)
                {
                    if (SecondOrder)
                        d = ((DependencyDecoder2O) m_decoder).DecodeNonProjective(il, fvs, probs,
                                                                                fvsTrips, probsTrips,
                                                                                fvsSibs, probsSibs,
                                                                                ntFvs, ntProbs, K);
                    else
                        d = m_decoder.decodeNonProjective(il, fvs, probs, ntFvs, ntProbs, K);
                }

                string[] res = ((string) d[0, 1]).Split(' ');
                string[] sent = il.Sentence;
                string[] pos = il.POS;
                var line1 = new StringBuilder();
                var line2 = new StringBuilder();
                var line3 = new StringBuilder();
                var line4 = new StringBuilder();
                for (int j = 1; j < pos.Length; j++)
                {
                    string[] trip = res[j - 1].Split("[\\|:]".ToCharArray());
                    line1.Append(sent[j] + "\t");
                    line2.Append(pos[j] + "\t");
                    line4.Append(trip[0] + "\t");
                    line3.Append(m_pipe.Types[int.Parse(trip[2])] + "\t");
                }
                var line=new StringBuilder();
                line.Append(line1.ToString().Trim()+"\n");
                line.Append(line2.ToString().Trim() + "\n");
                if(m_pipe.Labeled)
                    line.Append(line3.ToString().Trim() + "\n");
                line.Append(line4.ToString().Trim() + "\n\n");
                pred.Write(line.ToString());
                il = m_pipe.CreateInstance(in_);
            }
            Console.WriteLine();

            pred.Close();
            in_.Close();

            long end = DateTime.Now.Ticks*10000;
            Console.WriteLine("Took: " + (end - start));
        }
Example #26
0
        /// <summary>
        /// Generates the 205 dimensional feature vector [Markov, abs(p), skew, kurt], p = {1,2,3} essential for classification
        /// *Applies weights (10x) to Kurtosis and Skewness
        /// </summary>
        /// <param name="mv">Markov Object</param>
        /// <param name="abs1">Absolute Central Moment, p = 1</param>
        /// <param name="abs2">Absolute Central Moment, p = 2</param>
        /// <param name="abs3">Absolute Central Moment, p = 3</param>
        /// <param name="skew">Skewness</param>
        /// <param name="kurt">Kurtosis</param>
        /// <returns></returns>
        static FeatureVector featureVector1(Markov mv, double abs1, double abs2, double abs3, double skew, double kurt, int id)
        {
            double[] vec = new double[2 * (markov_t * markov_t) + 5];
            for (int y = 0; y < markov_t; y++) //intra
                for (int x = 0; x < markov_t; x++)
                    vec[((markov_t) * y) + x] = mv.markov[x, y, 0];
            for (int y = 0; y < markov_t; y++) //inter
                for (int x = 0; x < markov_t; x++)
                    vec[((markov_t) * markov_t) + ((markov_t) * y) + x] = mv.markov[x, y, 1];

            vec[2 * (markov_t * markov_t) + 0] = abs1;
            vec[2 * (markov_t * markov_t) + 1] = abs2;
            vec[2 * (markov_t * markov_t) + 2] = abs3;
            vec[2 * (markov_t * markov_t) + 3] = 10 * skew;
            vec[2 * (markov_t * markov_t) + 4] = 10 * kurt;

            FeatureVector f = new FeatureVector((markov_t * markov_t) + 5, id);

            f.vec = vec; // ur pushing the envelope on ambiguity here chief
            return f;
        }
        private static void RunConsoleApplication()
        {
            string filePath = (Environment.OSVersion.Platform == PlatformID.Unix || Environment.OSVersion.Platform == PlatformID.MacOSX) ? Environment.GetEnvironmentVariable("HOME") : Environment.ExpandEnvironmentVariables("%HOMEDRIVE%%HOMEPATH%");

            filePath += "\\indicatorOutput.txt";

            string   code       = "AKBNK";
            DateTime targetDate = new DateTime(2018, 11, 1).ToLocalTime();

            int numberOfData = 1000;

            var data = IndicatorService.GetData(code, targetDate, new string[] { "Tarih", "Kapanis" }, numberOfData + 1);

            double[] sma = MovingAverage.Simple(code, targetDate, 14, numberOfData);
            double[] wma = MovingAverage.Weighted(code, targetDate, 14, numberOfData);
            double[] ema = MovingAverage.Exponential(code, targetDate, 14, numberOfData);
            MovingAverageConvergenceDivergence macd = new MovingAverageConvergenceDivergence(code, targetDate, 12, 26, 9, numberOfData);

            double[]    rsi         = RelativeStrengthIndex.Rsi(code, targetDate, 14, numberOfData);
            double[]    williams    = WilliamsR.Wsr(code, targetDate, 14, numberOfData);
            Stochastics stochastics = new Stochastics(code, targetDate, 14, 3, 3, numberOfData);

            double[] closesOut      = IndicatorDataPreprocessor.GetClosesOut(numberOfData, data);
            double[] smaOut         = IndicatorDataPreprocessor.GetSMAOut(sma);
            double[] wmaOut         = IndicatorDataPreprocessor.GetWMAOut(wma);
            double[] emaOut         = IndicatorDataPreprocessor.GetEMAOut(ema);
            double[] macdOut        = IndicatorDataPreprocessor.GetMACDOut(macd);
            double[] rsiOut         = IndicatorDataPreprocessor.GetRSIOut(rsi);
            double[] williamsROut   = IndicatorDataPreprocessor.GetWilliamsROut(williams);
            double[] stochasticsOut = IndicatorDataPreprocessor.GetStochasticsOut(stochastics);

            int minRowCount;

            minRowCount = smaOut.Length;
            minRowCount = minRowCount < wmaOut.Length ? minRowCount : wmaOut.Length;
            minRowCount = minRowCount < emaOut.Length ? minRowCount : emaOut.Length;
            minRowCount = minRowCount < macdOut.Length ? minRowCount : macdOut.Length;
            minRowCount = minRowCount < rsiOut.Length ? minRowCount : rsiOut.Length;
            minRowCount = minRowCount < williamsROut.Length ? minRowCount : williamsROut.Length;
            minRowCount = minRowCount < stochasticsOut.Length ? minRowCount : stochasticsOut.Length;
            minRowCount = minRowCount < closesOut.Length ? minRowCount : closesOut.Length;
            FeatureVector vector = new FeatureVector();

            vector.AddColumn("SMA", smaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            vector.AddColumn("WMA", wmaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            vector.AddColumn("EMA", emaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            vector.AddColumn("MACD", macdOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            vector.AddColumn("RSI", rsiOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            vector.AddColumn("WilliamsR", williamsROut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            vector.AddColumn("Stochastics", stochasticsOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            vector.AddColumn("label", closesOut.Select(p => (object)string.Format("{0:0.0}", p).ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());

            new LabeledPointExporter(vector).Export(filePath);

            int           count    = vector.Values[0].Length;
            FeatureVector training = new FeatureVector();

            for (int i = 0; i < vector.ColumnName.Count; i++)
            {
                training.AddColumn(vector.ColumnName[i], vector.Values[i].Take(count / 2).ToArray());
            }

            FeatureVector test = new FeatureVector();

            for (int i = 0; i < vector.ColumnName.Count; i++)
            {
                test.AddColumn(vector.ColumnName[i], vector.Values[i].Skip(count / 2).Take(count / 2).ToArray());
            }

            //TestNaiveBayes(training, test);
            //TestNaiveBayesUsingCrossValidation(training, test);
            //TestLinearRegression(training, test);
            //TestLinearRegressionUsingCrossValidation(training, test);
            //TestLogisticRegression(training, test);
            //TestLogisticRegressionUsingCrossValidation(training, test);
        }
Example #28
0
        /// <summary>
        /// Random initialization k-means with cosine distance
        /// </summary>
        static List<FeatureVector>[] k_means_cosine(FeatureVector[] o, int k, int terminateIn)
        {
            //k means
            FeatureVector[] means = new FeatureVector[k];
            FeatureVector mean = new FeatureVector(markov_t, markov_t);
            List<FeatureVector>[] clusters = new List<FeatureVector>[k];
            double min = 0;

            bool notDone = true;

            //use random initialization
            o = randomShuffle(o);

            Console.WriteLine("\n ..Clustering with " + k + " randomly selected cluster centers");
            Console.WriteLine("First cluster center selected is: Feature Vector " + means[0].id);
            Console.WriteLine("Second cluster center selected is: Feature Vector " + means[1].id);

            //select k random observations and add them to the cluster centers
            for (int i = 0; i < k; i++)
            {
                clusters[i].Add(o[i]);
                means[i] = o[i];
            }

            //k means Algorithm
            for (int q = 0; q < terminateIn; q++)
            {
                Console.WriteLine("Iteration: " + q);
                notDone = false;

                //put each element in o[] to the centroid that its closer to
                for (int i = 0; i < o.Length; i++)
                {
                    min = cosine(o[i], means[0]);

                    for (int kk = 0; kk < k; kk++)
                        min = Math.Min(cosine(o[i], means[kk]), min);

                    for (int kk = 0; kk < k; kk++)
                        if (cosine(o[i], means[kk]) == min)
                        {
                            //check if o[i] is not already in the proper cluster
                            if (!clusters[kk].Contains(o[i]))
                            {
                                //o[i] has found a new cluster, move on
                                Console.WriteLine("Feature Vector " + i + " has moved to cluster " + kk);
                                clusters[kk].Add(o[i]);

                                //remove o[i] from other clusters
                                for (int kkk = 0; kkk < kk; kkk++)
                                    if (kkk != kk)
                                        clusters[kkk].Remove(o[i]);

                                //calculate the new centroid after inserting the new member

                                //make sure everything checks out
                                for (int ui = 0; ui < mean.vec.Length; ui++)
                                    if (!(mean.vec[ui] == means[kk].vec[ui])) notDone = true;
                                means[kk] = mean;

                                /*mean = centroid(clusters[kk]);
                                if (mean != means[kk]) notDone = true;
                                means[kk] = mean;
                                */

                                continue;
                            }
                        }
                }

                for (int s = 0; s < k; s++)
                {
                    //Output..
                    Console.WriteLine("= Elements in cluster " + s + " are:");
                    for (int ss = 0; ss < clusters[k].Count(); ss++)
                        Console.WriteLine(ss + ". Feature Vector " + clusters[k][ss].id);
                    Console.WriteLine("= Centroid for cluster " + s + " is: \n <");

                    for (int ss = 0; ss < means[k].vec.Length; ss++)
                        Console.Write(means[k].vec[ss] + ", ");
                    Console.Write("> \n");
                }

                if (!notDone)
                {
                    Console.WriteLine("K means algorithm has been completed after " + q + " steps.");
                    return clusters;
                }
            }

            Console.WriteLine("Did not converge in specified steps: Terminating.");

            return clusters;
        }
 public void AddNewFeature(int index, double val, FeatureVector fv)
 {
     fv.FVector.AddFirst(new Feature(index, val));
 }
        // same as decode, except return K best
        public object[,] DecodeProjective(DependencyInstance inst,
                                          FeatureVector[,,] fvs,
                                          double[,,] probs,
                                          FeatureVector[,,] fvsTrips,
                                          double[,,] probsTrips,
                                          FeatureVector[,,] fvsSibs,
                                          double[,,] probsSibs,
                                          FeatureVector[,,,] ntFvs,
                                          double[,,,] ntProbs, int K)
        {
            string[] toks = inst.Sentence;
            string[] pos = inst.POS;

            int[,] staticTypes = null;
            if (m_pipe.Labeled)
            {
                staticTypes = GetTypes(ntProbs, toks.Length);
            }

            var pf = new KBestParseForest2O(0, toks.Length - 1, inst, K);

            for (int s = 0; s < toks.Length; s++)
            {
                pf.Add(s, -1, 0, 0.0, new FeatureVector());
                pf.Add(s, -1, 1, 0.0, new FeatureVector());
            }

            for (int j = 1; j < toks.Length; j++)
            {
                for (int s = 0; s < toks.Length && s + j < toks.Length; s++)
                {
                    int t = s + j;

                    FeatureVector prodFvSt = fvs[s, t, 0];
                    FeatureVector prodFvTs = fvs[s, t, 1];
                    double prodProbSt = probs[s, t, 0];
                    double prodProbTs = probs[s, t, 1];

                    int type1 = m_pipe.Labeled ? staticTypes[s, t] : 0;
                    int type2 = m_pipe.Labeled ? staticTypes[t, s] : 0;

                    FeatureVector ntFvS01 = ntFvs[s, type1, 0, 1];
                    FeatureVector ntFvS10 = ntFvs[s, type2, 1, 0];
                    FeatureVector ntFvT00 = ntFvs[t, type1, 0, 0];
                    FeatureVector ntFvT11 = ntFvs[t, type2, 1, 1];
                    double ntProbS01 = ntProbs[s, type1, 0, 1];
                    double ntProbS10 = ntProbs[s, type2, 1, 0];
                    double ntProbT00 = ntProbs[t, type1, 0, 0];
                    double ntProbT11 = ntProbs[t, type2, 1, 1];
                    double prodProb = 0.0;

                    if (true)
                    {
                        // case when R == S
                        ParseForestItem[] b1 = pf.GetItems(s, s, 0, 0);
                        ParseForestItem[] c1 = pf.GetItems(s + 1, t, 1, 0);
                        if (!(b1 == null || c1 == null))
                        {
                            FeatureVector prodFvSst = pf.Cat(fvsTrips[s, s, t], fvsSibs[s, t, 0]);
                            double prodProbSst = probsTrips[s, s, t] + probsSibs[s, t, 0];

                            int[,] pairs = pf.GetKBestPairs(b1, c1);

                            for (int k = 0; k < K; k++)
                            {
                                if (pairs[k, 0] == -1 || pairs[k, 1] == -1)
                                    break;

                                int comp1 = pairs[k, 0];
                                int comp2 = pairs[k, 1];

                                double bc = b1[comp1].Prob + c1[comp2].Prob;

                                // create sibling pair
                                // create parent pair: S->T and S->(start,T)
                                bc += prodProbSt + prodProbSst;

                                FeatureVector fvFin = pf.Cat(prodFvSt, prodFvSst);
                                if (m_pipe.Labeled)
                                {
                                    bc += ntProbS01 + ntProbT00;
                                    fvFin = FeatureVector.Cat(ntFvS01, FeatureVector.Cat(ntFvT00, fvFin));
                                }

                                pf.Add(s, s, t, type1, 0, 1, bc, fvFin, b1[comp1], c1[comp2]);
                            }
                        }

                        // case when R == T
                        b1 = pf.GetItems(s, t - 1, 0, 0);
                        c1 = pf.GetItems(t, t, 1, 0);
                        if (!(b1 == null || c1 == null))
                        {
                            FeatureVector prodFvStt = pf.Cat(fvsTrips[t, t, s], fvsSibs[t, s, 0]);
                            double prodProbStt = probsTrips[t, t, s] + probsSibs[t, s, 0];

                            int[,] pairs = pf.GetKBestPairs(b1, c1);

                            for (int k = 0; k < K; k++)
                            {
                                if (pairs[k, 0] == -1 || pairs[k, 1] == -1)
                                    break;

                                int comp1 = pairs[k, 0];
                                int comp2 = pairs[k, 1];

                                double bc = b1[comp1].Prob + c1[comp2].Prob;

                                // create sibling pair
                                // create parent pair: S->T and S->(start,T)
                                bc += prodProbTs + prodProbStt;

                                FeatureVector fvFin = pf.Cat(prodFvTs, prodFvStt);
                                if (m_pipe.Labeled)
                                {
                                    bc += ntProbT11 + ntProbS10;
                                    fvFin = FeatureVector.Cat(ntFvT11, FeatureVector.Cat(ntFvS10, fvFin));
                                }

                                pf.Add(s, t, t, type2, 1, 1, bc, fvFin, b1[comp1], c1[comp2]);
                            }
                        }
                    }

                    for (int r = s; r < t; r++)
                    {
                        // First case - create sibling
                        ParseForestItem[] b1 = pf.GetItems(s, r, 0, 0);
                        ParseForestItem[] c1 = pf.GetItems(r + 1, t, 1, 0);

                        if (!(b1 == null || c1 == null))
                        {
                            int[,] pairs = pf.GetKBestPairs(b1, c1);

                            for (int k = 0; k < K; k++)
                            {
                                if (pairs[k, 0] == -1 || pairs[k, 1] == -1)
                                    break;

                                int comp1 = pairs[k, 0];
                                int comp2 = pairs[k, 1];

                                double bc = b1[comp1].Prob + c1[comp2].Prob;

                                pf.Add(s, r, t, -1, 0, 2, bc, new FeatureVector(), b1[comp1], c1[comp2]);
                                pf.Add(s, r, t, -1, 1, 2, bc, new FeatureVector(), b1[comp1], c1[comp2]);
                            }
                        }
                    }

                    for (int r = s + 1; r < t; r++)
                    {
                        // S -> (R,T)
                        ParseForestItem[] b1 = pf.GetItems(s, r, 0, 1);
                        ParseForestItem[] c1 = pf.GetItems(r, t, 0, 2);

                        if (!(b1 == null || c1 == null))
                        {
                            int[,] pairs = pf.GetKBestPairs(b1, c1);

                            for (int k = 0; k < K; k++)
                            {
                                if (pairs[k, 0] == -1 || pairs[k, 1] == -1)
                                    break;

                                int comp1 = pairs[k, 0];
                                int comp2 = pairs[k, 1];

                                double bc = b1[comp1].Prob + c1[comp2].Prob;

                                bc += prodProbSt + probsTrips[s, r, t] + probsSibs[r, t, 1];
                                FeatureVector fv_fin = pf.Cat(prodFvSt, pf.Cat(fvsTrips[s, r, t], fvsSibs[r, t, 1]));

                                if (m_pipe.Labeled)
                                {
                                    bc += ntProbS01 + ntProbT00;
                                    fv_fin = FeatureVector.Cat(ntFvS01, FeatureVector.Cat(ntFvT00, fv_fin));
                                }

                                pf.Add(s, r, t, type1, 0, 1, bc, fv_fin, b1[comp1], c1[comp2]);
                            }
                        }

                        // T -> (R,S)
                        b1 = pf.GetItems(s, r, 1, 2);
                        c1 = pf.GetItems(r, t, 1, 1);

                        if (!(b1 == null || c1 == null))
                        {
                            int[,] pairs = pf.GetKBestPairs(b1, c1);

                            for (int k = 0; k < K; k++)
                            {
                                if (pairs[k, 0] == -1 || pairs[k, 1] == -1)
                                    break;

                                int comp1 = pairs[k, 0];
                                int comp2 = pairs[k, 1];

                                double bc = b1[comp1].Prob + c1[comp2].Prob;

                                bc += prodProbTs + probsTrips[t, r, s] + probsSibs[r, s, 1];

                                FeatureVector fvFin = pf.Cat(prodFvTs, pf.Cat(fvsTrips[t, r, s], fvsSibs[r, s, 1]));
                                if (m_pipe.Labeled)
                                {
                                    bc += ntProbT11 + ntProbS10;
                                    fvFin = FeatureVector.Cat(ntFvT11, FeatureVector.Cat(ntFvS10, fvFin));
                                }

                                pf.Add(s, r, t, type2, 1, 1, bc, fvFin, b1[comp1], c1[comp2]);
                            }
                        }
                    }

                    // Finish off pieces incom + Comp -> Comp
                    for (int r = s; r <= t; r++)
                    {
                        if (r != s)
                        {
                            ParseForestItem[] b1 = pf.GetItems(s, r, 0, 1);
                            ParseForestItem[] c1 = pf.GetItems(r, t, 0, 0);

                            if (!(b1 == null || c1 == null))
                            {
                                //continue;

                                int[,] pairs = pf.GetKBestPairs(b1, c1);
                                for (int k = 0; k < K; k++)
                                {
                                    if (pairs[k, 0] == -1 || pairs[k, 1] == -1)
                                        break;

                                    int comp1 = pairs[k, 0];
                                    int comp2 = pairs[k, 1];

                                    double bc = b1[comp1].Prob + c1[comp2].Prob;

                                    if (
                                        !pf.Add(s, r, t, -1, 0, 0, bc, new FeatureVector(), b1[comp1],
                                                c1[comp2]))
                                        break;
                                }
                            }
                        }

                        if (r != t)
                        {
                            ParseForestItem[] b1 = pf.GetItems(s, r, 1, 0);
                            ParseForestItem[] c1 = pf.GetItems(r, t, 1, 1);

                            if (!(b1 == null || c1 == null))
                            {
                                //continue;

                                int[,] pairs = pf.GetKBestPairs(b1, c1);
                                for (int k = 0; k < K; k++)
                                {
                                    if (pairs[k, 0] == -1 || pairs[k, 1] == -1)
                                        break;

                                    int comp1 = pairs[k, 0];
                                    int comp2 = pairs[k, 1];

                                    double bc = b1[comp1].Prob + c1[comp2].Prob;

                                    if (
                                        !pf.Add(s, r, t, -1, 1, 0, bc, new FeatureVector(), b1[comp1],
                                                c1[comp2]))
                                        break;
                                }
                            }
                        }
                    }
                }
            }

            return pf.GetBestParses();
        }
        public virtual FeatureVector CreateFeatureVector(string[] toks,
                                                 string[] pos,
                                                 string[] posA,
                                                 int small,
                                                 int large,
                                                 bool attR,
                                                 FeatureVector fv)
        {
            string att = "";
            att = attR ? "RA" : "LA";

            int dist = Math.Abs(large - small);
            string distBool = "0";
            if (dist > 10)
                distBool = "10";
            else if (dist > 5)
                distBool = "5";
            else if (dist > 4)
                distBool = "4";
            else if (dist > 3)
                distBool = "3";
            else if (dist > 2)
                distBool = "2";
            else if (dist > 1)
                distBool = "1";

            string attDist = "&" + att + "&" + distBool;

            string pLeft = small > 0 ? pos[small - 1] : "STR";
            string pRight = large < pos.Length - 1 ? pos[large + 1] : "END";
            string pLeftRight = small < large - 1 ? pos[small + 1] : "MID";
            string pRightLeft = large > small + 1 ? pos[large - 1] : "MID";
            string pLeftA = small > 0 ? posA[small - 1] : "STR";
            string pRightA = large < pos.Length - 1 ? posA[large + 1] : "END";
            string pLeftRightA = small < large - 1 ? posA[small + 1] : "MID";
            string pRightLeftA = large > small + 1 ? posA[large - 1] : "MID";

            // feature posR posMid posL
            for (int i = small + 1; i < large; i++)
            {
                string allPos = pos[small] + " " + pos[i] + " " + pos[large];
                string allPosA = posA[small] + " " + posA[i] + " " + posA[large];
                Add("PC=" + allPos + attDist, 1.0, fv);
                Add("1PC=" + allPos, 1.0, fv);
                Add("XPC=" + allPosA + attDist, 1.0, fv);
                Add("X1PC=" + allPosA, 1.0, fv);
            }

            // feature posL-1 posL posR posR+1
            Add("PT=" + pLeft + " " + pos[small] + " " + pos[large] + " " + pRight + attDist, 1.0, fv);
            Add("PT1=" + pos[small] + " " + pos[large] + " " + pRight + attDist, 1.0, fv);
            Add("PT2=" + pLeft + " " + pos[small] + " " + pos[large] + attDist, 1.0, fv);
            Add("PT3=" + pLeft + " " + pos[large] + " " + pRight + attDist, 1.0, fv);
            Add("PT4=" + pLeft + " " + pos[small] + " " + pRight + attDist, 1.0, fv);

            Add("1PT=" + pLeft + " " + pos[small] + " " + pos[large] + " " + pRight, 1.0, fv);
            Add("1PT1=" + pos[small] + " " + pos[large] + " " + pRight, 1.0, fv);
            Add("1PT2=" + pLeft + " " + pos[small] + " " + pos[large], 1.0, fv);
            Add("1PT3=" + pLeft + " " + pos[large] + " " + pRight, 1.0, fv);
            Add("1PT4=" + pLeft + " " + pos[small] + " " + pRight, 1.0, fv);

            Add("XPT=" + pLeftA + " " + posA[small] + " " + posA[large] + " " + pRightA + attDist, 1.0, fv);
            Add("XPT1=" + posA[small] + " " + posA[large] + " " + pRightA + attDist, 1.0, fv);
            Add("XPT2=" + pLeftA + " " + posA[small] + " " + posA[large] + attDist, 1.0, fv);
            Add("XPT3=" + pLeftA + " " + posA[large] + " " + pRightA + attDist, 1.0, fv);
            Add("XPT4=" + pLeftA + " " + posA[small] + " " + pRightA + attDist, 1.0, fv);

            Add("X1PT=" + pLeftA + " " + posA[small] + " " + posA[large] + " " + pRightA, 1.0, fv);
            Add("X1PT1=" + posA[small] + " " + posA[large] + " " + pRightA, 1.0, fv);
            Add("X1PT2=" + pLeftA + " " + posA[small] + " " + posA[large], 1.0, fv);
            Add("X1PT3=" + pLeftA + " " + posA[large] + " " + pRightA, 1.0, fv);
            Add("X1PT4=" + pLeftA + " " + posA[small] + " " + pRightA, 1.0, fv);

            // feature posL posL+1 posR-1 posR
            Add("APT=" + pos[small] + " " + pLeftRight + " "
                + pRightLeft + " " + pos[large] + attDist, 1.0, fv);
            Add("APT1=" + pos[small] + " " + pRightLeft + " " + pos[large] + attDist, 1.0, fv);
            Add("APT2=" + pos[small] + " " + pLeftRight + " " + pos[large] + attDist, 1.0, fv);
            Add("APT3=" + pLeftRight + " " + pRightLeft + " " + pos[large] + attDist, 1.0, fv);
            Add("APT4=" + pos[small] + " " + pLeftRight + " " + pRightLeft + attDist, 1.0, fv);

            Add("1APT=" + pos[small] + " " + pLeftRight + " "
                + pRightLeft + " " + pos[large], 1.0, fv);
            Add("1APT1=" + pos[small] + " " + pRightLeft + " " + pos[large], 1.0, fv);
            Add("1APT2=" + pos[small] + " " + pLeftRight + " " + pos[large], 1.0, fv);
            Add("1APT3=" + pLeftRight + " " + pRightLeft + " " + pos[large], 1.0, fv);
            Add("1APT4=" + pos[small] + " " + pLeftRight + " " + pRightLeft, 1.0, fv);

            Add("XAPT=" + posA[small] + " " + pLeftRightA + " "
                + pRightLeftA + " " + posA[large] + attDist, 1.0, fv);
            Add("XAPT1=" + posA[small] + " " + pRightLeftA + " " + posA[large] + attDist, 1.0, fv);
            Add("XAPT2=" + posA[small] + " " + pLeftRightA + " " + posA[large] + attDist, 1.0, fv);
            Add("XAPT3=" + pLeftRightA + " " + pRightLeftA + " " + posA[large] + attDist, 1.0, fv);
            Add("XAPT4=" + posA[small] + " " + pLeftRightA + " " + pRightLeftA + attDist, 1.0, fv);

            Add("X1APT=" + posA[small] + " " + pLeftRightA + " "
                + pRightLeftA + " " + posA[large], 1.0, fv);
            Add("X1APT1=" + posA[small] + " " + pRightLeftA + " " + posA[large], 1.0, fv);
            Add("X1APT2=" + posA[small] + " " + pLeftRightA + " " + posA[large], 1.0, fv);
            Add("X1APT3=" + pLeftRightA + " " + pRightLeftA + " " + posA[large], 1.0, fv);
            Add("X1APT4=" + posA[small] + " " + pLeftRightA + " " + pRightLeftA, 1.0, fv);

            // feature posL-1 posL posR-1 posR
            // feature posL posL+1 posR posR+1
            Add("BPT=" + pLeft + " " + pos[small] + " " + pRightLeft + " " + pos[large] + attDist, 1.0, fv);
            Add("1BPT=" + pLeft + " " + pos[small] + " " + pRightLeft + " " + pos[large], 1.0, fv);
            Add("CPT=" + pos[small] + " " + pLeftRight + " " + pos[large] + " " + pRight + attDist, 1.0, fv);
            Add("1CPT=" + pos[small] + " " + pLeftRight + " " + pos[large] + " " + pRight, 1.0, fv);

            Add("XBPT=" + pLeftA + " " + posA[small] + " " + pRightLeftA + " " + posA[large] + attDist, 1.0, fv);
            Add("X1BPT=" + pLeftA + " " + posA[small] + " " + pRightLeftA + " " + posA[large], 1.0, fv);
            Add("XCPT=" + posA[small] + " " + pLeftRightA + " " + posA[large] + " " + pRightA + attDist, 1.0, fv);
            Add("X1CPT=" + posA[small] + " " + pLeftRightA + " " + posA[large] + " " + pRightA, 1.0, fv);

            string head = attR ? toks[small] : toks[large];
            string headP = attR ? pos[small] : pos[large];
            string child = attR ? toks[large] : toks[small];
            string childP = attR ? pos[large] : pos[small];

            string all = head + " " + headP + " " + child + " " + childP;
            string hPos = headP + " " + child + " " + childP;
            string cPos = head + " " + headP + " " + childP;
            string hP = headP + " " + child;
            string cP = head + " " + childP;
            string oPos = headP + " " + childP;
            string oLex = head + " " + child;

            Add("A=" + all + attDist, 1.0, fv); //this
            Add("B=" + hPos + attDist, 1.0, fv);
            Add("C=" + cPos + attDist, 1.0, fv);
            Add("D=" + hP + attDist, 1.0, fv);
            Add("E=" + cP + attDist, 1.0, fv);
            Add("F=" + oLex + attDist, 1.0, fv); //this
            Add("G=" + oPos + attDist, 1.0, fv);
            Add("H=" + head + " " + headP + attDist, 1.0, fv);
            Add("I=" + headP + attDist, 1.0, fv);
            Add("J=" + head + attDist, 1.0, fv); //this
            Add("K=" + child + " " + childP + attDist, 1.0, fv);
            Add("L=" + childP + attDist, 1.0, fv);
            Add("M=" + child + attDist, 1.0, fv); //this

            Add("AA=" + all, 1.0, fv); //this
            Add("BB=" + hPos, 1.0, fv);
            Add("CC=" + cPos, 1.0, fv);
            Add("DD=" + hP, 1.0, fv);
            Add("EE=" + cP, 1.0, fv);
            Add("FF=" + oLex, 1.0, fv); //this
            Add("GG=" + oPos, 1.0, fv);
            Add("HH=" + head + " " + headP, 1.0, fv);
            Add("II=" + headP, 1.0, fv);
            Add("JJ=" + head, 1.0, fv); //this
            Add("KK=" + child + " " + childP, 1.0, fv);
            Add("LL=" + childP, 1.0, fv);
            Add("MM=" + child, 1.0, fv); //this

            if (head.Length > 5 || child.Length > 5)
            {
                int hL = head.Length;
                int cL = child.Length;

                head = hL > 5 ? head.SubstringWithIndex(0, 5) : head;
                child = cL > 5 ? child.SubstringWithIndex(0, 5) : child;

                all = head + " " + headP + " " + child + " " + childP;
                hPos = headP + " " + child + " " + childP;
                cPos = head + " " + headP + " " + childP;
                hP = headP + " " + child;
                cP = head + " " + childP;
                oPos = headP + " " + childP;
                oLex = head + " " + child;

                Add("SA=" + all + attDist, 1.0, fv); //this
                Add("SF=" + oLex + attDist, 1.0, fv); //this
                Add("SAA=" + all, 1.0, fv); //this
                Add("SFF=" + oLex, 1.0, fv); //this

                if (cL > 5)
                {
                    Add("SB=" + hPos + attDist, 1.0, fv);
                    Add("SD=" + hP + attDist, 1.0, fv);
                    Add("SK=" + child + " " + childP + attDist, 1.0, fv);
                    Add("SM=" + child + attDist, 1.0, fv); //this
                    Add("SBB=" + hPos, 1.0, fv);
                    Add("SDD=" + hP, 1.0, fv);
                    Add("SKK=" + child + " " + childP, 1.0, fv);
                    Add("SMM=" + child, 1.0, fv); //this
                }
                if (hL > 5)
                {
                    Add("SC=" + cPos + attDist, 1.0, fv);
                    Add("SE=" + cP + attDist, 1.0, fv);
                    Add("SH=" + head + " " + headP + attDist, 1.0, fv);
                    Add("SJ=" + head + attDist, 1.0, fv); //this

                    Add("SCC=" + cPos, 1.0, fv);
                    Add("SEE=" + cP, 1.0, fv);
                    Add("SHH=" + head + " " + headP, 1.0, fv);
                    Add("SJJ=" + head, 1.0, fv); //this
                }
            }

            return fv;
        }
Example #32
0
 public static void Write_KK(IEngineConf engineConf, FeatureVector fv, string fvDirectory)
 {
     File.WriteAllText(Path.Combine(fvDirectory, engineConf.GetResourceBasename("Fv01KKInFvDir")), Format_FeatureVector_KK.Format_KK(fv));
 }
 public TrainingExample(FeatureVector features, ClassMark expected)
 {
     Features = features;
     Expected = expected;
 }
        private static void TestLinearRegressionUsingCrossValidation(FeatureVector training, FeatureVector test)
        {
            CrossValidator      cv          = new CrossValidator(new LinearRegression(), new BinaryClassificationEvaluator(), 10);
            CrossValidatorModel cvModel     = (CrossValidatorModel)cv.Fit(training);
            FeatureVector       predictions = cvModel.transform(test);

            PrintPredictionsAndEvaluate(predictions);
        }
 public FeatureVector Cat(FeatureVector fv1, FeatureVector fv2)
 {
     return FeatureVector.Cat(fv1, fv2);
 }
        // static Type for each edge: run time O(n^3 + Tn^2) T is number of Types
        public object[,] DecodeProjective(DependencyInstance inst,
                                          FeatureVector[,,] fvs,
                                          double[,,] probs,
                                          FeatureVector[,,,] ntFvs,
                                          double[,,,] ntProbs, int K)
        {
            string[] toks = inst.Sentence;
            string[] pos = inst.POS;

            int[,] staticTypes = null;
            if (m_pipe.Labeled)
            {
                staticTypes = GetTypes(ntProbs, toks.Length);
            }

            var pf = new KBestParseForest(0, toks.Length - 1, inst, K);

            for (int s = 0; s < toks.Length; s++)
            {
                pf.Add(s, -1, 0, 0.0, new FeatureVector());
                pf.Add(s, -1, 1, 0.0, new FeatureVector());
            }

            for (int j = 1; j < toks.Length; j++)
            {
                for (int s = 0; s < toks.Length && s + j < toks.Length; s++)
                {
                    int t = s + j;

                    FeatureVector prodFvSt = fvs[s, t, 0];
                    FeatureVector prodFvTs = fvs[s, t, 1];
                    double prodProbSt = probs[s, t, 0];
                    double prodProbTs = probs[s, t, 1];

                    int type1 = m_pipe.Labeled ? staticTypes[s, t] : 0;
                    int type2 = m_pipe.Labeled ? staticTypes[t, s] : 0;

                    FeatureVector ntFvS01 = ntFvs[s, type1, 0, 1];
                    FeatureVector ntFvS10 = ntFvs[s, type2, 1, 0];
                    FeatureVector ntFvT00 = ntFvs[t, type1, 0, 0];
                    FeatureVector ntFvT11 = ntFvs[t, type2, 1, 1];
                    double ntProbS01 = ntProbs[s, type1, 0, 1];
                    double ntProbS10 = ntProbs[s, type2, 1, 0];
                    double ntProbT00 = ntProbs[t, type1, 0, 0];
                    double ntProbT11 = ntProbs[t, type2, 1, 1];

                    for (int r = s; r <= t; r++)
                    {
                        if (r != t)
                        {
                            ParseForestItem[] b1 = pf.GetItems(s, r, 0, 0);
                            ParseForestItem[] c1 = pf.GetItems(r + 1, t, 1, 0);

                            if (b1 != null && c1 != null)
                            {
                                int[,] pairs = pf.GetKBestPairs(b1, c1);
                                for (int k = 0; k < pairs.GetLength(0); k++)
                                {
                                    if (pairs[k, 0] == -1 || pairs[k, 1] == -1)
                                        break;

                                    int comp1 = pairs[k, 0];
                                    int comp2 = pairs[k, 1];

                                    double bc = b1[comp1].Prob + c1[comp2].Prob;

                                    double probFin = bc + prodProbSt;
                                    FeatureVector fv_fin = prodFvSt;
                                    if (m_pipe.Labeled)
                                    {
                                        fv_fin = FeatureVector.Cat(ntFvS01, FeatureVector.Cat(ntFvT00, fv_fin));
                                        probFin += ntProbS01 + ntProbT00;
                                    }
                                    pf.Add(s, r, t, type1, 0, 1, probFin, fv_fin, b1[comp1], c1[comp2]);

                                    probFin = bc + prodProbTs;
                                    fv_fin = prodFvTs;
                                    if (m_pipe.Labeled)
                                    {
                                        fv_fin = FeatureVector.Cat(ntFvT11, FeatureVector.Cat(ntFvS10, fv_fin));
                                        probFin += ntProbT11 + ntProbS10;
                                    }
                                    pf.Add(s, r, t, type2, 1, 1, probFin, fv_fin, b1[comp1], c1[comp2]);
                                }
                            }
                        }
                    }

                    for (int r = s; r <= t; r++)
                    {
                        if (r != s)
                        {
                            ParseForestItem[] b1 = pf.GetItems(s, r, 0, 1);
                            ParseForestItem[] c1 = pf.GetItems(r, t, 0, 0);
                            if (b1 != null && c1 != null)
                            {
                                int[,] pairs = pf.GetKBestPairs(b1, c1);
                                for (int k = 0; k < pairs.GetLength(0); k++)
                                {
                                    if (pairs[k, 0] == -1 || pairs[k, 1] == -1)
                                        break;

                                    int comp1 = pairs[k, 0];
                                    int comp2 = pairs[k, 1];

                                    double bc = b1[comp1].Prob + c1[comp2].Prob;

                                    if (!pf.Add(s, r, t, -1, 0, 0, bc,
                                                new FeatureVector(),
                                                b1[comp1], c1[comp2]))
                                    {
                                        break;
                                    }
                                }
                            }
                        }

                        if (r != t)
                        {
                            ParseForestItem[] b1 = pf.GetItems(s, r, 1, 0);
                            ParseForestItem[] c1 = pf.GetItems(r, t, 1, 1);
                            if (b1 != null && c1 != null)
                            {
                                int[,] pairs = pf.GetKBestPairs(b1, c1);
                                for (int k = 0; k < pairs.GetLength(0); k++)
                                {
                                    if (pairs[k, 0] == -1 || pairs[k, 1] == -1)
                                        break;

                                    int comp1 = pairs[k, 0];
                                    int comp2 = pairs[k, 1];

                                    double bc = b1[comp1].Prob + c1[comp2].Prob;

                                    if (!pf.Add(s, r, t, -1, 1, 0, bc,
                                                new FeatureVector(), b1[comp1], c1[comp2]))
                                        break;
                                }
                            }
                        }
                    }
                }
            }

            return pf.GetBestParses();
        }
        public FeatureVector CreateFeatureVectorSib(string[] toks,
                                                    string[] pos,
                                                    int ch1, int ch2,
                                                    bool isST,
                                                    FeatureVector fv)
        {
            // ch1 is always the closes to par
            string dir = ch1 > ch2 ? "RA" : "LA";

            string ch1POS = isST ? "STPOS" : pos[ch1];
            string ch2POS = pos[ch2];
            string ch1Word = isST ? "STWRD" : toks[ch1];
            string ch2Word = toks[ch2];

            Add("CH_PAIR=" + ch1POS + "_" + ch2POS + "_" + dir, 1.0, fv);
            Add("CH_WPAIR=" + ch1Word + "_" + ch2Word + "_" + dir, 1.0, fv);
            Add("CH_WPAIRA=" + ch1Word + "_" + ch2POS + "_" + dir, 1.0, fv);
            Add("CH_WPAIRB=" + ch1POS + "_" + ch2Word + "_" + dir, 1.0, fv);
            Add("ACH_PAIR=" + ch1POS + "_" + ch2POS, 1.0, fv);
            Add("ACH_WPAIR=" + ch1Word + "_" + ch2Word, 1.0, fv);
            Add("ACH_WPAIRA=" + ch1Word + "_" + ch2POS, 1.0, fv);
            Add("ACH_WPAIRB=" + ch1POS + "_" + ch2Word, 1.0, fv);

            int dist = Math.Max(ch1, ch2) - Math.Min(ch1, ch2);
            string distBool = "0";
            if (dist > 1)
                distBool = "1";
            if (dist > 2)
                distBool = "2";
            if (dist > 3)
                distBool = "3";
            if (dist > 4)
                distBool = "4";
            if (dist > 5)
                distBool = "5";
            if (dist > 10)
                distBool = "10";
            Add("SIB_PAIR_DIST=" + distBool + "_" + dir, 1.0, fv);
            Add("ASIB_PAIR_DIST=" + distBool, 1.0, fv);
            Add("CH_PAIR_DIST=" + ch1POS + "_" + ch2POS + "_" + distBool + "_" + dir, 1.0, fv);
            Add("ACH_PAIR_DIST=" + ch1POS + "_" + ch2POS + "_" + distBool, 1.0, fv);

            return fv;
        }
        public object[,] decodeNonProjective(DependencyInstance inst,
                                             FeatureVector[,,] fvs,
                                             double[,,] probs,
                                             FeatureVector[,,,] nt_fvs,
                                             double[,,,] nt_probs, int K)
        {
            string[] pos = inst.POS;

            int numWords = inst.Sentence.Length;
            var oldI = new int[numWords,numWords];
            var oldO = new int[numWords,numWords];
            var scoreMatrix = new double[numWords,numWords];
            var orig_scoreMatrix = new double[numWords,numWords];
            var curr_nodes = new bool[numWords];
            var reps = new Dictionary<int, int>[numWords];

            int[,] static_types = null;
            if (m_pipe.Labeled)
            {
                static_types = GetTypes(nt_probs, pos.Length);
            }

            for (int i = 0; i < numWords; i++)
            {
                curr_nodes[i] = true;
                reps[i] = new Dictionary<int, int>();
                reps[i].Add(i, 0);
                for (int j = 0; j < numWords; j++)
                {
                    // score of edge (i,j) i --> j
                    scoreMatrix[i, j] = probs[i < j ? i : j, i < j ? j : i, i < j ? 0 : 1]
                                        + (m_pipe.Labeled
                                               ? nt_probs[i, static_types[i, j], i < j ? 0 : 1, 1]
                                                 + nt_probs[j, static_types[i, j], i < j ? 0 : 1, 0]
                                               : 0.0);
                    orig_scoreMatrix[i, j] = probs[i < j ? i : j, i < j ? j : i, i < j ? 0 : 1]
                                             + (m_pipe.Labeled
                                                    ? nt_probs[i, static_types[i, j], i < j ? 0 : 1, 1]
                                                      + nt_probs[j, static_types[i, j], i < j ? 0 : 1, 0]
                                                    : 0.0);
                    oldI[i, j] = i;
                    oldO[i, j] = j;

                    if (i == j || j == 0)
                        continue; // no self loops of i --> 0
                }
            }

            Dictionary<int, int> final_edges = chuLiuEdmonds(scoreMatrix, curr_nodes, oldI, oldO, false,
                                                             new Dictionary<int, int>(), reps);
            var par = new int[numWords];
            int[] ns = final_edges.Keys.ToArray();
            for (int i = 0; i < ns.Length; i++)
            {
                int ch = ns[i];
                int pr = final_edges[ns[i]];
                par[ch] = pr;
            }

            int[] n_par = getKChanges(par, orig_scoreMatrix, Math.Min(K, par.Length));
            int new_k = 1;
            for (int i = 0; i < n_par.Length; i++)
                if (n_par[i] > -1) new_k++;

            // Create Feature Vectors;
            var fin_par = new int[new_k,numWords];
            int fin_parFirstLen = new_k;
            int fin_par_secondLen = numWords;
            var fin_fv = new FeatureVector[new_k,numWords];
            int len = fin_par.GetLength(1);
            for (int i = 0; i < len; i++)
            {
                fin_par[0, i] = par[i];
            }
            int c = 1;
            for (int i = 0; i < n_par.Length; i++)
            {
                if (n_par[i] > -1)
                {
                    var t_par = new int[par.Length];
                    for (int j = 0; j < t_par.Length; j++)
                        t_par[j] = par[j];
                    t_par[i] = n_par[i];
                    len = t_par.Length;
                    for (int ct = 0; ct < len; ct++)
                    {
                        fin_par[c, ct] = t_par[ct];
                    }
                    c++;
                }
            }
            for (int k = 0; k < fin_parFirstLen; k++)
            {
                for (int i = 0; i < fin_par_secondLen; i++)
                {
                    int ch = i;
                    int pr = fin_par[k, i];
                    if (pr != -1)
                    {
                        fin_fv[k, ch] = fvs[ch < pr ? ch : pr, ch < pr ? pr : ch, ch < pr ? 1 : 0];
                        if (m_pipe.Labeled)
                        {
                            fin_fv[k, ch] = FeatureVector.Cat(fin_fv[k, ch],
                                      nt_fvs[ch, static_types[pr, ch], ch < pr ? 1 : 0, 0]);
                            fin_fv[k, ch] = FeatureVector.Cat(fin_fv[k, ch],
                                      nt_fvs[pr, static_types[pr, ch], ch < pr ? 1 : 0, 1]);
                        }
                    }
                    else
                    {
                        fin_fv[k, ch] = new FeatureVector();
                    }
                }
            }

            var fin = new FeatureVector[new_k];
            var result = new string[new_k];
            for (int k = 0; k < fin.Length; k++)
            {
                fin[k] = new FeatureVector();
                for (int i = 1; i < fin_fv.GetLength(k); i++) //doubt of Index
                    fin[k] = FeatureVector.Cat(fin_fv[k, i], fin[k]);
                result[k] = "";
                for (int i = 1; i < par.Length; i++)
                    result[k] += fin_par[k, i] + "|" + i + (m_pipe.Labeled ? ":" + static_types[fin_par[k, i], i] : ":0") + " ";
            }

            // create d.
            var d = new object[new_k,2];

            for (int k = 0; k < new_k; k++)
            {
                d[k, 0] = fin[k];
                d[k, 1] = result[k].Trim();
            }

            return d;
        }
 internal static HandleRef getCPtr(FeatureVector obj) {
   return (obj == null) ? new HandleRef(null, IntPtr.Zero) : obj.swigCPtr;
 }
Example #40
0
 public LearningDataImpl(IEngineConf engineConf)
 {
     EngineConf = engineConf;
     this.Fv    = new FeatureVectorImpl();
 }
Example #41
0
        /// <summary>
        /// FVの保存。
        /// </summary>
        /// <param name="uc_Main"></param>
        public static void Do_Save(Uc_Main uc_Main)
        {
            FeatureVector fv = uc_Main.LearningData.Fv;


            // ファイルチューザーで指定された、fvフォルダーのパス
            string fvFolderPath = Path.GetDirectoryName(uc_Main.TxtFvFilepath.Text);

            // ファイルチューザーで指定された、Dataフォルダーのパス(fvフォルダーの親)
            string dataFolderPath = Directory.GetParent(fvFolderPath).FullName;

            //----------------------------------------
            // 時間
            //----------------------------------------
            string ymd;
            string hms;

            {
                DateTime dt = DateTime.Now;

                // 年月日
                {
                    StringBuilder sb = new StringBuilder();
                    sb.Append(dt.Year);
                    sb.Append("-");
                    sb.Append(dt.Month);
                    sb.Append("-");
                    sb.Append(dt.Day);
                    ymd = sb.ToString();
                    uc_Main.TxtAutosaveYMD.Text = ymd;
                }

                // 時分秒
                {
                    StringBuilder sb = new StringBuilder();
                    sb.Append(dt.Hour);
                    sb.Append("-");
                    sb.Append(dt.Minute);
                    sb.Append("-");
                    sb.Append(dt.Second);
                    hms = sb.ToString();
                    uc_Main.TxtAutosaveHMS.Text = hms;
                }
            }

            //----------------------------------------
            // バックアップ
            //----------------------------------------
            //
            // 失敗した場合、バックアップせず続行します
            //
            {
                // バックアップの失敗判定
                bool backup_failuer = false;

                // フォルダーのリネーム
                try
                {
                    string srcPath = Path.Combine(dataFolderPath, "fv");
                    string dstPath = Path.Combine(dataFolderPath, $"fv_{ymd}_{hms}");

                    Directory.Move(srcPath, dstPath);
                }
                catch (IOException)
                {
                    // フォルダーを、Windowsのファイル・エクスプローラーで開いているなどすると、失敗します。
                    backup_failuer = true;
                }

                if (!backup_failuer)
                {
                    // fvフォルダーの新規作成
                    Directory.CreateDirectory(fvFolderPath);
                }
            }

            //----------------------------------------
            // -999~999 に調整
            //----------------------------------------
            Util_LearnFunctions.FvParamRange_PP(uc_Main.LearningData.Fv);// 自動で -999~999(*bairitu) に矯正。


            // 駒割
            File.WriteAllText(uc_Main.TxtFvFilepath.Text, Format_FeatureVector_Komawari.Format_Text(fv));
            // スケール
            Util_FeatureVectorOutput.Write_Scale(uc_Main.EngineConf, fv, fvFolderPath);
            // KK
            Util_FeatureVectorOutput.Write_KK(uc_Main.EngineConf, fv, fvFolderPath);
            // 1pKP,2pKP
            Util_FeatureVectorOutput.Write_KP(uc_Main.EngineConf, fv, fvFolderPath);
            // PP 盤上
            Util_FeatureVectorOutput.Write_PP_Banjo(uc_Main.EngineConf, fv, fvFolderPath);
            // PP 19枚の持駒
            Util_FeatureVectorOutput.Write_PP_19Mai(uc_Main.EngineConf, fv, fvFolderPath);
            // PP 5枚の持駒、3枚の持駒
            Util_FeatureVectorOutput.Write_PP_5Mai(uc_Main.EngineConf, fv, fvFolderPath);
            Util_FeatureVectorOutput.Write_PP_3Mai(uc_Main.EngineConf, fv, fvFolderPath);
        }
Example #42
0
 public State tick(ref PlayerAction action, FeatureVector vector, StateController controller)
 {
     action = PlayerAction.Prepare;
     return(nextState);
 }
        private void TrainingIter(DependencyInstance[] il, string trainfile, string train_forest, int iter)
        {
            int numUpd = 0;
            var in_ = new BinaryReader(new FileStream(train_forest, FileMode.Open));
            bool evaluateI = true;

            for (int i = 0; i < il.Length; i++)
            {
                if ((i + 1)%100 == 0)
                    Console.WriteLine("  " + (i + 1) + " instances");

                DependencyInstance inst = il[i];

                int length = inst.Length;

                // Get production crap.
                var fvs = new FeatureVector[length,length,2];
                var probs = new double[length,length,2];
                var ntFvs = new FeatureVector[length,m_pipe.Types.Length,2,2];
                var ntProbs = new double[length,m_pipe.Types.Length,2,2];
                var fvsTrips = new FeatureVector[length,length,length];
                var probsTrips = new double[length,length,length];
                var fvsSibs = new FeatureVector[length,length,2];
                var probsSibs = new double[length,length,2];

                if (SecondOrder)
                    inst = ((DependencyPipe2O) m_pipe).GetFeatureVector(in_, inst, fvs, probs,
                                                                      fvsTrips, probsTrips,
                                                                      fvsSibs, probsSibs,
                                                                      ntFvs, ntProbs, m_params);
                else
                    inst = m_pipe.ReadFeatureVector(in_, inst, fvs, probs, ntFvs, ntProbs, m_params);

                var upd = (double) (NumIters*il.Length - (il.Length*(iter - 1) + (i + 1)) + 1);
                int K = TrainK;
                object[,] d = null;
                if (DecodeType==ProjectiveTypes.Projective)
                {
                    if (SecondOrder)
                        d = ((DependencyDecoder2O) m_decoder).DecodeProjective(inst, fvs, probs,
                                                                             fvsTrips, probsTrips,
                                                                             fvsSibs, probsSibs,
                                                                             ntFvs, ntProbs, K);
                    else
                        d = m_decoder.DecodeProjective(inst, fvs, probs, ntFvs, ntProbs, K);
                }
                if (DecodeType==ProjectiveTypes.NonProjective)
                {
                    if (SecondOrder)
                        d = ((DependencyDecoder2O) m_decoder).DecodeNonProjective(inst, fvs, probs,
                                                                                fvsTrips, probsTrips,
                                                                                fvsSibs, probsSibs,
                                                                                ntFvs, ntProbs, K);
                    else
                        d = m_decoder.decodeNonProjective(inst, fvs, probs, ntFvs, ntProbs, K);
                }
                m_params.UpdateParamsMIRA(inst, d, upd);
            }
            Console.WriteLine("");

            Console.WriteLine("  " + il.Length + " instances");

            in_.Close();
        }
        public virtual FeatureVector CreateFeatureVector(string[] toks,
                                                 string[] pos,
                                                 string[] posA,
                                                 int word,
                                                 string type,
                                                 bool attR,
                                                 bool childFeatures,
                                                 FeatureVector fv)
        {
            if (!Labeled) return fv;

            string att = "";
            if (attR)
                att = "RA";
            else
                att = "LA";

            att += "&" + childFeatures;

            string w = toks[word];
            string wP = pos[word];

            string wPm1 = word > 0 ? pos[word - 1] : "STR";
            string wPp1 = word < pos.Length - 1 ? pos[word + 1] : "END";

            Add("NTS1=" + type + "&" + att, 1.0, fv);
            Add("ANTS1=" + type, 1.0, fv);
            for (int i = 0; i < 2; i++)
            {
                string suff = i < 1 ? "&" + att : "";
                suff = "&" + type + suff;

                Add("NTH=" + w + " " + wP + suff, 1.0, fv);
                Add("NTI=" + wP + suff, 1.0, fv);
                Add("NTIA=" + wPm1 + " " + wP + suff, 1.0, fv);
                Add("NTIB=" + wP + " " + wPp1 + suff, 1.0, fv);
                Add("NTIC=" + wPm1 + " " + wP + " " + wPp1 + suff, 1.0, fv);
                Add("NTJ=" + w + suff, 1.0, fv); //this
            }

            return fv;
        }
Example #45
0
        public void UpdateParamsMIRA(DependencyInstance inst, object[,] d, double upd)
        {
            string actParseTree = inst.ActParseTree;
            FeatureVector actFV = inst.Fv;

            int K = 0;
            for (int i = 0; i < d.GetLength(0) && d[i, 0] != null; i++)
            {
                K = i + 1;
            }

            var b = new double[K];
            var lamDist = new double[K];
            var dist = new FeatureVector[K];

            for (int k = 0; k < K; k++)
            {
                lamDist[k] = GetScore(actFV)
                              - GetScore((FeatureVector) d[k, 0]);
                b[k] = NumErrors(inst, (string) d[k, 1], actParseTree);
                b[k] -= lamDist[k];
                dist[k] = FeatureVector.GetDistVector(actFV, (FeatureVector) d[k, 0]);
            }

            double[] alpha = hildreth(dist, b);

            FeatureVector fv = null;
            int res = 0;
            for (int k = 0; k < K; k++)
            {
                fv = dist[k];
                foreach (Feature feature in fv.FVector)
                {
                    if (feature.Index < 0)
                        continue;
                    parameters[feature.Index] += alpha[k]*feature.Value;
                    Total[feature.Index] += upd*alpha[k]*feature.Value;
                }
            }
        }
        public virtual FeatureVector CreateFeatureVector(string[] toks,
                                                 string[] pos,
                                                 string[] labs,
                                                 int[] deps)
        {
            var posA = new string[pos.Length];
            for (int i = 0; i < pos.Length; i++)
            {
                posA[i] = pos[i].SubstringWithIndex(0, 1);
            }

            var fv = new FeatureVector();
            for (int i = 0; i < toks.Length; i++)
            {
                if (deps[i] == -1)
                    continue;
                int small = i < deps[i] ? i : deps[i];
                int large = i > deps[i] ? i : deps[i];
                bool attR = i < deps[i] ? false : true;
                fv = CreateFeatureVector(toks, pos, posA, small, large, attR, fv);
                if (Labeled)
                {
                    fv = CreateFeatureVector(toks, pos, posA, i, labs[i], attR, true, fv);
                    fv = CreateFeatureVector(toks, pos, posA, deps[i], labs[i], attR, false, fv);
                }
            }
            return fv;
        }
Example #47
0
 /// <summary>
 /// Normalisa a single featurevector.
 /// </summary>
 /// <param name="vector">The featurevector.</param>
 /// <returns>The normalised featurevector.</returns>
 public static FeatureVector NormaliseVector(FeatureVector vector)
 {
     return(FeatureNormaliser.Instance.NormaliseVector(vector));
 }
        private static double CalculateAccuracy(List <int> indicators, int mlAlgorithm, bool isCrossValidationEnabled, int minRowCount, double trainingSetPercentage, double[] smaOut, double[] wmaOut, double[] emaOut, double[] macdOut, double[] rsiOut, double[] williamsROut, double[] stochasticsOut, double[] closesOut)
        {
            FeatureVector vector = new FeatureVector();

            if (indicators.Contains(IndicatorService.SMA))
            {
                vector.AddColumn("SMA", smaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (indicators.Contains(IndicatorService.WMA))
            {
                vector.AddColumn("WMA", wmaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (indicators.Contains(IndicatorService.EMA))
            {
                vector.AddColumn("EMA", emaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (indicators.Contains(IndicatorService.MACD))
            {
                vector.AddColumn("MACD", macdOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (indicators.Contains(IndicatorService.RSI))
            {
                vector.AddColumn("RSI", rsiOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (indicators.Contains(IndicatorService.WilliamsR))
            {
                vector.AddColumn("WilliamsR", williamsROut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (indicators.Contains(IndicatorService.Stochastics))
            {
                vector.AddColumn("Stochastics", stochasticsOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            vector.AddColumn("label", closesOut.Select(p => (object)string.Format("{0:0.0}", p).ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());

            new CSVExporter(vector).Export("c:\\users\\yasin\\indicatorOutput.csv");
            int           count    = vector.Values[0].Length;
            FeatureVector training = new FeatureVector();

            for (int i = 0; i < vector.ColumnName.Count; i++)
            {
                training.AddColumn(vector.ColumnName[i], vector.Values[i].Take((int)(count * trainingSetPercentage)).ToArray());
            }

            FeatureVector test = new FeatureVector();

            for (int i = 0; i < vector.ColumnName.Count; i++)
            {
                test.AddColumn(vector.ColumnName[i], vector.Values[i].Skip((int)(count * trainingSetPercentage)).Take(count).ToArray());
            }

            double accuracy = 0;

            if (mlAlgorithm == MLAService.LIN_REG)
            {
                var linReg = new LinearRegression();
                var bce    = new BinaryClassificationEvaluator();
                if (isCrossValidationEnabled)
                {
                    var cv          = new CrossValidator(linReg, bce, 10);
                    var cvModel     = (CrossValidatorModel)cv.Fit(training);
                    var predictions = cvModel.transform(test);
                    bce.evaluate(predictions);
                    accuracy = bce.Accuracy;
                }
                else
                {
                    var linRegModel = (LinearRegressionModel)linReg.Fit(training);
                    var predictions = linRegModel.transform(test);
                    bce.evaluate(predictions);
                    accuracy = bce.Accuracy;
                }
            }
            else if (mlAlgorithm == MLAService.LOG_REG)
            {
                var logReg = new LogisticRegression();
                var bce    = new BinaryClassificationEvaluator();
                if (isCrossValidationEnabled)
                {
                    var cv          = new CrossValidator(logReg, bce, 10);
                    var cvModel     = (CrossValidatorModel)cv.Fit(training);
                    var predictions = cvModel.transform(test);
                    bce.evaluate(predictions);
                    accuracy = bce.Accuracy;
                }
                else
                {
                    var logRegModel = (LogisticRegressionModel)logReg.Fit(training);
                    var predictions = logRegModel.transform(test);
                    bce.evaluate(predictions);
                    accuracy = bce.Accuracy;
                }
            }
            else if (mlAlgorithm == MLAService.NAI_BAY)
            {
                var naiBay = new NaiveBayes();
                var bce    = new BinaryClassificationEvaluator();
                if (isCrossValidationEnabled)
                {
                    var cv          = new CrossValidator(naiBay, bce, 10);
                    var cvModel     = (CrossValidatorModel)cv.Fit(training);
                    var predictions = cvModel.transform(test);
                    bce.evaluate(predictions);
                    accuracy = bce.Accuracy;
                }
                else
                {
                    var naiBayModel = (NaiveBayesModel)naiBay.Fit(training);
                    var predictions = naiBayModel.transform(test);
                    bce.evaluate(predictions);
                    accuracy = bce.Accuracy;
                }
            }
            return(accuracy);
        }