public bool CategorizePayments(int customerID)
        {
            NaiveBayesModel <Operation> model = new NaiveBayesModel <Operation>();

            var operations = _operationServices.GetOperationsByCustomerID(customerID);


            var notCategorized = operations.Where(x => x.Tag == null || x.Tag.Name == "NotSet");
            var categorized    = operations.Where(x => x.Tag != null && x.Tag.Name != "NotSet");

            var predictor = model.Generate(categorized);

            foreach (var operation in notCategorized)
            {
                var newOperation = predictor.Predict(operation);
                Debug.Write(newOperation.Tag.Name);
                using (TransactionScope scope = new TransactionScope())
                {
                    _repository.Update <Operation>(newOperation);
                    scope.Complete();
                }
            }

            return(true);
        }
예제 #2
0
        public bool CategorizePayments(int customerID)
        {
            NaiveBayesModel<Operation> model = new NaiveBayesModel<Operation>();

            var operations = _operationServices.GetOperationsByCustomerID(customerID);

            var notCategorized = operations.Where(x => x.Tag == null || x.Tag.Name == "NotSet");
            var categorized = operations.Where(x => x.Tag != null && x.Tag.Name!="NotSet");

            var predictor = model.Generate(categorized);

            foreach (var operation in notCategorized)
            {

                var newOperation = predictor.Predict(operation);
                Debug.Write(newOperation.Tag.Name);
                using (TransactionScope scope = new TransactionScope())
                {
                    _repository.Update<Operation>(newOperation);
                    scope.Complete();
                }
            }

            return true;
        }
        private static void TestNaiveBayes(FeatureVector training, FeatureVector test)
        {
            NaiveBayes      nb          = new NaiveBayes();
            NaiveBayesModel nbModel     = (NaiveBayesModel)nb.Fit(training);
            FeatureVector   predictions = nbModel.transform(test);

            PrintPredictionsAndEvaluate(predictions);
        }
예제 #4
0
        public void NaiveBayes_Main_Test()
        {
            var data = Payment.GetData();

            NaiveBayesModel<Payment> model = new NaiveBayesModel<Payment>();
            var predictor = model.Generate(data);
            var item = predictor.Predict(new Payment { Amount = 110, Description = "Monop try it" });

            Assert.AreEqual(item.Category, "Household");
        }
        public static string FindSentimentType(int sentimentCount, bool isRetweet, string language, List <TweetClassification> tweetClassificationList)
        {
            var model     = new NaiveBayesModel <TweetClassification>();
            var predictor = model.Generate(tweetClassificationList);

            var result = predictor.Predict(new TweetClassification {
                SentimentCount = sentimentCount, Language = language, IsRetweet = isRetweet
            });

            return(result.SentimentType);
        }
예제 #6
0
        public void NaiveBayes_Main_Test()
        {
            var data = Payment.GetData();

            NaiveBayesModel <Payment> model = new NaiveBayesModel <Payment>();
            var predictor = model.Generate(data);
            var item      = predictor.Predict(new Payment {
                Amount = 110, Description = "Monop try it"
            });

            Assert.AreEqual(item.Category, "Household");
        }
예제 #7
0
        /**
         * <summary> Training algorithm for Naive Bayes algorithm with a discrete data set.</summary>
         * <param name="priorDistribution">Probability distribution of classes P(C_i)</param>
         * <param name="classLists">Instances are divided into K lists, where each list contains only instances from a single class</param>
         */
        private void TrainDiscreteVersion(DiscreteDistribution priorDistribution, Partition classLists)
        {
            var classAttributeDistributions =
                new Dictionary <string, List <DiscreteDistribution> >();

            for (var i = 0; i < classLists.Size(); i++)
            {
                classAttributeDistributions[((InstanceListOfSameClass)classLists.Get(i)).GetClassLabel()] =
                    classLists.Get(i).AllAttributesDistribution();
            }

            model = new NaiveBayesModel(priorDistribution, classAttributeDistributions);
        }
예제 #8
0
        /**
         * <summary> Training algorithm for Naive Bayes algorithm with a continuous data set.</summary>
         *
         * <param name="priorDistribution">Probability distribution of classes P(C_i)</param>
         * <param name="classLists">       Instances are divided into K lists, where each list contains only instances from a single class</param>
         */
        private void TrainContinuousVersion(DiscreteDistribution priorDistribution, Partition classLists)
        {
            var classMeans      = new Dictionary <string, Vector>();
            var classDeviations = new Dictionary <string, Vector>();

            for (var i = 0; i < classLists.Size(); i++)
            {
                var classLabel    = ((InstanceListOfSameClass)classLists.Get(i)).GetClassLabel();
                var averageVector = classLists.Get(i).Average().ToVector();
                classMeans[classLabel] = averageVector;
                var standardDeviationVector = classLists.Get(i).StandardDeviation().ToVector();
                classDeviations[classLabel] = standardDeviationVector;
            }

            model = new NaiveBayesModel(priorDistribution, classMeans, classDeviations);
        }
예제 #9
0
        public void NaiveBayesPredictor_Serialization_Test()
        {
            var data = Payment.GetData();

            NaiveBayesModel<Payment> model = new NaiveBayesModel<Payment>();
            var predictor = model.Generate(data);

            XmlSerializer ser = new XmlSerializer(predictor.GetType());

            using (var stream = new MemoryStream())
            {
                ser.Serialize(stream, predictor);

                stream.Position = 0;

                var newPredictor = model.Load(stream);
                var item = newPredictor.Predict(new Payment { Amount = 110, Description = "Monop try it" });
                Assert.AreEqual(item.Category, "Household");
            }
        }
예제 #10
0
        public void NaiveBayesPredictor_Serialization_Test()
        {
            var data = Payment.GetData();

            NaiveBayesModel <Payment> model = new NaiveBayesModel <Payment>();
            var predictor = model.Generate(data);

            XmlSerializer ser = new XmlSerializer(predictor.GetType());

            using (var stream = new MemoryStream())
            {
                ser.Serialize(stream, predictor);

                stream.Position = 0;

                var newPredictor = model.Load(stream);
                var item         = newPredictor.Predict(new Payment {
                    Amount = 110, Description = "Monop try it"
                });
                Assert.AreEqual(item.Category, "Household");
            }
        }
예제 #11
0
        static void Main(string[] args)
        {
            Console.WriteLine("ICM PREDICTION MODEL");
            Console.WriteLine("=============================================");

            Console.WriteLine("Preparing model...");
            IUserDataProvider inMemDataProvider = new InMemoryUserDataProvider();
            IDataPointFactory dataPointFactory  = new DataPointFactory(inMemDataProvider);
            IModel            naiveBayesModel   = new NaiveBayesModel();

            ModelRunner runner = new ModelRunner(naiveBayesModel, dataPointFactory);

            runner.Prepare();

            Console.WriteLine("Model prepared");

            do
            {
                var userData = new UserData();

                Console.WriteLine("Enter Issue details: ");

                Console.Write("\tIs it a partner user (y/n): ");
                ConsoleKeyInfo key = Console.ReadKey();
                userData.IsPartner = ConvertKeyToBool(key);
                Console.WriteLine();

                Console.Write("\tDoes user have profile in AAD (y/n): ");
                key = Console.ReadKey();
                userData.ProfileExistsInAAD = ConvertKeyToBool(key);
                Console.WriteLine();

                Console.Write("\tDoes user have profile in One Profile (y/n): ");
                key = Console.ReadKey();
                userData.ProfileExistyInOP = ConvertKeyToBool(key);
                Console.WriteLine();

                Console.Write("\tDoes claims exists for user (y/n): ");
                key = Console.ReadKey();
                userData.ClaimsExists = ConvertKeyToBool(key);
                Console.WriteLine();

                Console.Write("\tIs the Login Expired Windows appearing continously for the user (y/n): ");
                key = Console.ReadKey();
                userData.LoginExpiredMessage = ConvertKeyToBool(key);
                Console.WriteLine();

                Console.Write("\tDoes application insights have a lot of result code 0 logged for the user (y/n): ");
                key = Console.ReadKey();
                userData.ResultCodeZero = ConvertKeyToBool(key);
                Console.WriteLine();

                Console.Write("\tIs the error AADSTS525 logged in Application Insights for the user (y/n): ");
                key = Console.ReadKey();
                userData.AADSTS525Issue = ConvertKeyToBool(key);
                Console.WriteLine();

                Console.WriteLine("Is this correct (y/n)?");
                Console.WriteLine(JsonConvert.SerializeObject(userData));
                key = Console.ReadKey();
                if (!ConvertKeyToBool(key))
                {
                    continue;
                }

                Console.WriteLine();
                Console.WriteLine("Predicting issue...");
                string issue = runner.PredictIssue(userData);
                Console.WriteLine($"PREDICTED ISSUE: {issue}");


                Console.WriteLine("=============================================");

                Console.WriteLine("Do you want to continue");
                key = Console.ReadKey();
                if (!ConvertKeyToBool(key))
                {
                    break;
                }
            } while (true);
        }
예제 #12
0
        private void buttonForDataSplitNext_Click(object sender, EventArgs e)
        {
            trainingSetPercentage = (double)numericUpDownForTrainingSetPercent.Value / 100.0;
            numFolds = (int)numericUpDownForNumFolds.Value;

            double[] smaOut         = null;
            double[] wmaOut         = null;
            double[] emaOut         = null;
            double[] macdOut        = null;
            double[] stochasticsOut = null;
            double[] williamsROut   = null;
            double[] rsiOut         = null;
            double[] closesOut      = null;

            var data = IndicatorService.GetData(code, targetDate, new string[] { "Tarih", "Kapanis" }, numberOfData + 1);

            if (isSMAChecked)
            {
                smaOut = IndicatorDataPreprocessor.GetSMAOut(MovingAverage.Simple(code, targetDate, smaPeriod, numberOfData));
            }
            if (isWMAChecked)
            {
                wmaOut = IndicatorDataPreprocessor.GetWMAOut(MovingAverage.Weighted(code, targetDate, wmaPeriod, numberOfData));
            }
            if (isEMAChecked)
            {
                emaOut = IndicatorDataPreprocessor.GetEMAOut(MovingAverage.Exponential(code, targetDate, emaPeriod, numberOfData));
            }
            if (isMACDChecked)
            {
                macdOut = IndicatorDataPreprocessor.GetMACDOut(new MovingAverageConvergenceDivergence(code, targetDate, firstPeriod, secondPeriod, triggerPeriod, numberOfData));
            }
            if (isStochasticsChecked)
            {
                stochasticsOut = IndicatorDataPreprocessor.GetStochasticsOut(new Stochastics(code, targetDate, fastKPeriod, fastDPeriod, slowDPeriod, numberOfData));
            }
            if (isWilliamsRChecked)
            {
                williamsROut = IndicatorDataPreprocessor.GetWilliamsROut(WilliamsR.Wsr(code, targetDate, williamsRPeriod, numberOfData));
            }
            if (isRSIChecked)
            {
                rsiOut = IndicatorDataPreprocessor.GetRSIOut(RelativeStrengthIndex.Rsi(code, targetDate, rsiPeriod, numberOfData));
            }
            closesOut = IndicatorDataPreprocessor.GetClosesOut(numberOfData, data);

            int minRowCount = 1000000;

            if (smaOut != null)
            {
                minRowCount = smaOut.Length;
            }
            if (wmaOut != null)
            {
                minRowCount = minRowCount < wmaOut.Length ? minRowCount : wmaOut.Length;
            }
            if (emaOut != null)
            {
                minRowCount = minRowCount < emaOut.Length ? minRowCount : emaOut.Length;
            }
            if (macdOut != null)
            {
                minRowCount = minRowCount < macdOut.Length ? minRowCount : macdOut.Length;
            }
            if (rsiOut != null)
            {
                minRowCount = minRowCount < rsiOut.Length ? minRowCount : rsiOut.Length;
            }
            if (williamsROut != null)
            {
                minRowCount = minRowCount < williamsROut.Length ? minRowCount : williamsROut.Length;
            }
            if (stochasticsOut != null)
            {
                minRowCount = minRowCount < stochasticsOut.Length ? minRowCount : stochasticsOut.Length;
            }
            if (closesOut != null)
            {
                minRowCount = minRowCount < closesOut.Length ? minRowCount : closesOut.Length;
            }

            var fv = new FeatureVector();

            if (isSMAChecked)
            {
                fv.AddColumn("SMA", smaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isWMAChecked)
            {
                fv.AddColumn("WMA", wmaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isEMAChecked)
            {
                fv.AddColumn("EMA", emaOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isMACDChecked)
            {
                fv.AddColumn("MACD", macdOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isRSIChecked)
            {
                fv.AddColumn("RSI", rsiOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isWilliamsRChecked)
            {
                fv.AddColumn("WilliamsR", williamsROut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            if (isStochasticsChecked)
            {
                fv.AddColumn("Stochastics", stochasticsOut.Select(p => (object)p.ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());
            }
            fv.AddColumn("label", closesOut.Select(p => (object)string.Format("{0:0.0}", p).ToString(CultureInfo.InvariantCulture)).Take(minRowCount).ToArray());

            var training = new FeatureVector();
            var test     = new FeatureVector();
            int count    = fv.Values[0].Length;

            for (int i = 0; i < fv.ColumnName.Count; i++)
            {
                training.AddColumn(fv.ColumnName[i], fv.Values[i].Take((int)(count * trainingSetPercentage)).ToArray());
            }

            for (int i = 0; i < fv.ColumnName.Count; i++)
            {
                test.AddColumn(fv.ColumnName[i], fv.Values[i].Skip((int)(count * trainingSetPercentage)).Take(count).ToArray()); // Take(count) means take the rest of all elements, number of the rest of the elements is smaller than count.
            }

            if (numFolds > 0)
            {
                BinaryClassificationEvaluator bce1    = new BinaryClassificationEvaluator();
                LinearRegression    linearRegression  = new LinearRegression();
                CrossValidator      cvLinReg          = new CrossValidator(linearRegression, bce1, numFolds);
                CrossValidatorModel cvLinRegModel     = (CrossValidatorModel)cvLinReg.Fit(training);
                FeatureVector       linRegPredictions = cvLinRegModel.transform(test);
                bce1.evaluate(linRegPredictions);
                linRegAcc = bce1.Accuracy;

                BinaryClassificationEvaluator bce2 = new BinaryClassificationEvaluator();
                LogisticRegression            logisticRegression = new LogisticRegression();
                CrossValidator      cvLogReg          = new CrossValidator(logisticRegression, bce2, numFolds);
                CrossValidatorModel cvLogRegModel     = (CrossValidatorModel)cvLogReg.Fit(training);
                FeatureVector       logRegPredictions = cvLogRegModel.transform(test);
                bce2.evaluate(logRegPredictions);
                logRegAcc = bce2.Accuracy;

                BinaryClassificationEvaluator bce3    = new BinaryClassificationEvaluator();
                NaiveBayes          naiveBayes        = new NaiveBayes();
                CrossValidator      cvNaiBay          = new CrossValidator(naiveBayes, bce3, numFolds);
                CrossValidatorModel cvNaiBayModel     = (CrossValidatorModel)cvNaiBay.Fit(training);
                FeatureVector       naiBayPredictions = cvNaiBayModel.transform(test);
                bce3.evaluate(naiBayPredictions);
                naiBayAcc = bce3.Accuracy;
            }
            else
            {
                BinaryClassificationEvaluator bce1          = new BinaryClassificationEvaluator();
                LinearRegression      linearRegression      = new LinearRegression();
                LinearRegressionModel linearRegressionModel = (LinearRegressionModel)linearRegression.Fit(training);
                FeatureVector         linRegPredictions     = linearRegressionModel.transform(test);
                bce1.evaluate(linRegPredictions);
                linRegAcc = bce1.Accuracy;

                BinaryClassificationEvaluator bce2 = new BinaryClassificationEvaluator();
                LogisticRegression            logicticRegression      = new LogisticRegression();
                LogisticRegressionModel       logisticRegressionModel = (LogisticRegressionModel)logicticRegression.Fit(training);
                FeatureVector logRegPredictions = logisticRegressionModel.transform(test);
                bce2.evaluate(logRegPredictions);
                logRegAcc = bce2.Accuracy;

                BinaryClassificationEvaluator bce3 = new BinaryClassificationEvaluator();
                NaiveBayes      naiveBayes         = new NaiveBayes();
                NaiveBayesModel naiveBayesModel    = (NaiveBayesModel)naiveBayes.Fit(training);
                FeatureVector   naiBayPredictions  = naiveBayesModel.transform(test);
                bce3.evaluate(naiBayPredictions);
                naiBayAcc = bce3.Accuracy;
            }

            labelForLinRegAcc.Text = linRegAcc.ToString();
            labelForLogRegAcc.Text = logRegAcc.ToString();
            labelForNaiBayAcc.Text = naiBayAcc.ToString();

            panelForResults.BringToFront();
        }