static void trainSeverityModel() { MLService service = new MLService(); var chain = service.GetAllSegments_SegmentChain7_AnomalySpreaded_OnlyANomaly(); double[][] inputs = chain.Select(sc => new double[] { sc.MaxSpeedChange.Value, sc.MinSpeedChange.Value }).ToArray(); // KMeans kmeans = new KMeans(k: 3); //kmeans.Centroids = new double [2][]; //kmeans.Centroids[0] = new double[] {-2.76384235537843, 0.608515794829327 }; //kmeans.Centroids[1] = new double[] {-0.579246221664498,0.121793585323178}; // Compute and retrieve the data centroids var clusters = kmeans.Learn(inputs); Serializer.Save(clusters, AnomalyClassifierBySeverityModelFileName); // Use the centroids to parition all the data int[] labels = clusters.Decide(inputs); var zeros = labels.Where(l => l == 0).ToList(); var ones = labels.Where(l => l == 1).ToList(); var twos = labels.Where(l => l == 2).ToList(); ScatterplotBox.Show("Severity Clusters", inputs, labels); }
public MLViewModel() { mLService = new MLService(); _input1 = new ObservableCollection <Input1>(); EnviarCommand = new Command(async() => await ExecuteEnviarCommand(), () => !IsBusy); ServicoCommand = new Command(async() => await ExecuteServicoCommand(), () => !IsBusy); }
private void ComputeAllCharacteristicsFromFolder() { if (!VerifyWebConfigVariables()) { return; } var emotionMapper = new MLMapper(); var sourceFolder = ConfigurationManager.AppSettings["SourceFolder"].ToString(); var saveFolder = ConfigurationManager.AppSettings["SaveFolder"].ToString(); string[] dirs = Directory.GetDirectories(sourceFolder); foreach (var dir in dirs) { string[] files = Directory.GetFiles(dir); Parallel.ForEach(files, new ParallelOptions { MaxDegreeOfParallelism = 4 }, (file) => { var mlService = new MLService(new ServiceProxyCognitiveAzure()); string lastFolderName = Path.GetFileName(Path.GetDirectoryName(file)); string fileName = Path.GetFileName(file); Console.WriteLine(string.Format("Processing {0}", file)); byte[] imgdata = System.IO.File.ReadAllBytes(file); FaceEmotionDTO[] emotionDTOs = mlService.GetEmotionsFromImage(imgdata); if (emotionDTOs != null && emotionDTOs.Count() > 0) { FaceInfoDTO[] faceInfo = mlService.GetFacesFromImage(imgdata); ObjectInfoDTO objectsDTOs = mlService.GetObjectsFromImage(imgdata); var imageinfo = new ImageInfoDTO() { Emotions = emotionDTOs, Faces = faceInfo, Objects = objectsDTOs, Category = lastFolderName, ImageUrl = fileName, Anger = emotionDTOs.Average(x => x.Scores != null ? x.Scores.Anger : 0), Contempt = emotionDTOs.Average(x => x.Scores != null ? x.Scores.Contempt : 0), Disgust = emotionDTOs.Average(x => x.Scores != null ? x.Scores.Disgust : 0), Fear = emotionDTOs.Average(x => x.Scores != null ? x.Scores.Fear : 0), Happiness = emotionDTOs.Average(x => x.Scores != null ? x.Scores.Happiness : 0), Neutral = emotionDTOs.Average(x => x.Scores != null ? x.Scores.Neutral : 0), Sadness = emotionDTOs.Average(x => x.Scores != null ? x.Scores.Sadness : 0), Surprise = emotionDTOs.Average(x => x.Scores != null ? x.Scores.Surprise : 0) }; if (imageinfo != null) { _emotionService.AddEmotion(imageinfo); } Thread.Sleep(_pauseParameter); } } ); } }
public void SentimentFromLink_PositiveSentiment_1() { //Arrange MLService mL = new MLService(new System.Net.Http.HttpClient()); //Act JObject sentiment = mL.SentimentFromLink("it is good").Result; //Assert Assert.AreEqual(sentiment["prediction"], 1); }
public async System.Threading.Tasks.Task SentimentFromtext_NegativeSentiment_False() { //Arrange MLService mL = new MLService(new System.Net.Http.HttpClient()); string negativeText = "it is not good"; //Act var sentiment = await mL.SentimentFromText(negativeText); //Assert Assert.AreEqual(false, (bool)(sentiment)["prediction"], sentiment.ToString()); }
public async System.Threading.Tasks.Task SentimentFromLink_PositiveSentiment_True() { //Arrange MLService mL = new MLService(new System.Net.Http.HttpClient()); string positiveText = "it is good"; //Act var sentiment = await mL.SentimentFromLink(positiveText); //Assert Assert.AreEqual(true, (bool)(sentiment)["prediction"], sentiment.ToString()); }
static void trainAnomalyDetecorDTModel() { MLService service = new MLService(); var chain = service.GetAllSegments_SegmentChain7_AnomalySpreaded_OnlyANomaly().ToList(); double[][] inputs = chain.Select(sc => new double[] { sc.MaxSpeed.Value, sc.MinSpeed.Value, sc.SpeedRange.Value, sc.TotalTime.Value, sc.MinTime.Value, sc.MaxTime.Value, sc.TimeRange.Value, sc.MaxSpeedVar.Value, sc.MinSpeedVar.Value, sc.SpeedVarRange.Value, sc.MaxTimeVar.Value, sc.MinTimeVar.Value, sc.TimeVarRange.Value }).ToArray(); // int[] outputs = chain.Select(sc => sc.AnomalyType).ToArray(); DecisionTree_crossValidation(inputs, outputs); Console.ReadKey(); }
public async System.Threading.Tasks.Task SentimentFromMultiple_PositiveSentiment_True() { //Arrange MLService mL = new MLService(new System.Net.Http.HttpClient()); List <string> negativeTexts = new List <string>() { "it is good" }; //Act var sentiment = await mL.SentimentFromMultiple(negativeTexts); //Assert Assert.AreEqual(true, (bool)(sentiment)["prediction"], sentiment.ToString()); }
static void trainAnomalyDetectionTwoClassModel() { MLService service = new MLService(); var chain = service.GetAllSegments_SegmentChain7_AnomalySpreaded_All(); double[][] inputs = chain.Select(sc => new double[] { sc.MaxSpeed.Value, sc.MinSpeed.Value, sc.SpeedRange.Value, sc.TotalTime.Value, sc.MinTime.Value, sc.MaxTime.Value, sc.TimeRange.Value, sc.MaxSpeedVar.Value, sc.MinSpeedVar.Value, sc.SpeedVarRange.Value, sc.MaxTimeVar.Value, sc.MinTimeVar.Value, sc.TimeVarRange.Value }).ToArray(); // //double[][] inputs = chain.Select(sc => new double[] { sc.TotalTime.Value, sc.MinTime.Value, sc.MaxTime.Value, sc.TimeRange.Value, sc.MaxSpeedVar.Value, sc.MinSpeedVar.Value, sc.SpeedVarRange.Value }).ToArray(); // int[] outputs = chain.Select(sc => sc.ContainAnomaly).ToArray(); //SVM_crossValidation_AnomalyDetection(inputs, outputs); trainTwoClass(inputs, outputs); Console.ReadKey(); }
static void detectAnomalies() { var anomalyDetectionModel = Serializer.Load <SupportVectorMachine <Gaussian> >(AnomalyDetectionModelFileName); var anomalyClassificationModel = Serializer.Load <SupportVectorMachine <Gaussian> >(AnomalyClassificationModelFileName); MLService service = new MLService(); SegmentService serviceService = new SegmentService(); var chains = service.GetAllSegments_SegmentChain7_AnomalySpreaded(); int numberOfAnomalies = 0; int numberOfPotholes = 0; foreach (var chain in chains) { double[] inputs = new double[] { chain.MaxSpeed.Value, chain.MinSpeed.Value, chain.SpeedRange.Value, chain.TotalTime.Value, chain.MinTime.Value, chain.MaxTime.Value, chain.TimeRange.Value, chain.MaxSpeedVar.Value, chain.MinSpeedVar.Value, chain.SpeedVarRange.Value, chain.MaxTimeVar.Value, chain.MinTimeVar.Value, chain.TimeVarRange.Value }; bool isContainAnomaly = anomalyDetectionModel.Decide(inputs); bool isPothole = false; if (isContainAnomaly) { numberOfAnomalies++; isPothole = anomalyClassificationModel.Decide(inputs); if (isPothole) { numberOfPotholes++; } SegmentChain sc = new SegmentChain() { Segment1Id = chain.Segment1Id, Segment2Id = chain.Segment2Id, Segment3Id = chain.Segment3Id, Segment4Id = chain.Segment4Id, Segment5Id = chain.Segment5Id, Segment6Id = chain.Segment6Id, Segment7Id = chain.Segment7Id, PredictedAnomalyType = (short)(isContainAnomaly? (isPothole?2:1):0), CreationDate = DateTime.Now }; serviceService.CreateSegmentChain(sc); } } }
static void trainAnomalyDetectionModel() { MLService service = new MLService(); //var chain = service.GetAllSegments_SegmentChain3(); //var chain = service.GetAllSegments_SegmentChain4(); //var chain = service.GetAllSegments_SegmentChain5(); //var chain = service.GetAllSegments_SegmentChain6(); //var chain = service.GetAllSegments_SegmentChain4_HighSampleCount(); //var chain = service.GetAllSegments_SegmentChain5_Top28(); //var chain = service.GetAllSegments_SpeedDifference_SegmentChain5_Minimized(); //var chain = service.GetAllSegments_SegmentChain6_Rounded(); //var chain = service.GetAllSegments_SegmentChain6_AnomalySpreaded(); //var chain = service.GetAllSegments_SegmentChain6_AnomalySpreaded(); var chain = service.GetAllSegments_SegmentChain7_AnomalySpreaded(); // var chain = service.GetAllSegments_SegmentChain7_AnomalySpreaded().Where(t => t.AnomalyType == 1 || t.AnomalyType == 2); //var chain = service.GetAllSegments_SpeedDifference_SegmentChain5(); //var chain = service.GetAllSegments_HighSamplesCount_SegmentChain5(); //double[][] test = chain1.Select(sc => new double[] { sc.speedAvg1.Value, sc.speedAvg2.Value, sc.speedAvg3.Value, sc.speedAvg4.Value, sc.speedAvg5.Value, sc.speedVar1.Value, sc.speedVar2.Value, sc.speedVar3.Value, sc.speedVar4.Value, sc.speedVar5.Value, sc.AvgSpentTime1.Value, sc.AvgSpentTime2.Value, sc.AvgSpentTime3.Value, sc.AvgSpentTime4.Value, sc.AvgSpentTime5.Value, sc.VarSpentTime1.Value, sc.VarSpentTime2.Value, sc.VarSpentTime3.Value, sc.VarSpentTime4.Value, sc.VarSpentTime5.Value, }).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.speedAvg1.Value, sc.speedAvg2.Value, sc.speedAvg3.Value }).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.speedAvg1.Value, sc.speedAvg2.Value, sc.speedAvg3.Value, sc.speedAvg4.Value }).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.speedAvg1.Value, sc.speedAvg2.Value, sc.speedAvg3.Value, sc.speedVar1.Value, sc.speedVar2.Value, sc.speedVar3.Value }).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.speedAvg1.Value, sc.speedAvg2.Value, sc.speedAvg3.Value, sc.speedVar1.Value, sc.speedVar2.Value, sc.speedVar3.Value,sc.AvgSpentTime1.Value, sc.AvgSpentTime2.Value, sc.AvgSpentTime3.Value, sc.VarSpentTime1.Value, sc.VarSpentTime2.Value, sc.VarSpentTime3.Value}).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.speedAvg1.Value, sc.speedAvg2.Value, sc.speedAvg3.Value, sc.speedAvg4.Value, sc.speedAvg5.Value, sc.speedVar1.Value, sc.speedVar2.Value, sc.speedVar3.Value, sc.speedVar4.Value, sc.speedVar5.Value }).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.speedAvg1.Value, sc.speedAvg2.Value, sc.speedAvg3.Value, sc.speedVar1.Value, sc.speedVar2.Value, sc.speedVar3.Value, sc.AvgSpentTime1.Value, sc.AvgSpentTime2.Value, sc.AvgSpentTime3.Value, sc.VarSpentTime1.Value, sc.VarSpentTime2.Value, sc.VarSpentTime3.Value }).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.speedAvg1.Value, sc.speedAvg2.Value, sc.speedAvg3.Value, sc.speedAvg4.Value, sc.speedAvg5.Value, sc.AvgSpentTime4.Value}).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.speedAvg1.Value, sc.speedAvg2.Value, sc.speedAvg3.Value, sc.speedAvg4.Value, sc.speedAvg5.Value, sc.speedVar1.Value, sc.speedVar2.Value, sc.speedVar3.Value, sc.speedVar4.Value, sc.speedVar5.Value,sc.AvgSpentTime1.Value, sc.AvgSpentTime2.Value, sc.AvgSpentTime3.Value, sc.AvgSpentTime4.Value, sc.AvgSpentTime5.Value, sc.VarSpentTime1.Value, sc.VarSpentTime2.Value, sc.VarSpentTime3.Value, sc.VarSpentTime4.Value, sc.VarSpentTime5.Value, }).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.speedAvg1.Value, sc.speedAvg2.Value, sc.speedAvg3.Value, sc.speedAvg4.Value, sc.speedAvg5.Value, sc.speedAvg6.Value, sc.speedVar1.Value, sc.speedVar2.Value, sc.speedVar3.Value, sc.speedVar4.Value, sc.speedVar5.Value, sc.speedVar6.Value, sc.AvgSpentTime1.Value, sc.AvgSpentTime2.Value, sc.AvgSpentTime3.Value, sc.AvgSpentTime4.Value, sc.AvgSpentTime5.Value, sc.AvgSpentTime6.Value, sc.VarSpentTime1.Value, sc.VarSpentTime2.Value, sc.VarSpentTime3.Value, sc.VarSpentTime4.Value, sc.VarSpentTime5.Value, sc.VarSpentTime6.Value }).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.speedAvg1.Value, sc.speedAvg2.Value, sc.speedAvg3.Value, sc.speedAvg4.Value, sc.speedVar1.Value, sc.speedVar2.Value, sc.speedVar3.Value, sc.speedVar4.Value, sc.AvgSpentTime1.Value, sc.AvgSpentTime2.Value, sc.AvgSpentTime3.Value, sc.VarSpentTime1.Value, sc.VarSpentTime2.Value, sc.VarSpentTime3.Value }).ToArray(); // double[][] inputs = chain.Select(sc => new double[] { sc.speedAvg1.Value, sc.speedAvg2.Value, sc.speedAvg3.Value, sc.speedAvg4.Value, sc.speedVar1.Value, sc.speedVar2.Value, sc.speedVar3.Value, sc.speedVar4.Value }).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.speedAvg1.Value, sc.speedAvg2.Value, sc.speedAvg3.Value, sc.speedAvg4.Value, sc.speedAvg5.Value }).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.speedAvg1.Value, sc.speedAvg2.Value, sc.speedAvg3.Value, sc.speedAvg4.Value, sc.speedAvg5.Value, sc.AvgSpentTime1.Value, sc.AvgSpentTime2.Value, sc.AvgSpentTime3.Value, sc.AvgSpentTime4.Value, sc.AvgSpentTime5.Value}).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.AvgSpentTime1.Value, sc.AvgSpentTime2.Value, sc.AvgSpentTime3.Value, sc.AvgSpentTime4.Value, sc.AvgSpentTime5.Value }).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.speedAvg1.Value, sc.speedAvg2.Value, sc.speedAvg3.Value, sc.speedAvg4.Value, sc.speedAvg5.Value }).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.SpeedDiff1.Value, sc.SpeedDiff2.Value, sc.SpeedDiff3.Value, sc.SpeedDiff4.Value,sc.AvgSpentTime1.Value, sc.AvgSpentTime2.Value, sc.AvgSpentTime3.Value, sc.AvgSpentTime4.Value, sc.AvgSpentTime5.Value }).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.SpeedDiff1.Value, sc.SpeedDiff2.Value, sc.SpeedDiff3.Value, sc.SpeedDiff4.Value,sc.AvgSpentTime1.Value, sc.AvgSpentTime2.Value, sc.AvgSpentTime3.Value, sc.AvgSpentTime4.Value, sc.AvgSpentTime5.Value }).ToArray(); //double[][] inputs = chain.Select(sc => new double[] { sc.SpeedDiff1.Value, sc.SpeedDiff2.Value,sc.AvgTime.Value,sc.speedAvg4.Value }).ToArray(); double[][] inputs = chain.Select(sc => new double[] { sc.MaxSpeed.Value, sc.MinSpeed.Value, sc.SpeedRange.Value, sc.TotalTime.Value, sc.MinTime.Value, sc.MaxTime.Value, sc.TimeRange.Value, sc.MaxSpeedVar.Value, sc.MinSpeedVar.Value, sc.SpeedVarRange.Value, sc.MaxTimeVar.Value, sc.MinTimeVar.Value, sc.TimeVarRange.Value }).ToArray(); // //double[][] inputs = chain.Select(sc => new double[] { sc.TotalTime.Value, sc.MinTime.Value, sc.MaxTime.Value, sc.TimeRange.Value, sc.MaxSpeedVar.Value, sc.MinSpeedVar.Value, sc.SpeedVarRange.Value }).ToArray(); // int[] outputs = chain.Select(sc => sc.ContainAnomaly).ToArray(); //int[] outputs = chain.Select(sc => sc.AnomalyType).ToArray(); //double[][] inputs0 = chain.Where(sc => sc.ContainAnomaly == 0).Select(sc => new double[] { sc.SpeedDiff1.Value, sc.SpeedDiff2.Value }).ToArray(); //int[] outputs0 = chain.Where(sc => sc.ContainAnomaly == 0).Select(sc => sc.ContainAnomaly).ToArray(); //double[][] inputs1 = chain.Where(sc => sc.ContainAnomaly == 1).Select(sc => new double[] { sc.SpeedDiff1.Value, sc.SpeedDiff2.Value }).ToArray(); //int[] outputs1 = chain.Where(sc => sc.ContainAnomaly == 1).Select(sc => sc.ContainAnomaly).ToArray(); //double[][] inputs2 = chain.Where(sc => sc.ContainAnomaly == 0).Select(sc => new double[] { sc.SpeedDiff1.Value, sc.AvgTime.Value }).ToArray(); //int[] outputs2 = chain.Where(sc => sc.ContainAnomaly == 0).Select(sc => sc.ContainAnomaly).ToArray(); //double[][] inputs3= chain.Where(sc => sc.ContainAnomaly == 1).Select(sc => new double[] { sc.SpeedDiff1.Value, sc.AvgTime.Value }).ToArray(); //int[] outputs3= chain.Where(sc => sc.ContainAnomaly == 1).Select(sc => sc.ContainAnomaly).ToArray(); //SVM_crossValidation(inputs, outputs); SVM_TrainAnomalyDetectionModel(inputs, outputs); //SVM_crossValidation_AnomalyDetection(inputs, outputs); //DecisionTree_crossValidation(inputs, outputs); //GridSearch(inputs, outputs); //var pca = new PrincipalComponentAnalysis() //{ // Method = PrincipalComponentMethod.Center, // Whiten = true //}; //// Now we can learn the linear projection from the data //MultivariateLinearRegression transform = pca.Learn(inputs); //pca.NumberOfOutputs = 3; //// Finally, we can project all the data //double[][] output1 = pca.Transform(inputs); //SVM_crossValidation(inputs, outputs); //// Or just its first components by setting //// NumberOfOutputs to the desired components: //// And then calling transform again: //double[][] output2 = pca.Transform(inputs); //// We can also limit to 80% of explained variance: //pca.ExplainedVariance = 0.8; //// And then call transform again: //double[][] output3 = pca.Transform(inputs); //// Create a new K-Means algorithm //KMeans kmeans = new KMeans(k: 2); ////kmeans.Centroids = new double [2][]; ////kmeans.Centroids[0] = new double[] {-2.76384235537843, 0.608515794829327 }; ////kmeans.Centroids[1] = new double[] {-0.579246221664498,0.121793585323178}; //// Compute and retrieve the data centroids //var clusters = kmeans.Learn(inputs); //// Use the centroids to parition all the data //int[] labels = clusters.Decide(inputs); //var zeros = labels.Where(l => l == 0).ToList(); //var ones = labels.Where(l => l == 1).ToList(); //var twos = labels.Where(l => l == 2).ToList(); //var threes = labels.Where(l => l == 3).ToList(); //int truleyLabeledAnomalies = 0; //for (int i = 0; i < chain.Count; i++) //{ // if (chain.ElementAt(i).ContainAnomaly == 1 && labels.ElementAt(i) == 1) // { // truleyLabeledAnomalies += 1; // } //} //Console.WriteLine(truleyLabeledAnomalies); //var smo = new SequentialMinimalOptimization<Linear>() //{ // Complexity = 0.000001, // Kernel = new Linear() //}; //// Use the algorithm to learn the svm //var svm = smo.Learn(inputs, outputs); //// bool pred = svm.Decide(new double[] { 2.49922336915495, 2.38719486132452, 2.59047776529037, 1.94322311520282, 2.32949533880782, 1.53491343472098, 0.0611354785139238, 0.848698199040911, 0.85367931112144, 0.887108397987341, 2, 2.66666666666667, 2, 2.54545454545455, 3, 1.33333333333333, 0, 0.672727272727273, 0.75, 0 }); ////bool pred = svm.Decide(new double[] { 3.6668832456934,2.72565059380599,3.4617494242872,4.15125345320445,1.07290177209667,4.02676082352871,0.157250607457867,0.00245768485574516,8.51233980164814,1.2701659036119,2,2,2,10,2.33333333333333,0,0 ,0,128,0.333333333333334}); ////bool pred = svm.Decide(new double[] {0.342804868030116,0.971802788698728,3.87391319346965, 0.0486282126059593,2.94649397465733,0.547271634138705, 0 ,4.34382476637454 , 0.00508098235164243,1.13588971473715,2,2,1.75 ,28 ,1 , 1 , 0 , 0.25 , 1352 , 0 }); //bool[] prediction = svm.Decide(test); //// Compute the classification error between the expected //// values and the values actually predicted by the machine: //double error = new AccuracyLoss(outputs).Loss(prediction); //Console.WriteLine("Error: " + error); //Show results on screen //ScatterplotBox.Show("not anomaly speed", inputs0, outputs0); //ScatterplotBox.Show("anomaly speed", inputs1, outputs1); //ScatterplotBox.Show("not anomaly time", inputs2, outputs2); //ScatterplotBox.Show("anomaly time ", inputs3, outputs3); //ScatterplotBox.Show("SVM results", inputs, prediction.ToZeroOne()); Console.ReadKey(); }