protected override async Task ExecuteAsync(CancellationToken stoppingToken) { Console.WriteLine($"Building sentiment classifier..."); var sw = new Stopwatch(); sw.Start(); SentimentClassifierBuilder analyser = new SentimentClassifierBuilder(); var engine = await analyser.BuildEngine(); sw.Stop(); Console.WriteLine($"Finished in {(int)sw.Elapsed.TotalSeconds}s"); string[] searches = new string[] { "microsoft", "apple", "google", "amazon", "facebook", "ibm", "dell", "sony", "panasonic", "intel", "hp" }; while (!stoppingToken.IsCancellationRequested) { foreach (var line in searches) { HashSet <string> allTweets = new HashSet <string>(); ConcurrentDictionary <DateTime, DailySentiment> dailySentiments = new ConcurrentDictionary <DateTime, DailySentiment>(); var dates = new DateTime[] { DateTime.Today.AddDays(-6), DateTime.Today.AddDays(-5), DateTime.Today.AddDays(-4), DateTime.Today.AddDays(-3), DateTime.Today.AddDays(-2), DateTime.Today.AddDays(-1), DateTime.Today, }; foreach (var date in dates) { if (File.Exists(GetFileName(line, date.AddDays(-1)))) { continue; } await foreach (var statuses in _twitterSearch.Search(line, date.AddDays(-1), date)) { foreach (var status in statuses) { if (!allTweets.Add(status.Text)) { continue; } var result = engine.Predict(status.Text); var dailySentiment = dailySentiments.GetOrAdd(status.Created_At.Date, new DailySentiment { SearchTerm = line, Date = status.Created_At.Date }); if (result.sentiment == Sentiment.Good) { dailySentiment.PositiveCount++; } else { dailySentiment.NegativeCount++; } } } foreach (var dailySentiment in dailySentiments) { string text = JsonConvert.SerializeObject(dailySentiment.Value); string fileName = GetFileName(line, dailySentiment.Key); File.WriteAllText(fileName, text); Console.WriteLine($"Saved {fileName}"); } } string GetFileName(string search, DateTime date) { return($"data/{search}_{date:yyyyMMdd}.json"); } } } }
protected override async Task ExecuteAsync(CancellationToken stoppingToken) { Console.WriteLine($"Building sentiment classifier..."); var sw = new Stopwatch(); sw.Start(); SentimentClassifierBuilder analyser = new SentimentClassifierBuilder(); var engine = await analyser.BuildEngine(); sw.Stop(); Console.WriteLine($"Finished in {(int)sw.Elapsed.TotalSeconds}s"); while (!stoppingToken.IsCancellationRequested) { Console.WriteLine($"Please enter search term."); string line = Console.ReadLine(); int bad = 0; int good = 0; int maxSamples = 3; List <string> badSamples = new List <string>(); List <string> goodSamples = new List <string>(); await foreach (var statuses in _twitterSearch.Search(line, DateTime.UtcNow.AddDays(-1))) { foreach (var status in statuses) { var result = engine.Predict(status.Text); if (result.sentiment == Sentiment.Good) { good++; if (goodSamples.Count < maxSamples) { goodSamples.Add($"Probability:{result.probability}{Environment.NewLine}Score:{result.score}{Environment.NewLine}{status.Text}{Environment.NewLine}"); } } else { bad++; if (badSamples.Count < maxSamples) { badSamples.Add($"Probability:{result.probability}{Environment.NewLine}Score:{result.score}{Environment.NewLine}{status.Text}{Environment.NewLine}"); } } } } Console.WriteLine("****** Sample bad tweets ******"); foreach (string tweet in badSamples) { Console.WriteLine(tweet); } Console.WriteLine("****** Sample good tweets ******"); foreach (string tweet in goodSamples) { Console.WriteLine(tweet); } Console.WriteLine($"Total: {bad} bad and {good} good"); } }