/// <summary> /// Adds analyzed match and saves a Json file locally. /// </summary> /// <param name="analyzedMatch"></param> public static void AddAnalyzedMatch(MatchMetricGroup analyzedMatch) { if (!AnalyzedMatches.Contains(analyzedMatch)) { AnalyzedMatches.Add(analyzedMatch); File.WriteAllText(AnalyzedMatchesPath + AnalyzedMatchesJson, JsonConvert.SerializeObject(AnalyzedMatches)); } else { throw new Exception("analyzedMatch was added where a duplicate already exists."); } }
/// <summary> /// Uses the Tensorflow model to predict the most exciting moment. /// Returns metadata about the highlight. /// </summary> /// <param name="matchMetricGroup"></param> /// <returns></returns> public HighlightInfo GetHighlightPeriod(MatchMetricGroup matchMetricGroup, bool testConfiguration = false) { if (testConfiguration) { _deepLearnerScriptPath = ConfigurationManager.AppSettings["AltScriptsPath"] + "DeepLearningModel.py"; } // Packages matchMetricGroup info into a chunked up form that Tensorflow can understand and creates a csv file for the Tensorflow python script to reference. var(matchPath, predictedDataPath) = PrepareMatchForTensorFlow(matchMetricGroup, false); // Runs the python script that outputs a csv file for the predictions the Tensorflow model made about the excitement level at a particular time in the match. GetHighlightInfo(matchPath, predictedDataPath); // Load in the predictions made by the model. List <string> predictedDataRaw; try { predictedDataRaw = File.ReadAllLines(_tensorflowPath + "Predictions\\" + predictedDataPath).ToList(); } catch (Exception) { // Very rare edge case when multiple parallel uses of the model can cause a failure to predict. GetHighlightInfo(matchPath, predictedDataPath); predictedDataRaw = File.ReadAllLines(_tensorflowPath + "Predictions\\" + predictedDataPath).ToList(); } Console.WriteLine(predictedDataPath + " Complete."); List <double> predictedData = new List <double>(); List <double> matchOffset = new List <double>(); var matchAnalyzer = new MatchAnalyzer(); // Parse predicted data into relevant objects. reference is via an offset from the original raw Broadcast video start time. var counter = 0.0; foreach (var line in predictedDataRaw) { predictedData.Add(double.Parse(line)); matchOffset.Add(matchAnalyzer.ConvertVideoTimeToMatchOffset(counter * 15, matchMetricGroup.Match)); counter += 1; } // Find the most exciting period in the match and its score. var highestScore = 0.0; var index = 0; var highestScoreIndex = 0; foreach (var score in predictedData) { if (score > highestScore) { highestScore = score; highestScoreIndex = index; } index += 1; } // We offset the start by an additional 90 seconds to account for time slippage. return(new HighlightInfo(matchOffset[highestScoreIndex] + 90, 40, highestScore)); }
/// <summary> /// Formats the MatchMetricGroup for a single match into a form that Tensorflow can process. /// Metrics are grouped into discrete time chunks and written to csv files. /// Can be configured to create a csv using manual training data which corresponds to the input match. /// </summary> /// <param name="matchMetricGroup"></param> /// <param name="training">If manual training data exists, process it for the Tensorflow model.</param> /// <returns>The location of the match csv and the corresponding expected location of the scripts output predictions.</returns> public (string, string) PrepareMatchForTensorFlow(MatchMetricGroup matchMetricGroup, bool training) { // Each metric is grouped into a series of set time chunks. var chunkedKillDifferences = DivideIntoTimeChunks(matchMetricGroup.KillDifferences, _secondsChunkSize); var chunkedUltimateUsage = DivideIntoTimeChunks(matchMetricGroup.UltimateUsage, _secondsChunkSize); var chunkedTurretKills = DivideIntoTimeChunks(matchMetricGroup.TurretKills, _secondsChunkSize); var chunkedBaronKills = DivideIntoTimeChunks(matchMetricGroup.BaronKills, _secondsChunkSize); var chunkedDragonKills = DivideIntoTimeChunks(matchMetricGroup.DragonKills, _secondsChunkSize); var chunkedInhibitorKills = DivideIntoTimeChunks(matchMetricGroup.InhibitorKills, _secondsChunkSize); // Discovering the longest list for ensuring that the output for Tensorflow has metric chunks of equal length. int highestListCount = 0; highestListCount = (chunkedKillDifferences.Count > highestListCount) ? chunkedKillDifferences.Count : highestListCount; highestListCount = (chunkedUltimateUsage.Count > highestListCount) ? chunkedUltimateUsage.Count : highestListCount; highestListCount = (chunkedTurretKills.Count > highestListCount) ? chunkedTurretKills.Count : highestListCount; highestListCount = (chunkedBaronKills.Count > highestListCount) ? chunkedBaronKills.Count : highestListCount; highestListCount = (chunkedDragonKills.Count > highestListCount) ? chunkedDragonKills.Count : highestListCount; highestListCount = (chunkedInhibitorKills.Count > highestListCount) ? chunkedInhibitorKills.Count : highestListCount; // Group chat rate into time chunks. var chunkedChatRate = DivideChatIntoTimeChunks(matchMetricGroup.ChatRate, highestListCount); // Fill our output list for Tensorflow. Ensure that shorter metric lists are padded with 0's to ensure all lists are equal length. var matchCompiled = new List <double>(); var output = new List <List <double> >(); for (int i = 0; i < highestListCount; i++) { matchCompiled.Add(i < chunkedKillDifferences.Count ? chunkedKillDifferences[i] : 0.0); matchCompiled.Add(i < chunkedUltimateUsage.Count ? chunkedUltimateUsage[i] : 0.0); matchCompiled.Add(i < chunkedTurretKills.Count ? chunkedTurretKills[i] : 0.0); matchCompiled.Add(i < chunkedBaronKills.Count ? chunkedBaronKills[i] : 0.0); matchCompiled.Add(i < chunkedDragonKills.Count ? chunkedDragonKills[i] : 0.0); matchCompiled.Add(i < chunkedInhibitorKills.Count ? chunkedInhibitorKills[i] : 0.0); matchCompiled.Add(i < chunkedChatRate.Count ? chunkedChatRate[i] : 0.0); output.Add(matchCompiled); matchCompiled = new List <double>(); } // Write 2D list out to csv. var outputCsv = new StringBuilder(); foreach (var line in output) { outputCsv.AppendLine(string.Join(",", line)); } // Save location modifier. if (training) { // Extra csv file created for training data labels. var outputCount = output.Count; // Load and parse label data into a Tensorflow friendly format. var trainingDataRaw = LoadTrainingData(matchMetricGroup.Match); var trainingData = DivideTrainingDataIntoTimeChunks(trainingDataRaw, _secondsChunkSize); // Align data by padding out labels with 0's until at the length of the matchMetric data. bool linedUp = false; while (!linedUp) { if (outputCount > trainingData.Count) { trainingData.Add(0.0); } else if (outputCount < trainingData.Count) { trainingData.RemoveAt(trainingData.Count - 1); } else { linedUp = true; } } // Write labels to csv. outputCsv = new StringBuilder(); foreach (var line in trainingData) { outputCsv.AppendLine(string.Join(",", line)); } // Labels. File.WriteAllText(_tensorflowPath + "TrainingData\\" + matchMetricGroup.Match.BroadcastId + matchMetricGroup.Match.GetFileName(false) + "_training.csv", outputCsv.ToString()); // MatchMetricGroup. File.WriteAllText(_tensorflowPath + "TrainingData\\" + matchMetricGroup.Match.BroadcastId + matchMetricGroup.Match.GetFileName(false) + ".csv", outputCsv.ToString()); } else { // MatchMetricGroup. File.WriteAllText(_tensorflowPath + "EvaluationData\\" + matchMetricGroup.Match.BroadcastId + matchMetricGroup.Match.GetFileName(false) + ".csv", outputCsv.ToString()); } // Locations of relevant files required by the Tensorflow Python script. var matchPath = matchMetricGroup.Match.BroadcastId + matchMetricGroup.Match.GetFileName(false) + ".csv"; var predictedDataPath = matchMetricGroup.Match.BroadcastId + matchMetricGroup.Match.GetFileName(false) + "_prediction" + ".csv"; return(matchPath, predictedDataPath); }