protected override void InitializeTests() { //Always compute training L1/L2 errors TrainTest = new BinaryClassificationTest(ConstructScoreTracker(TrainSet), _trainSetLabels, Args.LearningRates); Tests.Add(TrainTest); if (ValidSet != null) { ValidTest = new BinaryClassificationTest(ConstructScoreTracker(ValidSet), GetClassificationLabelsFromRatings(ValidSet).ToArray(), Args.LearningRates); Tests.Add(ValidTest); } //If external label is missing use Rating column for L1/L2 error //The values may not make much sense if regression value is not an actual label value if (TestSets != null) { for (int t = 0; t < TestSets.Length; ++t) { bool[] labels = GetClassificationLabelsFromRatings(TestSets[t]).ToArray(); Tests.Add(new BinaryClassificationTest(ConstructScoreTracker(TestSets[t]), labels, Args.LearningRates)); } } if (Args.EnablePruning && ValidSet != null) { if (!Args.UseTolerantPruning) { //use simple early stopping condition PruningTest = new TestHistory(ValidTest, 0); } else { //use tollerant stopping condition PruningTest = new TestWindowWithTolerance(ValidTest, 0, Args.PruningWindowSize, Args.PruningThreshold); } } }
protected virtual void AddFullNDCGTests() { Tests.Add(new NDCGTest(ConstructScoreTracker(TrainSet), TrainSet.Ratings, _args.sortingAlgorithm)); if (ValidSet != null) { Test test = new NDCGTest(ConstructScoreTracker(ValidSet), ValidSet.Ratings, _args.sortingAlgorithm); Tests.Add(test); } if (TestSets != null) { for (int t = 0; t < TestSets.Length; ++t) { Test test = new NDCGTest(ConstructScoreTracker(TestSets[t]), TestSets[t].Ratings, _args.sortingAlgorithm); if (t == 0) { _firstTestSetHistory = new TestHistory(test, 0); } Tests.Add(test); } } }
private void RunHistory() { History = _history = new TestHistory(this); Indicator = _indicator = new TestIndicator(); }
public LogTest() : base(new TestIndicator()) { History = new TestHistory(this); }
private void Executer( ) { int exitCode = 0; TestHistory testHistory = null; for (;;) { Thread.Sleep(ExecutionDelay); // If there are items waiting on the queue, we will remove the first item and begin // execution... if (ExecutionQueue.Count > 0) { updateMutex.WaitOne( ); // Start waiting for the mutex... // Pull out the queue entry... QueueEntry entry = (QueueEntry)ExecutionQueue.Dequeue( ); // Set a flag on the RunItem to show that it is out of the queue and currently // running. foreach (RunItem item in RunItems) { if (item.TestID == entry.TestInformation.ID) { item.IsRunning = true; } } updateMutex.ReleaseMutex( ); // Release the mutex when done... // Add a new history entry to record that the test is now starting... testHistory = new TestHistory( ); testHistory.TestID = entry.TestInformation.ID; testHistory.Runtime = DateTime.Now; testHistory.Status = "STARTED"; dbConnection.AddTestHistory(testHistory); // Update the CURRENT state of the test... UpdateTestState(entry.TestInformation.ID, "RUNNING"); // Now, outside the main mutex, we perform our concurrent action for running the // executable, but this time using a different mutex. This makes sure that // each application never runs more than one at a time. executionMutex.WaitOne( ); exitCode = ExecuteQueueEntry(entry); executionMutex.ReleaseMutex( ); // We must grab the mutex again to mess with the RunItem structure... updateMutex.WaitOne( ); // Since the position or location of the RunItem in memory cannot be guaranteed because we // left the mutex for the run of the program, we must go back and find it again... foreach (RunItem item in RunItems) { if (item.TestID == entry.TestInformation.ID) { // The application is no longer running, so update that state in the current // memory structure... item.IsRunning = false; // Modify the existing runFrequency trackingItem definition to contain the current // time as the start time... RunFrequency runFrequency = new RunFrequency(item.Frequency); runFrequency.StartDate = DateTime.Now; // If this test is supposed to run a static number of times, we decrement the counter... if (!runFrequency.RunInfinite) { runFrequency.RunNumberOfTimes--; } // Update the item already in memory with the new changes... item.Frequency = runFrequency.GetFrequencyString( ); // Now, update the SQL database with this information... UpdateRunItemFrequency(item.TestID, runFrequency.GetFrequencyString( )); // Add a new history entry to record the results of the test... testHistory = new TestHistory( ); testHistory.TestID = item.TestID; testHistory.Runtime = runFrequency.StartDate; if (exitCode == 0) { testHistory.Status = "SUCCESS"; SendEmail(item, true); } else { testHistory.Status = "FAILURE (" + exitCode.ToString( ) + ")"; SendEmail(item, false); } // Update the CURRENT state of the test... - SUCCESS or FAILURE (either determines that // the test is no longer running - STOPPED). UpdateTestState(item.TestID, testHistory.Status); dbConnection.AddTestHistory(testHistory); } else { // We should NEVER get here... eventLog.WriteEntry("An item existed in the execution queue which didn't exist in the database!", System.Diagnostics.EventLogEntryType.Error); } } // Release the mutex so that further threads may modify the RunItem structures... updateMutex.ReleaseMutex( ); } } }