/// <summary>test clean old history files.</summary> /// <remarks> /// test clean old history files. Files should be deleted after 1 week by /// default. /// </remarks> /// <exception cref="System.Exception"/> public virtual void TestDeleteFileInfo() { Log.Info("STARTING testDeleteFileInfo"); try { Configuration conf = new Configuration(); conf.SetClass(CommonConfigurationKeysPublic.NetTopologyNodeSwitchMappingImplKey, typeof(TestJobHistoryParsing.MyResolver), typeof(DNSToSwitchMapping)); RackResolver.Init(conf); MRApp app = new TestJobHistoryEvents.MRAppWithHistory(1, 1, true, this.GetType(). FullName, true); app.Submit(conf); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = app.GetContext().GetAllJobs().Values .GetEnumerator().Next(); JobId jobId = job.GetID(); app.WaitForState(job, JobState.Succeeded); // make sure all events are flushed app.WaitForState(Service.STATE.Stopped); HistoryFileManager hfm = new HistoryFileManager(); hfm.Init(conf); HistoryFileManager.HistoryFileInfo fileInfo = hfm.GetFileInfo(jobId); hfm.InitExisting(); // wait for move files form the done_intermediate directory to the gone // directory while (fileInfo.IsMovePending()) { Sharpen.Thread.Sleep(300); } NUnit.Framework.Assert.IsNotNull(hfm.jobListCache.Values()); // try to remove fileInfo hfm.Clean(); // check that fileInfo does not deleted NUnit.Framework.Assert.IsFalse(fileInfo.IsDeleted()); // correct live time hfm.SetMaxHistoryAge(-1); hfm.Clean(); hfm.Stop(); NUnit.Framework.Assert.IsTrue("Thread pool shutdown", hfm.moveToDoneExecutor.IsTerminated ()); // should be deleted ! NUnit.Framework.Assert.IsTrue("file should be deleted ", fileInfo.IsDeleted()); } finally { Log.Info("FINISHED testDeleteFileInfo"); } }
/// <exception cref="System.Exception"/> public virtual void TestScanningOldDirs() { Log.Info("STARTING testScanningOldDirs"); try { Configuration conf = new Configuration(); conf.SetClass(CommonConfigurationKeysPublic.NetTopologyNodeSwitchMappingImplKey, typeof(TestJobHistoryParsing.MyResolver), typeof(DNSToSwitchMapping)); RackResolver.Init(conf); MRApp app = new TestJobHistoryEvents.MRAppWithHistory(1, 1, true, this.GetType(). FullName, true); app.Submit(conf); Org.Apache.Hadoop.Mapreduce.V2.App.Job.Job job = app.GetContext().GetAllJobs().Values .GetEnumerator().Next(); JobId jobId = job.GetID(); Log.Info("JOBID is " + TypeConverter.FromYarn(jobId).ToString()); app.WaitForState(job, JobState.Succeeded); // make sure all events are flushed app.WaitForState(Service.STATE.Stopped); TestJobHistoryParsing.HistoryFileManagerForTest hfm = new TestJobHistoryParsing.HistoryFileManagerForTest (); hfm.Init(conf); HistoryFileManager.HistoryFileInfo fileInfo = hfm.GetFileInfo(jobId); NUnit.Framework.Assert.IsNotNull("Unable to locate job history", fileInfo); // force the manager to "forget" the job hfm.DeleteJobFromJobListCache(fileInfo); int msecPerSleep = 10; int msecToSleep = 10 * 1000; while (fileInfo.IsMovePending() && msecToSleep > 0) { NUnit.Framework.Assert.IsTrue(!fileInfo.DidMoveFail()); msecToSleep -= msecPerSleep; Sharpen.Thread.Sleep(msecPerSleep); } NUnit.Framework.Assert.IsTrue("Timeout waiting for history move", msecToSleep > 0 ); fileInfo = hfm.GetFileInfo(jobId); hfm.Stop(); NUnit.Framework.Assert.IsNotNull("Unable to locate old job history", fileInfo); NUnit.Framework.Assert.IsTrue("HistoryFileManager not shutdown properly", hfm.moveToDoneExecutor .IsTerminated()); } finally { Log.Info("FINISHED testScanningOldDirs"); } }