private static void ThreadProc(object num) { int inum = (int)num; evt.WaitOne(); Exec.Shell(exe + @" exec regressionTest_criticalSection" + inum.ToString() + ".xml \"" + dumpfn + "\""); }
private static void TestPacketSniffAdminCommandLock(string[] args) { Console.WriteLine("-"); Console.WriteLine("Testing packet sniff under admin command lock..."); string exe = Exec.GetQizmtExe(); try { MySpace.DataMining.AELight.AELight.EnterAdminCmd(); Console.WriteLine("Admin command lock obtained..."); Console.WriteLine("Running packet sniff..."); string output = Exec.Shell(exe + " packetsniff", false); string expected = "sniffing"; if (output.IndexOf(expected, StringComparison.OrdinalIgnoreCase) > -1) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } } finally { MySpace.DataMining.AELight.AELight.CleanThisExecQ(); Console.WriteLine("Admin command lock released..."); } }
public static void TestDSpaceGetBinaryFilePermissions(string[] args) { string exe = Exec.GetQizmtExe(); string username = Environment.UserName.ToLower(); string userdomain = Environment.UserDomainName.ToLower(); string useraccount = userdomain + @"\" + username; Console.WriteLine(); Console.WriteLine("-"); Console.WriteLine("Testing: Qizmt getbinary file permissions..."); Exec.Shell(exe + " del TestDSpaceGetBinaryFilePermissions*", false); Exec.Shell(exe + " gen TestDSpaceGetBinaryFilePermissions1.txt 40MB", false); Exec.Shell(exe + " gen TestDSpaceGetBinaryFilePermissions2.txt 40MB", false); string dir = Environment.CurrentDirectory + @"\TestDSpaceGetBinaryFilePermissions"; if (System.IO.Directory.Exists(dir)) { System.IO.Directory.Delete(dir, true); } System.IO.Directory.CreateDirectory(dir); dir = dir.Replace(":", "$"); dir = @"\\" + System.Net.Dns.GetHostName() + @"\" + dir; string dir2 = dir + "2"; if (System.IO.Directory.Exists(dir2)) { System.IO.Directory.Delete(dir2, true); } System.IO.Directory.CreateDirectory(dir2); dir = dir + @"\"; dir2 = dir2 + @"\"; Exec.Shell(exe + " get TestDSpaceGetBinaryFilePermissions*.txt " + dir, false); Exec.Shell(exe + " putbinary " + dir + "*.* TestDSpaceGetBinaryFilePermissions.blob", false); Exec.Shell(exe + " getbinary TestDSpaceGetBinaryFilePermissions.blob " + dir2); if (HasFullControl(useraccount, dir2)) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } System.IO.Directory.Delete(dir, true); System.IO.Directory.Delete(dir2, true); Exec.Shell(exe + " del TestDSpaceGetBinaryFilePermissions*", false); }
private static bool CreateCache(string surrogate, string jobdir) { //Create mr job, run to create cache. Exec.Shell(@"Qizmt @=" + surrogate + " del regressionTest_removeMachineClearCache.xml"); Exec.Shell(@"Qizmt @=" + surrogate + " importdir " + jobdir); Exec.Shell(@"Qizmt @=" + surrogate + " exec regressionTest_removeMachineClearCache.xml"); //Check cache exists. if (!CacheExists(surrogate)) { Console.Error.WriteLine("Cache is not found after job executed."); return(false); } return(true); }
static int GetReplicationFactor() { string[] lines = Exec.Shell("qizmt replicationfactorview").Trim().Split(new char[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries); int rep = -1; foreach (string line in lines) { if (line.StartsWith("Replication factor is set to", StringComparison.OrdinalIgnoreCase)) { int del = line.Trim().LastIndexOf(' '); rep = Int32.Parse(line.Substring(del + 1)); break; } } return(rep); }
public static void TestDSpaceGetFilePermissions(string[] args) { string exe = Exec.GetQizmtExe(); string username = Environment.UserName.ToLower(); string userdomain = Environment.UserDomainName.ToLower(); string useraccount = userdomain + @"\" + username; Console.WriteLine(); Console.WriteLine("-"); Console.WriteLine("Testing: Qizmt get file permissions..."); Exec.Shell(exe + " del TestDSpaceGetFilePermissions*", false); Exec.Shell(exe + " gen TestDSpaceGetFilePermissionsA1.txt 100MB", false); Exec.Shell(exe + " gen TestDSpaceGetFilePermissionsA2.txt 100MB", false); Exec.Shell(exe + " gen TestDSpaceGetFilePermissionsB1.txt 100MB", false); Exec.Shell(exe + " gen TestDSpaceGetFilePermissionsB2.txt 100MB", false); string dir = Environment.CurrentDirectory + @"\TestDSpaceGetFilePermissions\"; if (System.IO.Directory.Exists(dir)) { System.IO.Directory.Delete(dir, true); } System.IO.Directory.CreateDirectory(dir); dir = dir.Replace(":", "$"); dir = @"\\" + System.Net.Dns.GetHostName() + @"\" + dir; Exec.Shell(exe + " get TestDSpaceGetFilePermissionsA1.txt " + dir + "A1.txt", false); Exec.Shell(exe + " get TestDSpaceGetFilePermissionsA1.txt " + dir + "A1.gz", false); Exec.Shell(exe + " get TestDSpaceGetFilePermissionsA*.txt " + dir, false); Exec.Shell(exe + " get parts=0-1 TestDSpaceGetFilePermissionsB1.txt " + dir + "B1.txt", false); Exec.Shell(exe + " get parts=0-1 TestDSpaceGetFilePermissionsB1.txt " + dir + "B1.gz", false); Exec.Shell(exe + " get parts=0-1 TestDSpaceGetFilePermissionsB*.txt " + dir, false); if (HasFullControl(useraccount, dir)) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } System.IO.Directory.Delete(dir, true); Exec.Shell(exe + " del TestDSpaceGetFilePermissions*", false); }
static string[] GetQizmtHosts(out string qizmtdir) { string[] lines = Exec.Shell("qizmt slaveinstalls").Trim().Split(new char[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries); string[] hosts = new string[lines.Length]; qizmtdir = null; for (int i = 0; i < lines.Length; i++) { string line = lines[i]; int del = line.IndexOf(' '); string host = line.Substring(0, del); string netpath = line.Substring(del + 1); hosts[i] = host; del = netpath.IndexOf(@"\", 2); qizmtdir = netpath.Substring(del + 1); } return(hosts); }
static void RestartHost(string host) { Exec.Shell(@"Shutdown /m \\" + host + " /r"); int tryremains = 24; for (; ;) { Console.Write("."); System.Threading.Thread.Sleep(5000); string output = Exec.Shell("ping " + host + " -n 1"); if (output.IndexOf("Reply from ", StringComparison.OrdinalIgnoreCase) == -1) { break; } if (--tryremains <= 0) { throw new Exception("Still getting reply from host " + host + " after shutdown."); } } }
private static bool GenerateDfsFile(string fname, string dfspath, string dspacedir, ref string fpath) { string exe = Exec.GetQizmtExe(); Exec.Shell(exe + @" del " + fname); Exec.Shell(exe + @" gen " + fname + " 100B"); System.Xml.XmlDocument dfs = new System.Xml.XmlDocument(); try { dfs.Load(dfspath); } catch (Exception e) { Console.Error.WriteLine("Error loading dfs.xml: {0}", e.Message); return(false); } System.Xml.XmlNode fnode = dfs.SelectSingleNode(@"//DfsFile[Name='" + fname + @"']/Nodes/FileNode[1]"); if (fnode == null) { Console.Error.WriteLine("File node of generated file not found in dfs.xml"); return(false); } string name = fnode["Name"].InnerText; string host = fnode["Host"].InnerText.Split(';')[0]; fpath = @"\\" + host + @"\" + dspacedir + name; if (!System.IO.File.Exists(fpath)) { Console.Error.WriteLine("Cannot find the file that was just generated."); return(false); } return(true); }
public static void TestHDFailureBeforeMapStarts(string[] args) { Console.WriteLine("====TestHDFailureBeforeMapStarts===="); string qizmtdir = null; string[] hosts = GetQizmtHosts(out qizmtdir); if (hosts.Length < 4) { throw new Exception("There must be more than 4 machines in the Qimzt cluster to test."); } int replication = GetReplicationFactor(); if (replication < 2) { throw new Exception("Replication factor must be greater than 2."); } if (!IsClusterHealthy()) { throw new Exception("Cluster must be 100% healthy to begin with."); } string guid = "10DF6995-B6C1-4c9a-9770-C46B6DF6DE26"; Console.WriteLine("Importing jobs..."); ImportJobs(); Console.WriteLine("Done"); Console.WriteLine("Running job in normal mode..."); Exec.Shell("qizmt exec reg_job1_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26.xml"); Console.WriteLine("Done"); //Speculative computing with InputOrder = next { //Simulate hd failure string fhost = hosts[1]; Console.WriteLine("Simulating HDF at {0}...", fhost); SimulateHDFailure(fhost); Console.WriteLine("Done"); //Restart a machine string rhost = hosts[2]; Console.WriteLine("Restarting host {0}...", rhost); RestartHost(rhost); Console.WriteLine("Done"); { Console.WriteLine("Running job in speculative computing mode with inputOrder = next..."); string output = Exec.Shell("qizmt exec \"//Job[@Name='mr']/Computing/InputOrder=next\" reg_job2_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26.xml"); Console.WriteLine("Done"); if (output.IndexOf("HWFailure:Recovered:" + fhost + ":", StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("HWFailure at host " + fhost + " was not captured."); } if (output.IndexOf("Warning: excluding '" + rhost, StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("Excluded host " + rhost + " was not captured."); } } { UnsimulateHDFailure(fhost); Console.WriteLine("Checking results..."); string output = Exec.Shell("qizmt exec reg_job3_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26.xml \"" + qizmtdir + "\""); Console.WriteLine("Done"); if (output.IndexOf("error count=0", StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("Test failed"); } } } //Speculative computing with InputOrder = shuffle { //Simulate hd failure string fhost = hosts[1]; Console.WriteLine("Simulating HDF at {0}...", fhost); SimulateHDFailure(fhost); Console.WriteLine("Done"); //Restart a machine string rhost = hosts[2]; Console.WriteLine("Restarting host {0}...", rhost); RestartHost(rhost); Console.WriteLine("Done"); { Console.WriteLine("Running job in speculative computing mode with inputOrder = shuffle..."); string output = Exec.Shell("qizmt exec \"//Job[@Name='mr']/Computing/InputOrder=shuffle\" reg_job2_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26.xml"); Console.WriteLine("Done"); if (output.IndexOf("HWFailure:Recovered:" + fhost + ":", StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("HWFailure at host " + fhost + " was not captured."); } if (output.IndexOf("Warning: excluding '" + rhost, StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("Excluded host " + rhost + " was not captured."); } } { UnsimulateHDFailure(fhost); Console.WriteLine("Checking results..."); string output = Exec.Shell("qizmt exec reg_job3_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26.xml \"" + qizmtdir + "\""); Console.WriteLine("Done"); if (output.IndexOf("error count=0", StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("Test failed"); } } } Console.WriteLine("[PASSED] - " + string.Join(" ", args)); }
private static bool CacheExists(string surrogate) { string result = Exec.Shell(@"Qizmt @=" + surrogate + " info regressionTest_removeMachineClearCache_Cache", true); return(result.Length > 0); }
public static void TestAddRefNonParticipatingCluster(string[] args) { if (args.Length < 2) { Console.Error.WriteLine("Error: LocalJobAddRefNonParticipatingCluster command needs argument: <dfsXmlPath>"); return; } string dfspath = args[1]; if (!dfspath.StartsWith(@"\\")) { Console.Error.WriteLine("Argument: <dfsXmlPath> must be a network path"); return; } string surrogate = dfspath.Substring(2, dfspath.IndexOf(@"\", 2) - 2).ToUpper(); System.Xml.XmlDocument dfs = new System.Xml.XmlDocument(); string slavelist = null; try { dfs.Load(dfspath); System.Xml.XmlNode node = dfs.SelectSingleNode("//SlaveList"); if (node == null) { Console.Error.WriteLine("SlaveList node is not found in dfs.xml"); return; } slavelist = node.InnerText.ToUpper(); string[] parts = slavelist.Split(new char[] { ',', ';' }); if (parts.Length < 2) { Console.Error.WriteLine("Must have at least 2 hosts in SlaveList tag in dfs.xml"); return; } } catch (Exception e) { Console.Error.WriteLine("Error loading dfs.xml: {0}", e.Message); return; } string dfsback = null; bool ok = false; try { if (slavelist.IndexOf(surrogate, StringComparison.OrdinalIgnoreCase) > -1) { if (!DFSUtils.MakeFileBackup(dfspath, ref dfsback)) { Console.Error.WriteLine("Error while backing up dfs.xml."); return; } string newslavelist = ""; string[] hosts = slavelist.Split(new char[] { ',', ';' }); foreach (string host in hosts) { if (string.Compare(surrogate, host, true) != 0) { newslavelist = ";" + host; } } newslavelist = newslavelist.Trim(';'); DFSUtils.ChangeDFSXMLSlaveList(dfs, dfspath, newslavelist); } string mr = @"<SourceCode> <Jobs> <Job> <Narrative> <Name>regression_test_put_dll_Preprocessing</Name> <Custodian></Custodian> <email></email> </Narrative> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt del regressionTestLocalJobAddRef4A30DE94_testdll.dll`); } ]]> </Local> </Job> <Job> <Narrative> <Name>regression_test_put_dll PUT DLL</Name> <Custodian></Custodian> <email></email> </Narrative> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { string localdir = IOUtils.GetTempDirectory(); string fn = `regressionTestLocalJobAddRef4A30DE94_testdll.dll`; string localfn = localdir + @`\` + Guid.NewGuid().ToString() + fn; string testdlldatab64 = `TVqQAAMAAAAEAAAA//8AALgAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAA4fug4AtAnNIbgBTM0hVGhpcyBwcm9ncmFtIGNhbm5vdCBiZSBydW4gaW4gRE9TIG1vZGUuDQ0KJAAAAAAAAABQRQAATAEDAOMDJ0oAAAAAAAAAAOAAAiELAQgAAAgAAAAIAAAAAAAALicAAAAgAAAAQAAAAABAAAAgAAAAAgAABAAAAAAAAAAEAAAAAAAAAACAAAAAAgAAAAAAAAMAQIUAABAAABAAAAAAEAAAEAAAAAAAABAAAAAAAAAAAAAAAOAmAABLAAAAAEAAACgEAAAAAAAAAAAAAAAAAAAAAAAAAGAAAAwAAAAsJgAAHAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAACAAAAAAAAAAAAAAACCAAAEgAAAAAAAAAAAAAAC50ZXh0AAAANAcAAAAgAAAACAAAAAIAAAAAAAAAAAAAAAAAACAAAGAucnNyYwAAACgEAAAAQAAAAAYAAAAKAAAAAAAAAAAAAAAAAABAAABALnJlbG9jAAAMAAAAAGAAAAACAAAAEAAAAAAAAAAAAAAAAAAAQAAAQgAAAAAAAAAAAAAAAAAAAAAQJwAAAAAAAEgAAAACAAUAcCAAALwFAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABMwAQALAAAAAQAAEQByAQAAcAorAAYqHgIoEAAACioAQlNKQgEAAQAAAAAADAAAAHYyLjAuNTA3MjcAAAAABQBsAAAAwAEAACN+AAAsAgAAiAIAACNTdHJpbmdzAAAAALQEAAAUAAAAI1VTAMgEAAAQAAAAI0dVSUQAAADYBAAA5AAAACNCbG9iAAAAAAAAAAIAAAFHFAIACQAAAAD6ATMAFgAAAQAAABEAAAACAAAAAgAAABAAAAANAAAAAQAAAAEAAAABAAAAAAAKAAEAAAAAAAYAggB7AAYApwCVAAYAvgCVAAYA2wCVAAYA+gCVAAYAEwGVAAYALAGVAAYARwGVAAYAYgGVAAYAmgF7AQYArgF7AQYAvAGVAAYA1QGVAAYABQLyATsAGQIAAAYASAIoAgYAaAIoAgAAAAABAAAAAAABAAEAAQAQADsARQAFAAEAAQBQIAAAAACWAIkACgABAGcgAAAAAIYYjwAOAAEAEQCPABIAGQCPABIAIQCPABIAKQCPABIAMQCPABIAOQCPABIAQQCPABIASQCPABIAUQCPABcAWQCPABIAYQCPABIAaQCPABIAcQCPABwAgQCPACIAiQCPAA4ACQCPAA4ALgALACsALgATAF0ALgAbAF0ALgAjAF0ALgArACsALgAzAGMALgA7AF0ALgBLAF0ALgBTAHsALgBjAKUALgBrALIALgBzALsALgB7AMQAJwAEgAAAAQAAAAAAAAAAAAAAAABFAAAAAgAAAAAAAAAAAAAAAQByAAAAAAAAAAA8TW9kdWxlPgByZWdyZXNzaW9uVGVzdExvY2FsSm9iQWRkUmVmNEEzMERFOTRfdGVzdGRsbC5kbGwAVGVzdENsYXNzAHJlZ3Jlc3Npb25UZXN0TG9jYWxKb2JBZGRSZWY0QTMwREU5NF90ZXN0ZGxsAG1zY29ybGliAFN5c3RlbQBPYmplY3QAU2F5SGkALmN0b3IAU3lzdGVtLlJlZmxlY3Rpb24AQXNzZW1ibHlUaXRsZUF0dHJpYnV0ZQBBc3NlbWJseURlc2NyaXB0aW9uQXR0cmlidXRlAEFzc2VtYmx5Q29uZmlndXJhdGlvbkF0dHJpYnV0ZQBBc3NlbWJseUNvbXBhbnlBdHRyaWJ1dGUAQXNzZW1ibHlQcm9kdWN0QXR0cmlidXRlAEFzc2VtYmx5Q29weXJpZ2h0QXR0cmlidXRlAEFzc2VtYmx5VHJhZGVtYXJrQXR0cmlidXRlAEFzc2VtYmx5Q3VsdHVyZUF0dHJpYnV0ZQBTeXN0ZW0uUnVudGltZS5JbnRlcm9wU2VydmljZXMAQ29tVmlzaWJsZUF0dHJpYnV0ZQBHdWlkQXR0cmlidXRlAEFzc2VtYmx5VmVyc2lvbkF0dHJpYnV0ZQBBc3NlbWJseUZpbGVWZXJzaW9uQXR0cmlidXRlAFN5c3RlbS5EaWFnbm9zdGljcwBEZWJ1Z2dhYmxlQXR0cmlidXRlAERlYnVnZ2luZ01vZGVzAFN5c3RlbS5SdW50aW1lLkNvbXBpbGVyU2VydmljZXMAQ29tcGlsYXRpb25SZWxheGF0aW9uc0F0dHJpYnV0ZQBSdW50aW1lQ29tcGF0aWJpbGl0eUF0dHJpYnV0ZQAAAAARaABpACAAdABoAGUAcgBlAAAA+LohWe+Eo0GsrsHvHDwd5wAIt3pcVhk04IkDAAAOAyAAAQQgAQEOBCABAQIFIAEBET0EIAEBCAMHAQ4xAQAscmVncmVzc2lvblRlc3RMb2NhbEpvYkFkZFJlZjRBMzBERTk0X3Rlc3RkbGwAAAUBAAAAABcBABJDb3B5cmlnaHQgwqkgIDIwMDkAACkBACQwN2Y1Njc4Zi04YzJlLTQ3OWUtOTg1Ny1iMzBhNjFkZDczN2UAAAwBAAcxLjAuMC4wAAAIAQAHAQAAAAAIAQAIAAAAAAAeAQABAFQCFldyYXBOb25FeGNlcHRpb25UaHJvd3MBAAAAAADjAydKAAAAAAIAAACWAAAASCYAAEgIAABSU0RTJqyw+VFRU0yJX+9RNtIOogEAAABDOlxzb3VyY2VcY29uc29sZUFwcHNccmVncmVzc2lvblRlc3RMb2NhbEpvYkFkZFJlZjRBMzBERTk0X3Rlc3RkbGxcb2JqXERlYnVnXHJlZ3Jlc3Npb25UZXN0TG9jYWxKb2JBZGRSZWY0QTMwREU5NF90ZXN0ZGxsLnBkYgAAAAgnAAAAAAAAAAAAAB4nAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQJwAAAAAAAAAAX0NvckRsbE1haW4AbXNjb3JlZS5kbGwAAAAAAP8lACBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAQAAAAGAAAgAAAAAAAAAAAAAAAAAAAAQABAAAAMAAAgAAAAAAAAAAAAAAAAAAAAQAAAAAASAAAAFhAAADQAwAAAAAAAAAAAADQAzQAAABWAFMAXwBWAEUAUgBTAEkATwBOAF8ASQBOAEYATwAAAAAAvQTv/gAAAQAAAAEAAAAAAAAAAQAAAAAAPwAAAAAAAAAEAAAAAgAAAAAAAAAAAAAAAAAAAEQAAAABAFYAYQByAEYAaQBsAGUASQBuAGYAbwAAAAAAJAAEAAAAVAByAGEAbgBzAGwAYQB0AGkAbwBuAAAAAAAAALAEMAMAAAEAUwB0AHIAaQBuAGcARgBpAGwAZQBJAG4AZgBvAAAADAMAAAEAMAAwADAAMAAwADQAYgAwAAAAhAAtAAEARgBpAGwAZQBEAGUAcwBjAHIAaQBwAHQAaQBvAG4AAAAAAHIAZQBnAHIAZQBzAHMAaQBvAG4AVABlAHMAdABMAG8AYwBhAGwASgBvAGIAQQBkAGQAUgBlAGYANABBADMAMABEAEUAOQA0AF8AdABlAHMAdABkAGwAbAAAAAAAMAAIAAEARgBpAGwAZQBWAGUAcgBzAGkAbwBuAAAAAAAxAC4AMAAuADAALgAwAAAAhAAxAAEASQBuAHQAZQByAG4AYQBsAE4AYQBtAGUAAAByAGUAZwByAGUAcwBzAGkAbwBuAFQAZQBzAHQATABvAGMAYQBsAEoAbwBiAEEAZABkAFIAZQBmADQAQQAzADAARABFADkANABfAHQAZQBzAHQAZABsAGwALgBkAGwAbAAAAAAASAASAAEATABlAGcAYQBsAEMAbwBwAHkAcgBpAGcAaAB0AAAAQwBvAHAAeQByAGkAZwBoAHQAIACpACAAIAAyADAAMAA5AAAAjAAxAAEATwByAGkAZwBpAG4AYQBsAEYAaQBsAGUAbgBhAG0AZQAAAHIAZQBnAHIAZQBzAHMAaQBvAG4AVABlAHMAdABMAG8AYwBhAGwASgBvAGIAQQBkAGQAUgBlAGYANABBADMAMABEAEUAOQA0AF8AdABlAHMAdABkAGwAbAAuAGQAbABsAAAAAAB8AC0AAQBQAHIAbwBkAHUAYwB0AE4AYQBtAGUAAAAAAHIAZQBnAHIAZQBzAHMAaQBvAG4AVABlAHMAdABMAG8AYwBhAGwASgBvAGIAQQBkAGQAUgBlAGYANABBADMAMABEAEUAOQA0AF8AdABlAHMAdABkAGwAbAAAAAAANAAIAAEAUAByAG8AZAB1AGMAdABWAGUAcgBzAGkAbwBuAAAAMQAuADAALgAwAC4AMAAAADgACAABAEEAcwBzAGUAbQBiAGwAeQAgAFYAZQByAHMAaQBvAG4AAAAxAC4AMAAuADAALgAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAMAAAAMDcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA`; byte[] testdlldata = System.Convert.FromBase64String(testdlldatab64); System.IO.File.WriteAllBytes(localfn, testdlldata); try { Shell(@`Qizmt dfs put ` + localfn + ` ` + fn); } finally { System.IO.File.Delete(localfn); } } ]]> </Local> </Job> <Job Name=`testdll` Custodian=`` email=``> <Add Reference=`regressionTestLocalJobAddRef4A30DE94_testdll.dll` Type=`dfs` /> <Using>regressionTestLocalJobAddRef4A30DE94_testdll</Using> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Qizmt_Log(TestClass.SayHi()); } ]]> </Local> </Job> <Job Name=`testdll` Custodian=`` email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt del regressionTestLocalJobAddRef4A30DE94_testdll.dll`); } ]]> </Local> </Job> </Jobs> </SourceCode> ".Replace('`', '"'); string dir = @"\\" + System.Net.Dns.GetHostName() + @"\" + Environment.CurrentDirectory.Replace(':', '$') + @"\RegressionTest\LocalJobAddRefTest\"; if (System.IO.Directory.Exists(dir)) { System.IO.Directory.Delete(dir, true); } System.IO.Directory.CreateDirectory(dir); System.IO.File.WriteAllText(dir + "regressionTest_localJobAddRef.xml", mr); string exe = Exec.GetQizmtExe(); Exec.Shell(exe + " del regressionTest_localJobAddRef.xml"); Exec.Shell(exe + " importdir " + dir); Console.WriteLine("-"); Console.WriteLine("Testing add reference in a local job on a non-participating cluster..."); string output = Exec.Shell(exe + " exec regressionTest_localJobAddRef.xml"); string expected = "hi there"; if (output.IndexOf(expected, StringComparison.OrdinalIgnoreCase) > -1) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } //Clean up Exec.Shell(exe + " del regressionTest_localJobAddRef.xml"); System.IO.Directory.Delete(dir, true); } catch (Exception e) { Console.Error.WriteLine("Error during local job add reference testing: {0}", e.Message); } finally { if (dfsback != null) { ok = DFSUtils.UndoFileChanges(dfspath, dfsback); } } if (dfsback != null && ok) { System.IO.File.Delete(dfsback); } }
public static void TestHost(string[] args) { if (args.Length < 2) { Console.Error.WriteLine("Error: LocalJobHost command needs argument: <dfsXmlPath>"); return; } string dfspath = args[1]; if (!dfspath.StartsWith(@"\\")) { Console.Error.WriteLine("Argument: <dfsXmlPath> must be a network path"); return; } string surrogate = dfspath.Substring(2, dfspath.IndexOf(@"\", 2) - 2).ToUpper(); System.Xml.XmlDocument dfs = new System.Xml.XmlDocument(); string slavelist = null; try { dfs.Load(dfspath); System.Xml.XmlNode node = dfs.SelectSingleNode("//SlaveList"); if (node == null) { Console.Error.WriteLine("SlaveList node is not found in dfs.xml"); return; } slavelist = node.InnerText.ToUpper(); string[] parts = slavelist.Split(new char[] { ',', ';' }); if (parts.Length < 2) { Console.Error.WriteLine("Must have at least 2 hosts in SlaveList tag in dfs.xml"); return; } } catch (Exception e) { Console.Error.WriteLine("Error loading dfs.xml: {0}", e.Message); return; } string exe = Exec.GetQizmtExe(); //Test Host tag, empty and non-empty. { string[] hosts = slavelist.Split(new char[] { ',', ';' }); string nonsurrogate = null; string firsthost = hosts[0]; foreach (string host in hosts) { if (string.Compare(surrogate, host, true) != 0) { nonsurrogate = host; break; } } if (nonsurrogate == null) { Console.Error.WriteLine("Non-surrogate is not found from dfs.xml"); return; } string mr = @"<SourceCode> <Jobs> <Job Name=`mash_Preprocessing` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> <Host>xxx</Host> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Qizmt_Log(`DNS=` + System.Net.Dns.GetHostName()); } ]]> </Local> </Job> </Jobs> </SourceCode>".Replace('`', '"').Replace("xxx", nonsurrogate); string dir = @"\\" + System.Net.Dns.GetHostName() + @"\" + Environment.CurrentDirectory.Replace(':', '$') + @"\RegressionTest\LocalJobHostTest\"; if (System.IO.Directory.Exists(dir)) { System.IO.Directory.Delete(dir, true); } System.IO.Directory.CreateDirectory(dir); System.IO.File.WriteAllText(dir + "regressionTest_localJobHost_nonEmptyHost.xml", mr); mr = @"<SourceCode> <Jobs> <Job Name=`mash_Preprocessing` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Qizmt_Log(`DNS=` + System.Net.Dns.GetHostName()); } ]]> </Local> </Job> </Jobs> </SourceCode>".Replace('`', '"'); System.IO.File.WriteAllText(dir + "regressionTest_localJobHost_emptyHost.xml", mr); Exec.Shell(exe + " del regressionTest_localJobHost_nonEmptyHost.xml"); Exec.Shell(exe + " del regressionTest_localJobHost_emptyHost.xml"); Exec.Shell(exe + " importdir " + dir); Console.WriteLine("-"); Console.WriteLine("Testing non-empty host in local job..."); string output = Exec.Shell(exe + " exec regressionTest_localJobHost_nonEmptyHost.xml"); string expected = "DNS=" + nonsurrogate; if (output.IndexOf(expected, StringComparison.OrdinalIgnoreCase) > -1) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } Console.WriteLine("-"); Console.WriteLine("Testing empty host in local job..."); output = Exec.Shell(exe + " exec regressionTest_localJobHost_emptyHost.xml"); expected = "DNS=" + firsthost; if (output.IndexOf(expected, StringComparison.OrdinalIgnoreCase) > -1) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } //Clean up System.IO.Directory.Delete(dir, true); Exec.Shell(exe + " del regressionTest_localJobHost_nonEmptyHost.xml"); Exec.Shell(exe + " del regressionTest_localJobHost_emptyHost.xml"); } }
public static void TestAddRemoveMachineClearCache(string[] args) { if (args.Length < 2) { Console.Error.WriteLine("Error: AddRemoveMachineClearCache command needs argument: <dfsXmlPath>"); return; } string dfspath = args[1]; if (!dfspath.StartsWith(@"\\")) { Console.Error.WriteLine("Argument: <dfsXmlPath> must be a network path"); return; } string slavelist = null; try { System.Xml.XmlDocument dfs = new System.Xml.XmlDocument(); dfs.Load(dfspath); System.Xml.XmlNode node = dfs.SelectSingleNode("//SlaveList"); if (node == null) { Console.Error.WriteLine("SlaveList node is not found in dfs.xml"); return; } slavelist = node.InnerText.ToUpper(); string[] parts = slavelist.Split(';'); if (parts.Length < 2) { Console.Error.WriteLine("Must have at least 2 machines in SlaveList in dfs.xml to test."); return; } } catch (Exception e) { Console.Error.WriteLine("Error loading dfs.xml: {0}", e.Message); return; } string surrogate = dfspath.Substring(2, dfspath.IndexOf(@"\", 2) - 2).ToUpper(); int si = 2 + surrogate.Length + 1; string dspacedir = dfspath.Substring(si, dfspath.LastIndexOf(@"\") - si) + @"\"; if (string.Compare(surrogate, System.Net.Dns.GetHostName(), true) != 0) { Console.Error.WriteLine("AddRemoveMachineClearCache test must be run from the surrogate."); return; } string jobdir = @"\\" + System.Net.Dns.GetHostName() + @"\" + Environment.CurrentDirectory.Replace(':', '$') + @"\RegressionTest\removeMachineClearCacheTest\"; { string mr = @"<SourceCode> <Jobs> <Job Name=`local` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt del regressionTest_removeMachineClearCache_Input.txt`); Shell(@`Qizmt del regressionTest_removeMachineClearCache_Output.txt`); Shell(@`Qizmt del regressionTest_removeMachineClearCache_Cache`); Shell(@`Qizmt gen regressionTest_removeMachineClearCache_Input.txt 200B`); } ]]> </Local> </Job> <Job Name=`mr` Custodian=`` Email=``> <Delta> <Name>regressionTest_removeMachineClearCache_Cache</Name> <DFSInput>dfs://regressionTest_removeMachineClearCache_Input*.txt</DFSInput> </Delta> <IOSettings> <JobType>mapreduce</JobType> <KeyLength>100</KeyLength> <DFSInput></DFSInput> <DFSOutput>dfs://regressionTest_removeMachineClearCache_Output.txt</DFSOutput> <OutputMethod>grouped</OutputMethod> </IOSettings> <MapReduce> <Map> <![CDATA[ public virtual void Map(ByteSlice line, MapOutput output) { output.Add(line, ByteSlice.Prepare()); } ]]> </Map> <Reduce> <![CDATA[ public override void Reduce(ByteSlice key, ByteSliceList values, ReduceOutput output) { for(int i = 0; i < values.Length; i++) { output.Add(values[i].Value); } } ]]> </Reduce> </MapReduce> </Job> </Jobs> </SourceCode>".Replace('`', '"'); if (System.IO.Directory.Exists(jobdir)) { System.IO.Directory.Delete(jobdir, true); } System.IO.Directory.CreateDirectory(jobdir); System.IO.File.WriteAllText(jobdir + "regressionTest_removeMachineClearCache.xml", mr); } string nonsurrogate = null; { string[] hosts = slavelist.Split(';'); foreach (string host in hosts) { if (string.Compare(host, surrogate, true) != 0) { nonsurrogate = host; break; } } if (nonsurrogate == null) { Console.Error.WriteLine("Non-surrogate is not found from SlaveList in dfs.xml"); return; } } //Remove a non-surrogate machine. { Console.WriteLine("-"); Console.WriteLine("Testing removal of a non-surrogate machine..."); if (!CreateCache(surrogate, jobdir)) { return; } Exec.Shell(@"Qizmt removemachine " + nonsurrogate); if (CacheExists(surrogate)) { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } else { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } } //Add the machine back. { Console.WriteLine("-"); Console.WriteLine("Testing adding of a machine..."); if (!CreateCache(surrogate, jobdir)) { return; } Exec.Shell(@"Qizmt addmachine " + nonsurrogate); if (CacheExists(surrogate)) { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } else { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } } //Removal of a non-surrogate and non-responsive machine. { Console.WriteLine("-"); Console.WriteLine("Testing removal of a non-surrogate and non-responsive machine..."); //Format dfs to its original state. Exec.Shell(@"Qizmt @format slaves=" + slavelist); if (!CreateCache(surrogate, jobdir)) { return; } string dummymachine = nonsurrogate + "dummy"; string newslavelist = slavelist.Replace(nonsurrogate, dummymachine); DFSUtils.ChangeDFSXMLSlaveList(dfspath, newslavelist); Exec.Shell(@"Qizmt removemachine " + dummymachine, true); if (CacheExists(surrogate)) { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } else { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } } //Remove a participating surrogate machine. { Console.WriteLine("-"); Console.WriteLine("Testing removal of a participating surrogate..."); //Participating surrogate. string newslavelist = slavelist; if (slavelist.IndexOf(surrogate, StringComparison.OrdinalIgnoreCase) == -1) { newslavelist += ";" + surrogate; } Exec.Shell(@"Qizmt @format slaves=" + newslavelist); if (!CreateCache(surrogate, jobdir)) { return; } Exec.Shell(@"Qizmt removemachine " + surrogate); //Who is the new surrogate? string newsurrogate = LocateSurrogate(dspacedir, nonsurrogate); if (newsurrogate == null) { Console.Error.WriteLine("After the removal of surrogate, cannot locate the new surrogate."); return; } if (CacheExists(newsurrogate)) { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } else { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } } //Removal of a non-participating surrogate. { Console.WriteLine("-"); Console.WriteLine("Testing removal of a non-participating surrogate..."); string newslavelist = ""; if (slavelist.IndexOf(surrogate, StringComparison.OrdinalIgnoreCase) > -1) { string[] hosts = slavelist.Split(';'); foreach (string host in hosts) { if (string.Compare(host, surrogate, true) != 0) { newslavelist += ";" + host; } } newslavelist = newslavelist.Trim(new char[] { ';' }); } else { newslavelist = slavelist; } Exec.Shell(@"Qizmt @format slaves=" + newslavelist); if (!CreateCache(surrogate, jobdir)) { return; } Exec.Shell(@"Qizmt removemachine " + surrogate); string newsurrogate = LocateSurrogate(dspacedir, nonsurrogate); if (newsurrogate == null) { Console.Error.WriteLine("Cannot locate new surrogate after the removal of surrogate machine."); return; } if (CacheExists(newsurrogate)) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } } Exec.Shell(@"Qizmt @format slaves=" + slavelist); }
public static void TestFormatMetaOnlySwitch(string[] args) { if (args.Length < 2) { Console.Error.WriteLine("Error: FormatMetaOnlySwitch command needs argument: <dfsXmlPath>"); return; } string dfspath = args[1]; if (!dfspath.StartsWith(@"\\")) { Console.Error.WriteLine("Argument: <dfsXmlPath> must be a network path"); return; } System.Xml.XmlDocument dfs = new System.Xml.XmlDocument(); string slavelist = null; try { dfs.Load(dfspath); System.Xml.XmlNode node = dfs.SelectSingleNode("//SlaveList"); if (node == null) { Console.Error.WriteLine("SlaveList node is not found in dfs.xml"); return; } slavelist = node.InnerText.ToUpper(); } catch (Exception e) { Console.Error.WriteLine("Error loading dfs.xml: {0}", e.Message); return; } string dspacedir = ""; { int del = dfspath.LastIndexOf(@"\"); dspacedir = dfspath.Substring(0, del + 1); del = dspacedir.IndexOf(@"\", 2); dspacedir = dspacedir.Substring(del + 1); } Console.WriteLine("-"); Console.WriteLine("Testing: Qizmt format metaOnly switch..."); string fpath = ""; if (!GenerateDfsFile("regressionTest_formatMetaOnlySwitchTest.txt", dfspath, dspacedir, ref fpath)) { return; } //format with metaonly=1 string exe = Exec.GetQizmtExe(); Exec.Shell(exe + " @format slaves=" + slavelist + " metaonly=1"); bool ok = false; if (System.IO.File.Exists(fpath)) { if (!GenerateDfsFile("regressionTest_formatMetaOnlySwitchTest.txt", dfspath, dspacedir, ref fpath)) { return; } //format with metaonly=0 Exec.Shell(exe + " @format slaves=" + slavelist); if (!System.IO.File.Exists(fpath)) { ok = true; } } if (ok) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } }
public static void TestDSpaceHosts(string[] args) { if (args.Length < 2) { Console.Error.WriteLine("Error: QizmtHosts command needs argument: <dfsXmlPath>"); return; } string dfspath = args[1]; if (!dfspath.StartsWith(@"\\")) { Console.Error.WriteLine("Argument: <dfsXmlPath> must be a network path"); return; } System.Xml.XmlDocument dfs = new System.Xml.XmlDocument(); string slavelist = null; try { dfs.Load(dfspath); System.Xml.XmlNode node = dfs.SelectSingleNode("//SlaveList"); if (node == null) { Console.Error.WriteLine("SlaveList node is not found in dfs.xml"); return; } slavelist = node.InnerText.ToUpper(); } catch (Exception e) { Console.Error.WriteLine("Error loading dfs.xml: {0}", e.Message); return; } string mr = @"<SourceCode> <Jobs> <Job Name=`Preprocessing` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt del regression_test_Qizmt_Hosts_Input.txt`); Shell(@`Qizmt del regression_test_Qizmt_Hosts_Output.txt`); string hosts = ``; for(int i = 0; i < StaticGlobals.Qizmt_Hosts.Length; i++) { if(i != 0) { hosts += `;`; } hosts += StaticGlobals.Qizmt_Hosts[i]; } Qizmt_Log(`local:` + hosts + `:`); } ]]> </Local> </Job> <Job Name=`CreateSampleData` Custodian=`` Email=``> <IOSettings> <JobType>remote</JobType> <DFS_IO> <DFSReader></DFSReader> <DFSWriter>dfs://regression_test_Qizmt_Hosts_Input.txt</DFSWriter> </DFS_IO> </IOSettings> <Remote> <![CDATA[ public virtual void Remote(RemoteInputStream dfsinput, RemoteOutputStream dfsoutput) { //Create sample data. dfsoutput.WriteLine(`1`); string hosts = ``; for(int i = 0; i < StaticGlobals.Qizmt_Hosts.Length; i++) { if(i != 0) { hosts += `;`; } hosts += StaticGlobals.Qizmt_Hosts[i]; } Qizmt_Log(`remote:` + hosts + `:`); } ]]> </Remote> </Job> <Job Name=`mr` Custodian=`` Email=``> <IOSettings> <JobType>mapreduce</JobType> <KeyLength>1</KeyLength> <DFSInput>dfs://regression_test_Qizmt_Hosts_Input.txt</DFSInput> <DFSOutput>dfs://regression_test_Qizmt_Hosts_Output.txt</DFSOutput> <OutputMethod>grouped</OutputMethod> </IOSettings> <MapReduce> <Map> <![CDATA[ public virtual void Map(ByteSlice line, MapOutput output) { output.Add(line, line); string hosts = ``; for(int i = 0; i < StaticGlobals.Qizmt_Hosts.Length; i++) { if(i != 0) { hosts += `;`; } hosts += StaticGlobals.Qizmt_Hosts[i]; } Qizmt_Log(`map:` + hosts + `:`); } ]]> </Map> <Reduce> <![CDATA[ public override void Reduce(ByteSlice key, ByteSliceList values, ReduceOutput output) { string hosts = ``; for(int i = 0; i < StaticGlobals.Qizmt_Hosts.Length; i++) { if(i != 0) { hosts += `;`; } hosts += StaticGlobals.Qizmt_Hosts[i]; } Qizmt_Log(`reduce:` + hosts + `:`); } ]]> </Reduce> </MapReduce> </Job> <Job Name=`Post-processing` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt del regression_test_Qizmt_Hosts_Input.txt`); Shell(@`Qizmt del regression_test_Qizmt_Hosts_Output.txt`); } ]]> </Local> </Job> </Jobs> </SourceCode>".Replace('`', '"'); Console.WriteLine("-"); Console.WriteLine("Testing Qizmt_Hosts..."); string dir = @"\\" + System.Net.Dns.GetHostName() + @"\" + Environment.CurrentDirectory.Replace(':', '$') + @"\RegressionTest\DSpaceHostsTest\"; if (System.IO.Directory.Exists(dir)) { System.IO.Directory.Delete(dir, true); } System.IO.Directory.CreateDirectory(dir); System.IO.File.WriteAllText(dir + @"regressionTest_dspaceHosts.xml", mr); string exe = Exec.GetQizmtExe(); Exec.Shell(exe + @" del regressionTest_dspaceHosts.xml"); Exec.Shell(exe + @" importdir " + dir); string results = Exec.Shell(exe + @" exec regressionTest_dspaceHosts.xml"); bool ok = true; string[] expected = new string[] { "local:" + slavelist + ":", "remote:" + slavelist + ":", "map:" + slavelist + ":", "reduce:" + slavelist + ":" }; foreach (string exp in expected) { if (results.IndexOf(exp, StringComparison.OrdinalIgnoreCase) == -1) { ok = false; break; } } if (ok) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } Exec.Shell(exe + @" del regressionTest_dspaceHosts.xml"); System.IO.Directory.Delete(dir, true); }
public static void TestClusterCheck(string[] args) { if (args.Length < 2) { Console.Error.WriteLine("Error: ClusterCheck command needs argument: <dfsXmlPath>"); return; } string dfspath = args[1]; if (!dfspath.StartsWith(@"\\")) { Console.Error.WriteLine("Argument: <dfsXmlPath> must be a network path"); return; } bool verbose = false; if (args.Length > 2 && string.Compare(args[2], "verbose", true) == 0) { verbose = true; } #if DEBUG verbose = true; #endif string surrogate = dfspath.Substring(2, dfspath.IndexOf(@"\", 2) - 2).ToUpper(); int si = 2 + surrogate.Length + 1; string dir = dfspath.Substring(si, dfspath.LastIndexOf(@"\") - si); System.Xml.XmlDocument dfs = new System.Xml.XmlDocument(); string slavelist = null; bool cont = false; try { dfs.Load(dfspath); System.Xml.XmlNode node = dfs.SelectSingleNode("//SlaveList"); if (node == null) { Console.Error.WriteLine("SlaveList node is not found in dfs.xml"); return; } slavelist = node.InnerText.ToUpper(); string[] parts = slavelist.Split(new char[] { ',', ';' }); if (parts.Length < 2) { Console.Error.WriteLine("Must have at least 2 hosts in SlaveList tag in dfs.xml"); return; } } catch (Exception e) { Console.Error.WriteLine("Error loading dfs.xml: {0}", e.Message); return; } string dfsback = null; if (!DFSUtils.MakeFileBackup(dfspath, ref dfsback)) { Console.Error.WriteLine("Error backing up dfs.xml"); return; } string allhosts = slavelist; if (slavelist.IndexOf(surrogate) == -1) { allhosts += ";" + surrogate; } string exe = Exec.GetQizmtExe(); //Participating surrogate. try { string sl = slavelist; if (sl.IndexOf(surrogate) == -1) { sl += ";" + surrogate; DFSUtils.ChangeDFSXMLSlaveList(dfs, dfspath, sl); } string result = Exec.Shell(exe + " clustercheck " + allhosts); if (verbose) { Console.Write(result); } Console.WriteLine(); Console.WriteLine("-"); Console.WriteLine("Test case: Participating surrogate:"); string expResult = @"Participating surrogate: \\" + surrogate.ToUpper(); if (result.IndexOf(expResult) > -1) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } } catch (Exception e) { Console.Error.WriteLine("Error during participating surrogate test case: {0}", e.Message); } finally { cont = DFSUtils.UndoFileChanges(dfspath, dfsback); } if (!cont) { return; } //Non-particpating surrogate. try { if (slavelist.IndexOf(surrogate) > -1) { string[] parts = slavelist.Split(new char[] { ',', ';' }); string sl = ""; foreach (string p in parts) { if (string.Compare(p, surrogate, true) != 0) { sl += ";" + p; } } sl = sl.Trim(';'); DFSUtils.ChangeDFSXMLSlaveList(dfs, dfspath, sl); } string result = Exec.Shell(exe + " clustercheck " + allhosts); if (verbose) { Console.Write(result); } Console.WriteLine(); Console.WriteLine("-"); Console.WriteLine("Test case: Non-participating surrogate:"); string expResult = @"Non-participating surrogate: \\" + surrogate.ToUpper(); if (result.IndexOf(expResult) > -1) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } } catch (Exception e) { Console.Error.WriteLine("Error during non-participating surrogate test case: {0}", e.Message); } finally { cont = DFSUtils.UndoFileChanges(dfspath, dfsback); } if (!cont) { return; } //Inaccessible host. try { string sl = allhosts + ";" + "BOGUSHOSTNAME"; DFSUtils.ChangeDFSXMLSlaveList(dfs, dfspath, sl); string result = Exec.Shell(exe + " clustercheck " + sl); if (verbose) { Console.Write(result); } Console.WriteLine(); Console.WriteLine("-"); Console.WriteLine("Test case: Inaccessible host:"); string expResult = @"Inaccessible host: \\BOGUSHOSTNAME"; if (result.IndexOf(expResult) > -1) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } } catch (Exception e) { Console.Error.WriteLine("Error during inaccessible host test case: {0}", e.Message); } finally { cont = DFSUtils.UndoFileChanges(dfspath, dfsback); } if (!cont) { return; } //Bad meta data backup. try { { System.Xml.XmlDocument thisdoc = new System.Xml.XmlDocument(); thisdoc.Load(dfspath); System.Xml.XmlNode node = thisdoc.SelectSingleNode("//MetaBackup"); if (node == null) { node = thisdoc.CreateElement("MetaBackup"); thisdoc.DocumentElement.AppendChild(node); } node.InnerText = @"\\" + surrogate + @"\c$\" + Guid.NewGuid().ToString(); thisdoc.Save(dfspath); } string result = Exec.Shell(exe + " clustercheck " + allhosts); if (verbose) { Console.Write(result); } Console.WriteLine(); Console.WriteLine("-"); Console.WriteLine("Test case: Bad meta data backup:"); string expResult = @"Bad meta backup surrogate"; if (result.IndexOf(expResult) > -1) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } } catch (Exception e) { Console.Error.WriteLine("Error during bad meta data backup test case: {0}", e.Message); } finally { cont = DFSUtils.UndoFileChanges(dfspath, dfsback); } if (!cont) { return; } //Broken surrogate. string slavedat = ""; string slavedatback = ""; string slave = ""; { string[] parts = slavelist.Split(new char[] { ',', ';' }); foreach (string p in parts) { if (string.Compare(p, surrogate, true) != 0) { slave = p; break; } } if (slave == "") { Console.Error.WriteLine("Cannot perform broken surrogate, uninstalled host, or orphaned worker tests: must have a host in the cluster besides the surrogate itself."); return; } slavedat = @"\\" + slave + @"\" + dir + @"\slave.dat"; if (!DFSUtils.MakeFileBackup(slavedat, ref slavedatback)) { Console.Error.WriteLine("Cannot perform broken surrogate, uninstalled host, or orphaned worker tests. Error while backing up slave.dat: {0}", slavedat); return; } } try { System.IO.File.WriteAllText(slavedat, "master=BOGUSHOSTNAME"); string result = Exec.Shell(exe + " clustercheck " + allhosts); if (verbose) { Console.Write(result); } Console.WriteLine(); Console.WriteLine("-"); Console.WriteLine("Test case: Broken surrogate:"); string expResult = @"Broken surrogate: \\" + surrogate; if (result.IndexOf(expResult) > -1) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } } catch (Exception e) { Console.Error.WriteLine("Error during broken surrogate test case: {0}", e.Message); } finally { cont = DFSUtils.UndoFileChanges(slavedat, slavedatback); } if (!cont) { return; } //Uninstalled host. try { System.IO.File.Delete(slavedat); string result = Exec.Shell(exe + " clustercheck " + allhosts); if (verbose) { Console.Write(result); } Console.WriteLine(); Console.WriteLine("-"); Console.WriteLine("Test case: Uninstalled host:"); string expResult = @"Uninstalled host: \\" + slave; if (result.IndexOf(expResult) > -1) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } } catch (Exception e) { Console.Error.WriteLine("Error during uninstalled host test case: {0}", e.Message); } finally { cont = DFSUtils.UndoFileChanges(slavedat, slavedatback); } if (!cont) { return; } //Orphaned worker. { string[] parts = slavelist.Split(new char[] { ',', ';' }); string sl = ""; foreach (string p in parts) { if (string.Compare(p, slave, true) != 0) { sl += ";" + p; } } sl = sl.Trim(';'); if (sl.Length == 0) { Console.Error.WriteLine("Cannot perform orphaned worker test. Must have at least 2 machines in Slavelist tag in dfs.xml."); } else { try { DFSUtils.ChangeDFSXMLSlaveList(dfs, dfspath, sl); string result = Exec.Shell(exe + " clustercheck " + allhosts); if (verbose) { Console.Write(result); } Console.WriteLine(); Console.WriteLine("-"); Console.WriteLine("Test case: Orphaned worker:"); string expResult = @"Orphaned worker: \\" + slave; if (result.IndexOf(expResult) > -1) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } } catch (Exception e) { Console.Error.WriteLine("Error during orphaned worker test case: {0}", e.Message); } finally { cont = DFSUtils.UndoFileChanges(dfspath, dfsback); } } } if (!cont) { return; } System.IO.File.Delete(dfsback); System.IO.File.Delete(slavedatback); }
static void FaultTolerantExecutionTest(string[] args) { bool verbose = false; bool skipsplitsort = false; for (int ai = 1; ai < args.Length; ai++) { switch (args[ai].ToLower()) { case "-verbose": verbose = true; Console.WriteLine("verbose=true"); break; case "-skipsplitsort": skipsplitsort = true; Console.WriteLine("skipsplitsort=true"); break; default: throw new Exception("Unknown argument: " + args[ai]); } } Console.WriteLine("====FaultTolerantExecutionTest===="); if (!MySpace.DataMining.AELight.FTTest.enabled) { throw new Exception("TESTFAULTTOLERANT is not #defined. Need Qizmt build with all #define TESTFAULTTOLERANT uncommented."); } if (GetReplicationFactor() < 2) { throw new Exception("Replication factor must be 2 or greater."); } if (!IsClusterHealthy()) { throw new Exception("Cluster must be 100% healthy to begin with."); } { Console.WriteLine("Importing jobs..."); string tempdir = @"\\" + System.Net.Dns.GetHostName() + @"\" + Environment.CurrentDirectory.Replace(':', '$') + @"\" + Guid.NewGuid().ToString(); System.IO.Directory.CreateDirectory(tempdir); #region job1 string txt = @"<SourceCode> <Jobs> <Job Name=`Preprocessing` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt del oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input*`); Shell(@`Qizmt del oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Output*`); } ]]> </Local> </Job> <Job Name=`CreateSampleData` Custodian=`` Email=`` Description=``> <IOSettings> <JobType>remote</JobType> <DFS_IO_Multi> <DFSReader></DFSReader> <DFSWriter>oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input1_####.txt</DFSWriter> <Mode>ALL CORES</Mode> </DFS_IO_Multi> </IOSettings> <Remote> <![CDATA[ public virtual void Remote(RemoteInputStream dfsinput, RemoteOutputStream dfsoutput) { int max = 50000; Random rnd = new Random(System.DateTime.Now.Millisecond / 2 + System.Diagnostics.Process.GetCurrentProcess().Id / 2); for(int i = 0; i < max; i++) { if(i % Qizmt_ProcessCount == Qizmt_ProcessID) { int num = rnd.Next(Int32.MinValue, Int32.MaxValue); int valuescount = rnd.Next(10,50); for(int vi = 0; vi < valuescount; vi++) { int num2 = rnd.Next(Int32.MinValue, Int32.MaxValue); dfsoutput.WriteLine(num.ToString() + `,apple,` + num2.ToString() + `,lemon`); } } } } ]]> </Remote> </Job> <Job Name=`CreateSampleData` Custodian=`` Email=`` Description=``> <IOSettings> <JobType>remote</JobType> <DFS_IO_Multi> <DFSReader></DFSReader> <DFSWriter>oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input2_####.txt</DFSWriter> <Mode>ALL CORES</Mode> </DFS_IO_Multi> </IOSettings> <Remote> <![CDATA[ public virtual void Remote(RemoteInputStream dfsinput, RemoteOutputStream dfsoutput) { int max = 50000; Random rnd = new Random(System.DateTime.Now.Millisecond / 2 + System.Diagnostics.Process.GetCurrentProcess().Id / 2); for(int i = 0; i < max; i++) { if(i % Qizmt_ProcessCount == Qizmt_ProcessID) { int num = rnd.Next(Int32.MinValue, Int32.MaxValue); int valuescount = rnd.Next(10,50); for(int vi = 0; vi < valuescount; vi++) { int num2 = rnd.Next(Int32.MinValue, Int32.MaxValue); dfsoutput.WriteLine(num.ToString() + `,apple,` + num2.ToString() + `,lemon`); } } } } ]]> </Remote> </Job> <Job Name=`Preprocessing` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt combine oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input1_*.txt +oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input1.txt`); Shell(@`Qizmt combine oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input2_*.txt +oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input2.txt`); } ]]> </Local> </Job> <Job Name=`mr` Custodian=`` Email=``> <IOSettings> <JobType>mapreduce</JobType> <KeyLength>int</KeyLength> <DFSInput>dfs://oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input1.txt;oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input2.txt</DFSInput> <DFSOutput>dfs://oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Output_not_sp.txt</DFSOutput> <OutputMethod>grouped</OutputMethod> </IOSettings> <MapReduce> <Map> <![CDATA[ public virtual void Map(ByteSlice line, MapOutput output) { mstring sLine = mstring.Prepare(line); int num = sLine.NextItemToInt(','); mstring title = sLine.NextItemToString(','); int num2 = sLine.NextItemToInt(','); mstring title2 = sLine.NextItemToString(','); recordset rKey = recordset.Prepare(); rKey.PutInt(num); int num3 = -1; if(StaticGlobals.DSpace_InputFileName == `oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input1.txt`) { num3 = 1; } else if(StaticGlobals.DSpace_InputFileName == `oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input2.txt`) { num3 = 2; } else if(StaticGlobals.DSpace_InputFileName == `oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input3.txt`) { num3 = 3; } recordset rValue = recordset.Prepare(); rValue.PutInt(num2); rValue.PutString(title); rValue.PutString(title2); rValue.PutInt(num3); output.Add(rKey, rValue); } ]]> </Map> <Reduce> <![CDATA[ public override void Reduce(ByteSlice key, ByteSliceList values, ReduceOutput output) { recordset rKey = recordset.Prepare(key); int num = rKey.GetInt(); for(int i = 0; i < values.Length; i++) { recordset rValue = recordset.Prepare(values.Items[i]); int num2 = rValue.GetInt(); mstring title = rValue.GetString(); mstring title2 = rValue.GetString(); int num3 = rValue.GetInt(); mstring sLine = mstring.Prepare(num); sLine = sLine.AppendM(',') .AppendM(num2) .AppendM(',') .AppendM(title) .AppendM(',') .AppendM(title2) .AppendM(',') .AppendM(num3); output.Add(sLine); } } ]]> </Reduce> </MapReduce> </Job> </Jobs> </SourceCode>".Replace('`', '"'); System.IO.File.WriteAllText(tempdir + @"\reg_job1_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml", txt); #endregion #region job2 txt = @"<SourceCode> <Jobs> <Job Name=`Preprocessing` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt del oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Output_sp.txt`); } ]]> </Local> </Job> <Job Name=`mr` Custodian=`` Email=``> <IOSettings> <JobType>mapreduce</JobType> <KeyLength>int</KeyLength> <DFSInput>dfs://oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input1.txt;oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input2.txt</DFSInput> <DFSOutput>dfs://oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Output_sp.txt</DFSOutput> <OutputMethod>grouped</OutputMethod> </IOSettings> <FaultTolerantExecution> <Mode>enabled</Mode> <MapInputOrder>shuffle</MapInputOrder> </FaultTolerantExecution> <MapReduce> <Map> <![CDATA[ public virtual void Map(ByteSlice line, MapOutput output) { mstring sLine = mstring.Prepare(line); int num = sLine.NextItemToInt(','); mstring title = sLine.NextItemToString(','); int num2 = sLine.NextItemToInt(','); mstring title2 = sLine.NextItemToString(','); recordset rKey = recordset.Prepare(); rKey.PutInt(num); int num3 = -1; if(StaticGlobals.DSpace_InputFileName == `oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input1.txt`) { num3 = 1; } else if(StaticGlobals.DSpace_InputFileName == `oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input2.txt`) { num3 = 2; } else if(StaticGlobals.DSpace_InputFileName == `oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Input3.txt`) { num3 = 3; } recordset rValue = recordset.Prepare(); rValue.PutInt(num2); rValue.PutString(title); rValue.PutString(title2); rValue.PutInt(num3); output.Add(rKey, rValue); } ]]> </Map> <Reduce> <![CDATA[ public override void Reduce(ByteSlice key, ByteSliceList values, ReduceOutput output) { recordset rKey = recordset.Prepare(key); int num = rKey.GetInt(); List<KeyValuePair<int, string>> list = new List<KeyValuePair<int, string>>(values.Length); for(int i = 0; i < values.Length; i++) { recordset rValue = recordset.Prepare(values.Items[i]); int num2 = rValue.GetInt(); mstring title = rValue.GetString(); mstring title2 = rValue.GetString(); int num3 = rValue.GetInt(); mstring sLine = mstring.Prepare(num); sLine = sLine.AppendM(',') .AppendM(num2) .AppendM(',') .AppendM(title) .AppendM(',') .AppendM(title2) .AppendM(',') .AppendM(num3); output.Add(sLine); } } ]]> </Reduce> </MapReduce> </Job> </Jobs> </SourceCode> ".Replace('`', '"'); System.IO.File.WriteAllText(tempdir + @"\reg_job2_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml", txt); #endregion #region job3 txt = @"<SourceCode> <Jobs> <Job Name=`checkresults` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { string f1 = IOUtils.GetTempDirectory() + @`\` + Guid.NewGuid().ToString(); string f2 = IOUtils.GetTempDirectory() + @`\` + Guid.NewGuid().ToString(); Shell(@`qizmt get oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Output_not_sp.txt ` + f1); Shell(@`qizmt get oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_Output_sp.txt ` + f2); string[] lines1 = System.IO.File.ReadAllLines(f1); string[] lines2 = System.IO.File.ReadAllLines(f2); if(lines1.Length != lines2.Length) { throw new Exception(`lines counts are different. lines1.len=` + lines1.Length.ToString() + `; lines2.len=` + lines2.Length.ToString()); } if(!CheckGroupedKeys(lines1)) { throw new Exception(`lines1 not grouped properly`); } if(!CheckGroupedKeys(lines2)) { throw new Exception(`lines2 not grouped properly`); } { List<string> list1 = new List<string>(lines1); List<string> list2 = new List<string>(lines2); list1.Sort(); list2.Sort(); for(int i=0; i < list1.Count; i++) { if(list1[i] != list2[i]) { throw new Exception(`line different at: ` + i.ToString()); } } } System.IO.File.Delete(f1); System.IO.File.Delete(f2); Qizmt_Log(`error count=0`); } public bool CheckGroupedKeys(string[] lines) { //Make sure the keys are grouped. Dictionary<int, List<string>> dic = new Dictionary<int, List<string>>(lines.Length); int prevkey = -1; for(int i = 0; i < lines.Length; i++) { string line = lines[i]; string[] parts = line.Split(','); int key = Int32.Parse(parts[0]); bool keychanged = false; if(i == 0) { keychanged = true; } else if(prevkey != key) { keychanged = true; } if(keychanged) { prevkey = key; dic.Add(key, new List<string>()); //will error out if this key has been seen before. thus not grouped properly. } dic[key].Add(line); } return true; } ]]> </Local> </Job> </Jobs> </SourceCode> ".Replace('`', '"'); System.IO.File.WriteAllText(tempdir + @"\reg_job3_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml", txt); #endregion #region job1splitsort txt = @"<SourceCode> <Jobs> <Job Name=`Preprocessing` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt del oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Input*`); Shell(@`Qizmt del oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Output*`); } ]]> </Local> </Job> <Job Name=`CreateSampleData` Custodian=`` Email=`` Description=``> <IOSettings> <JobType>remote</JobType> <DFS_IO_Multi> <DFSReader></DFSReader> <DFSWriter>oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Input1_####.txt</DFSWriter> <Mode>ALL CORES</Mode> </DFS_IO_Multi> </IOSettings> <Remote> <![CDATA[ public virtual void Remote(RemoteInputStream dfsinput, RemoteOutputStream dfsoutput) { int max = 2097152; Random rnd = new Random(System.DateTime.Now.Millisecond / 2 + System.Diagnostics.Process.GetCurrentProcess().Id / 2); for(int i = 0; i < max; i++) { if(i % Qizmt_ProcessCount == Qizmt_ProcessID) { int num = 1; int valuescount = 1; for(int vi = 0; vi < valuescount; vi++) { int num2 = rnd.Next(0,9); dfsoutput.WriteLine(num.ToString() + `,` + num2.ToString()); } } } } ]]> </Remote> </Job> <Job Name=`CreateSampleData` Custodian=`` Email=`` Description=``> <IOSettings> <JobType>remote</JobType> <DFS_IO_Multi> <DFSReader></DFSReader> <DFSWriter>oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Input2_####.txt</DFSWriter> <Mode>ALL CORES</Mode> </DFS_IO_Multi> </IOSettings> <Remote> <![CDATA[ public virtual void Remote(RemoteInputStream dfsinput, RemoteOutputStream dfsoutput) { int max = 2097152; Random rnd = new Random(System.DateTime.Now.Millisecond / 2 + System.Diagnostics.Process.GetCurrentProcess().Id / 2); for(int i = 0; i < max; i++) { if(i % Qizmt_ProcessCount == Qizmt_ProcessID) { int num = 1; int valuescount = 1; for(int vi = 0; vi < valuescount; vi++) { int num2 = rnd.Next(0,9); dfsoutput.WriteLine(num.ToString() + `,` + num2.ToString()); } } } } ]]> </Remote> </Job> <Job Name=`Preprocessing` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt combine oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Input1_*.txt +oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Input1.txt`); Shell(@`Qizmt combine oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Input2_*.txt +oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Input2.txt`); } ]]> </Local> </Job> <Job Name=`mr` Custodian=`` Email=``> <IOSettings> <JobType>mapreduce</JobType> <KeyLength>int</KeyLength> <DFSInput>dfs://oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Input1.txt;oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Input2.txt</DFSInput> <DFSOutput>dfs://oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Output_not_sp.txt</DFSOutput> <OutputMethod>grouped</OutputMethod> </IOSettings> <MapReduce> <Map> <![CDATA[ public virtual void Map(ByteSlice line, MapOutput output) { mstring sLine = mstring.Prepare(line); int num = sLine.NextItemToInt(','); int num2 = sLine.NextItemToInt(','); recordset rKey = recordset.Prepare(); num = 1; // constant key to cause splitsort rKey.PutInt(num); recordset rValue = recordset.Prepare(); rValue.PutInt(num2); for(int i = 0; i < 32; i++) { output.Add(rKey, rValue); } } ]]> </Map> <Reduce> <![CDATA[ public override void Reduce(ByteSlice key, ByteSliceList values, ReduceOutput output) { recordset rKey = recordset.Prepare(key); int num = rKey.GetInt(); for(int i = 0; i < values.Length; i++) { recordset rValue = recordset.Prepare(values.Items[i]); int num2 = rValue.GetInt(); mstring sLine = mstring.Prepare(num); sLine = sLine.AppendM(',') .AppendM(num2); output.Add(sLine); } } ]]> </Reduce> </MapReduce> </Job> </Jobs> </SourceCode> ".Replace('`', '"'); System.IO.File.WriteAllText(tempdir + @"\reg_splitsort_job1_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml", txt); #endregion #region job2splitsort txt = @"<SourceCode> <Jobs> <Job Name=`Preprocessing` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt del oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Output_sp.txt`); } ]]> </Local> </Job> <Job Name=`mr` Custodian=`` Email=``> <IOSettings> <JobType>mapreduce</JobType> <KeyLength>int</KeyLength> <DFSInput>dfs://oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Input1.txt;oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Input2.txt</DFSInput> <DFSOutput>dfs://oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Output_sp.txt</DFSOutput> <OutputMethod>grouped</OutputMethod> </IOSettings> <FaultTolerantExecution> <Mode>enabled</Mode> <MapInputOrder>shuffle</MapInputOrder> </FaultTolerantExecution> <MapReduce> <Map> <![CDATA[ public virtual void Map(ByteSlice line, MapOutput output) { mstring sLine = mstring.Prepare(line); int num = sLine.NextItemToInt(','); int num2 = sLine.NextItemToInt(','); recordset rKey = recordset.Prepare(); num = 1; // constant key to cause splitsort rKey.PutInt(num); recordset rValue = recordset.Prepare(); rValue.PutInt(num2); for(int i = 0; i < 32; i++) { output.Add(rKey, rValue); } } ]]> </Map> <Reduce> <![CDATA[ public override void Reduce(ByteSlice key, ByteSliceList values, ReduceOutput output) { recordset rKey = recordset.Prepare(key); int num = rKey.GetInt(); for(int i = 0; i < values.Length; i++) { recordset rValue = recordset.Prepare(values.Items[i]); int num2 = rValue.GetInt(); mstring sLine = mstring.Prepare(num); sLine = sLine.AppendM(',') .AppendM(num2); output.Add(sLine); } } ]]> </Reduce> </MapReduce> </Job> </Jobs> </SourceCode> ".Replace('`', '"'); System.IO.File.WriteAllText(tempdir + @"\reg_splitsort_job2_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml", txt); #endregion #region job3splitsort txt = @"<SourceCode> <Jobs> <Job Name=`checkresults` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { string f1 = IOUtils.GetTempDirectory() + @`\` + Guid.NewGuid().ToString(); string f2 = IOUtils.GetTempDirectory() + @`\` + Guid.NewGuid().ToString(); Shell(@`qizmt get oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Output_not_sp.txt ` + f1); Shell(@`qizmt get oo_D3FA879A-5AC7-48c5-80BC-0168550B9A11_splitsort_Output_sp.txt ` + f2); string[] lines1 = System.IO.File.ReadAllLines(f1); string[] lines2 = System.IO.File.ReadAllLines(f2); if(lines1.Length != lines2.Length) { throw new Exception(`lines counts are different. lines1.len=` + lines1.Length.ToString() + `; lines2.len=` + lines2.Length.ToString()); } { List<string> list1 = new List<string>(lines1); List<string> list2 = new List<string>(lines2); list1.Sort(); list2.Sort(); for(int i=0; i < list1.Count; i++) { if(list1[i] != list2[i]) { throw new Exception(`line different at: ` + i.ToString()); } } } System.IO.File.Delete(f1); System.IO.File.Delete(f2); Qizmt_Log(`error count=0`); } ]]> </Local> </Job> </Jobs> </SourceCode> ".Replace('`', '"'); System.IO.File.WriteAllText(tempdir + @"\reg_splitsort_job3_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml", txt); #endregion Exec.Shell("qizmt del reg_*job*_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); Exec.Shell("qizmt importdir \"" + tempdir + "\""); System.IO.Directory.Delete(tempdir, true); Console.WriteLine("Done"); } string controlfile = @"\\" + MySpace.DataMining.AELight.Surrogate.MasterHost + @"\c$\temp\" + MySpace.DataMining.AELight.FTTest.controlfilename; try { Console.WriteLine("Running job in normal mode..."); string output = Exec.Shell("qizmt exec reg_job1_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); //Test failure at map { string phase = "map"; using (System.IO.StreamWriter w = new System.IO.StreamWriter(controlfile)) { w.WriteLine("{1}:" + phase); } Console.WriteLine("Running job in FTE mode with failure at {0}...", phase); output = Exec.Shell("qizmt exec reg_job2_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); Console.WriteLine("Checking results..."); output = Exec.Shell("qizmt exec reg_job3_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (output.IndexOf("error count=0", StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("Test failed"); } if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); } //Test failure at exchangeremote { string phase = "exchangeremote"; using (System.IO.StreamWriter w = new System.IO.StreamWriter(controlfile)) { w.WriteLine("{1}:" + phase); } Console.WriteLine("Running job in FTE mode with failure at {0}...", phase); output = Exec.Shell("qizmt exec reg_job2_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); Console.WriteLine("Checking results..."); output = Exec.Shell("qizmt exec reg_job3_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (output.IndexOf("error count=0", StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("Test failed"); } if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); } //Test failure at exchangeowned { string phase = "exchangeowned"; using (System.IO.StreamWriter w = new System.IO.StreamWriter(controlfile)) { w.WriteLine("{1}:" + phase); } Console.WriteLine("Running job in FTE mode with failure at {0}...", phase); output = Exec.Shell("qizmt exec reg_job2_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); Console.WriteLine("Checking results..."); output = Exec.Shell("qizmt exec reg_job3_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (output.IndexOf("error count=0", StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("Test failed"); } if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); } //Test failure at sort { string phase = "sort"; using (System.IO.StreamWriter w = new System.IO.StreamWriter(controlfile)) { w.WriteLine("{1}:" + phase); } Console.WriteLine("Running job in FTE mode with failure at {0}...", phase); output = Exec.Shell("qizmt exec reg_job2_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); Console.WriteLine("Checking results..."); output = Exec.Shell("qizmt exec reg_job3_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (output.IndexOf("error count=0", StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("Test failed"); } if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); } //Test failure at reduce { string phase = "reduce"; using (System.IO.StreamWriter w = new System.IO.StreamWriter(controlfile)) { w.WriteLine("{1}:" + phase); } Console.WriteLine("Running job in FTE mode with failure at {0}...", phase); output = Exec.Shell("qizmt exec reg_job2_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); Console.WriteLine("Checking results..."); output = Exec.Shell("qizmt exec reg_job3_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (output.IndexOf("error count=0", StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("Test failed"); } if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); } //Test failure at replication { string phase = "replication"; using (System.IO.StreamWriter w = new System.IO.StreamWriter(controlfile)) { w.WriteLine("{1}:" + phase); } Console.WriteLine("Running job in FTE mode with failure at {0}...", phase); output = Exec.Shell("qizmt exec reg_job2_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); Console.WriteLine("Checking results..."); output = Exec.Shell("qizmt exec reg_job3_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (output.IndexOf("error count=0", StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("Test failed"); } if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); } if (!skipsplitsort) { System.IO.File.Delete(controlfile); //! Console.WriteLine("Running job in normal mode with splitsort..."); Exec.Shell("qizmt exec reg_splitsort_job1_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); Console.WriteLine("Done"); //Test failure at splitsort { string phase = "splitsort"; using (System.IO.StreamWriter w = new System.IO.StreamWriter(controlfile)) { w.WriteLine("{1}:" + phase); } Console.WriteLine("Running job in FTE mode with failure at {0}...", phase); output = Exec.Shell("qizmt exec reg_splitsort_job2_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); Console.WriteLine("Checking results..."); output = Exec.Shell("qizmt exec reg_splitsort_job3_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (output.IndexOf("error count=0", StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("Test failed"); } if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); } //Test failure at reducelargezblock, reduce when splitsort occurrs. { string phase = "reducelargezblock"; using (System.IO.StreamWriter w = new System.IO.StreamWriter(controlfile)) { w.WriteLine("{1}:" + phase); } Console.WriteLine("Running job in FTE mode with failure at {0}...", phase); output = Exec.Shell("qizmt exec reg_splitsort_job2_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); Console.WriteLine("Checking results..."); output = Exec.Shell("qizmt exec reg_splitsort_job3_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); if (output.IndexOf("error count=0", StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("Test failed"); } if (verbose) { Console.WriteLine(output); } Console.WriteLine("Done"); } } } finally { Exec.Shell("qizmt del reg_*job*_D3FA879A-5AC7-48c5-80BC-0168550B9A11.xml"); Exec.Shell("qizmt del *_D3FA879A-5AC7-48c5-80BC-0168550B9A11_*.txt"); System.IO.File.Delete(controlfile); } }
public static void TestCriticalSection(string[] args) { const int threadcount = 2; string dir = Environment.CurrentDirectory + @"\TestCriticalSection\"; if (System.IO.Directory.Exists(dir)) { System.IO.Directory.Delete(dir, true); } System.IO.Directory.CreateDirectory(dir); dir = dir.Replace(":", "$"); dir = @"\\" + System.Net.Dns.GetHostName() + @"\" + dir; dumpfn = dir + "dump.txt"; Console.WriteLine("-"); Console.WriteLine("Testing critical section..."); for (int i = 0; i < threadcount; i++) { string mr = (@"<SourceCode> <Jobs> <Job Name=`regression_test_CriticalSection_Preprocessing` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt del regression_test_CriticalSection_Input" + i.ToString() + @".txt`); Shell(@`Qizmt del regression_test_CriticalSection_Output" + i.ToString() + @".txt`); using(GlobalCriticalSection.GetLock()) { Increment(); } } private void Increment() { System.IO.StreamReader r = new System.IO.StreamReader(Qizmt_ExecArgs[0]); int count = Int32.Parse(r.ReadToEnd()); r.Close(); count++; System.IO.FileStream fs = new System.IO.FileStream(Qizmt_ExecArgs[0], System.IO.FileMode.Open, System.IO.FileAccess.Write, System.IO.FileShare.None); byte[] buf = System.Text.Encoding.UTF8.GetBytes(count.ToString()); fs.Write(buf, 0, buf.Length); fs.Close(); } ]]> </Local> </Job> <Job Name=`regression_test_CriticalSection_CreateSampleData` Custodian=`` Email=``> <IOSettings> <JobType>remote</JobType> <DFS_IO> <DFSReader></DFSReader> <DFSWriter>dfs://regression_test_CriticalSection_Input" + i.ToString() + @".txt</DFSWriter> </DFS_IO> </IOSettings> <Remote> <![CDATA[ public virtual void Remote(RemoteInputStream dfsinput, RemoteOutputStream dfsoutput) { //Create sample data. dfsoutput.WriteLine(`1498`); dfsoutput.WriteLine(`1503`); dfsoutput.WriteLine(`1501`); dfsoutput.WriteLine(`1501`); using(GlobalCriticalSection.GetLock()) { Increment(); } } private void Increment() { System.IO.StreamReader r = new System.IO.StreamReader(Qizmt_ExecArgs[0]); int count = Int32.Parse(r.ReadToEnd()); r.Close(); count++; System.IO.FileStream fs = new System.IO.FileStream(Qizmt_ExecArgs[0], System.IO.FileMode.Open, System.IO.FileAccess.Write, System.IO.FileShare.None); byte[] buf = System.Text.Encoding.UTF8.GetBytes(count.ToString()); fs.Write(buf, 0, buf.Length); fs.Close(); } ]]> </Remote> </Job> <Job Name=`regression_test_CriticalSection` Custodian=`` Email=``> <IOSettings> <JobType>mapreduce</JobType> <KeyLength>4</KeyLength> <DFSInput>dfs://regression_test_CriticalSection_Input" + i.ToString() + @".txt</DFSInput> <DFSOutput>dfs://regression_test_CriticalSection_Output" + i.ToString() + @".txt</DFSOutput> <OutputMethod>grouped</OutputMethod> </IOSettings> <MapReduce> <Map> <![CDATA[ public virtual void Map(ByteSlice line, MapOutput output) { output.Add(line, line); using(GlobalCriticalSection.GetLock()) { Increment(); } } private void Increment() { System.IO.StreamReader r = new System.IO.StreamReader(Qizmt_ExecArgs[0]); int count = Int32.Parse(r.ReadToEnd()); r.Close(); count++; System.IO.FileStream fs = new System.IO.FileStream(Qizmt_ExecArgs[0], System.IO.FileMode.Open, System.IO.FileAccess.Write, System.IO.FileShare.None); byte[] buf = System.Text.Encoding.UTF8.GetBytes(count.ToString()); fs.Write(buf, 0, buf.Length); fs.Close(); } ]]> </Map> <Reduce> <![CDATA[ public override void Reduce(ByteSlice key, ByteSliceList values, ReduceOutput output) { for(int i = 0; i < values.Length; i++) { output.Add(key); using(GlobalCriticalSection.GetLock()) { Increment(); } } } private void Increment() { System.IO.StreamReader r = new System.IO.StreamReader(Qizmt_ExecArgs[0]); int count = Int32.Parse(r.ReadToEnd()); r.Close(); count++; System.IO.FileStream fs = new System.IO.FileStream(Qizmt_ExecArgs[0], System.IO.FileMode.Open, System.IO.FileAccess.Write, System.IO.FileShare.None); byte[] buf = System.Text.Encoding.UTF8.GetBytes(count.ToString()); fs.Write(buf, 0, buf.Length); fs.Close(); } ]]> </Reduce> </MapReduce> </Job> <Job Name=`regression_test_CriticalSection_Preprocessing` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt del regression_test_CriticalSection_Input" + i.ToString() + @".txt`); Shell(@`Qizmt del regression_test_CriticalSection_Output" + i.ToString() + @".txt`); } ]]> </Local> </Job> </Jobs> </SourceCode>").Replace('`', '"'); string fn = "regressionTest_criticalSection" + i.ToString() + ".xml"; System.IO.File.WriteAllText(dir + fn, mr); } Exec.Shell(exe + @" del regressionTest_criticalSection*.xml"); Exec.Shell(exe + @" importdirmt " + dir); System.IO.File.WriteAllText(dumpfn, "0"); System.Threading.Thread[] ths = new System.Threading.Thread[threadcount]; for (int i = 0; i < threadcount; i++) { System.Threading.Thread th = new System.Threading.Thread(new System.Threading.ParameterizedThreadStart(ThreadProc)); ths[i] = th; th.Start(i); } evt.Set(); for (int i = 0; i < threadcount; i++) { ths[i].Join(); } int result = Int32.Parse(System.IO.File.ReadAllText(dumpfn).Trim()); if (result == threadcount * 10) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); } else { Console.WriteLine("[FAILED] - " + string.Join(" ", args)); } Exec.Shell(exe + @" del regressionTest_criticalSection*.xml"); System.IO.Directory.Delete(dir, true); }
static bool IsClusterHealthy() { string output = Exec.Shell("qizmt health"); return(output.IndexOf("100%") != -1); }
static void ImportJobs() { string dir = @"\\" + System.Net.Dns.GetHostName() + @"\" + Environment.CurrentDirectory.Replace(':', '$') + @"\" + Guid.NewGuid().ToString(); System.IO.Directory.CreateDirectory(dir); #region normalmode { string job = @"<SourceCode> <Jobs> <Job Name=`Preprocessing` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt del reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input*`); Shell(@`Qizmt del reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Output*`); } ]]> </Local> </Job> <Job Name=`CreateSampleData` Custodian=`` Email=`` Description=`Create sample data`> <IOSettings> <JobType>remote</JobType> <DFS_IO_Multi> <DFSReader></DFSReader> <DFSWriter>reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input1_####.txt</DFSWriter> <Mode>ALL CORES</Mode> </DFS_IO_Multi> </IOSettings> <Remote> <![CDATA[ public virtual void Remote(RemoteInputStream dfsinput, RemoteOutputStream dfsoutput) { int max = 50000000; Random rnd = new Random(System.DateTime.Now.Millisecond / 2 + System.Diagnostics.Process.GetCurrentProcess().Id / 2); for(int i = 0; i < max; i++) { if(i % Qizmt_ProcessCount == Qizmt_ProcessID) { int num = rnd.Next(Int32.MinValue, Int32.MaxValue); int num2 = rnd.Next(Int32.MinValue, Int32.MaxValue); dfsoutput.WriteLine(num.ToString() + `,apple,` + num2.ToString() + `,lemon`); } } } ]]> </Remote> </Job> <Job Name=`CreateSampleData` Custodian=`` Email=`` Description=`Create sample data`> <IOSettings> <JobType>remote</JobType> <DFS_IO_Multi> <DFSReader></DFSReader> <DFSWriter>reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input2_####.txt</DFSWriter> <Mode>ALL CORES</Mode> </DFS_IO_Multi> </IOSettings> <Remote> <![CDATA[ public virtual void Remote(RemoteInputStream dfsinput, RemoteOutputStream dfsoutput) { int max = 50000000; Random rnd = new Random(System.DateTime.Now.Millisecond / 2 + System.Diagnostics.Process.GetCurrentProcess().Id / 2); for(int i = 0; i < max; i++) { if(i % Qizmt_ProcessCount == Qizmt_ProcessID) { int num = rnd.Next(Int32.MinValue, Int32.MaxValue); int num2 = rnd.Next(Int32.MinValue, Int32.MaxValue); dfsoutput.WriteLine(num.ToString() + `,apple,` + num2.ToString() + `,lemon`); } } } ]]> </Remote> </Job> <Job Name=`CreateSampleData` Custodian=`` Email=`` Description=`Create sample data`> <IOSettings> <JobType>remote</JobType> <DFS_IO_Multi> <DFSReader></DFSReader> <DFSWriter>reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input3_####.txt</DFSWriter> <Mode>ALL CORES</Mode> </DFS_IO_Multi> </IOSettings> <Remote> <![CDATA[ public virtual void Remote(RemoteInputStream dfsinput, RemoteOutputStream dfsoutput) { int max = 50000000; Random rnd = new Random(System.DateTime.Now.Millisecond / 2 + System.Diagnostics.Process.GetCurrentProcess().Id / 2); for(int i = 0; i < max; i++) { if(i % Qizmt_ProcessCount == Qizmt_ProcessID) { int num = rnd.Next(Int32.MinValue, Int32.MaxValue); int num2 = rnd.Next(Int32.MinValue, Int32.MaxValue); dfsoutput.WriteLine(num.ToString() + `,apple,` + num2.ToString() + `,lemon`); } } } ]]> </Remote> </Job> <Job Name=`Preprocessing` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt combine reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input1_*.txt +reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input1.txt`); Shell(@`Qizmt combine reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input2_*.txt +reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input2.txt`); Shell(@`Qizmt combine reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input3_*.txt +reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input3.txt`); } ]]> </Local> </Job> <Job Name=`oo` Custodian=`` Email=``> <IOSettings> <JobType>mapreduce</JobType> <KeyLength>int</KeyLength> <DFSInput>dfs://reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input1.txt;dfs://reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input2.txt;reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input3.txt</DFSInput> <DFSOutput>dfs://reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Output_not_sp.txt</DFSOutput> <OutputMethod>grouped</OutputMethod> </IOSettings> <MapReduce> <Map> <![CDATA[ public virtual void Map(ByteSlice line, MapOutput output) { if(StaticGlobals.MapIteration % 12195 == 0) { //System.Threading.Thread.Sleep(120000); //System.Threading.Thread.Sleep(1000); } mstring sLine = mstring.Prepare(line); int num = sLine.NextItemToInt(','); mstring title = sLine.NextItemToString(','); int num2 = sLine.NextItemToInt(','); mstring title2 = sLine.NextItemToString(','); recordset rKey = recordset.Prepare(); rKey.PutInt(num); int num3 = -1; if(StaticGlobals.DSpace_InputFileName == `reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input1.txt`) { num3 = 1; } else if(StaticGlobals.DSpace_InputFileName == `reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input2.txt`) { num3 = 2; } else if(StaticGlobals.DSpace_InputFileName == `reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input3.txt`) { num3 = 3; } recordset rValue = recordset.Prepare(); rValue.PutInt(num2); rValue.PutString(title); rValue.PutString(title2); rValue.PutInt(num3); output.Add(rKey, rValue); } ]]> </Map> <Reduce> <![CDATA[ public override void Reduce(ByteSlice key, ByteSliceList values, ReduceOutput output) { recordset rKey = recordset.Prepare(key); int num = rKey.GetInt(); List<KeyValuePair<int, string>> list = new List<KeyValuePair<int, string>>(values.Length); for(int i = 0; i < values.Length; i++) { recordset rValue = recordset.Prepare(values.Items[i]); int num2 = rValue.GetInt(); mstring title = rValue.GetString(); mstring title2 = rValue.GetString(); int num3 = rValue.GetInt(); KeyValuePair<int, string> pair = new KeyValuePair<int, string>(num2, title.ToString() + `,` + title2.ToString() + `,` + num3.ToString()); list.Add(pair); } list.Sort(delegate(KeyValuePair<int, string> x, KeyValuePair<int, string> y) { int comp = x.Key.CompareTo(y.Key); if(comp != 0) { return comp; } return x.Value.CompareTo(y.Value); }); foreach(KeyValuePair<int, string> pair in list) { mstring sLine = mstring.Prepare(num); sLine = sLine.AppendM(',') .AppendM(pair.Key) .AppendM(',') .AppendM(pair.Value); output.Add(sLine); } } ]]> </Reduce> </MapReduce> </Job> </Jobs> </SourceCode>".Replace('`', '"'); System.IO.File.WriteAllText(dir + @"\reg_job1_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26.xml", job); } #endregion #region speculativeComputing { string job = @"<SourceCode> <Jobs> <Job Name=`Preprocessing` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt del reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Output_sp.txt`); } ]]> </Local> </Job> <Job Name=`mr` Custodian=`` Email=``> <IOSettings> <JobType>mapreduce</JobType> <KeyLength>int</KeyLength> <DFSInput>dfs://reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input1.txt;dfs://reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input2.txt;reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input3.txt</DFSInput> <DFSOutput>dfs://reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Output_sp.txt</DFSOutput> <OutputMethod>grouped</OutputMethod> </IOSettings> <Computing> <Mode>speculative</Mode> <MapInputOrder>shuffle</MapInputOrder> </Computing> <MapReduce> <Map> <![CDATA[ public virtual void Map(ByteSlice line, MapOutput output) { if(StaticGlobals.MapIteration % 12195 == 0) { System.Threading.Thread.Sleep(1000); } mstring sLine = mstring.Prepare(line); int num = sLine.NextItemToInt(','); mstring title = sLine.NextItemToString(','); int num2 = sLine.NextItemToInt(','); mstring title2 = sLine.NextItemToString(','); recordset rKey = recordset.Prepare(); rKey.PutInt(num); int num3 = -1; if(StaticGlobals.DSpace_InputFileName == `reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input1.txt`) { num3 = 1; } else if(StaticGlobals.DSpace_InputFileName == `reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input2.txt`) { num3 = 2; } else if(StaticGlobals.DSpace_InputFileName == `reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Input3.txt`) { num3 = 3; } recordset rValue = recordset.Prepare(); rValue.PutInt(num2); rValue.PutString(title); rValue.PutString(title2); rValue.PutInt(num3); output.Add(rKey, rValue); } ]]> </Map> <Reduce> <![CDATA[ public override void Reduce(ByteSlice key, ByteSliceList values, ReduceOutput output) { recordset rKey = recordset.Prepare(key); int num = rKey.GetInt(); List<KeyValuePair<int, string>> list = new List<KeyValuePair<int, string>>(values.Length); for(int i = 0; i < values.Length; i++) { recordset rValue = recordset.Prepare(values.Items[i]); int num2 = rValue.GetInt(); mstring title = rValue.GetString(); mstring title2 = rValue.GetString(); int num3 = rValue.GetInt(); KeyValuePair<int, string> pair = new KeyValuePair<int, string>(num2, title.ToString() + `,` + title2.ToString() + `,` + num3.ToString()); list.Add(pair); } list.Sort(delegate(KeyValuePair<int, string> x, KeyValuePair<int, string> y) { int comp = x.Key.CompareTo(y.Key); if(comp != 0) { return comp; } return x.Value.CompareTo(y.Value); }); foreach(KeyValuePair<int, string> pair in list) { mstring sLine = mstring.Prepare(num); sLine = sLine.AppendM(',') .AppendM(pair.Key) .AppendM(',') .AppendM(pair.Value); output.Add(sLine); } } ]]> </Reduce> </MapReduce> </Job> </Jobs> </SourceCode>".Replace('`', '"'); System.IO.File.WriteAllText(dir + @"\reg_job2_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26.xml", job); } #endregion #region check { string job = @"<SourceCode> <Jobs> <Job Name=`check` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { if(Qizmt_ExecArgs.Length == 0) { throw new Exception(`Qizmt dir required.`); } string qizmtdir = Qizmt_ExecArgs[0]; string f1 = IOUtils.GetTempDirectory() + @`\` + Guid.NewGuid().ToString(); string f2 = IOUtils.GetTempDirectory() + @`\` + Guid.NewGuid().ToString(); Shell(@`qizmt bulkget ` + f1 + ` reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Output_not_sp.txt`); Shell(@`qizmt bulkget ` + f2 + ` reg_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26_Output_sp.txt`); { string[] lines1 = System.IO.File.ReadAllLines(f1); string[] lines2 = System.IO.File.ReadAllLines(f2); if(lines1.Length != lines2.Length) { throw new Exception(`Parts count is different. lines1.len=` + lines1.Length.ToString() + `; lines2.len=` + lines2.Length.ToString()); } List<string> err = new List<string>(); List<System.Threading.Thread> thds = new List<System.Threading.Thread>(); for(int li = 0; li < lines1.Length; li++) { string[] parts1 = lines1[li].Split(' '); string[] parts2 = lines2[li].Split(' '); string chunk1 = @`\\` + parts1[0].Split(';')[0] + @`\` + qizmtdir + @`\` + parts1[1]; string chunk2 = @`\\` + parts2[0].Split(';')[0] + @`\` + qizmtdir + @`\` + parts2[1]; //DSpace_Log(`chunk1=` + chunk1 + `;chunk2=` + chunk2); TP tp = new TP(); tp.chunk1 = chunk1; tp.chunk2 = chunk2; tp.err = err; System.Threading.Thread th = new System.Threading.Thread(new System.Threading.ThreadStart(tp.ThreadProc)); th.Start(); thds.Add(th); } foreach(System.Threading.Thread th in thds) { th.Join(); } DSpace_Log(`error count=` + err.Count.ToString()); } System.IO.File.Delete(f1); System.IO.File.Delete(f2); } public class TP { public string chunk1; public string chunk2; public List<string> err; public void ThreadProc() { if(!CompareFiles(chunk1, chunk2)) { lock(err) { err.Add(chunk1); } } } private static bool CompareFiles(string f1, string f2) { System.IO.FileInfo info1 = new System.IO.FileInfo(f1); System.IO.FileInfo info2 = new System.IO.FileInfo(f2); if (info1.Length != info2.Length) { return false; } System.IO.FileStream fs1 = new System.IO.FileStream(f1, System.IO.FileMode.Open); System.IO.FileStream fs2 = new System.IO.FileStream(f2, System.IO.FileMode.Open); bool ok = true; for (int i = 0; i < info1.Length; i++) { int b1 = fs1.ReadByte(); int b2 = fs2.ReadByte(); if (b1 != b2) { ok = false; break; } } fs1.Close(); fs2.Close(); return ok; } } ]]> </Local> </Job> </Jobs> </SourceCode>".Replace('`', '"');; System.IO.File.WriteAllText(dir + @"\reg_job3_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26.xml", job); } #endregion Exec.Shell("qizmt del reg_job*_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26.xml"); Exec.Shell("qizmt importdir \"" + dir + "\""); System.IO.Directory.Delete(dir, true); }
public static void TestHDFailureAfterMapStarts(string[] args) { Console.WriteLine("====TestHDFailureAfterMapStarts===="); string qizmtdir = null; string[] hosts = GetQizmtHosts(out qizmtdir); if (hosts.Length < 4) { throw new Exception("There must be more than 4 machines in the Qimzt cluster to test."); } int replication = GetReplicationFactor(); if (replication < 2) { throw new Exception("Replication factor must be greater than 2."); } if (!IsClusterHealthy()) { throw new Exception("Cluster must be 100% healthy to begin with."); } Console.WriteLine("Importing jobs..."); ImportJobs(); Console.WriteLine("Done"); Console.WriteLine("Running job in normal mode..."); Exec.Shell("qizmt exec reg_job1_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26.xml"); Console.WriteLine("Done"); //Speculative computing with InputOrder=next { List <string> errors = new List <string>(); //Simulate hd failure string fhost = hosts[1]; //Restart host string rhost = hosts[2]; System.Threading.Thread thjob = new System.Threading.Thread(new System.Threading.ThreadStart(delegate() { Console.WriteLine("Running job in speculative computing mode with inputOrder = next..."); string output = Exec.Shell("qizmt exec \"//Job[@Name='mr']/Computing/InputOrder=next\" reg_job2_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26.xml"); Console.WriteLine("Done"); if (output.IndexOf("HWFailure:Recovered:" + fhost + ":", StringComparison.OrdinalIgnoreCase) == -1) { errors.Add("HWFailure at host " + fhost + " was not captured."); } if (output.IndexOf("HWFailure:Recovered:" + rhost + ":", StringComparison.OrdinalIgnoreCase) == -1) { errors.Add("HWFailure at host " + rhost + " was not captured."); } })); thjob.Start(); //Let the job run for 1 min before simulating hd failures. System.Threading.Thread.Sleep(60 * 1000); SimulateHDFailure(fhost); RestartHost(rhost); thjob.Join(); if (errors.Count > 0) { string allerr = string.Join(";", errors.ToArray()); throw new Exception("Error during job2: " + allerr); } { UnsimulateHDFailure(fhost); Console.WriteLine("Checking results..."); string output = Exec.Shell("qizmt exec reg_job3_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26.xml \"" + qizmtdir + "\""); Console.WriteLine("Done"); if (output.IndexOf("error count=0", StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("Test failed"); } } } //Speculative computing with InputOrder=shuffle { List <string> errors = new List <string>(); //Simulate hd failure string fhost = hosts[1]; //Restart host string rhost = hosts[2]; System.Threading.Thread thjob = new System.Threading.Thread(new System.Threading.ThreadStart(delegate() { Console.WriteLine("Running job in speculative computing mode with inputOrder = shuffle..."); string output = Exec.Shell("qizmt exec \"//Job[@Name='mr']/Computing/InputOrder=shuffle\" reg_job2_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26.xml"); Console.WriteLine("Done"); if (output.IndexOf("HWFailure:Recovered:" + fhost + ":", StringComparison.OrdinalIgnoreCase) == -1) { errors.Add("HWFailure at host " + fhost + " was not captured."); } if (output.IndexOf("HWFailure:Recovered:" + rhost + ":", StringComparison.OrdinalIgnoreCase) == -1) { errors.Add("HWFailure at host " + rhost + " was not captured."); } })); thjob.Start(); //Let the job run for 1 min before simulating hd failures. System.Threading.Thread.Sleep(60 * 1000); SimulateHDFailure(fhost); RestartHost(rhost); thjob.Join(); if (errors.Count > 0) { string allerr = string.Join(";", errors.ToArray()); throw new Exception("Error during job2: " + allerr); } { UnsimulateHDFailure(fhost); Console.WriteLine("Checking results..."); string output = Exec.Shell("qizmt exec reg_job3_10DF6995-B6C1-4c9a-9770-C46B6DF6DE26.xml \"" + qizmtdir + "\""); Console.WriteLine("Done"); if (output.IndexOf("error count=0", StringComparison.OrdinalIgnoreCase) == -1) { throw new Exception("Test failed"); } } } Console.WriteLine("[PASSED] - " + string.Join(" ", args)); }
public static void TestClearLogs(string[] args) { if (args.Length < 2) { Console.Error.WriteLine("Error: Command clearlogs needs argument: <dfsXmlPath>"); return; } string dfspath = args[1]; if (!dfspath.StartsWith(@"\\")) { Console.Error.WriteLine("Error: Argument <dfsXmlPath> must be a network path"); return; } string dspacedir = null; { int del = dfspath.IndexOf('$'); if (del > 0) { string temp = dfspath.Substring(del - 1).Trim(); del = temp.LastIndexOf(@"\"); if (del > -1) { dspacedir = temp.Substring(0, del); } } } if (dspacedir == null) { Console.Error.WriteLine("Error: Cannot parse Qizmt dir."); return; } string[] hosts = null; { try { System.Xml.XmlDocument dc = new System.Xml.XmlDocument(); dc.Load(dfspath); System.Xml.XmlNode node = dc.SelectSingleNode("//SlaveList"); if (node == null) { Console.Error.WriteLine("SlaveList node is not found in dfs.xml"); return; } hosts = node.InnerText.Split(';'); } catch (Exception e) { Console.Error.WriteLine("Error while loading dfs.xml: {0}", e.ToString()); return; } } Console.WriteLine("-"); Console.WriteLine("Testing clearlogs..."); string dir = @"\\" + System.Net.Dns.GetHostName() + @"\" + Environment.CurrentDirectory.Replace(':', '$') + @"\RegressionTest\ClearLogsTest\"; if (System.IO.Directory.Exists(dir)) { System.IO.Directory.Delete(dir, true); } System.IO.Directory.CreateDirectory(dir); System.IO.File.WriteAllText(dir + "regressionTest_clearlogs.xml", @" <SourceCode> <Jobs> <Job Name=`local` Custodian=`` Email=``> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Shell(@`Qizmt del regressionTest_clearlogs_Output*.txt`); } ]]> </Local> </Job> <Job Name=`remote` Custodian=`` Email=``> <IOSettings> <JobType>remote</JobType> <DFS_IO_Multi> <DFSReader></DFSReader> <DFSWriter>dfs://regressionTest_clearlogs_Output####.txt</DFSWriter> <Mode>ALL MACHINES</Mode> </DFS_IO_Multi> </IOSettings> <Remote> <![CDATA[ public virtual void Remote(RemoteInputStream dfsinput, RemoteOutputStream dfsoutput) { dfsoutput.WriteLine(`x`); string s = null; int x = s.Length; } ]]> </Remote> </Job> </Jobs> </SourceCode> ".Replace('`', '"')); string exe = Exec.GetQizmtExe(); Exec.Shell(exe + " del regressionTest_clearlogs.xml", false); Exec.Shell(exe + " importdir " + dir, false); try { Exec.Shell(exe + " exec regressionTest_clearlogs.xml"); } catch { } //Make sure there exists slave-log.txt. bool found = false; for (int i = 0; i < hosts.Length; i++) { if (System.IO.File.Exists(@"\\" + hosts[i] + @"\" + dspacedir + @"\slave-log.txt")) { found = true; break; } } if (!found) { Console.Error.WriteLine("Slave-log.txt is not located."); return; } Exec.Shell(exe + " del regressionTest_clearlogs.xml", false); Exec.Shell(exe + " del regressionTest_clearlogs_Output*.xml", false); System.IO.Directory.Delete(dir, true); string output = Exec.Shell(exe + " clearlogs"); if (output.IndexOf("Done", StringComparison.OrdinalIgnoreCase) > -1) { found = false; for (int i = 0; i < hosts.Length; i++) { if (System.IO.File.Exists(@"\\" + hosts[i] + @"\" + dspacedir + @"\slave-log.txt")) { found = true; break; } } if (!found) { Console.WriteLine("[PASSED] - " + string.Join(" ", args)); return; } } Console.WriteLine("[FAILED] - " + string.Join(" ", args)); }
public static void TestKillallProxy(string[] args) { string job = @"<SourceCode> <Jobs> <Job Name=`m_Preprocessing`> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { System.Threading.Thread.Sleep(System.Threading.Timeout.Infinite); } ]]> </Local> </Job> <Job Name=`m_Preprocessing`> <IOSettings> <JobType>local</JobType> </IOSettings> <Local> <![CDATA[ public virtual void Local() { Qizmt_Log(`B40735E5-8B8D-4638-8908-CD2AB024C3A7`); } ]]> </Local> </Job> </Jobs> </SourceCode>".Replace('`', '"'); string dir = @"\\" + System.Net.Dns.GetHostName() + @"\" + Environment.CurrentDirectory.Replace(':', '$') + @"\RegressionTest\B40735E5-8B8D-4638-8908-CD2AB024C3A7\"; if (System.IO.Directory.Exists(dir)) { System.IO.Directory.Delete(dir, true); } System.IO.Directory.CreateDirectory(dir); System.IO.File.WriteAllText(dir + @"B40735E5-8B8D-4638-8908-CD2AB024C3A7.xml", job); string exe = Exec.GetQizmtExe(); Exec.Shell(exe + @" del B40735E5-8B8D-4638-8908-CD2AB024C3A7.xml"); Exec.Shell(exe + @" importdir " + dir); System.IO.Directory.Delete(dir, true); bool guidfound = false; bool jobdone = false; System.Threading.Thread th = new Thread(new ThreadStart(delegate() { try { Console.WriteLine("Running job..."); string results = Exec.Shell(exe + @" exec B40735E5-8B8D-4638-8908-CD2AB024C3A7.xml"); Console.WriteLine("Job output: {0}", results); if (results.IndexOf("B40735E5-8B8D-4638-8908-CD2AB024C3A7") > -1) { guidfound = true; } } catch { } jobdone = true; })); th.Start(); System.Threading.Thread.Sleep(5000); if (guidfound) { throw new Exception("Job exited normally. Job error."); } if (jobdone) { throw new Exception("Job finished too early, cannot run killall to kill it."); } Console.WriteLine("Running killall proxy..."); Exec.Shell(exe + " killall -f proxy"); System.Threading.Thread.Sleep(2000); if (guidfound) { throw new Exception("Job exited normally. Job error."); } if (!jobdone) { throw new Exception("Job didn't return. Killall didn't kill the job."); } Console.WriteLine("[PASSED] - " + string.Join(" ", args)); Exec.Shell(exe + @" del B40735E5-8B8D-4638-8908-CD2AB024C3A7.xml"); }