public byte[] OnTask(byte[] input) { // TODO: get shared data file name here string sharedDataFile = "some filename"; DateTime tS = DateTime.Now; // TODO - implement config class! HPCUtilities.Init(HPCEnvironment.KubernetesAWS); TaskData td = HPCUtilities.Deserialize <TaskData>(input); Func <SharedJobData> getSJD = () => { return(HPCUtilities.Deserialize <SharedJobData>(File.ReadAllBytes(sharedDataFile))); }; JobDataUtilities.SetSharedJobData(td.Job, getSJD); ZOSTaskData taskSettings = HPCUtilities.Deserialize <ZOSTaskData>(td.Data[0].Data); byte[] resultData = JobDataUtilities.RunZOSJob( td, tS); return(resultData); }
static void Main(string[] args) { // arguments // job id // shared data file // job data file // output file DateTime tS = DateTime.Now; // TODO - implement config class! HPCUtilities.Init(HPCEnvironment.KubernetesAWS); int numArgs = args.Length; if (numArgs < 4) { throw new Exception("Invalid number of arguments"); } string jobId = args[numArgs - 4]; string sharedDataFile = args[numArgs - 3]; string taskDataFile = args[numArgs - 2]; string outFile = args[numArgs - 1]; // TODO - shared data isn't needed for Prime factoring // For other jobs, it needs to be pre-processed once per node TaskData td = HPCUtilities.Deserialize <TaskData>(File.ReadAllBytes(taskDataFile)); Func <SharedJobData> getSJD = () => { return(HPCUtilities.Deserialize <SharedJobData>(File.ReadAllBytes(sharedDataFile))); }; JobDataUtilities.SetSharedJobData(td.Job, getSJD); ZOSTaskData taskSettings = HPCUtilities.Deserialize <ZOSTaskData>(td.Data[0].Data); byte[] resultData = JobDataUtilities.RunZOSJob( td, tS); File.WriteAllBytes(outFile, resultData); //RunPrime1(td.Job, td.TaskNumber, taskSettings.NumberToFactor, outFile); }
static void MainPrimes(string[] args) { DateTime tS = DateTime.UtcNow; const int numJobs = 100; const int numCores = 1; // TODO - implement config class! HPCUtilities.Init(HPCEnvironment.KubernetesAWS); JobData jd; SharedJobData sjd; List <TaskData> tasks; JobDataUtilities.CreateJobDataPrimes( numJobs, numCores, out jd, out sjd, out tasks); byte[] sharedDataBlob = HPCUtilities.Serialize(sjd); List <byte[]> taskBlobs = new List <byte[]>(); foreach (var task in tasks) { taskBlobs.Add(HPCUtilities.Serialize(task)); } // TODO - send shared data blob to cluster // TODO - send task blobs to cluster // TODO - collect results // TODO - where should we output the data? //string outputFolder = @"c:\temp\"; string dataDirectoryPath = null; // send input file and task blobs to cluster, collect results TaskSender taskSender = new TaskSender(Orchestrator.Docker); List <byte[]> results = taskSender.Send(taskBlobs); // DK - temp output results Console.WriteLine("processing complete"); foreach (var result in results) { TaskResults taskResults = HPCUtilities.Deserialize <TaskResults>(result); DataEntry[] data = taskResults.Results; Console.Write("number: " + BitConverter.ToInt32(data[0].Data) + ", factors: "); for (int i = 1; i < data.Length - 1; i++) { Console.Write(BitConverter.ToInt32(data[i].Data) + " "); } Console.WriteLine(); } // DK- following code not quite hooked up yet with k8s /* * * string[] resultFiles = new string[] { }; * List<ZOSResult> processedResults = new List<ZOSResult>(); * int numProcessed = 0; * int numFail = 0; * foreach (string resultFile in resultFiles) * { * var tr = HPCUtilities.Deserialize<TaskResults>(System.IO.File.ReadAllBytes(resultFile)); * ZOSResult result; * JobDataUtilities.ProcessZOSResult(tr, out result); * if (result != null) * { * JobDataUtilities.StoreZOSResult(jd.JobType, result, outputFolder, numProcessed); * } * else * { ++numFail; * } ++numProcessed; * } * * int numSucceed; * var stats = JobDataUtilities.GetZOSStats( * jd.JobType, * tS, * outputFolder, * out numSucceed, * ref numFail); * * foreach (var stat in stats) * { * Console.WriteLine(stat.ToString()); * } * */ Console.WriteLine(); Console.WriteLine("Press any key to exit"); Console.ReadKey(); }
static void Main(string[] args) { DateTime tS = DateTime.UtcNow; const int numJobs = 100; const int numCores = 1; // TODO - implement config class! HPCUtilities.Init(HPCEnvironment.KubernetesAWS); //string fileFolder = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); string fileFolder = @"C:\tmp\zoskub\input"; string outputFolder = @"C:\tmp\zoskub\output"; string zarFile = Path.Combine(fileFolder, "tol_test.zar"); string topFile = Path.Combine(fileFolder, "tol_test.top"); JobData jd; SharedJobData sjd; List <TaskData> tasks; JobDataUtilities.CreateJobDataMCTol( 24, zarFile, topFile, 4, 250, out jd, out sjd, out tasks); //JobDataUtilities.CreateJobDataPrimes( // numJobs, // numCores, // out jd, // out sjd, // out tasks); string sjdFile = Path.Combine(fileFolder, jd.JobId + ".sjd"); byte[] sharedDataBlob = HPCUtilities.Serialize(sjd); List <byte[]> taskBlobs = new List <byte[]>(); foreach (var task in tasks) { taskBlobs.Add(HPCUtilities.Serialize(task)); } File.WriteAllBytes(sjdFile, sharedDataBlob); // send shared data blob and task blobs to cluster, collect results Console.WriteLine("JobId = " + jd.JobId); string dataDirectoryPath = null; TaskSender taskSender = new TaskSender(Orchestrator.Docker); taskSender.CopySharedJobData(sjdFile); List <byte[]> resultByteArrays = taskSender.Send(taskBlobs); List <ZOSResult> processedResults = new List <ZOSResult>(); int numProcessed = 0; int numFail = 0; foreach (byte[] resultByteArray in resultByteArrays) { var tr = HPCUtilities.Deserialize <TaskResults>(resultByteArray); ZOSResult result; JobDataUtilities.ProcessZOSResult(tr, out result); if (result != null) { JobDataUtilities.StoreZOSResult(jd.JobType, result, outputFolder, numProcessed); } else { ++numFail; } ++numProcessed; } int numSucceed; var stats = JobDataUtilities.GetZOSStats( jd.JobType, tS, outputFolder, out numSucceed, ref numFail); foreach (var stat in stats) { Console.WriteLine(stat.ToString()); } Console.WriteLine(); Console.WriteLine("Press any key to exit"); Console.ReadKey(); }