public void work(LibPADIMapNoReduce.FileSplit fileSplits) { handleFreezeWorker(); // For handling FREEZEW from PuppetMaster handleSlowMap(); // For handling SLOWW from PuppetMaster CURRENT_STATUS_WORKER = STATUS.WORKER_WORKING; // For STATUS command of PuppetMaster PERCENTAGE_FINISHED = 0; PADIMapNoReduce.Pair<long, long> byteInterval = fileSplits.pair; Console.WriteLine("Received job for bytes: " + byteInterval.First + " to " + byteInterval.Second); if (workerSetup) { long splitSize = byteInterval.Second - byteInterval.First; PADIMapNoReduce.IClient client = (PADIMapNoReduce.IClient)Activator.GetObject(typeof(PADIMapNoReduce.IClient), clientUrl); if (splitSize <= BATCH_REQUEST_SIZE) //Request all { CURRENT_STATUS_WORKER = STATUS.WORKER_TRANSFERING_INPUT; List<byte> splitBytes = client.processBytes(byteInterval, filePath); CURRENT_STATUS_WORKER = STATUS.WORKER_WORKING; string[] splitLines = System.Text.Encoding.UTF8.GetString(splitBytes.ToArray()).Split(new string[] { Environment.NewLine }, System.StringSplitOptions.RemoveEmptyEntries); splitBytes.Clear(); map(ref splitLines, fileSplits.splitId, true); } else //request batch { for (long i = byteInterval.First; i < byteInterval.Second; i += BATCH_REQUEST_SIZE) { handleFreezeWorker(); // For handling FREEZEW from PuppetMaster handleSlowMap(); PADIMapNoReduce.Pair<long, long> miniByteInterval; if (i + BATCH_REQUEST_SIZE > byteInterval.Second) { miniByteInterval = new PADIMapNoReduce.Pair<long, long>(i, byteInterval.Second); } else { miniByteInterval = new PADIMapNoReduce.Pair<long, long>(i, i + BATCH_REQUEST_SIZE); } CURRENT_STATUS_WORKER = STATUS.WORKER_TRANSFERING_INPUT; List<byte> splitBytes = client.processBytes(miniByteInterval, filePath); CURRENT_STATUS_WORKER = STATUS.WORKER_WORKING; string[] splitLines = System.Text.Encoding.UTF8.GetString(splitBytes.ToArray()).Split(new string[] { Environment.NewLine }, System.StringSplitOptions.RemoveEmptyEntries); splitBytes.Clear(); if (!map(ref splitLines, fileSplits.splitId, false)) return; // We need something more coarse because we can't get the current size being processed due to different encodings PERCENTAGE_FINISHED = (float)(i - byteInterval.First) / (float)(byteInterval.Second - byteInterval.First); } PERCENTAGE_FINISHED = 1; } } else { Console.WriteLine("Worker is not set"); } PERCENTAGE_FINISHED = 1; // For STATUS command of PuppetMaster CURRENT_STATUS_WORKER = STATUS.WORKER_WAITING; // For STATUS command of PuppetMaster //In case, at the call to the jobtracker he is down, the worker stay on the while until the variable jobTracker be actualized bool success = false; while (!success) { //Notify JobTracker PADIMapNoReduce.IJobTracker jobTracker = (PADIMapNoReduce.IJobTracker)Activator.GetObject(typeof(PADIMapNoReduce.IJobTracker), jobTrackerUrl); try { jobTracker.notifySplitFinish(url, fileSplits); success = true; } catch (System.Net.Sockets.SocketException) { //Console.WriteLine("In WORK func - Couldn't contact to JobTracker! Waiting for the new one..."); } } }
public FileSplit(int i, PADIMapNoReduce.Pair<long, long> pair) { this.splitId = i; this.pair = pair; }
public void registerJob(string inputFilePath, int nSplits, string outputResultPath, long nBytes, string clientUrl, byte[] mapperCode, string mapperClassName) { handleFreezeJobTracker(); // For handling FREEZEC from PuppetMaster //handleFreezeWorker(); CURRENT_STATUS_JOBTRACKER = STATUS.JOBTRACKER_WORKING; // For STATUS command of PuppetMaster this.clientUrl = clientUrl; if (nSplits == 0) { CURRENT_STATUS_JOBTRACKER = STATUS.JOBTRACKER_WAITING; return; } long splitBytes = nBytes / nSplits; for (int i = 0; i < nSplits; i++) { PADIMapNoReduce.Pair<long, long> pair; if (i == nSplits - 1) { pair = new PADIMapNoReduce.Pair<long, long>(i * splitBytes, nBytes); System.Console.WriteLine("Added split: " + pair.First + " to " + pair.Second); } else { pair = new PADIMapNoReduce.Pair<long, long>(i * splitBytes, (i + 1) * splitBytes - 1); System.Console.WriteLine("Added split: " + pair.First + " to " + pair.Second); } jobQueue.Enqueue(new LibPADIMapNoReduce.FileSplit(i, pair)); } //Distribute to each worker one split for (int i = 0; i < workers.Count; i++) { if (jobQueue.IsEmpty) { break; } string workerUrl = workers[i]; PADIMapNoReduce.IWorker worker = (PADIMapNoReduce.IWorker)Activator.GetObject(typeof(PADIMapNoReduce.IWorker), workerUrl); worker.setup(mapperCode, mapperClassName, clientUrl, inputFilePath); LibPADIMapNoReduce.FileSplit job = null; if (jobQueue.TryDequeue(out job)) { if (onGoingWork.ContainsKey(workerUrl)) //UPDATE { onGoingWork[workerUrl] = job; } else //ADD { onGoingWork.TryAdd(workerUrl, job); } worker.work(job); } } checkWorkerStatus(null); }