/// <summary> /// Terminate the result thread and apply any required exit procedures. /// Save orders log files to disk. /// </summary> public override void Exit() { base.Exit(); lock (_sync) { if (_writer != null) { // only log final statistics and we want them to all be together foreach (var kvp in RuntimeStatistics.OrderBy(kvp => kvp.Key)) { WriteLine($"{kvp.Key,-15}\t{kvp.Value}"); } var end = DateTime.UtcNow; var delta = end - _testStartTime; WriteLine($"{end}: Completed regression test, took: {delta.TotalSeconds:0.0} seconds"); _writer.DisposeSafely(); _writer = null; } else { string line; while (_preInitializeLines.TryDequeue(out line)) { Console.WriteLine(line); } } } }
//test that displays statistics public void test4() { RuntimeStatistics.addToExtractedUrls(1); initQueuesTest2(); RuntimeStatistics.addToFeedUrls(5000); DateTime startTime = DateTime.Now; RankFrointer rankFrontier = new RankFrointer(feedback, serverQueues); Thread frontierThread = new Thread(new ThreadStart(rankFrontier.sceduleTasks)); frontierThread.Start(); Thread workerThread = new Thread(new ThreadStart(workerSimulator3)); workerThread.Start(); int iteration = 5000; while (true) { StatusDisplay.DisplayOnScreen(feedback, serverQueues); Thread.Sleep(1000); if (iteration > 40000) { break; } fillQueue(iteration, iteration + 5000); iteration += 5000; RuntimeStatistics.addToFeedUrls(5000); } while (true) { StatusDisplay.DisplayOnScreen(feedback, serverQueues); Thread.Sleep(1000); } }
/** * This method scedules the tasks given to the server queues in order to be processed * if there is no tasks so the method will wait until there is something to be processed * NOTE: This method never returns */ public override void sceduleTasks() { Dictionary <String, String> dictionary = new Dictionary <String, String>(); int serverTurn = 0; Url request = null; while (true) { try { // get new request SyncAccessor.getSlot(2, 0); request = SyncAccessor.getFromQueue <Url>(_tasksQueue, _timer); // handle the request if (dictionary.ContainsKey(request.getUrl())) { // if it already exists need to pick another one continue; } else { // if not just mark it as old and continue dictionary.Add(request.getUrl(), null); } // now there's a new request we should put it in the server queues bool needToPutRequest = true; int retryCount = 0; while (needToPutRequest) { SyncAccessor.getSlot(2, 0); if (SyncAccessor.queueSize <Url>(_serversQueues[serverTurn]) < _limit) { needToPutRequest = false; SyncAccessor.getSlot(2, 0); SyncAccessor.putInQueue <Url>(_serversQueues[serverTurn], request); } else { retryCount++; if (retryCount > MAX_RETRY_COUNTER) { serverTurn = (serverTurn + 1) % _serversQueues.Count; } else { Thread.Sleep(_timer * 3); } } } serverTurn = (serverTurn + 1) % _serversQueues.Count; } catch (Exception e) { RuntimeStatistics.addToErrors(1); } } }
/** * it will invoke the worker to start working on the tasks - never returns */ public void run() { int requestNum = 0, timeoutCounter = 0; bool needToTerminate = false; TimeSpan totalProcessTime; Thread.Sleep(10000); while (needToTerminate == false) { DateTime startTime = DateTime.Now; try { //System.Console.WriteLine("-<>--------------------------------------------------------------------------"); SyncAccessor.getSlot(2, 1); Url task = SyncAccessor.getFromQueue <Url>(_tasks, _timer); //System.Console.WriteLine(" Start Working on : " + task.getUrl() + " ..."); ResourceContent content = _fetchers.fetchResource(task.getUrl()); if (content.isValid() != true) { timeoutCounter++; //System.Console.WriteLine(" Fetch Failed Ignoring ... "); continue; } //System.Console.WriteLine(" Fetched Successfully ... "); ResourceContent modifiedContent = new ResourceContent(content.getResourceUrl(), content.getResourceType() , content.getResourceContent(), content.getReturnCode(), task.getRank()); DateTime startProcess = DateTime.Now; _processors.processResource(modifiedContent); DateTime endProcess = DateTime.Now; totalProcessTime = endProcess - startProcess; //System.Console.WriteLine(" URL Processed Successfully ... "); System.Console.WriteLine(" URL Processed Successfully ... "); } catch (Exception e) { //System.Console.WriteLine("[Exception Happened] " + e); RuntimeStatistics.addToErrors(1); continue; } DateTime endTime = DateTime.Now; TimeSpan totalRequestTime = endTime - startTime; if (LogDebuggerControl.getInstance().enableTiming) { // write request time to timing log file StreamWriter sw = new StreamWriter("_DEBUG_INFO_TIMING@" + System.Threading.Thread.CurrentThread.ManagedThreadId + ".txt", true); sw.WriteLine(" TIMING FOR REQ - " + requestNum++ + " takes about " + totalRequestTime.TotalSeconds + " s, Processed At " + totalProcessTime.TotalSeconds + " s"); sw.Close(); } } }
/** * This method scedules the tasks given to the server queues in order to be processed * if there is no tasks so the method will wait until there is something to be processed * NOTE: This method never returns */ public override void sceduleTasks() { int serverTurn = 0; bool getNewRequest = true; Url request = null; Url request2 = null; while (true) { try { int inserts = 0; SyncAccessor.getSlot(ThreadsDim + 1, 0); while (SyncAccessor.queueSize <Url>(_tasksQueue) != 0 && inserts < MAX_INSERTS_IN_TIME) { request2 = SyncAccessor.getFromQueue <Url>(_tasksQueue, _timer); _rankingTrie.add(request2); inserts++; } if (getNewRequest) { RuntimeStatistics.setFrontierUrls(_rankingTrie.count()); if (_rankingTrie.count() == 0) { Thread.Sleep(_timer); continue; } request = _rankingTrie.pop(); getNewRequest = false; } SyncAccessor.getSlot(2, 0); if (SyncAccessor.queueSize <Url>(_serversQueues[serverTurn]) < _limit) { SyncAccessor.getSlot(2, 0); SyncAccessor.putInQueue <Url>(_serversQueues[serverTurn], request); getNewRequest = true; } else { getNewRequest = false; } serverTurn = (serverTurn + 1) % _serversQueues.Count; } catch (Exception e) { RuntimeStatistics.addToErrors(1); } } }
//tests a multi-threaded workers public void test6() { DateTime startTime = DateTime.Now; initQueuesTest3(); RuntimeStatistics.addToExtractedUrls(1); RuntimeStatistics.addToFeedUrls(5000); RankFrointer rankFrontier = new RankFrointer(feedback, serverQueues); Thread frontierThread = new Thread(new ThreadStart(rankFrontier.sceduleTasks)); frontierThread.Start(); Thread workerThread = new Thread(new ThreadStart(workerSimulator4)); workerThread.Start(); Thread workerThread2 = new Thread(new ThreadStart(workerSimulator5)); workerThread2.Start(); int iteration = 5000; while (true) { Thread.Sleep(10); fillQueue(iteration, iteration + 5000); iteration += 5000; RuntimeStatistics.addToFeedUrls(5000); StatusDisplay.DisplayOnScreen(feedback, serverQueues); if (iteration > 1000000) { break; } } while (true) { Thread.Sleep(1000); StatusDisplay.DisplayOnScreen(feedback, serverQueues); //workerThread.Interrupt(); ThreadState state = workerThread.ThreadState; Console.WriteLine("Workerthread is : " + state.ToString()); ThreadState state2 = workerThread2.ThreadState; if ((state == ThreadState.Stopped) && (state2 == ThreadState.Stopped)) { //continue; //workerThread2.Join(); DateTime endTime = DateTime.Now; TimeSpan totalTime = endTime - startTime; } Console.WriteLine("Workerthread2 is : " + state2.ToString()); } }
/** * This method scedules the tasks given to the server queues in order to be processed * if there is no tasks so the method will wait until there is something to be processed * NOTE: This method never returns */ public void sceduleTasks() { Dictionary <String, String> dictionary = new Dictionary <String, String>(); int serverTurn = 0, iterations = 0; bool getNewRequest = true, needToTerminate = false;; Url request = null; while (needToTerminate == false) { try { if (getNewRequest) { request = SyncAccessor.getFromQueue <Url>(_tasksQueue, _timer); getNewRequest = false; } getNewRequest = true; if (dictionary.ContainsKey(request.getUrl())) { continue; } dictionary.Add(request.getUrl(), null); if (SyncAccessor.queueSize <Url>(_serversQueues[serverTurn]) <= _limit) { SyncAccessor.putInQueue <Url>(_serversQueues[serverTurn], request); } else { getNewRequest = false; } serverTurn = (serverTurn + 1) % _serversQueues.Count; iterations++; if (iterations >= _checkStatusLimit) { iterations = 0; if (_shouldStop) { //System.Console.WriteLine("Frontier Thread recieved should stop"); needToTerminate = true; } } } catch (Exception e) { RuntimeStatistics.addToErrors(1); } } }
/** * This method is used to fetch the required resource from the network by using one * of the supplied fetchers, null will be returned if no fetcher can fetch the required * content */ public ResourceContent fetchResource(String url) { if (url == null) { throw new NullReferenceException(); } foreach (String protcolId in resourceFetchers.Keys) { if (resourceFetchers[protcolId].canFetch(url) == true) { RuntimeStatistics.addToFetchedUrls(1); return(resourceFetchers[protcolId].fetch(url, timeOut, 0)); } } return(null); }
private void workerSimulator5() { int errCount = 0, urlsNum = 1, emptyQueueCount = 0; bool queueFirstNotEmpty = false; while (true) { Thread.Sleep(10); if (SyncAccessor.queueSize <Url>(serverQueues[1]) > 0) { queueFirstNotEmpty = true; emptyQueueCount = 0; Url url = SyncAccessor.getFromQueue <Url>(serverQueues[1], 100); if (url != null) { RuntimeStatistics.addToCrawledUrls(1); StreamWriter sw = new StreamWriter("FrontierDebugger2.txt", true); sw.WriteLine("******************************************************"); sw.WriteLine("URL DEQUEUED: "); sw.WriteLine(url.getUrl()); sw.WriteLine("RANK OF URL"); sw.WriteLine(url.getRank()); sw.WriteLine("NumberOfUrls"); sw.WriteLine(urlsNum++); sw.Close(); } else { errCount++; Console.WriteLine("ERROR - " + errCount.ToString()); } } else { Thread.Sleep(1000); if (emptyQueueCount > 2) { break; } if (queueFirstNotEmpty) { emptyQueueCount++; } } } }
public void test5() { initQueuesTest2(); RuntimeStatistics.addToExtractedUrls(1); RuntimeStatistics.addToFeedUrls(5000); RankFrointer rankFrontier = new RankFrointer(feedback, serverQueues); Thread frontierThread = new Thread(new ThreadStart(rankFrontier.sceduleTasks)); frontierThread.Start(); Thread workerThread = new Thread(new ThreadStart(workerSimulator4)); workerThread.Start(); while (true) { Thread.Sleep(1000); StatusDisplay.DisplayOnScreen(feedback, serverQueues); } }
public Cpu(IEmulationInstance emulator, ComponentParameters parameters) { Debug.Assert(emulator != null); Debug.Assert(parameters != null); _caps = new CpuCapabilities(); _stats = new RuntimeStatistics(); _emulator = emulator; _params = parameters; _lastSyscall = -1; _syscalls = new BiosFunction[1024]; #if STATS _timer = new PerformanceTimer(); _timeSinceLastIpsPrint = 0.0; #endif _clock = new Clock(); _memory = new Memory(); // Order matters as the lookup is linear and stupid... should be changed somehow //_memory->DefineSegment( MemoryType::PhysicalMemory, "Main Memory", 0x08000000, 0x01FFFFFF ); //_memory->DefineSegment( MemoryType::PhysicalMemory, "Hardware Vectors", 0x1FC00000, 0x000FFFFF ); //_memory->DefineSegment( MemoryType::PhysicalMemory, "Scratchpad", 0x00010000, 0x00003FFF ); //_memory->DefineSegment( MemoryType::PhysicalMemory, "Frame Buffer", 0x04000000, 0x001FFFFF ); //_memory->DefineSegment( MemoryType::HardwareMapped, "Hardware IO 1", 0x1C000000, 0x03BFFFFF ); //_memory->DefineSegment( MemoryType::HardwareMapped, "Hardware IO 2", 0x1FD00000, 0x002FFFFF ); _core0 = new Core(this, 0, "Allegrex", CoreAttributes.HasCp2); _core1 = new Core(this, 1, "Media Engine", CoreAttributes.Default); _codeCache = new CodeCache(); _context = new GenerationContext(); _context.Cpu = this; _context.Core0 = _core0; _context.Memory = _memory; _executionMode = ExecutionMode.Run; _firstExecute = true; }
private void workerSimulator3() { int errCount = 0, emptyQueueCount = 0; bool queueFirstNotEmpty = false; while (true) { Thread.Sleep(10); if (SyncAccessor.queueSize <Url>(serverQueues[0]) > 0) { queueFirstNotEmpty = true; emptyQueueCount = 0; Url url = SyncAccessor.getFromQueue <Url>(serverQueues[0], 100); if (url != null) { RuntimeStatistics.addToCrawledUrls(1); } else { RuntimeStatistics.addToErrors(1); errCount++; Console.WriteLine("ERROR - " + errCount.ToString()); } } else { Thread.Sleep(1000); if (emptyQueueCount > 2) { break; } if (queueFirstNotEmpty) { emptyQueueCount++; } } } }
/** * Main method of the console application */ public static void Main(String[] args) { bool toContinue = ParseArguements(args), needToRestart = false; if (toContinue == false) { return; } Queue <int> keepAlive = new Queue <int>(); String currentUser = "******", currentTask = ""; LogDebuggerControl.getInstance().debugCategorization = false; LogDebuggerControl.getInstance().debugCategorizationInRanker = false; LogDebuggerControl.getInstance().debugRanker = false; while (true) { // select which task to invoke SelectTask(ref currentUser, ref currentTask); //update the WorkDetails class with the new taskId WorkDetails.setTaskId(currentTask); //Set ALL constants of the task SetAllConstants(); // getting init data SetInitializer(currentTask); // init queues InitQueues(currentTask); // initing worker and frontier threads InvokeThreads(); // polling to the user requests while (needToRestart == false) { Thread.Sleep(_refreshRate * 1000); StatusDisplay.DisplayOnScreen(_feedBackQueue, _serversQueues); if (_operationMode == operationMode_t.Auto) { List <TaskStatus> tasks = StorageSystem.StorageSystem.getInstance().getWorkDetails(currentUser, QueryOption.ActiveTasks); needToRestart = true; foreach (TaskStatus task in tasks) { if (task.getTaskID() == currentTask) { task.setTaskElapsedTime(task.getTaskElapsedTime() + _refreshRate); StorageSystem.StorageSystem.getInstance().changeWorkDetails(task); needToRestart = false; continue; } } } } // Terminate all the threads TerminateThreads(); needToRestart = false; RuntimeStatistics.resetStatistics(); } //RankerTest test = new RankerTest(); //test.Test2(); }
/** * This method constructs a resourceContent object according to the page that it fetches * if it does not succeed in fetching a page for some reason it creates a resourceContent * with a null content */ public ResourceContent fetch(String url, int timeOut, int rankOfUrl) { ResourceContent resource; // used to build entire input StringBuilder sb = new StringBuilder(); // used on each read operation byte[] buf = new byte[8192]; // prepare the web page we will be asking for try { DateTime startTime = DateTime.Now; if (!(canFetch(url))) { resource = new ResourceContent(url, ResourceType.HtmlResource, null, ERROR_CODE, rankOfUrl); return(resource); } HttpWebRequest request = (HttpWebRequest) WebRequest.Create(url); request.Timeout = timeOut; // execute the request HttpWebResponse response = (HttpWebResponse) request.GetResponse(); // we will read data via the response stream Stream resStream = response.GetResponseStream(); string tempString = null; int count = 0; do { // fill the buffer with data count = resStream.Read(buf, 0, buf.Length); // make sure we read some data if (count != 0) { // translate from bytes to ASCII text tempString = Encoding.ASCII.GetString(buf, 0, count); // continue building the string sb.Append(tempString); } }while (count > 0); // any more data to read? // print out page source resource = new ResourceContent(url, ResourceType.HtmlResource, sb.ToString(), VALID_CODE, rankOfUrl); DateTime endTime = DateTime.Now; TimeSpan totalRequest = endTime - startTime; return(resource); } catch (Exception e) { RuntimeStatistics.addToErrors(1); //System.Console.WriteLine("[Fetch Error] " + e); //200 is the success returnCode and 400 is failure returnCode resource = new ResourceContent(url, ResourceType.HtmlResource, null, ERROR_CODE, rankOfUrl); return(resource); } }
public void Test2() { List <String> urls = new List <string>(); urls.Add("http://www.autonews.com/"); urls.Add("http://www.geonius.com/www/"); urls.Add("http://en.wikipedia.org/wiki/Main_Page"); urls.Add("http://www.computerworld.com/"); List <string> seeds = StorageSystem.StorageSystem.getInstance().getSeedList(taskId); foreach (string seed in seeds) { urls.Add(seed); } List <Category> _categories; Constraints _constraints; _categories = StorageSystem.StorageSystem.getInstance().getCategories(taskId); _constraints = StorageSystem.StorageSystem.getInstance().getRestrictions(taskId); StorageSystem.StorageSystem.getInstance().getSeedList(taskId); Filter filter = new Filter("http://", _constraints); Categorizer categorizer = new Categorizer(_categories); Ranker ranker = new Ranker(categorizer); Extractor extractor = new Extractor(); HttpResourceFetcher httpfetcher = new HttpResourceFetcher(); foreach (String url in urls) { DateTime startTime = DateTime.Now; ResourceContent resource = null; if (httpfetcher.canFetch(url)) { resource = httpfetcher.fetch(url, 10000, 100); } DateTime fetchEndTime = DateTime.Now; if ((resource == null) || (resource.getResourceContent() == null)) { continue; } /*** 0. fetching the link from the internet ***/ TimeSpan fetchingTime = fetchEndTime - startTime; List <LinkItem> listOfLinks = new List <LinkItem>(); //extract all the links in page listOfLinks = extractor.extractLinks(resource.getResourceUrl(), resource.getResourceContent()); RuntimeStatistics.addToExtractedUrls(listOfLinks.Count); DateTime extEndTime = DateTime.Now; /*** 1. Extracting the link from the request ***/ TimeSpan extRequest = extEndTime - fetchEndTime; //reset the dictionary in filter that contains the urls from the same page filter.resetDictionary(); int filteredUrlsCount = 0; foreach (LinkItem item in listOfLinks) { //Filter the links and return only links that can be crawled List <String> links = new List <String>(); links.Add(item.getLink()); List <String> filteredLinks = filter.filterLinks(links); //If filteredLinks is not empty if (filteredLinks.Count > 0) { filteredUrlsCount++; Url url1 = new Url(filteredLinks[0], hashUrl(filteredLinks[0]), ranker.rankUrl(resource, item), item.getDomainUrl(), hashUrl(item.getDomainUrl())); deployLinksToFrontier(url1); RuntimeStatistics.addToFeedUrls(1); } } DateTime catStartTime = DateTime.Now; /*** 2. Ranking and deployment to the frontier ***/ TimeSpan rankTotalRequest = catStartTime - extEndTime; //Ascribe the url to all the categories it is belonged to. List <Result> classifiedResults = categorizer.classifyContent(resource.getResourceContent(), resource.getResourceUrl()); if (classifiedResults.Count != 0) { RuntimeStatistics.addToCrawledUrls(1); } DateTime catEndTime = DateTime.Now; /*** 3. Classification of the current request ***/ TimeSpan catTotalRequest = catEndTime - catStartTime; foreach (Result classifiedResult in classifiedResults) { Result result = new Result("0", classifiedResult.getUrl(), classifiedResult.getCategoryID(), resource.getRankOfUrl(), classifiedResult.getTrustMeter()); deployResourceToStorage(result); } DateTime endTime = DateTime.Now; /*** 4. deployment to the database (result) ***/ TimeSpan deployRequest = endTime - catEndTime; /*** 5. Total processing time ***/ TimeSpan totalRequest = endTime - startTime; } }
public Cpu( IEmulationInstance emulator, ComponentParameters parameters ) { Debug.Assert( emulator != null ); Debug.Assert( parameters != null ); _caps = new CpuCapabilities(); _stats = new RuntimeStatistics(); _emulator = emulator; _params = parameters; _lastSyscall = -1; _syscalls = new BiosFunction[ 1024 ]; #if STATS _timer = new PerformanceTimer(); _timeSinceLastIpsPrint = 0.0; #endif _clock = new Clock(); _memory = new Memory(); // Order matters as the lookup is linear and stupid... should be changed somehow //_memory->DefineSegment( MemoryType::PhysicalMemory, "Main Memory", 0x08000000, 0x01FFFFFF ); //_memory->DefineSegment( MemoryType::PhysicalMemory, "Hardware Vectors", 0x1FC00000, 0x000FFFFF ); //_memory->DefineSegment( MemoryType::PhysicalMemory, "Scratchpad", 0x00010000, 0x00003FFF ); //_memory->DefineSegment( MemoryType::PhysicalMemory, "Frame Buffer", 0x04000000, 0x001FFFFF ); //_memory->DefineSegment( MemoryType::HardwareMapped, "Hardware IO 1", 0x1C000000, 0x03BFFFFF ); //_memory->DefineSegment( MemoryType::HardwareMapped, "Hardware IO 2", 0x1FD00000, 0x002FFFFF ); _core0 = new Core( this, 0, "Allegrex", CoreAttributes.HasCp2 ); _core1 = new Core( this, 1, "Media Engine", CoreAttributes.Default ); _codeCache = new CodeCache(); _context = new GenerationContext(); _context.Cpu = this; _context.Core0 = _core0; _context.Memory = _memory; _executionMode = ExecutionMode.Run; _firstExecute = true; }
/** * This method tries to process the given content assuming that the given content * can be processed via this processor */ public void process(ResourceContent resource) { DateTime startTime = DateTime.Now; List <LinkItem> listOfLinks; //extract all the links in page listOfLinks = extractor.extractLinks(resource.getResourceUrl(), resource.getResourceContent()); RuntimeStatistics.addToExtractedUrls(listOfLinks.Count); DateTime extEndTime = DateTime.Now; /*** 1. Extracting the link from the request ***/ TimeSpan extRequest = extEndTime - startTime; //reset the dictionary in filter that contains the urls from the same page filter.resetDictionary(); int filteredUrlsCount = 0; foreach (LinkItem item in listOfLinks) { //Filter the links and return only links that can be crawled List <String> links = new List <String>(); links.Add(item.getLink()); List <String> filteredLinks = filter.filterLinks(links); //If filteredLinks is not empty if (filteredLinks.Count > 0) { filteredUrlsCount++; Url url = new Url(filteredLinks[0], hashUrl(filteredLinks[0]), ranker.rankUrl(resource, item), item.getDomainUrl(), hashUrl(item.getDomainUrl())); deployLinksToFrontier(url); RuntimeStatistics.addToFeedUrls(1); } } DateTime catStartTime = DateTime.Now; /*** 2. Ranking and deployment to the frontier ***/ TimeSpan rankTotalRequest = catStartTime - extEndTime; //Ascribe the url to all the categories it is belonged to. List <Result> classifiedResults = categorizer.classifyContent(resource.getResourceContent(), resource.getResourceUrl()); if (classifiedResults.Count != 0) { RuntimeStatistics.addToCrawledUrls(1); } DateTime catEndTime = DateTime.Now; /*** 3. Classification of the current request ***/ TimeSpan catTotalRequest = catEndTime - catStartTime; //Save all the results to Storage foreach (Result classifiedResult in classifiedResults) { Result result = new Result("0", classifiedResult.getUrl(), classifiedResult.getCategoryID(), resource.getRankOfUrl(), classifiedResult.getTrustMeter()); deployResourceToStorage(result); } DateTime endTime = DateTime.Now; /*** 4. deployment to the database (result) ***/ TimeSpan deployRequest = endTime - catEndTime; /*** $. Total processing time ***/ TimeSpan totalRequest = endTime - startTime; // write request time to timing log file if (LogDebuggerControl.getInstance().enableTiming) { StreamWriter sw = new StreamWriter("_DEBUG_INFO_PROCESSOR_TIMING@" + System.Threading.Thread.CurrentThread.ManagedThreadId + ".txt", true); sw.WriteLine(" TIMING FOR REQ - [] "); sw.WriteLine(" - Extractor Time " + extRequest.TotalSeconds + " seconds "); sw.WriteLine(" - Ranker Time " + extRequest.TotalSeconds + " seconds "); sw.WriteLine(" - Categori. Time " + catTotalRequest.TotalSeconds + " seconds "); sw.WriteLine(" - Deploy Time " + deployRequest.TotalSeconds + " seconds "); sw.WriteLine(" - Total Timing " + totalRequest.TotalSeconds + " seconds "); sw.Close(); } }