public void updateForCriticalError(ArchiveJobError.ErrorType errorType) { //string errorMsg = ""; this.docMarkedForSuccess = false; errorCode = ArchJobErrorDesc.getInstance().getErrorCode(exception); bool success = false; GAD_Error_Occured = true; if (errorType == ArchiveJobError.ErrorType.G) { this.errorMessage = generateErrorMessage("Source Doc not found:"); errorCodeChar = 'G'; /*success = PwnArchLogDAO.getInstance().CreateArchLogWithError(vo, errorCode, errorMsg, 'G', isRecovery); * if (!success) * { * this.criticalError = true; * }*/ //make db call to update pawn doc reg } else if (errorType == ArchiveJobError.ErrorType.A) { this.errorMessage = generateErrorMessage("Add doc to target failed:"); errorCodeChar = 'A'; /*success = PwnArchLogDAO.getInstance().CreateArchLogWithError(vo, errorCode, errorMsg, 'A', isRecovery); * if (!success) * { * this.criticalError = true; * }*/ } else if (errorType == ArchiveJobError.ErrorType.D) { this.errorMessage = generateErrorMessage("Delete source failed:"); errorCodeChar = 'D'; /*success = PwnArchLogDAO.getInstance().CreateArchLogWithError(vo, errorCode, errorMsg, 'D', isRecovery); * if (!success) * { * this.criticalError = true; * }*/ } else { this.errorMessage = generateErrorMessage("Archive failed:"); this.criticalError = true; } }
public void updateDBForError(ArchiveJobError.ErrorType errorType, PawnDocRegVO pawnDocRegVO) { string errorMsg = ""; int errorCode = ArchJobErrorDesc.getInstance().getErrorCode(exception); bool success = false; GAD_Error_Occured = true; _failCount++; if (errorType == ArchiveJobError.ErrorType.G) { errorMsg = generateErrorMessage("Source Doc not found:"); success = PwnArchLogDAO.getInstance().CreateArchLogWithError(pawnDocRegVO, errorCode, errorMsg, 'G', isRecovery); if (!success) { this.criticalError = true; } //make db call to update pawn doc reg } else if (errorType == ArchiveJobError.ErrorType.A) { errorMsg = generateErrorMessage("Add doc to target failed:"); success = PwnArchLogDAO.getInstance().CreateArchLogWithError(pawnDocRegVO, errorCode, errorMsg, 'A', isRecovery); if (!success) { this.criticalError = true; } } else if (errorType == ArchiveJobError.ErrorType.D) { errorMsg = generateErrorMessage("Delete source failed:"); success = PwnArchLogDAO.getInstance().CreateArchLogWithError(pawnDocRegVO, errorCode, errorMsg, 'D', isRecovery); if (!success) { this.criticalError = true; } } else { errorMsg = generateErrorMessage("Archive failed:"); this.criticalError = true; } }
private void Execute() { Thread jobThread = null; ArchiveJob archJob = null; ThreadBean tBean = null; initDBNameDictionary(); //Init Error Messages log.Info(string.Format("Execution started with no of threads {0}", this.totalCount)); if (!ArchJobErrorDesc.getInstance().getStatus()) { log.Error("Problem in initializing Error Messages , stopping process"); return; } var tempList = new List <vo.PawnDocRegVO>(); var totList = new List <vo.PawnDocRegVO>(); bool dbCreateError = false; bool docGetError = false; totList = GetDocumentToArchOnlyRecovery(out dbCreateError, out docGetError); if (totList == null || totList.Count() == 0) { log.Info("No data found to do retry"); return; } while (true) { if (worker.CancellationPending) { log.Info("cancel pressed felt in : main workder :step1 "); WaitUntillAllJobsCompleted(); return; } if (totList.Count() == 0) { //totList = GetDocumentToArch(out dbCreateError,out docGetError); //totList = GetDocumentToArchTemp(out dbCreateError, out docGetError); //totList = GetDocumentToArchOnlyRecovery(out dbCreateError, out docGetError); if (docGetError) { log.Error("Execution aborted due to error in getting doc from SP"); return; } if (dbCreateError) //check for error's during db creation { log.Error("Execution aborted due to error in DB creation in couch or db"); return; } if (totList == null || totList.Count == 0) { WaitUntillAllJobsCompleted(); log.Debug("All jobs completed , Exiting"); log.Debug("******************************************Reutne"); return; } } int allowedJob = GetAllowedJob(); /*if (this.completedList.Count()==119) * { * log.Info("Reached 119"); * }*/ worker.ReportProgress(computePercent(this.completedList.Count(), totalCount), string.Format("{0}:{1}:{2}", totalCount, completedList.Count(), (totalCount - completedList.Count()))); //compl count with duplicate /*worker.ReportProgress(computePercent(this.completedList.Count()+duplCount, totalCount), * string.Format("{0}:{1}:{2}", totalCount, completedList.Count()+this.duplCount, * (totalCount - (completedList.Count()+this.duplCount))));*/ //compl count with duplicate int jobCount = 0; foreach (var pawnDocRegVo in totList) { if (jobCount < allowedJob) { archJob = new ArchiveJob(pawnDocRegVo, this.mainForm.getMainForm().getSourceCouch(), this.mainForm.getMainForm().getTargetCouch(), isRecovery); jobThread = new Thread(new ThreadStart(archJob.process)); jobThread.Name = "T" + pawnDocRegVo.DocID.ToString(); tBean = new ThreadBean(); tBean.Job = archJob; tBean.DocumentID = pawnDocRegVo.DocID; tBean.ThreadObj = jobThread; runningList.Add(tBean); tempList.Add(pawnDocRegVo); jobThread.Start(); jobCount++; } else { break; } } //clear allocated job from totlist /*foreach (var threadBean in runningList) * { * totList.Remove(threadBean.Job.getVO()); * }*/ if (tempList.Count > 0) { foreach (var tempBeanVo in tempList) { totList.Remove(tempBeanVo); } tempList.Clear(); } if (criticalErrorOccured) { log.Error("Process Cancelled################### "); this.worker.CancelAsync(); } Thread.Sleep(50); } }
private void Execute() { ArchiveJob1 archJob = null; initDBNameDictionary(); log.Info(string.Format("Execution started with no of threads {0}", this.noOfThreads)); if (!ArchJobErrorDesc.getInstance().getStatus()) { log.Error("Problem in initializing Error Messages , stopping process"); return; } List <vo.PawnDocRegVO> tempList = null; List <vo.PawnDocRegVO> totList = null; //if (!isRecovery) totList = this.mainForm.getData(); if (totList.Count > 0) { lastRecord = totList[totList.Count - 1].DocID; if (!getDBNames(totList)) { log.Error("DB creation failed , execution aborted"); return; } checkAndStartFetchThread(); } while (true) { if (worker.CancellationPending) { //this.mainForm.setStatusLabel("Waiting for threads to end..."); log.Info("cancel pressed felt in : main workder :step1 "); WaitUntillAllJobsCompleted(); return; } if (totList == null || totList.Count() == 0) { log.Info("Going to get data............................"); //totList = GetDocumentToArch(out dbCreateError,out docGetError); bool dbCreateError = false; bool docGetError = false; totList = GetDocumentToArchTemp(out dbCreateError, out docGetError); //start the next search checkAndStartFetchThread(); if (docGetError) { log.Error("Execution aborted due to error in getting doc from SP"); return; } if (dbCreateError) //check for error's during db creation { log.Error("Execution aborted due to error in DB creation in couch or db"); return; } if (totList == null || totList.Count == 0) { WaitUntillAllJobsCompleted(); log.Debug("All jobs completed , Exiting"); log.Debug("******************************************Reutne"); return; } } int allowedJob = GetAllowedJob(); int jobCount = 0; tempList = new List <PawnDocRegVO>(); Thread.Sleep(100); log.Info("Memory usage :" + System.Diagnostics.Process.GetCurrentProcess().PrivateMemorySize64 / 1024 / 1024 + "MB"); foreach (var pawnDocRegVo in totList) { int currentThreadCount = System.Diagnostics.Process.GetCurrentProcess().Threads.Count; long maxMemory = System.Diagnostics.Process.GetCurrentProcess().PrivateMemorySize64 / 1024 / 1024; if (maxMemory > maxAllowedMemory) { log.Info("Max memory reached , allocation aborted current:" + maxMemory + " ,allowed :" + maxAllowedMemory); break; } if (currentThreadCount >= noOfThreads) { log.Info("Max thread count reached , allocation aborted current:" + currentThreadCount + " ,allowed :" + noOfThreads); break; } if (jobCount < allowedJob) { archJob = new ArchiveJob1(pawnDocRegVo, isRecovery); jobThread = new Thread(new ThreadStart(archJob.process)); jobThread.Name = "T" + pawnDocRegVo.DocID; archJob.JobThread = jobThread; runningList.Add(archJob); tempList.Add(pawnDocRegVo); jobThread.Start(); jobCount++; } else { break; } } if (tempList.Count > 0) { foreach (var tempBeanVo in tempList) { totList.Remove(tempBeanVo); } tempList.Clear(); } tempList = null; if (criticalErrorOccured) { log.Error("Process Cancelled################### "); this.worker.CancelAsync(); } } }
private void Execute() { Thread jobThread = null; ArchiveJob2 archJob = null; ThreadBean tBean = null; initDBNameDictionary(); //Init Error Messages log.Info(string.Format("Execution started with no of threads {0}", this.totalCount)); if (!ArchJobErrorDesc.getInstance().getStatus()) { log.Error("Problem in initializing Error Messages , stopping process"); return; } var tempList = new List <vo.PawnDocRegVO>(); var totList = new List <vo.PawnDocRegVO>(); bool dbCreateError = false; bool docGetError = false; int currentThreadCount = 0; long maxMemory = 0; int threadNameCount = 1; List <PawnDocRegVO> subList1 = new List <PawnDocRegVO>(); int jobCount = 0; int endCount = 20; while (true) { checkAndStartFetchThread(); if (worker.CancellationPending) { log.Info("cancel pressed felt in : main workder :step1 "); WaitUntillAllJobsCompleted(); return; } if (totList.Count() == 0) { log.Info("Going to get data............................"); //totList = GetDocumentToArch(out dbCreateError,out docGetError); totList = GetDocumentToArchTemp(out dbCreateError, out docGetError); if (docGetError) { log.Error("Execution aborted due to error in getting doc from SP"); return; } if (dbCreateError) //check for error's during db creation { log.Error("Execution aborted due to error in DB creation in couch or db"); return; } if (totList == null || totList.Count == 0) { WaitUntillAllJobsCompleted(); log.Debug("All jobs completed , Exiting"); log.Debug("******************************************Reutne"); return; } } int allowedJob = GetAllowedJob(); /* worker.ReportProgress(computePercent(completedCount, totalCount), * string.Format("{0}:{1}:{2}", totalCount, completedCount, (totalCount - completedCount)));*/ //compl count with duplicate //Thread.Sleep(100); //tempList = new List<PawnDocRegVO>(); log.Info("Memory usage :" + System.Diagnostics.Process.GetCurrentProcess().PrivateMemorySize64 / 1024 / 1024 + "MB"); jobCount = 0; endCount = 20; while (jobCount < allowedJob) { if (worker.CancellationPending) { log.Error("Process Cancelled due to cancel###### "); //this.worker.CancelAsync(); break; } currentThreadCount = System.Diagnostics.Process.GetCurrentProcess().Threads.Count; maxMemory = System.Diagnostics.Process.GetCurrentProcess().PrivateMemorySize64 / 1024 / 1024; if (maxMemory > maxAllowedMemory) { log.Info("Max memory reached , allocation aborted current:" + maxMemory + " ,allowed :" + maxAllowedMemory); break; } if (currentThreadCount >= noOfThreads) { log.Info("Max thread count reached , allocation aborted current:" + currentThreadCount + " ,allowed :" + noOfThreads); break; } if (totList.Count < endCount) { endCount = totList.Count; } subList1.Clear(); for (int i = 0; i < endCount; i++) { subList1.Add(totList[i]); } archJob = new ArchiveJob2(subList1, isRecovery); jobThread = new Thread(new ThreadStart(archJob.process)); jobThread.Name = "T" + threadNameCount; archJob.JobThread = jobThread; runningList.Add(archJob); //tempList.Add(pawnDocRegVo); jobThread.Start(); threadNameCount++; foreach (var jobFrmSubList in subList1) { totList.Remove(jobFrmSubList); } if (totList.Count == 0) { break; } jobCount++; } if (criticalErrorOccured) { log.Error("Process Cancelled################### "); this.worker.CancelAsync(); } } }