public JobInfo(int jobId, int jobRunId, int jobTypeId, List<WorkRequest> workRequests, string bootParameters, BaseJobBEO jobParameters) { JobId = jobId; PipelineId = jobRunId.ToString(); JobTypeId = jobTypeId; WorkRequests = workRequests; BootParameters = bootParameters; Command = Command.Run; }
/// <summary> /// This method can be used to get the next job from queue. /// </summary> /// <param name="jobTypeId">Job Type Id.</param> /// <param name="serverId">Job Server Id.</param> /// <returns>Next job to run.</returns> internal BaseJobBEO GetNextJobFromQueue(int jobTypeId, Guid serverId) { try { DataSet dsResult; lock (_db) { // Instantiate the stored procedure to obtain the jobs from load queue. var dbCommand = _db.GetStoredProcCommand(Constants.StoredProcedureGetNextJobFromLoadQueue); // Add input parameters. _db.AddInParameter(dbCommand, Constants.InputParameterJobTypeId, DbType.Int32, jobTypeId); _db.AddInParameter(dbCommand, Constants.InputParameterJobServerId, DbType.Guid, serverId); // Execute the stored procedure and obtain the result set from the database into a dataset. dsResult = _db.ExecuteDataSet(dbCommand); } // If there is a job to run then set the job parameters appropriately. BaseJobBEO jobParameters = null; if (null != dsResult) { if (dsResult.Tables.Count > 0 && dsResult.Tables[Constants.First].Rows.Count > 0) { // Create an instance of the job parameters. jobParameters = new BaseJobBEO(); // Set the job parameters. var drResult = dsResult.Tables[Constants.First].Rows[Constants.First]; jobParameters.JobId = Convert.ToInt32(drResult[Constants.TableLoadJobQueueColumnJobId]); jobParameters.JobRunId = Convert.ToInt32(drResult[Constants.TableLoadJobQueueColumnJobRunId]); jobParameters.BootParameters = !Convert.IsDBNull(drResult[Constants.TableLoadJobQueueColumnJobParameters]) ? Convert.ToString(drResult[Constants.TableLoadJobQueueColumnJobParameters]) : String.Empty; jobParameters.JobScheduleRunDuration = Convert.ToInt32(drResult[Constants.TableLoadJobQueueColumnJobDurationMinutes]); jobParameters.JobTypeId = jobTypeId; jobParameters.JobNotificationId = !DBNull.Value.Equals(drResult[Constants.TableJobMasterNotificationId]) ? Convert.ToInt64(drResult[Constants.TableJobMasterNotificationId]) : 0; jobParameters = GetJobDetails(jobParameters); } } // Return the job parameters for the next job to run. return(jobParameters); } catch (Exception ex) { ex.Trace(); return(null); // Calling method handles this gracefully. } }
internal void UpdateClusterStatus(BaseJobBEO job) { try { dynamic bootParameters; StringReader stream; XmlSerializer xmlStream; switch (job.JobTypeId) { case 2: case 8: stream = new StringReader(job.BootParameters); xmlStream = new XmlSerializer(typeof(ProfileBEO)); bootParameters = xmlStream.Deserialize(stream); stream.Close(); var profileBeo = bootParameters as ProfileBEO; if (profileBeo != null) { var dataSetService = new DataSetService(EstablishSession(profileBeo.CreatedBy)); dataSetService.UpdateClusterStatus(profileBeo.DatasetDetails.FolderID.ToString(), ClusterStatus.OutOfDate.ToString()); } break; case 14: stream = new StringReader(job.BootParameters); xmlStream = new XmlSerializer(typeof(ImportBEO)); bootParameters = xmlStream.Deserialize(stream); stream.Close(); var importBeo = bootParameters as ImportBEO; if (importBeo != null) { var dataSetService = new DataSetService(EstablishSession(importBeo.CreatedBy)); dataSetService.UpdateClusterStatus(importBeo.DatasetId.ToString(), ClusterStatus.OutOfDate.ToString()); } break; } } catch (Exception ex) { ex.AddDbgMsg("Unable to update cluster status to out of date by Job Id: {0}", job.JobId); ex.Trace(); } }
/// <summary> /// This method can be used to get the next job from queue. /// </summary> /// <param name="jobDetails">BaseJobBEO</param> /// <returns>BaseJobBEO</returns> internal BaseJobBEO GetJobDetails(BaseJobBEO jobDetails) { try { if (null != jobDetails) { DataSet dsResult; lock (_db) { // Instantiate the stored procedure to obtain the jobs from load queue. var dbCommand = _db.GetStoredProcCommand(Constants.StoredProcedureGetFromJobMaster); // Add input parameters. _db.AddInParameter(dbCommand, Constants.InputParameterJobId, DbType.Int32, jobDetails.JobId); // Execute the stored procedure and obtain the result set from the database into a dataset. dsResult = _db.ExecuteDataSet(dbCommand); } // If there is a job to run then set the job parameters appropriately. if (null != dsResult) { if (dsResult.Tables.Count > 0 && dsResult.Tables[Constants.First].Rows.Count > 0) { // Set the job parameters. var drResult = dsResult.Tables[Constants.First].Rows[Constants.First]; jobDetails.JobScheduleCreatedBy = drResult[Constants.TableJobMasterColumnCreatedBy].ToString(); jobDetails.JobNotificationId = !Convert.IsDBNull(drResult[Constants.TableJobMasterColumnNotfnId]) ? Convert.ToInt64(drResult[Constants.TableJobMasterColumnNotfnId]) : 0; jobDetails.JobFrequency = drResult[Constants.TableJobMasterColumnRecurrenceType].ToString(); } } } } catch (Exception ex) { Tracer.Error("Unable to get the job details."); ex.Trace(); } // Return the job parameters for the next job to run. return(jobDetails); }
public JobInfo(int jobId, int jobRunId, int jobTypeId, List <WorkRequest> workRequests, string bootParameters, BaseJobBEO jobParameters) { JobId = jobId; PipelineId = jobRunId.ToString(); JobTypeId = jobTypeId; WorkRequests = workRequests; BootParameters = bootParameters; Command = Command.Run; }
/// <summary> /// Get document collection for reconversion /// </summary> /// <returns></returns> /// public ConversionDocCollection GetReconversionDocCollection( ) { var docs = new ConversionDocCollection(); //collectionid to be used in reconversion string collectionId = ""; //populate job info docs.JobConfig = BootObject; BaseJobBEO baseConfig = ReconversionDAO.GetJobConfigInfo(Convert.ToInt32(BootObject.OrginialJobId)); docs.BaseJobTypeId = baseConfig.JobTypeId; //different type of base job has different object to hold job config info if (baseConfig.JobTypeId == 9) // Base job is production job { docs.BaseJobConfig = GetBootObject <ProductionDetailsBEO>(baseConfig.BootParameters); //for production reconversion, the collection id is the production Set collectionId, which is the collectionId in job parameter collectionId = ((ProductionDetailsBEO)docs.BaseJobConfig).OriginalCollectionId; //this is the native set collectionId //dataset associate with the document set docs.DataSet = DataSetBO.GetDataSetDetailForCollectionId(collectionId); //matterid associate with the document set long matterId = docs.DataSet.Matter.FolderID; //get the list of production document list to be reprocessed var helper = new ConversionReprocessStartupHelper(); IEnumerable <ReconversionProductionDocumentBEO> pDocs = helper.GetProductionDocumentList( BootObject.FilePath, BootObject.JobSelectionMode, matterId, docs.BaseJobConfig as ProductionDetailsBEO, docs.DataSet.RedactableDocumentSetId, Convert.ToInt32(BootObject.OrginialJobId), BootObject.Filters); //cast back to parent list of parent class if (pDocs != null && pDocs.Any()) { docs.Documents = pDocs.Cast <ReconversionDocumentBEO>().ToList(); } } else { if (baseConfig.JobTypeId == 14) //load file import { docs.BaseJobConfig = GetBootObject <ImportBEO>(baseConfig.BootParameters); //for import reconversion, the collection id is the native document set collectionId collectionId = ((ImportBEO)docs.BaseJobConfig).CollectionId; } else if (baseConfig.JobTypeId == 2 || baseConfig.JobTypeId == 8) //DCB import and Edoc Import { docs.BaseJobConfig = GetBootObject <ProfileBEO>(baseConfig.BootParameters); //for import reconversion, the collection id is the native document set collectionId collectionId = ((ProfileBEO)docs.BaseJobConfig).DatasetDetails.CollectionId; } else if (baseConfig.JobTypeId == 35) //Law import { docs.BaseJobConfig = GetBootObject <LawImportBEO>(baseConfig.BootParameters); //for import reconversion, the collection id is the native document set collectionId collectionId = ((LawImportBEO)docs.BaseJobConfig).CollectionId; } //dataset associate with the document set docs.DataSet = DataSetBO.GetDataSetDetailForCollectionId(collectionId); //assign heartbeat file path, if directory not exists, create it. docs.HeartbeatFilePath = docs.DataSet.CompressedFileExtractionLocation + ApplicationConfigurationManager.GetValue("ReconversionHeartbeatFileFolder", "Imports") + PipelineId; if (!Directory.Exists(docs.HeartbeatFilePath)) { Directory.CreateDirectory(docs.HeartbeatFilePath); } //matterid associate with the document set long matterId = docs.DataSet.Matter.FolderID; docs.Documents = ConversionReprocessStartupHelper.GetImportDocumentList( BootObject.FilePath, BootObject.JobSelectionMode, matterId, docs.DataSet.FolderID, BootObject.OrginialJobId, BootObject.Filters); } return(docs); }