private void RunProcedure(Processes processes, ProcessJob job, ProcessTrigger processTrigger) { var actionName = processes.PackageName; var parameters = new List <SqlParameter>(); Log.WriteErrorLog(string.Format("Starting store procedure execution for {0}-{1}.", _serviceName, processes.Title)); //Update tigger to Running processTrigger.Status = PackageStatus.Running; processTrigger.Remark = "Store procedure currently running..."; _dataManager.UpdateProcessTrigger(_connectionString, processTrigger); var result = string.Empty; SqlDataManager.RunProcedureWithMessage(_connectionString, actionName, parameters.ToArray()); if (result.Contains("Success")) { Log.WriteErrorLog(string.Format("Store procedure execution for {0}-{1} successfull.", _serviceName, processes.Title)); //Update tigger to Done processTrigger.Status = PackageStatus.Done; processTrigger.Remark = result; _dataManager.UpdateProcessTrigger(_connectionString, processTrigger); } else { Log.WriteErrorLog(string.Format("Store procedure execution for {0}-{1} failed.{2}", _serviceName, processes.Title, result)); //Update tigger to Fail processTrigger.Status = PackageStatus.Fail; processTrigger.Remark = string.Format("Package {0}-{1} failed. {2}", _serviceName, processes.Title, result); _dataManager.UpdateProcessTrigger(_connectionString, processTrigger); } }
protected override JobHandle OnUpdate(JobHandle inputDeps) { var length = m_Group.CalculateLength(); var entityArray = new NativeArray <Entity>(length, Allocator.TempJob); var addArray = new NativeArray <@bool>(length, Allocator.TempJob); var removeArray = new NativeArray <@bool>(length, Allocator.TempJob); var commandBufferSystem = World.Active.GetExistingManager <BeginSimulationEntityCommandBufferSystem>(); inputDeps = new ProcessJob { EntityArray = entityArray, AddArray = addArray, RemoveArray = removeArray, TranslationFromEntity = GetComponentDataFromEntity <Translation>(true), FacingTargetFromEntity = GetComponentDataFromEntity <FacingTarget>(true) }.Schedule(this, inputDeps); inputDeps = new FacingTargetJob { CommandBuffer = commandBufferSystem.CreateCommandBuffer().ToConcurrent(), EntityArray = entityArray, AddArray = addArray, RemoveArray = removeArray }.Schedule(length, 64, inputDeps); commandBufferSystem.AddJobHandleForProducer(inputDeps); return(inputDeps); }
public void Prefiltering_SingleArchetype_SingleChunk_Unfiltered() { const int kEntityCount = 10; var archetype = m_Manager.CreateArchetype(ComponentType.ReadWrite <EcsTestData>(), ComponentType.ReadWrite <EcsTestData2>()); var group = m_Manager.CreateEntityQuery(ComponentType.ReadWrite <EcsTestData>(), ComponentType.ReadWrite <EcsTestData2>()); var entities = new NativeArray <Entity>(kEntityCount, Allocator.TempJob); m_Manager.CreateEntity(archetype, entities); var dependsOn = new JobHandle(); Measure.Method( () => { dependsOn = new ProcessJob().Schedule(group, dependsOn); }) .Definition("Scheduling") .Run(); dependsOn.Complete(); Measure.Method( () => { var job = new ProcessJob().Schedule(group); job.Complete(); }) .Definition("ScheduleAndRun") .Run(); entities.Dispose(); }
/// <summary> /// Entry point method. /// </summary> /// <param name="bucket">The S3 bucket.</param> /// <param name="key">The S3 key.</param> /// <param name="dataset">The dataset name.</param> /// <param name="author">The dataset author.</param> /// <param name="awsProfile">The AWS credential profile name.</param> public static void Main( string bucket, string key, string dataset, string author, string awsProfile = null) { var loggerFactory = CreateLoggerFactory(); (var creds, var region) = GetAWSConfig(); var s3 = new AmazonS3Client(creds, region); var job = new ProcessJob() { SourceBucketName = bucket, SourceKey = key, DestinationBucket = Environment.GetEnvironmentVariable("CHESSDB_DST_BUCKET"), DestinationPrefix = Environment.GetEnvironmentVariable("CHESSDB_DST_PREFIX"), DatasetId = new DatasetId() { Author = author, Name = dataset, }, S3 = s3, }; var processor = new JobProcessor(loggerFactory.CreateLogger <JobProcessor>()); }
private static IEnumerable <ComponentNode> ExcuteComplexStartNode(List <ComponentNode> paramList, ComponentNode startNode, ICollection <ExtraProcessInfo> extraProcessInfos, INotificationCallback callback, ProcessBatch batch, ProcessGroup group, ProcessJob job) { Guid zero = new Guid(); IEnumerable <ComponentNode> childParams = from p in paramList where (p.ParentIdList.FirstOrDefault(pl => pl == startNode.Id) != zero) select p; IExecuteStartupComponent component = ProcessObjectLocator.LocateStartComponentProcess(startNode.CompopnentExcutionName); if (component != null) { //do waht ever client initiation here IResultForNextNode obj = component.ExecuteStartupComponent(batch, group, job, extraProcessInfos, paramList, startNode, callback); //since startup node takes raw file usually open it foreach (var param in paramList) { //set iRaw Data to each ComponentParameters param.StartupResult = obj; param.TreeExecutionTag = startNode.TreeExecutionTag; param.ParentComponentResults = new List <IResultForNextNode>(); param.ParentComponentResults.Add(obj); param.ProcessedParentCount = 0; } } return(childParams); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { var commandBufferSystem = World.GetExistingManager <BeginSimulationEntityCommandBufferSystem>(); inputDeps = new ProcessJob { AddTargetInRangeQueue = m_AddTargetInRangeQueue.ToConcurrent(), RemoveTargetInRangeQueue = m_RemoveTargetInRangeQueue.ToConcurrent(), TranslationFromEntity = GetComponentDataFromEntity <Translation>(true), TargetInRangeFromEntity = GetComponentDataFromEntity <TargetInRange>(true), DeltaTime = UnityEngine.Time.deltaTime }.Schedule(this, inputDeps); var addTargetInRangeDeps = new AddTargetInRangeJob { AddTargetInRangeQueue = m_AddTargetInRangeQueue, CommandBuffer = commandBufferSystem.CreateCommandBuffer() }.Schedule(inputDeps); var removeTargetInRangeDeps = new RemoveTargetInRangeJob { RemoveTargetInRangeQueue = m_RemoveTargetInRangeQueue, CommandBuffer = commandBufferSystem.CreateCommandBuffer() }.Schedule(inputDeps); inputDeps = JobHandle.CombineDependencies(addTargetInRangeDeps, removeTargetInRangeDeps); commandBufferSystem.AddJobHandleForProducer(inputDeps); return(inputDeps); }
public void Prefiltering_MultipleArchetype_MultipleChunks_Filtered() { var allTypes = new ComponentType[5]; allTypes[0] = ComponentType.ReadWrite<EcsTestSharedComp>(); allTypes[1] = ComponentType.ReadWrite<EcsTestData>(); allTypes[2] = ComponentType.ReadWrite<EcsTestData2>(); allTypes[3] = ComponentType.ReadWrite<EcsTestData3>(); allTypes[4] = ComponentType.ReadWrite<EcsTestData4>(); var allArchetypes = new EntityArchetype[8]; allArchetypes[0] = m_Manager.CreateArchetype(allTypes[0], allTypes[1]); allArchetypes[1] = m_Manager.CreateArchetype(allTypes[0], allTypes[1], allTypes[2]); allArchetypes[2] = m_Manager.CreateArchetype(allTypes[0], allTypes[1], allTypes[3]); allArchetypes[3] = m_Manager.CreateArchetype(allTypes[0], allTypes[1], allTypes[4]); allArchetypes[4] = m_Manager.CreateArchetype(allTypes[0], allTypes[1], allTypes[2], allTypes[3]); allArchetypes[5] = m_Manager.CreateArchetype(allTypes[0], allTypes[1], allTypes[2], allTypes[4]); allArchetypes[6] = m_Manager.CreateArchetype(allTypes[0], allTypes[1], allTypes[3], allTypes[4]); allArchetypes[7] = m_Manager.CreateArchetype(allTypes); const int kEntityCountPerArchetype = 1000; for (int i = 0; i < 8; ++i) { var entities = new NativeArray<Entity>(kEntityCountPerArchetype, Allocator.TempJob); m_Manager.CreateEntity(allArchetypes[i], entities); for (int j = 0; j < kEntityCountPerArchetype; ++j) { m_Manager.SetSharedComponentData(entities[i], new EcsTestSharedComp {value = i % 10 } ); } entities.Dispose(); } var dependsOn = new JobHandle(); var group = m_Manager.CreateEntityQuery( ComponentType.ReadWrite<EcsTestData>(), ComponentType.ReadWrite<EcsTestSharedComp>()); group.SetFilter(new EcsTestSharedComp{value = 0}); Measure.Method( () => { dependsOn = new ProcessJob().Schedule(group, dependsOn); }) .Definition("Scheduling") .Run(); dependsOn.Complete(); Measure.Method( () => { var job = new ProcessJob().Schedule(group); job.Complete(); }) .Definition("ScheduleAndRun") .Run(); }
private static ComponentNode ExcuteComplexComponentNode(List <ComponentNode> paramList, ComponentNode thisNode, INotificationCallback callback, ProcessBatch batch, ProcessGroup group, ProcessJob job) { ComponentNode nextNode = null; if (thisNode.CompNodeValidation == NodeValidationType.Group) { return(thisNode); } Debug.WriteLine(thisNode.ComponentName); thisNode.ProcessedParentCount++; if (thisNode.ProcessedParentCount != thisNode.ParentIdList.Count) { // _excutableInWait.Add(thisNode); return(null); } Guid zero = new Guid(); var childrenParams = (from p in paramList where (p.ParentIdList.FirstOrDefault(pl => pl == thisNode.Id) != zero) select p).ToList(); // IEnumerable<ComponentNode> childrenParams = from p in paramList where p.ParentId == thisNode.Id select p; //IExcuteComponent component = ProcessObjectLocator.LocateComponentProcess(thisNode.CompopnentExcutionName); Type tp = ProcessRunTimeLocator.GetExecutableType(thisNode.CompopnentExcutionName); if (tp == null) { return(null); } IExcuteComponent component = (IExcuteComponent)Activator.CreateInstance(tp); if (component != null) { IResultForNextNode ret = component.ExcuteThermoComponent(paramList, thisNode, callback, batch, group, job); if (ret != null) { ret.ThisNodeId = thisNode.Id; foreach (var param in childrenParams) { param.ParentComponentResults.Add(ret); } } } thisNode.ParentComponentResults.Clear(); //_excutableInWait.Remove(thisNode); thisNode.ProcessedParentCount = 0; foreach (var childrenParam in childrenParams) { nextNode = ExcuteComplexComponentNode(paramList, childrenParam, callback, batch, group, job); } return(nextNode); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { inputDeps = new ProcessJob { DamagedEntityArray = m_Group.ToEntityArray(Allocator.TempJob), DamagedFromEntity = GetComponentDataFromEntity <Damaged>(true), HealthFromEntity = GetComponentDataFromEntity <Health>() }.Schedule(m_Group.CalculateLength(), 64, inputDeps); return(inputDeps); }
public DataCollectionReport(ProcessJob processJob, bool threadSafe) { var factory = threadSafe ? new Func <IDictionary <string, DataItem> >(ThreadSafeDictionaryFactory) : new Func <IDictionary <string, DataItem> >(DictionaryFactory); _lotDatas = new Lazy <IDictionary <string, DataItem> >(factory, threadSafe); _waferDatas = new Lazy <IDictionary <string, DataItem> >(factory, threadSafe); _siteDatas = new Lazy <IDictionary <string, DataItem> >(factory, threadSafe); ProcessJob = processJob; }
public static Process ShellExec([NotNull] string fileName, string arguments, ShellSettings settings) { settings ??= ShellSettings.Default; SHELLEXECUTEINFO info = GetShellExecuteInfo(fileName, arguments, settings); Process p = InternalShellExec(info); if (p != null && !settings.JobHandle.IsInvalidHandle()) { ProcessJob.AddProcess(settings.JobHandle, p); } return(p); }
public void Prefiltering_SingleArchetype_MultipleChunks_Filtered() { const int kEntityCount = 10000; var archetype = m_Manager.CreateArchetype( ComponentType.ReadWrite <EcsTestData>(), ComponentType.ReadWrite <EcsTestData2>(), ComponentType.ReadWrite <EcsTestSharedComp>()); var group = m_Manager.CreateEntityQuery( ComponentType.ReadWrite <EcsTestData>(), ComponentType.ReadWrite <EcsTestData2>(), ComponentType.ReadWrite <EcsTestSharedComp>()); var entities = new NativeArray <Entity>(kEntityCount, Allocator.TempJob); m_Manager.CreateEntity(archetype, entities); for (int i = 0; i < kEntityCount; ++i) { m_Manager.SetSharedComponentData(entities[i], new EcsTestSharedComp { value = i % 10 }); } var dependsOn = new JobHandle(); group.SetSharedComponentFilter(new EcsTestSharedComp { value = 0 }); Measure.Method( () => { dependsOn = new ProcessJob().Schedule(group, dependsOn); }) .Definition("Scheduling") .Run(); dependsOn.Complete(); Measure.Method( () => { var job = new ProcessJob().Schedule(group); job.Complete(); }) .Definition("ScheduleAndRun") .Run(); entities.Dispose(); }
public void UpdateProcessJob(string connectionString, ProcessJob job) { using (var entityContext = new CoreContext(connectionString)) { var existingEntity = (from e in entityContext.Set <ProcessJob>() where e.ProcessJobId == job.ProcessJobId select e).FirstOrDefault(); SimpleMapper.PropertyMap(job, existingEntity); entityContext.SaveChanges(); } }
private static List <ProcessJob> CreateProcessJob(long mId) { List <ProcessJob> jobs = new List <ProcessJob>(); ProcessJob job = new ProcessJob() { Name = "up loader default job", JobWorkId = mId }; jobs.Add(job); return(jobs); }
public static List <ProcessJob> CreateProcessJobs(List <IdNamePair> measurements) { List <ProcessJob> jobs = new List <ProcessJob>(); foreach (var m in measurements) { ProcessJob job = new ProcessJob() { Name = m.Name, JobWorkId = m.Id }; jobs.Add(job); } return(jobs); }
public HttpResponseMessage UpdateProcessJob(HttpRequestMessage request, [FromBody] ProcessJob processJobModel) { return(GetHttpResponse(request, () => { processJobModel.Code = UniqueKeyGenerator.RNGCharacterMask(6, 8); processJobModel.UserName = User.Identity.Name; processJobModel.StartDate = DateTime.Now; processJobModel.EndDate = DateTime.Now; processJobModel.Remark = "Not started"; processJobModel.Status = PackageStatus.New; var job = _ExtractionProcessService.UpdateProcessJob(processJobModel); return request.CreateResponse <ProcessJob>(HttpStatusCode.OK, job); })); }
protected override JobHandle OnUpdate(JobHandle inputDeps) { var commandBufferSystem = World.GetExistingManager <BeginSimulationEntityCommandBufferSystem>(); inputDeps = new ProcessJob { RemoveQueue = m_RemoveQueue.ToConcurrent() }.Schedule(this, inputDeps); inputDeps = new RemoveJob { RemoveQueue = m_RemoveQueue, CommandBuffer = commandBufferSystem.CreateCommandBuffer() }.Schedule(inputDeps); commandBufferSystem.AddJobHandleForProducer(inputDeps); return(inputDeps); }
/// <summary> /// Processes the job. /// </summary> /// <param name="job">The job to process.</param> /// <returns>An awaitable task.</returns> public async Task ProcessAsync(ProcessJob job) { var util = new TransferUtility(job.S3); string tmpPath = Path.GetTempFileName(); await util.DownloadAsync(new TransferUtilityDownloadRequest() { BucketName = job.SourceBucketName, Key = job.SourceKey, FilePath = tmpPath, }); using var stream = File.OpenRead(tmpPath); using var pgnGameStream = new PgnGameStream(stream); while (!pgnGameStream.EndOfStream) { var nextGame = pgnGameStream.ParseNextGame(); var(game, rows) = Flattener.FlattenPgnGame(nextGame, job.DatasetId); } }
public static Process Run([NotNull] string execName, string arguments, [NotNull] RunSettings settings) { Process process = CreateForRun(execName, arguments, settings, out bool redirectOutput, out bool redirectError); bool result = false; try { result = process.Start(); if (!result) { return(null); } if (!settings.JobHandle.IsInvalidHandle()) { ProcessJob.AddProcess(settings.JobHandle, process); } if (redirectOutput) { process.BeginOutputReadLine(); } if (redirectError) { process.BeginErrorReadLine(); } settings.OnStart?.Invoke(execName, process.StartTime); return(process); } catch (Win32Exception e) { throw new InvalidOperationException(e.CollectMessages(), e); } finally { if (!result) { ObjectHelper.Dispose(ref process); } } }
private void CancelProcess() { ProcessJob job = null; try { job = _dataManager.GetProcessJob(_connectionString, _serviceName); if (job != null) { if (job.Status == PackageStatus.Cancel) { var processTriggers = _dataManager.GetProcessTriggers(_connectionString, job.ProcessJobId); foreach (var processTrigger in processTriggers) { if (processTrigger.Status == PackageStatus.New || processTrigger.Status == PackageStatus.Pending || processTrigger.Status == PackageStatus.Running) { //Update tigger to Cancel processTrigger.Status = PackageStatus.Cancel; processTrigger.Remark = "Package has been canceled..."; _dataManager.UpdateProcessTrigger(_connectionString, processTrigger); } } //Update job to done job.Status = PackageStatus.Stop; job.Remark = "Job has been canceled..."; _dataManager.UpdateProcessJob(_connectionString, job); Log.WriteErrorLog("Job: " + _serviceName + " canceled"); this.Stop(); } } } catch (Exception ex) { Log.WriteErrorLog(ex); } }
protected override JobHandle OnUpdate(JobHandle inputDeps) { EntityManager.RemoveComponent(m_DeadGroup, ComponentType.ReadWrite <Target>()); var commandBufferSystem = World.GetExistingManager <BeginSimulationEntityCommandBufferSystem>(); inputDeps = new ProcessJob { RemoveQueue = m_RemoveQueue.ToConcurrent(), DeadFromEntity = GetComponentDataFromEntity <Dead>(true), TranslationFromEntity = GetComponentDataFromEntity <Translation>(true) }.Schedule(this); inputDeps = new RemoveTargetJob { RemoveQueue = m_RemoveQueue, CommandBuffer = commandBufferSystem.CreateCommandBuffer() }.Schedule(inputDeps); commandBufferSystem.AddJobHandleForProducer(inputDeps); return(inputDeps); }
//complex solver internal static ComponentNode SolveComplexComponentTree(this ComponentSolver solver, List <ComponentNode> paramList, ComponentNode startNode, ICollection <ExtraProcessInfo> extraProcessInfos, INotificationCallback callback, ProcessBatch batch, ProcessGroup group, ProcessJob job) { ComponentNode nextComponent = null; IEnumerable <ComponentNode> children = ExcuteComplexStartNode(paramList, startNode, extraProcessInfos, callback, batch, group, job); foreach (var componentParam in children) { var comparam = ExcuteComplexComponentNode(paramList, componentParam, callback, batch, group, job); if (nextComponent == null) { nextComponent = comparam; } } return(nextComponent); }
public ProcessJob UpdateProcessJob(ProcessJob processJob) { return(Channel.UpdateProcessJob(processJob)); }
private void RunPackage(Processes processes, ProcessJob job, ProcessTrigger processTrigger) { var packagePath = processes.PackagePath + processes.PackageName + ".dtsx"; dts.Package package = _app.LoadPackage(packagePath, null); Log.WriteErrorLog(string.Format("Starting package execution for {0}---{1}.", _serviceName, processes.Title)); //Update tigger to Running processTrigger.Status = PackageStatus.Running; processTrigger.Remark = "Package currently running..."; _dataManager.UpdateProcessTrigger(_connectionString, processTrigger); var result = package.Execute(); if (result == dts.DTSExecResult.Success) { int ssisDuration = 0; ssisDuration = (package.ExecutionDuration / 1000); // int.TryParse(Math.Round((package.ExecutionDuration / 1000.00), 2).ToString(), out ssisDuration); var statusMessage = string.Format("Package execution for {0}-{1} successfull.", _serviceName, processes.Title); try { if (_currentProcess == Convert.ToInt32(processId) && Count() != 0) { //processTrigger.Remark = string.Format("Package {0}-{1} successfully executed,however cannot proceed with the next process ", _serviceName, processes.Title) + "Duration: (" + HHMMSS(ssisDuration) + ")"; processTrigger.Remark = string.Format("Package {0}-{1} successfully executed,however cannot proceed with the next process ", _serviceName, processes.Title) + "Duration- HH:MM:SS: (" + HHMMSS(ssisDuration) + ")"; processTrigger.Status = PackageStatus.Done; } else { processTrigger.Remark = string.Format("Package {0}-{1} successfully executed: ", _serviceName, processes.Title) + "Duration- HH:MM:SS: (" + HHMMSS(ssisDuration) + ")"; processTrigger.Status = PackageStatus.Done; } //processTrigger.Remark = string.Format("Package {0}-{1} successfully executed: ", _serviceName, processes.Title) + "Duration: (" + HHMMSS(ssisDuration) + ")"; //processTrigger.Status = PackageStatus.Done; var message = package.Variables["Message"].Value.ToString(); if (!string.IsNullOrEmpty(message)) { char firstLevelSeparator = '/'; char secondLevelSeparator = '|'; var firstLevels = message.Split(firstLevelSeparator); var secondLevels = firstLevels[2].Split(secondLevelSeparator); if (firstLevels[1] == "Failed") { statusMessage = string.Format("Package execution for {0}-{1} for operation {2} Fail.", _serviceName, processes.Title, firstLevels[0]) + "Duration- HH:MM:SS: (" + HHMMSS(ssisDuration) + ")"; processTrigger.Remark = string.Format("Package {0}-{1} for operation {2} execution failed: ", _serviceName, processes.Title, firstLevels[0]) + "Duration- HH:MM:SS: (" + HHMMSS(ssisDuration) + ")/n"; foreach (var s in secondLevels) { processTrigger.Remark += s + "/n"; } processTrigger.Status = PackageStatus.Fail; Log.WriteErrorLog(processTrigger.Remark); } else { //if (_redcount != 0) //{ // statusMessage = string.Format("Package execution for {0}-{1} for operation {2} successful.", _serviceName, processes.Title, firstLevels[0]) + "Duration: (" + HHMMSS(ssisDuration) + ")"; // processTrigger.Remark = string.Format("Package {0}-{1} for operation {2} execution successful: ", _serviceName, processes.Title, firstLevels[0]) + "Duration: (" + HHMMSS(ssisDuration) + ")"; //} //else //{ // statusMessage = string.Format("Package execution for {0}-{1} for operation {2} successful,however cannot proceed with the nest process", _serviceName, processes.Title, firstLevels[0]) + "Duration: (" + HHMMSS(ssisDuration) + ")"; // processTrigger.Remark = string.Format("Package {0}-{1} for operation {2} execution successful:however cannot proceed with the nest process ", _serviceName, processes.Title, firstLevels[0]) + "Duration: (" + HHMMSS(ssisDuration) + ")"; //} statusMessage = string.Format("Package execution for {0}-{1} for operation {2} successful.", _serviceName, processes.Title, firstLevels[0]) + "Duration- HH:MM:SS: (" + HHMMSS(ssisDuration) + ")"; processTrigger.Remark = string.Format("Package {0}-{1} for operation {2} execution successful: ", _serviceName, processes.Title, firstLevels[0]) + "Duration- HH:MM:SS: (" + HHMMSS(ssisDuration) + ")"; foreach (var s in secondLevels) { processTrigger.Remark += s; } processTrigger.Status = PackageStatus.Done; Log.WriteErrorLog(processTrigger.Remark); } } else { Log.WriteErrorLog("Message variable is empty."); } } catch (Exception ex) { Log.WriteErrorLog(ex.Message); } Log.WriteErrorLog(statusMessage); //Update tigger to Done _dataManager.UpdateProcessTrigger(_connectionString, processTrigger); //_Exp = _ProcessName & ": " & _ssisCount & " lines extracted (" & HHMMSS(_ssisDuration) & ")" } else { string errorMessage = string.Empty; foreach (var error in package.Errors) { errorMessage += error.Description; } Log.WriteErrorLog(string.Format("Package execution for {0}-{1} failed.", _serviceName, processes.Title)); //Update tigger to Fail processTrigger.Status = PackageStatus.Fail; processTrigger.Remark = string.Format("Package {0}-{1} failed.", _serviceName, processes.Title) + "\n" + errorMessage; _dataManager.UpdateProcessTrigger(_connectionString, processTrigger); } }
private void RunProcess() { ProcessJob job = null; bool testMode = false; //get job try { //string processId = ConfigurationManager.AppSettings["ProcessId"]; job = _dataManager.GetProcessJob(_connectionString, _serviceName); if (job != null) { Log.WriteErrorLog("Processing for job: " + job.Code); var processTriggers = _dataManager.GetProcessTriggers(_connectionString, job.ProcessJobId); foreach (var processTrigger in processTriggers) { if (processTrigger.Status == PackageStatus.New) { _currentTrigger = processTrigger.ProcessTriggerId; _currentProcess = processTrigger.ProcessId; var process = _dataManager.GetProcess(_connectionString, _currentProcess); if (process != null) { if (process.PackageName != "TestMode") { if (process.RunType == PackageRunType.Package) { RunPackage(process, job, processTrigger); } else { RunProcedure(process, job, processTrigger); } //if (_currentProcess == Convert.ToInt32(processId)) //{ // _redcount = Count(); //} //&& Count() != 0 //if (_currentProcess == Convert.ToInt32(processId) && _redcount != 0) if (_currentProcess == Convert.ToInt32(processId) && Count() != 0) //{ // _redcount = Count(); //} //if (_redcount != 0) { Log.WriteErrorLog(string.Format("Go to kill processs.")); goto KILLPROCESS; } } else { //Test Mode Operation Log.WriteErrorLog(string.Format("Starting test mode execution for {0}-{1}.", _serviceName, process.Title)); //Update tigger to Running processTrigger.Status = PackageStatus.Running; processTrigger.Remark = "Test Mode currently running..."; _dataManager.UpdateProcessTrigger(_connectionString, processTrigger); int counter = 0; do { counter += 1; } while (counter == 200000000000); processTrigger.Remark = string.Format("Test Mode process {0}-{1} successfully executed: ", _serviceName, process.Title); processTrigger.Status = PackageStatus.Done; _dataManager.UpdateProcessTrigger(_connectionString, processTrigger); } } } } KILLPROCESS: if (_redcount != 0) { //Update job to done job.Status = PackageStatus.Done; job.Remark = "Job processing completed..."; _dataManager.UpdateProcessJob(_connectionString, job); Log.WriteErrorLog("Job: " + _serviceName + " stop"); } else { //Update job to done job.Status = PackageStatus.Done; job.Remark = "Job processing completed..."; _dataManager.UpdateProcessJob(_connectionString, job); Log.WriteErrorLog("Job: " + _serviceName + " done"); } } } catch (Exception ex) { Log.WriteErrorLog(ex); if (job != null) { var processTriggers = _dataManager.GetProcessTriggers(_connectionString, job.ProcessJobId); foreach (var processTrigger in processTriggers) { if (processTrigger.Status == PackageStatus.New || processTrigger.Status == PackageStatus.Pending || processTrigger.Status == PackageStatus.Running) { //Update tigger to Cancel processTrigger.Status = PackageStatus.Cancel; processTrigger.Remark = "Package has been canceled..." + ex.Message; _dataManager.UpdateProcessTrigger(_connectionString, processTrigger); } } //Update job to fail job.Status = PackageStatus.Fail; job.Remark = "Job processing fail..."; _dataManager.UpdateProcessJob(_connectionString, job); } } }
public void UpdateJob(ProcessJob job) { _jobRepository.Update(job); }
public static RunOutput RunAndGetOutput([NotNull] string execName, string arguments, RunSettingsBase settings, WaitHandle awaitableHandle) { settings ??= RunSettingsBase.Default; settings.RedirectOutput = true; settings.RedirectError = true; RunOutput output = new RunOutput(); using (Process process = CreateForRun(execName, arguments, settings)) { bool processReallyExited = false; process.Exited += (sender, _) => { Process p = (Process)sender; if (p.IsAssociated()) { try { output.ExitTime = p.ExitTime; output.ExitCode = p.ExitCode; } catch { // ignored } } processReallyExited = true; settings.OnExit?.Invoke(execName, output.ExitTime, output.ExitCode); }; try { bool result = process.Start(); if (!result) { return(null); } if (!settings.JobHandle.IsInvalidHandle()) { ProcessJob.AddProcess(settings.JobHandle, process); } output.StartTime = process.StartTime; settings.OnStart?.Invoke(execName, output.StartTime); AsyncStreamReader outputReader = new AsyncStreamReader(process, process.StandardOutput.BaseStream, data => { if (data == null) { return; } output.Output.Append(data); output.OutputBuilder.Append(data); }, process.StandardOutput.CurrentEncoding); outputReader.BeginRead(); AsyncStreamReader errorReader = new AsyncStreamReader(process, process.StandardError.BaseStream, data => { if (data == null) { return; } output.Error.Append(data); output.OutputBuilder.Append(data); }, process.StandardOutput.CurrentEncoding); errorReader.BeginRead(); if (!awaitableHandle.IsAwaitable()) { process.WaitForExit(); return(output); } SafeWaitHandle waitHandle = null; ManualResetEvent processFinishedEvent = null; try { waitHandle = new SafeWaitHandle(process.Handle, false); if (!waitHandle.IsAwaitable()) { return(null); } processFinishedEvent = new ManualResetEvent(false) { SafeWaitHandle = waitHandle }; if (!awaitableHandle.IsAwaitable()) { return(null); } WaitHandle[] waitHandles = { processFinishedEvent, awaitableHandle }; int ndx = waitHandles.WaitAny(); if (ndx != 0) { return(null); } if (!processReallyExited && process.IsAwaitable()) { if (!process.WaitForExit(TimeSpanHelper.HALF)) { ndx = -1; } } process.Die(); return(ndx != 0 ? null : output); } finally { processFinishedEvent?.Close(); ObjectHelper.Dispose(ref processFinishedEvent); waitHandle?.Close(); ObjectHelper.Dispose(ref waitHandle); } } catch (Win32Exception e) { throw new InvalidOperationException(e.CollectMessages(), e); } } }
public DataCollectionReport(ProcessJob processJob) : this(processJob, false) { }
public static bool ShellExecAndWaitFor([NotNull] string fileName, string arguments, ShellSettings settings, WaitHandle awaitableHandle) { fileName = fileName.Trim(); if (string.IsNullOrEmpty(fileName)) { throw new ArgumentNullException(nameof(fileName)); } settings ??= ShellSettings.Default; SHELLEXECUTEINFO info = GetShellExecuteInfo(fileName, arguments, settings); using (Process process = InternalShellExec(info)) { if (process == null) { return(false); } if (!settings.JobHandle.IsInvalidHandle()) { ProcessJob.AddProcess(settings.JobHandle, process); } if (!awaitableHandle.IsAwaitable()) { process.WaitForExit(); return(true); } bool processReallyExited = false; process.Exited += (_, _) => processReallyExited = true; SafeWaitHandle waitHandle = null; ManualResetEvent processFinishedEvent = null; try { waitHandle = new SafeWaitHandle(process.Handle, false); if (!waitHandle.IsAwaitable()) { return(false); } processFinishedEvent = new ManualResetEvent(false) { SafeWaitHandle = waitHandle }; if (!awaitableHandle.IsAwaitable()) { return(false); } WaitHandle[] waitHandles = { processFinishedEvent, awaitableHandle }; int ndx = waitHandles.WaitAny(); if (ndx != 0) { return(false); } if (!processReallyExited && process.IsAwaitable()) { if (!process.WaitForExit(TimeSpanHelper.HALF)) { ndx = -1; } } process.Die(); return(ndx == 0); } finally { processFinishedEvent?.Close(); ObjectHelper.Dispose(ref processFinishedEvent); waitHandle?.Close(); ObjectHelper.Dispose(ref waitHandle); } } }
private void Execute(object state) { this.OnExecutionStarted(); try { if (this.execInfo.MinimizeMediaCenter) { IntPtr window = Advent.VmcExecute.NativeMethods.FindWindow("eHome Render Window", (string)null); if (window != IntPtr.Zero) { Advent.VmcExecute.NativeMethods.ShowWindow(window, Advent.VmcExecute.NativeMethods.WindowShowStyle.Minimize); } else { Trace.TraceWarning("Could not find ehome window."); } } if (this.execInfo.RequiresDirectX && !ExecutionEngine.Is64BitOs()) { ExecutionEngine.WaitForDirectXExclusive(); } using (ProcessJob processJob = new ProcessJob()) { this.ProcessJob = processJob; this.psi.WorkingDirectory = Path.GetDirectoryName(this.psi.FileName); this.psi.UseShellExecute = true; Trace.TraceInformation("Launching \"{0}\" {1}", (object)this.psi.FileName, (object)this.psi.Arguments); this.ExecutingProcess = Process.Start(this.psi); if (this.ExecutingProcess != null) { try { this.ProcessJob.AssignProcess(this.ExecutingProcess); } catch (Exception ex) { Trace.TraceError(ex.ToString()); } this.OnProcessStarted(); if (this.RequiresKeyboardHook) { this.hook.KeyDown += new KeyEventHandler(this.HookKeyDown); } this.ExecutingProcess.WaitForExit(); if (this.RequiresKeyboardHook) { this.hook.KeyDown -= new KeyEventHandler(this.HookKeyDown); } bool flag = false; while (!flag) { flag = true; foreach (Process process in this.ProcessJob.Processes) { if (!process.HasExited) { Trace.TraceInformation(string.Format("Main process exited, waiting for process: {0}({1})", (object)process.ProcessName, (object)process.Id)); flag = false; process.WaitForExit(); } } } this.OnProcessExited(); if (this.ExecutingProcess.ExitCode != 0) { Trace.TraceWarning("Process exited with non-zero error code " + (object)this.ExecutingProcess.ExitCode + "."); } } else { Trace.TraceError("Could not start process, Process.Start returned null."); } } } catch (Exception ex) { this.OnExecutionError(ex); } Trace.TraceInformation("Launching Media Center..."); ExecutionEngine.LaunchMediaCenter(false, false, false); this.OnExecutionFinished(); }