private void StartJob(JobBase job) { try { JobBase detailJobInfo = LoadDetailJobInfo(job.JobID, job.JobType); detailJobInfo.Start(); } catch (ThreadAbortException) { } catch (Exception ex) { WriteJobException(job, this.Params.Log, "执行", ex); } finally { job.SetCurrentJobEndStatus(); string logDetail = string.Format("定时任务[{0},{1}]在[{2}]时执行]", job.Name, job.JobType.ToString(), DateTime.Now.ToString()); UserOperationLog log = new UserOperationLog() { ResourceID = job.JobID, OperationDateTime = DateTime.Now, Subject = "定时任务执行", OperationName = job.Name, OperationDescription = logDetail }; UserOperationLogAdapter.Instance.Update(log); } }
public async Task <string> Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 27 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var stepFunction = new AmazonStepFunctionsClient(); Logger.Debug($"Getting Activity Task with ARN {ACTIVITY_ARN}..."); var data = await stepFunction.GetActivityTaskAsync(new GetActivityTaskRequest { ActivityArn = ACTIVITY_ARN }); var taskToken = data.TaskToken; if (taskToken == null) { throw new Exception("Failed to obtain activity task"); } Logger.Debug($"Activity Task token is {taskToken}"); @event = JToken.Parse(data.Input); Logger.Debug($"Getting job profile 'ExtractTechnicalMetadata'..."); var jobProfiles = await resourceManager.GetAsync <JobProfile>(("name", "ExtractTechnicalMetadata")); var jobProfileId = jobProfiles?.FirstOrDefault()?.Id; if (jobProfileId == null) { throw new Exception("JobProfile 'ExtractTechnicalMetadata' not found"); } var ameJob = new AmeJob { JobProfile = jobProfileId, JobInput = new JobParameterBag { ["inputFile"] = @event["data"]["repositoryFile"], ["outputLocation"] = new S3Locator { AwsS3Bucket = TEMP_BUCKET, AwsS3KeyPrefix = "AmeJobResults/" } }, NotificationEndpoint = new NotificationEndpoint { HttpEndpoint = ACTIVITY_CALLBACK_URL + "?taskToken=" + Uri.EscapeDataString(taskToken) } }; Logger.Debug($"Submitting AME job..."); ameJob = await resourceManager.CreateAsync(ameJob); Logger.Debug($"Successfully created AME job {ameJob.Id}."); return(ameJob.Id); }
public override void ExecuteCmdlet() { ExecutionBlock(() => { base.ExecuteCmdlet(); ResourceIdentifier resourceIdentifier = new ResourceIdentifier(VaultId); string vaultName = resourceIdentifier.ResourceName; string resourceGroupName = resourceIdentifier.ResourceGroupName; List <string> jobsToWaitOn = new List <string>(); List <JobBase> finalJobs = new List <JobBase>(); object castedObj; if (GetCastedObjFromPSObj <JobBase>(Job, out castedObj)) { JobBase justJob = castedObj as JobBase; jobsToWaitOn.Add(justJob.JobId); } else if (GetCastedObjFromPSObj <List <JobBase> >(Job, out castedObj)) { List <JobBase> jobsList = castedObj as List <JobBase>; foreach (var job in jobsList) { jobsToWaitOn.Add(job.JobId); } } else if (Job.GetType() == typeof(object[])) { object[] castedJobsList = Job as object[]; object castedJob; foreach (var job in castedJobsList) { if (GetCastedObjFromPSObj <JobBase>(job, out castedJob)) { jobsToWaitOn.Add((castedJob as JobBase).JobId); } else { throw new Exception(string.Format(Resources.JobWaitJobInvalidInput, Job.GetType().FullName)); } } } else { // not a valid object. throw exception. throw new Exception(string.Format(Resources.JobWaitJobInvalidInput, Job.GetType().FullName)); } // now wait until timeout happens or all jobs complete execution DateTime waitBeginning = DateTime.UtcNow; while (true) { if (Timeout.HasValue) { if (DateTime.UtcNow.Subtract(waitBeginning) >= TimeSpan.FromSeconds(Timeout.Value)) { break; } } bool hasUnfinishedJob = false; finalJobs.Clear(); for (int i = 0; i < jobsToWaitOn.Count; i++) { string jobId = jobsToWaitOn[i]; var updatedJob = JobConversions.GetPSJob( ServiceClientAdapter.GetJob( jobId, vaultName: vaultName, resourceGroupName: resourceGroupName)); if (IsJobInProgress(updatedJob)) { hasUnfinishedJob = true; } else { // removing finished job from list jobsToWaitOn.RemoveAt(i); i--; } finalJobs.Add(updatedJob); } if (!hasUnfinishedJob) { break; } // sleep for 30 seconds before checking again string testMode = Environment.GetEnvironmentVariable("AZURE_TEST_MODE"); if (String.Compare(testMode, "Playback", StringComparison.OrdinalIgnoreCase) != 0 && !TestMockSupport.RunningMocked) { Thread.Sleep(30000); } } WriteObject(finalJobs, enumerateCollection: true); }); }
public async Task <S3Locator> Handler(JToken @event, ILambdaContext context) { if (@event == null) { throw new Exception("Missing workflow input"); } var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 0 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } // check the input and return mediaFileLocator which service as input for the AI workflows if (@event["input"] == null) { throw new Exception("Missing workflow input"); } var input = @event["input"]; if (input["bmContent"] == null) { throw new Exception("Missing input.bmContent"); } if (input["bmEssence"] == null) { throw new Exception("Missing input.bmEssence"); } var bmContent = await resourceManager.ResolveAsync <BMContent>(input["bmContent"].Value <string>()); var bmEssence = await resourceManager.ResolveAsync <BMEssence>(input["bmEssence"].Value <string>()); Logger.Debug(bmContent.ToMcmaJson().ToString()); Logger.Debug(bmEssence.ToMcmaJson().ToString()); // find the media locator in the website bucket with public httpEndpoint var mediaFileLocator = bmEssence.Locations.OfType <S3Locator>().FirstOrDefault(l => l.AwsS3Bucket == WEBSITE_BUCKET); if (mediaFileLocator == null) { throw new Exception("No suitable Locator found on bmEssence"); } if (string.IsNullOrWhiteSpace(mediaFileLocator.HttpEndpoint)) { throw new Exception("Media file Locator does not have an httpEndpoint"); } return(mediaFileLocator); }
//private static object registerJobLocker = new object(); /// <summary> /// 注册一个任务类型 /// </summary> /// <param name="mission"></param> public static void RegisterJob(JobBase job) { s_AllJobs.Add(job); }
/// <summary> /// 检查某个人物是否到了可以执行的时间 /// </summary> /// <param name="job"></param> /// <returns></returns> private static bool IsExecuteTime(JobBase job) { DateTime executeTime; switch (job.TimeType) { case TimeType.Interval: if (job.LastExecuteTime.AddSeconds(job.IntervalSeconds) <= DateTimeUtil.Now) return true; break; case TimeType.Hour: executeTime = new DateTime(job.LastExecuteTime.Year, job.LastExecuteTime.Month, job.LastExecuteTime.Day, job.LastExecuteTime.Hour, job.ExecuteTime.Minutes, job.ExecuteTime.Seconds); if (executeTime.AddHours(1) <= DateTimeUtil.Now) return true; break; case TimeType.Day: executeTime = new DateTime(job.LastExecuteTime.Year, job.LastExecuteTime.Month, job.LastExecuteTime.Day, job.ExecuteTime.Hours, job.ExecuteTime.Minutes, job.ExecuteTime.Seconds); if (executeTime.AddDays(1) <= DateTimeUtil.Now) return true; break; case TimeType.Week: //job.LastExecuteTime.DayOfWeek //executeTime = new DateTime(job.LastExecuteTime.Year, job.LastExecuteTime.Month, job.LastExecuteTime.Day, job.ExecuteTime.Hours, job.ExecuteTime.Minutes, job.ExecuteTime.Seconds); if (DateTimeUtil.Now.DayOfWeek == job.DayOfWeek) { if (job.LastExecuteTime.Year == DateTimeUtil.Now.Year && job.LastExecuteTime.Month == DateTimeUtil.Now.Month && job.LastExecuteTime.Day == DateTimeUtil.Now.Day) { } else { executeTime = new DateTime(DateTimeUtil.Now.Year, DateTimeUtil.Now.Month, DateTimeUtil.Now.Day, job.ExecuteTime.Hours, job.ExecuteTime.Minutes, job.ExecuteTime.Seconds); if (executeTime <= DateTimeUtil.Now) return true; } } break; case TimeType.Month: executeTime = new DateTime(job.LastExecuteTime.Year, job.LastExecuteTime.Month, job.ExecuteTime.Days, job.ExecuteTime.Hours, job.ExecuteTime.Minutes, job.ExecuteTime.Seconds); if (executeTime.AddMonths(1) <= DateTimeUtil.Now) return true; break; case TimeType.Year: executeTime = new DateTime(job.LastExecuteTime.Year, job.Month, job.ExecuteTime.Days, job.ExecuteTime.Hours, job.ExecuteTime.Minutes, job.ExecuteTime.Seconds); if (executeTime.AddYears(1) <= DateTimeUtil.Now) return true; break; } return false; }
private void CacheJobBinaries(JobBase job, IJobLogger logger) { bool isInPlaceDefault = job.ScriptHost.GetType() == typeof(NodeScriptHost); if (JobSettings.GetIsInPlace(isInPlaceDefault)) { _inPlaceWorkingDirectory = JobBinariesPath; SafeKillAllRunningJobInstances(logger); UpdateAppConfigs(WorkingDirectory); return; } _inPlaceWorkingDirectory = null; if (WorkingDirectory != null) { try { int currentHash = CalculateHashForJob(JobBinariesPath); int lastHash = CalculateHashForJob(WorkingDirectory); if (lastHash == currentHash) { return; } } catch (Exception ex) { // Log error and ignore it as it's not critical to cache job binaries logger.LogWarning("Failed to calculate hash for WebJob, continue to copy WebJob binaries (this will not affect WebJob run)\n" + ex); _analytics.UnexpectedException(ex); } } SafeKillAllRunningJobInstances(logger); if (FileSystemHelpers.DirectoryExists(JobTempPath)) { FileSystemHelpers.DeleteDirectorySafe(JobTempPath, ignoreErrors: true); } if (FileSystemHelpers.DirectoryExists(JobTempPath)) { logger.LogWarning("Failed to delete temporary directory"); } try { OperationManager.Attempt(() => { var tempJobInstancePath = Path.Combine(JobTempPath, Path.GetRandomFileName()); FileSystemHelpers.CopyDirectoryRecursive(JobBinariesPath, tempJobInstancePath); UpdateAppConfigs(tempJobInstancePath); _workingDirectory = tempJobInstancePath; }); } catch (Exception ex) { //Status = "Worker is not running due to an error"; //TraceError("Failed to copy bin directory: " + ex); logger.LogError("Failed to copy job files: " + ex); _analytics.UnexpectedException(ex); // job disabled _workingDirectory = null; } }
private static void WriteJobException(JobBase job, ServiceLog log, string op, Exception ex) { string message = string.Format("{0}作业\"{1}\"({2})错误: {3}", op, job.Name, job.JobID, ex.GetRealException().ToString()); log.Write(message); }
public async Task <JToken> Handler(JToken @event, ILambdaContext context) { Logger.Debug(@event.ToMcmaJson().ToString()); var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 45 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var bme = await resourceManager.ResolveAsync <BMEssence>(@event["data"]["bmEssence"].ToString()); var technicalMetadata = bme.Get <object>("technicalMetadata").ToMcmaJson(); var ebuCoreMain = technicalMetadata["ebucore:ebuCoreMain"]; var coreMetadata = ebuCoreMain["ebucore:coreMetadata"]?.FirstOrDefault(); var containerFormat = coreMetadata["ebucore:format"]?.FirstOrDefault()?["ebucore:containerFormat"]?.FirstOrDefault(); var duration = coreMetadata["ebucore:format"]?.FirstOrDefault()?["ebucore:duration"]?.FirstOrDefault(); var video = new { Codec = containerFormat["ebucore:codec"]?.FirstOrDefault()?["ebucore:codecIdentifier"]?.FirstOrDefault()?["dc:identifier"]?.FirstOrDefault()?["#value"], BitRate = coreMetadata["ebucore:format"]?.FirstOrDefault()?["ebucore:videoFormat"]?.FirstOrDefault()?["ebucore:bitRate"]?.FirstOrDefault()?["#value"], Format = coreMetadata["ebucore:format"]?.FirstOrDefault()?["ebucore:videoFormat"]?.FirstOrDefault()?["@videoFormatName"], NormalPlayTime = duration["ebucore:normalPlayTime"]?.FirstOrDefault()?["#value"] }; var codec = video.Codec?.ToString(); var format = video.Format?.ToString(); var parsedBitRate = double.TryParse(video.BitRate.ToString(), out var bitRate); var mbyte = parsedBitRate ? (bitRate / 8) / (1024 * 1024) : default(double?); if ((codec == VIDEO_CODEC || codec == VIDEO_CODEC_ISOM) && format == VIDEO_FORMAT && mbyte.HasValue && mbyte.Value <= VIDEO_BITRATE_MB) { return("none"); } var normalPlayTime = video.NormalPlayTime?.ToString() ?? string.Empty; double totalSeconds; var ptSeconds = Regex.Match(normalPlayTime, "PT([0-9\\.]+)S"); if (ptSeconds.Success) { totalSeconds = double.Parse(ptSeconds.Groups[1].Captures[0].Value); } else { var hour = Regex.Match(normalPlayTime, "(\\d*)H"); var min = Regex.Match(normalPlayTime, "(\\d*)M"); var sec = Regex.Match(normalPlayTime, "(\\d*)S"); if (!sec.Success) { throw new Exception($"Invalid play time in technical metadata: {normalPlayTime ?? "[null]"}"); } totalSeconds = CalcSeconds( hour.Success ? int.Parse(hour.Groups[1].Captures[0].Value) : 0, min.Success ? int.Parse(min.Groups[1].Captures[0].Value) : 0, double.Parse(sec.Groups[1].Captures[0].Value)); } Logger.Debug("[Total Seconds]: " + totalSeconds); return(totalSeconds <= THRESHOLD_SECONDS ? "short" : "long"); }
public async Task <string> Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 54 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var stepFunction = new AmazonStepFunctionsClient(); var data = await stepFunction.GetActivityTaskAsync(new GetActivityTaskRequest { ActivityArn = ACTIVITY_ARN }); var taskToken = data.TaskToken; if (taskToken == null) { throw new Exception("Failed to obtain activity task"); } @event = JToken.Parse(data.Input); var jobProfiles = await resourceManager.GetAsync <JobProfile>(("name", JOB_PROFILE_NAME)); var jobProfileId = jobProfiles?.FirstOrDefault()?.Id; if (jobProfileId == null) { throw new Exception($"JobProfile '{JOB_PROFILE_NAME}' not found"); } var createProxyJob = new TransformJob { JobProfile = jobProfileId, JobInput = new JobParameterBag { ["inputFile"] = @event["data"]["repositoryFile"], ["outputLocation"] = new S3Locator { AwsS3Bucket = REPOSITORY_BUCKET, AwsS3KeyPrefix = "TransformJobResults/" } }, NotificationEndpoint = new NotificationEndpoint { HttpEndpoint = ACTIVITY_CALLBACK_URL + "?taskToken=" + Uri.EscapeDataString(taskToken) } }; createProxyJob = await resourceManager.CreateAsync(createProxyJob); return(createProxyJob.Id); }
protected void Init() { m_body = GetComponent <BodyBase>(); m_job = GetComponent <JobBase>(); }
override public void Excute(EnemyBase eb = null) { if (eb.CanTakeAction()) { foreach (var target in eb.e_pr.partyList) { JobBase jb = target.GetComponent <JobBase>(); if ((jb._hp / jb._maxHP * 100 < 50 && jb._type == JobType.Leader) && jb.battelStatus != BattelStatus.DEAD) { if (!eb.skillList[0].isRecast) { eb.StartCoroutine(eb.SkillUse(target, eb.skillList[0])); return; } } } EnemyBase leader_eb = eb.e_pr.enemyList[0].GetComponent <EnemyBase>(); if (leader_eb._target != null) { JobBase jb = leader_eb._target.GetComponent <JobBase>(); if (leader_eb._target.layer == LayerMask.NameToLayer("Player") && jb.battelStatus != BattelStatus.DEAD) { if (!eb.skillList[0].isRecast) { eb.StartCoroutine(eb.SkillUse(leader_eb._target, eb.skillList[0])); return; } } } List <GameObject> targetCount = new List <GameObject>(); foreach (var target in eb.e_pr.partyList) { JobBase jb = target.GetComponent <JobBase>(); if ((jb._type == JobType.Attacker || jb._type == JobType.Magician) && jb.battelStatus != BattelStatus.DEAD) { targetCount.Add(target); } } if (targetCount.Count > 0) { int ran = Random.Range(0, targetCount.Count); if (!eb.skillList[0].isRecast) { eb.StartCoroutine(eb.SkillUse(targetCount[ran], eb.skillList[0])); return; } } else { foreach (var target in eb.e_pr.partyList) { JobBase jb = target.GetComponent <JobBase>(); if (jb.battelStatus != BattelStatus.DEAD) { targetCount.Add(target); } } if (targetCount.Count > 0) { int ran = Random.Range(0, targetCount.Count); if (!eb.skillList[0].isRecast) { if (!eb.skillList[0].isRecast) { eb.StartCoroutine(eb.SkillUse(targetCount[ran], eb.skillList[0])); return; } } } } } else { Debug.Log("TODO: 状態異常のモードに移動"); return; } }
override public void Excute(EnemyBase eb = null) { if (eb.CanTakeAction()) { if (!eb.skillList[3].isRecast) { foreach (var target in eb.e_pr.enemyList) { EnemyBase t_eb = target.GetComponent <EnemyBase>(); if (t_eb._hp / t_eb._maxHP * 100 < 50 && t_eb.battelStatus != BattelStatus.DEAD) { eb.StartCoroutine(eb.SkillUse(target, eb.skillList[3])); return; } if (t_eb._type == JobType.Leader && t_eb._hp / t_eb._maxHP * 100 < 50 && t_eb.battelStatus != BattelStatus.DEAD) { eb.StartCoroutine(eb.SkillUse(target, eb.skillList[3])); return; } } } foreach (var target in eb.e_pr.enemyList) { EnemyBase t_eb = target.GetComponent <EnemyBase>(); if (t_eb._type == JobType.Magician && t_eb.battelStatus != BattelStatus.DEAD) { if (!eb.skillList[2].isRecast) { eb.StartCoroutine(eb.SkillUse(target, eb.skillList[2])); return; } } if (t_eb._type == JobType.Attacker && t_eb.battelStatus != BattelStatus.DEAD) { if (!eb.skillList[0].isRecast) { eb.StartCoroutine(eb.SkillUse(target, eb.skillList[0])); return; } } } if (!eb.skillList[3].isRecast) { GameObject obj = null; foreach (var target in eb.e_pr.partyList) { JobBase jb = target.GetComponent <JobBase>(); if (jb._type == JobType.Leader && jb.battelStatus != BattelStatus.DEAD) { if (obj == null) { obj = target; } } if (jb._hp / jb._maxHP * 100 < 30 && jb.battelStatus != BattelStatus.DEAD) { obj = target; break; } } eb.StartCoroutine(eb.SkillUse(obj, eb.skillList[3])); return; } } else { Debug.Log("TODO: 状態異常のモードに移動"); } }
/// <summary> /// 处理单个作业 /// </summary> /// <param name="job">Job.</param> /// <param name="cancellationToken">Cancellation token.</param> private void ProcessJob(JobBase job, CancellationToken cancellationToken = default) { cancellationToken.ThrowIfCancellationRequested(); // 重新获取作业最新信息,因为作业支持多个并行异步处理,开始处理此作业时,之前拿到的作业信息可能已经过时了 job = _cluster.GetJobRecord(job.Name, job.Id); // 明确为计划制定失败、任务合并失败、任务执行失败的不应该重试,应该先找找原因。 // 处理任务计划中下线的工人 if (job.Status == EnumJobRecordStatus.PlanMaked || job.Status == EnumJobRecordStatus.TaskExecuting || job.Status == EnumJobRecordStatus.Canceling || job.Status == EnumJobRecordStatus.TaskCompleted) { job.ReplaceTaskPlanOfflineWorker(cancellationToken); } // 待处理的作业肯定需要处理 if (job.Status == EnumJobRecordStatus.Pending) { job.CreateProductionPlan(cancellationToken); } // 正在制定计划的作业,可能Swift停止运行导致的 if (job.Status == EnumJobRecordStatus.PlanMaking) { LogWriter.Write("作业为计划制定中状态,将检查对应的进程:" + job.BusinessId, Log.LogLevel.Info); if (CheckPlanMakingJob(job, cancellationToken)) { LogWriter.Write("开始监控运行作业计划制定"); job.MointorRunJobSplit(cancellationToken); } } // 任务正在执行或都处理完成的作业:更新作业状态为任务已全部完成或任务结果已全部同步 if (job.Status == EnumJobRecordStatus.PlanMaked || job.Status == EnumJobRecordStatus.TaskExecuting || job.Status == EnumJobRecordStatus.TaskCompleted || job.Status == EnumJobRecordStatus.Canceling) { job.CheckTaskRunStatus(cancellationToken); } // 任务正在执行或都处理完成的作业:同步任务结果 if (job.Status == EnumJobRecordStatus.TaskExecuting || job.Status == EnumJobRecordStatus.TaskCompleted) { job.SyncTaskResult(cancellationToken); } // 合并任务同步完成的作业:合并全部任务结果 if (job.Status == EnumJobRecordStatus.TaskSynced) { job.MergeTaskResult(cancellationToken); } // 正在合并结果的作业,可能Swift停止运行导致的 if (job.Status == EnumJobRecordStatus.TaskMerging) { LogWriter.Write("作业为任务结果合并中状态,将检查对应的进程:" + job.BusinessId, Log.LogLevel.Info); if (CheckTaskMergingJob(job, cancellationToken)) { LogWriter.Write("开始监控运行任务结果合并"); job.MointorRunCollectTaskResult(cancellationToken); } } }
public QueueTaskEventArgs(JobBase job) { this.Job = job; }
private void DoUpdate(JobBase newJob) { using (TransactionScope ts = TransactionScopeFactory.Create()) { if (this.Mode == PageMode.Edit) { StartWorkflowJobAdapter.Instance.Delete(new string[] { newJob.JobID }); InvokeWebServiceJobAdapter.Instance.Delete(p => p.AppendItem("JOB_ID", newJob.JobID)); } if (newJob is StartWorkflowJob) { StartWorkflowJobAdapter.Instance.Update((StartWorkflowJob)newJob); } else if (newJob is InvokeWebServiceJob) { InvokeWebServiceJobAdapter.Instance.Update((InvokeWebServiceJob)newJob); } else { JobBaseAdapter.Instance.Update(newJob); } ts.Complete(); } }
/// <summary> /// 異常状態を与える /// </summary> /// <param name="target">Target</param> /// <param name="sc">Skill Script</param> /// <param name="a_time">エフェクトを出す時間</param> /// <param name="effect">エフェクトのpath</param> /// <param name="e_time"> 異常状態を与える時間</param> /// <param name="status_array"> 異常状態の配列</param> /// <returns></returns> public IEnumerator StatusMagic(GameObject target, SkillScript sc, float a_time, string effect, float e_time, ConditionStatus[] status_array) { if (sc.s_targetNum == TargetNum.MUTIPLE) { CreateRange(); GameObject.FindGameObjectWithTag("Range").GetComponent <SphereCollider>().radius = sc.s_range; } float timer = 0; bool useMagic = false; while (true) { timer += Time.deltaTime; if (timer >= a_time && !useMagic) { GameObject effectObj = Instantiate(Resources.Load(effect), target.transform.position, Quaternion.identity) as GameObject; effectObj.transform.parent = this.transform; useMagic = true; } if (timer >= e_time) { try { if (sc.s_targetNum == TargetNum.MUTIPLE) { foreach (var r_target in GameObject.FindGameObjectWithTag("Range").GetComponent <RangeDetect>().targets) { // TODO: Player / Enemy を判断できるように if (r_target.layer != LayerMask.NameToLayer("Player")) { continue; } JobBase jb = r_target.GetComponent <JobBase>(); foreach (var status in status_array) { jb.Set_c_Status(status); jb.StartCoroutine(jb.StatusCounter(status, sc.s_effectTime)); } } } else { if (target.layer == LayerMask.NameToLayer("Player")) { JobBase jb = target.GetComponent <JobBase>(); foreach (var status in status_array) { jb.Set_c_Status(status); jb.StartCoroutine(jb.StatusCounter(status, sc.s_effectTime)); } } } if (GameObject.FindGameObjectWithTag("Range")) { DeleteRange(); } StartCoroutine(SkillRecast(sc.gameObject, sc.s_recast)); yield break; } catch (MissingReferenceException) { yield break; } } yield return(new WaitForEndOfFrame()); } }
public async Task <S3Locator> Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 72 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var transformJobId = GetTransformJobId(@event); S3Locator outputFile; if (transformJobId == null) { Logger.Debug("Transform job ID is null. Transform was not done. Using original essence as proxy."); var bme = await resourceManager.ResolveAsync <BMEssence>(@event["data"]["bmEssence"]?.ToString()); outputFile = (S3Locator)bme.Locations[0]; } else { Logger.Debug($"Getting proxy location from transform job {transformJobId}."); var transformJob = await resourceManager.ResolveAsync <TransformJob>(transformJobId); outputFile = transformJob.JobOutput.Get <S3Locator>(nameof(outputFile)); } var s3Bucket = WEBSITE_BUCKET; var s3Key = "media/" + Guid.NewGuid(); var idxLastDot = outputFile.AwsS3Key.LastIndexOf("."); if (idxLastDot > 0) { s3Key += outputFile.AwsS3Key.Substring(idxLastDot); } var s3 = new AmazonS3Client(); var data = await s3.GetBucketLocationAsync(s3Bucket); try { var copyParams = new CopyObjectRequest { SourceBucket = outputFile.AwsS3Bucket, SourceKey = outputFile.AwsS3Key, DestinationBucket = s3Bucket, DestinationKey = s3Key }; var regionEndpoint = RegionEndpoint.GetBySystemName(!string.IsNullOrWhiteSpace(data.Location) ? (string)data.Location : "us-east-1"); var destS3 = new AmazonS3Client(regionEndpoint); await destS3.CopyObjectAsync(copyParams); } catch (Exception error) { throw new Exception("Unable to read input file in bucket '" + s3Bucket + "' with key '" + s3Key + "' due to error: " + error); } var s3SubDomain = !string.IsNullOrWhiteSpace(data.Location) ? $"s3-{data.Location}" : "s3"; var httpEndpoint = "https://" + s3SubDomain + ".amazonaws.com/" + s3Bucket + "/" + s3Key; return(new S3Locator { AwsS3Bucket = s3Bucket, AwsS3Key = s3Key, HttpEndpoint = httpEndpoint }); }
override public void Exit(JobBase jb) { TutorialRoot.Instance.counter++; }
/// <summary> /// Kills the abandoned collect task result process. /// </summary> /// <param name="processId">Process identifier.</param> /// <param name="jobName">Job name.</param> /// <param name="jobId">Job identifier.</param> public static void KillAbandonedCollectTaskResultProcess(int processId, string jobName, string jobId) { var businessId = JobBase.FormatBusinessId(jobName, jobId); Process osProcess = null; try { osProcess = Process.GetProcessById(processId); } catch (Exception ex) { LogWriter.Write(string.Format("根据进程Id查找进程异常,进程可能已经关闭了:{0},{1}", businessId, processId), ex, LogLevel.Info); } bool canDeleteProcessFile = true; if (osProcess != null) { if (SwiftProcess.CheckJobAndProcessMatch(osProcess, jobName, jobId, "CollectTaskResult")) { try { osProcess.Kill(); osProcess.WaitForExit(); LogWriter.Write(string.Format("已关闭废弃的任务合并进程:{0},{1}", businessId, processId), LogLevel.Info); } catch (Exception ex) { canDeleteProcessFile = false; LogWriter.Write(string.Format("关闭废弃的任务合并进程异常:{0},{1}", businessId, processId), ex, LogLevel.Error); } } } if (canDeleteProcessFile) { var processPath = SwiftConfiguration.GetSwiftProcessPath("CollectTaskResult", JobBase.FormatBusinessId(jobName, jobId)); try { File.Delete(processPath); LogWriter.Write(string.Format("进程文件已删除:{0}", processPath), LogLevel.Info); } catch (Exception ex) { LogWriter.Write(string.Format("删除废弃的任务合并进程文件异常:{0},{1}", businessId, processId), ex, LogLevel.Error); } } }
override public void Exit(JobBase jb) { Debug.Log("Exit"); }
/// <summary> /// 执行某个任务 /// </summary> /// <param name="job"></param> private static void ExecuteJob(JobBase job) { lock (job.Locker) { if (job.isExecuteing) return; job.isExecuteing = true; } try { job.Action(); //job.Action(); switch (job.TimeType) { //间隔时间类型 上次执行时间 始终记录 执行时候的时间 case TimeType.Interval: job.LastExecuteTime = DateTimeUtil.Now; if (job.IntervalSeconds > 10 * 60) { RecordExecuteTime(job.Type, job.LastExecuteTime); } break; default: job.LastExecuteTime = DateTimeUtil.Now; RecordExecuteTime(job.Type, job.LastExecuteTime); break; } } catch(Exception ex) { try { LogHelper.CreateErrorLog(ex); } catch { } } job.isExecuteing = false; }
override public void Enter(JobBase jb) { Debug.Log("Enter"); }
public async Task <JToken> Handler(JToken @event, ILambdaContext context) { if (@event == null) { throw new Exception("Missing workflow input"); } var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", ParallelProgress = { ["detect-celebrities-azure"] = 20 } }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var stepFunction = new AmazonStepFunctionsClient(); var data = await stepFunction.GetActivityTaskAsync(new GetActivityTaskRequest { ActivityArn = ACTIVITY_ARN }); var taskToken = data.TaskToken; if (taskToken == null) { throw new Exception("Failed to obtain activity task"); } @event = JToken.Parse(data.Input); var jobProfiles = await resourceManager.GetAsync <JobProfile>(("name", JOB_PROFILE_NAME)); var jobProfileId = jobProfiles?.FirstOrDefault()?.Id; if (jobProfileId == null) { throw new Exception($"JobProfile '{JOB_PROFILE_NAME}' not found"); } var job = new AIJob { JobProfile = jobProfileId, JobInput = new JobParameterBag { ["inputFile"] = @event["data"]["mediaFileLocator"], ["outputLocation"] = new S3Locator { AwsS3Bucket = TEMP_BUCKET, AwsS3Key = JOB_RESULTS_PREFIX } }, NotificationEndpoint = new NotificationEndpoint { HttpEndpoint = ACTIVITY_CALLBACK_URL + "?taskToken=" + Uri.EscapeDataString(taskToken) } }; job = await resourceManager.CreateAsync(job); return(job.Id); }
protected void Init() { m_brain = GetComponent <BrainBase>(); m_job = GetComponent <JobBase>(); m_homePos = base.transform.position; }
private void CacheJobBinaries(JobBase job, IJobLogger logger) { bool isInPlaceDefault = job.ScriptHost.GetType() == typeof(NodeScriptHost); if (JobSettings.GetIsInPlace(isInPlaceDefault)) { _inPlaceWorkingDirectory = JobBinariesPath; SafeKillAllRunningJobInstances(logger); UpdateAppConfigs(WorkingDirectory, _analytics); return; } _inPlaceWorkingDirectory = null; Dictionary <string, FileInfoBase> sourceDirectoryFileMap = GetJobDirectoryFileMap(JobBinariesPath); if (WorkingDirectory != null) { try { var workingDirectoryFileMap = GetJobDirectoryFileMap(WorkingDirectory); if (!JobDirectoryHasChanged(sourceDirectoryFileMap, workingDirectoryFileMap, _cachedSourceDirectoryFileMap, logger)) { // no changes detected, so skip the cache/copy step below return; } } catch (Exception ex) { // Log error and ignore it, since this diff optimization isn't critical. // We'll just do a full copy in this case. logger.LogWarning("Failed to diff WebJob directories for changes. Continuing to copy WebJob binaries (this will not affect the WebJob run)\n" + ex); _analytics.UnexpectedException(ex); } } SafeKillAllRunningJobInstances(logger); if (FileSystemHelpers.DirectoryExists(JobTempPath)) { FileSystemHelpers.DeleteDirectorySafe(JobTempPath, ignoreErrors: true); } if (FileSystemHelpers.DirectoryExists(JobTempPath)) { logger.LogWarning("Failed to delete temporary directory"); } try { OperationManager.Attempt(() => { var tempJobInstancePath = Path.Combine(JobTempPath, Path.GetRandomFileName()); FileSystemHelpers.CopyDirectoryRecursive(JobBinariesPath, tempJobInstancePath); UpdateAppConfigs(tempJobInstancePath, _analytics); _workingDirectory = tempJobInstancePath; // cache the file map snapshot for next time (to aid in detecting // file deletions) _cachedSourceDirectoryFileMap = sourceDirectoryFileMap; }); } catch (Exception ex) { //Status = "Worker is not running due to an error"; //TraceError("Failed to copy bin directory: " + ex); logger.LogError("Failed to copy job files: " + ex); _analytics.UnexpectedException(ex); // job disabled _workingDirectory = null; } }
/// <summary> /// 尝试更新任务状态 /// </summary> /// <returns><c>true</c>, if update task status was tryed, <c>false</c> otherwise.</returns> /// <param name="task">Task.</param> /// <param name="status">Status.</param> /// <param name="errCode">错误代码:0无错误 1不能设置为这个状态 2无需重复设置状态 3作业或任务不存在</param> /// <param name="latestJob">Latest job.</param> /// <param name="cancellationToken">Cancellation token.</param> public bool TryUpdateTaskStatus(JobTask task, EnumTaskStatus status, out int errCode, out JobBase latestJob, CancellationToken cancellationToken = default(CancellationToken)) { errCode = 0; latestJob = null; cancellationToken.ThrowIfCancellationRequested(); var jobRecordKey = GetJobRecordFullKey(task.Job.Cluster.Name, task.Job.Name, task.Job.Id); KVPair jobRecordKV; int updateIndex = 0; do { updateIndex++; Log.LogWriter.Write(string.Format("UpdateTaskStatus Execute Times: {0},{1}", jobRecordKey + ":" + task.Id, updateIndex), Log.LogLevel.Debug); if (updateIndex > 1) { Thread.Sleep(200); } jobRecordKV = ConsulKV.Get(jobRecordKey, cancellationToken); if (jobRecordKV == null) { errCode = 3; Log.LogWriter.Write(string.Format("the job missing: {0}", jobRecordKey), Log.LogLevel.Error); return(false); } var jobRecordJson = Encoding.UTF8.GetString(jobRecordKV.Value); Log.LogWriter.Write("UpdateTaskStatus Get Value[" + jobRecordKV.ModifyIndex + "]" + jobRecordJson, Log.LogLevel.Trace); var jobRecord = JobBase.Deserialize(jobRecordJson, task.Job.Cluster); jobRecord.ModifyIndex = jobRecordKV.ModifyIndex; // 从作业任务计划中查找出任务:Synced和SyncFailed是Manager更改的状态,其它情况下是任务所属的Worker来更改 var consulTask = jobRecord.TaskPlan.Where(d => d.Key == task.Job.Cluster.CurrentMember.Id).SelectMany(d => d.Value.Where(t => t.Id == task.Id)).FirstOrDefault(); if (status == EnumTaskStatus.Synced || status == EnumTaskStatus.SyncFailed) { consulTask = jobRecord.TaskPlan.SelectMany(d => d.Value.Where(t => t.Id == task.Id)).FirstOrDefault(); } if (consulTask == null) { errCode = 3; Log.LogWriter.Write(string.Format("the job task missing: {0}", jobRecordKey), Log.LogLevel.Error); return(false); } // 取消状态只能更新为 已取消或者取消失败 if (consulTask.Status == EnumTaskStatus.Canceling && status != EnumTaskStatus.Canceled && status != EnumTaskStatus.CancelFailed) { LogWriter.Write(string.Format("{0} can not change to {1}", jobRecord.Status, status)); errCode = 1; return(false); } if ((consulTask.Status == EnumTaskStatus.Completed || consulTask.Status == EnumTaskStatus.Synced) && status == EnumTaskStatus.Executing) { LogWriter.Write(string.Format("{0} can not change to {1}", jobRecord.Status, status)); errCode = 1; return(false); } if (status == EnumTaskStatus.Executing) { consulTask.StartTime = DateTime.Now; } if (status == EnumTaskStatus.Completed) { consulTask.FinishedTime = DateTime.Now; } consulTask.Status = status; jobRecord.ModifyIndex = jobRecordKV.ModifyIndex; latestJob = jobRecord; jobRecordJson = JsonConvert.SerializeObject(jobRecord); Log.LogWriter.Write("UpdateTaskStatus CAS Value[" + jobRecordKV.ModifyIndex + "]" + jobRecordJson, Log.LogLevel.Trace); jobRecordKV.Value = Encoding.UTF8.GetBytes(jobRecordJson); } while (!ConsulKV.CAS(jobRecordKV, cancellationToken)); return(true); }
protected void RunJobInstance(JobBase job, IJobLogger logger, string runId) { string scriptFileName = Path.GetFileName(job.ScriptFilePath); string scriptFileFullPath = Path.Combine(WorkingDirectory, job.RunCommand); string workingDirectoryForScript = Path.GetDirectoryName(scriptFileFullPath); logger.LogInformation("Run script '{0}' with script host - '{1}'".FormatCurrentCulture(scriptFileName, job.ScriptHost.GetType().Name)); using (var jobStartedReporter = new JobStartedReporter(_analytics, job, Settings.GetWebSiteSku(), JobDataPath)) { try { var exe = _externalCommandFactory.BuildCommandExecutable(job.ScriptHost.HostPath, workingDirectoryForScript, IdleTimeout, NullLogger.Instance); _shutdownNotificationFilePath = RefreshShutdownNotificationFilePath(job.Name, job.JobType); // Set environment variable to be able to identify all processes spawned for this job exe.EnvironmentVariables[GetJobEnvironmentKey()] = "true"; exe.EnvironmentVariables[WellKnownEnvironmentVariables.WebJobsRootPath] = WorkingDirectory; exe.EnvironmentVariables[WellKnownEnvironmentVariables.WebJobsName] = job.Name; exe.EnvironmentVariables[WellKnownEnvironmentVariables.WebJobsType] = job.JobType; exe.EnvironmentVariables[WellKnownEnvironmentVariables.WebJobsDataPath] = JobDataPath; exe.EnvironmentVariables[WellKnownEnvironmentVariables.WebJobsRunId] = runId; exe.EnvironmentVariables[WellKnownEnvironmentVariables.WebJobsCommandArguments] = job.CommandArguments; if (_shutdownNotificationFilePath != null) { exe.EnvironmentVariables[WellKnownEnvironmentVariables.WebJobsShutdownNotificationFile] = _shutdownNotificationFilePath; } UpdateStatus(logger, "Running"); int exitCode = exe.ExecuteReturnExitCode( TraceFactory.GetTracer(), logger.LogStandardOutput, logger.LogStandardError, job.ScriptHost.ArgumentsFormat, scriptFileName, job.CommandArguments != null ? " " + job.CommandArguments : String.Empty); if (exitCode != 0) { string errorMessage = "Job failed due to exit code " + exitCode; logger.LogError(errorMessage); jobStartedReporter.Error = errorMessage; } else { UpdateStatus(logger, "Success"); } } catch (ThreadAbortException) { // We kill the process when refreshing the job logger.LogInformation("WebJob process was aborted"); UpdateStatus(logger, "Stopped"); } catch (Exception ex) { logger.LogError(ex.ToString()); jobStartedReporter.Error = ex.Message; } } }
/// <summary> /// 尝试更新作业实例状态 /// </summary> /// <returns>The job status.</returns> /// <param name="job">Job.</param> /// <param name="status">Status.</param> /// <param name="errCode">错误代码:0无错误 1不能设置为这个状态 2无需重复设置状态 3作业不存在</param> /// <param name="latestJob">更新后最新的作业信息</param> /// <param name="cancellationToken">Cancellation token.</param> public bool TryUpdateJobStatus(JobBase job, EnumJobRecordStatus status, out int errCode, out JobBase latestJob, CancellationToken cancellationToken = default(CancellationToken)) { errCode = 0; latestJob = null; cancellationToken.ThrowIfCancellationRequested(); var jobRecordKey = GetJobRecordFullKey(job.Cluster.Name, job.Name, job.Id); KVPair jobRecordKV; int updateTimes = 0; do { updateTimes++; Log.LogWriter.Write(string.Format("UpdateJobStatus Execute Times:{0},{1}", jobRecordKey, updateTimes), Log.LogLevel.Debug); if (updateTimes > 1) { Thread.Sleep(200); } jobRecordKV = ConsulKV.Get(jobRecordKey, cancellationToken); if (jobRecordKV == null) { Log.LogWriter.Write(string.Format("the job missing: {0}", jobRecordKey), Log.LogLevel.Error); errCode = 3; return(false); } var jobRecordJson = Encoding.UTF8.GetString(jobRecordKV.Value); Log.LogWriter.Write("UpdateJobStatus Get Value[" + jobRecordKV.ModifyIndex + "]" + jobRecordJson, Log.LogLevel.Trace); JobWrapper jobRecord = JsonConvert.DeserializeObject <JobWrapper>(jobRecordJson); // 状态没有改变 if (jobRecord.Status == status) { LogWriter.Write(string.Format("the job status is already in {0}", status)); errCode = 2; return(false); } // 取消状态只能更新为 已取消或者取消失败 if (jobRecord.Status == EnumJobRecordStatus.Canceling && status != EnumJobRecordStatus.Canceled && status != EnumJobRecordStatus.CancelFailed) { LogWriter.Write(string.Format("{0} can not change to {1}", jobRecord.Status, status)); errCode = 1; return(false); } // 计划制定完毕 不能更新为 计划指定中 if (jobRecord.Status == EnumJobRecordStatus.PlanMaked && status == EnumJobRecordStatus.PlanMaking) { LogWriter.Write(string.Format("{0} can not change to {1}", jobRecord.Status, status)); errCode = 1; return(false); } // 任务合并完毕 不能更新为 任务合并中 if (jobRecord.Status == EnumJobRecordStatus.TaskMerged && status == EnumJobRecordStatus.TaskMerging) { LogWriter.Write(string.Format("{0} can not change to {1}", jobRecord.Status, status)); errCode = 1; return(false); } // 任务执行完成、任务执行失败、任务已同步、任务已合并 不能更新为 任务正在执行 if ((jobRecord.Status == EnumJobRecordStatus.TaskCompleted || jobRecord.Status == EnumJobRecordStatus.TaskExecutingFailed || jobRecord.Status == EnumJobRecordStatus.TaskSynced || jobRecord.Status == EnumJobRecordStatus.TaskMerged) && status == EnumJobRecordStatus.TaskExecuting) { LogWriter.Write(string.Format("{0} can not change to {1}", jobRecord.Status, status)); errCode = 1; return(false); } // 如果是任务计划还没有制定,则设置任务计划为null if (status == EnumJobRecordStatus.Pending) { jobRecord.TaskPlan = null; jobRecord.StartTime = DateTime.MinValue; jobRecord.JobSplitStartTime = DateTime.MinValue; jobRecord.CollectTaskResultStartTime = DateTime.MinValue; } // 如果是任务计划开始制定,则附上开始时间 if (status == EnumJobRecordStatus.PlanMaking) { jobRecord.StartTime = DateTime.Now; jobRecord.JobSplitStartTime = DateTime.Now; } // 如果是任务计划制定完毕,则附上计划 if (status == EnumJobRecordStatus.PlanMaked) { jobRecord.TaskPlan = job.TaskPlan; jobRecord.JobSplitEndTime = DateTime.Now; } // 如果是任务结果合并,则附上开始时间 if (status == EnumJobRecordStatus.TaskMerging) { jobRecord.CollectTaskResultStartTime = DateTime.Now; } // 如果是任务合并完毕,则附上结束时间 if (status == EnumJobRecordStatus.TaskMerged) { jobRecord.FinishedTime = DateTime.Now; jobRecord.CollectTaskResultEndTime = DateTime.Now; } jobRecord.ModifyIndex = jobRecordKV.ModifyIndex; jobRecord.Status = status; latestJob = jobRecord; jobRecordJson = JsonConvert.SerializeObject(jobRecord); Log.LogWriter.Write("UpdateJobStatus CAS Value[" + jobRecordKV.ModifyIndex + "]" + jobRecordJson, Log.LogLevel.Trace); jobRecordKV.Value = Encoding.UTF8.GetBytes(jobRecordJson); } while (!ConsulKV.CAS(jobRecordKV, cancellationToken)); return(true); }
public async Task Handler(JToken @event, ILambdaContext context) { if (@event == null) { throw new Exception("Missing workflow input"); } var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", ParallelProgress = { ["detect-celebrities-azure"] = 80 } }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } // get ai job id (first non null entry in array) var jobId = @event["data"]["azureCelebritiesJobId"]?.FirstOrDefault(id => id != null)?.Value <string>(); if (jobId == null) { throw new Exception("Failed to obtain azureCelebritiesJobId"); } Logger.Debug("[azureCelebritiesJobId]:", jobId); // get result of ai job var job = await resourceManager.ResolveAsync <AIJob>(jobId); S3Locator outputFile; if (!job.JobOutput.TryGet <S3Locator>(nameof(outputFile), false, out outputFile)) { throw new Exception($"AI job '{jobId}' does not specify an output file."); } // get media info var s3Bucket = outputFile.AwsS3Bucket; var s3Key = outputFile.AwsS3Key; GetObjectResponse s3Object; try { var s3Client = new AmazonS3Client(); s3Object = await s3Client.GetObjectAsync(new GetObjectRequest { BucketName = s3Bucket, Key = s3Key, }); } catch (Exception error) { throw new Exception("Unable to data file in bucket '" + s3Bucket + "' with key '" + s3Key + "'", error); } var azureResult = await s3Object.ResponseStream.ReadJsonFromStreamAsync(); Logger.Debug("AzureResult: {0}", azureResult.ToString(Formatting.Indented)); var bmContent = await resourceManager.ResolveAsync <BMContent>(@event["input"]["bmContent"].Value <string>()); // set response on the AzureAiMetadata object on the BMContent bmContent["azureAiMetadata"] = azureResult.ToMcmaObject <McmaExpandoObject>(); await resourceManager.UpdateAsync(bmContent); try { var jobData = new JobBase { Status = "RUNNING", ParallelProgress = { ["detect-celebrities-azure"] = 100 } }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } }
virtual public void Excute(JobBase jb) { }
private void SetJobBaseControlState(JobBase job) { txtName.Value = job.Name; txtDesc.Value = job.Description; ddlEnabled.Items.FindByValue(job.Enabled.ToString().ToLower()).Selected = true; ddlType.SelectedValue = ((int)job.JobType).ToString(); txtCategory.Value = job.Category; var simpleSchedules = from item in job.Schedules select new SimpleSchedule() { ID = item.ID, Name = item.Name, Description = item.Description, Enabled = item.Enabled, StartTime = item.StartTime, EndTime = item.EndTime }; detailGrid.InitialData = simpleSchedules.ToList(); }
virtual public void Exit(JobBase jb) { }
private void SetJobBaseInfo(JobBase newJob) { newJob.JobID = this.JobID; newJob.Name = txtName.Value.Trim(); newJob.Enabled = bool.Parse(this.ddlEnabled.Value); newJob.Description = txtDesc.Value; newJob.Creator = DeluxeIdentity.CurrentUser; newJob.Category = txtCategory.Value; JobScheduleCollection schedules = new JobScheduleCollection(); foreach (SimpleSchedule item in detailGrid.InitialData) { JobSchedule schedule = JobScheduleAdapter.Instance.Load(p => p.AppendItem("SCHEDULE_ID", item.ID))[0]; schedules.Add(schedule); } newJob.Schedules = schedules; }
virtual public void Enter(JobBase jb) { }
public async Task <JToken> Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 63 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } // get transform job id var transformJobId = GetTransformJobId(@event); // in case we did not do a transcode, just return the existing essence ID if (transformJobId == null) { return(@event["data"]["bmEssence"].Value <string>()); } // var transformJob = await resourceManager.ResolveAsync <TransformJob>(transformJobId); S3Locator outputFile; if (!transformJob.JobOutput.TryGet <S3Locator>(nameof(outputFile), false, out outputFile)) { throw new Exception("Unable to get outputFile from AmeJob output."); } var s3Bucket = outputFile.AwsS3Bucket; var s3Key = outputFile.AwsS3Key; var bmc = await resourceManager.ResolveAsync <BMContent>(@event["data"]["bmContent"]?.ToString()); var locator = new S3Locator { AwsS3Bucket = s3Bucket, AwsS3Key = s3Key }; var bme = CreateBmEssence(bmc, locator); bme = await resourceManager.CreateAsync(bme); if (bme?.Id == null) { throw new Exception("Failed to register BMEssence."); } bmc.BmEssences.Add(bme.Id); bmc = await resourceManager.UpdateAsync(bmc); return(bme.Id); }
public async Task <JToken> Handler(JToken @event, ILambdaContext context) { var resourceManager = AwsEnvironment.GetAwsV4ResourceManager(); try { var jobData = new JobBase { Status = "RUNNING", Progress = 36 }; await resourceManager.SendNotificationAsync(jobData, @event["notificationEndpoint"].ToMcmaObject <NotificationEndpoint>()); } catch (Exception error) { Logger.Error("Failed to send notification: {0}", error); } var ameJobId = GetAmeJobId(@event); if (ameJobId == null) { throw new Exception("Failed to obtain AmeJob ID"); } Logger.Debug("[AmeJobID]: " + ameJobId); var ameJob = await resourceManager.ResolveAsync <AmeJob>(ameJobId); S3Locator outputFile; if (!ameJob.JobOutput.TryGet <S3Locator>(nameof(outputFile), false, out outputFile)) { throw new Exception("Unable to get outputFile from AmeJob output."); } var s3Bucket = outputFile.AwsS3Bucket; var s3Key = outputFile.AwsS3Key; GetObjectResponse s3Object; try { var s3 = await outputFile.GetClientAsync(); s3Object = await s3.GetObjectAsync(new GetObjectRequest { BucketName = s3Bucket, Key = s3Key }); } catch (Exception error) { throw new Exception("Unable to get media info file in bucket '" + s3Bucket + "' with key '" + s3Key + " due to error: " + error); } var mediaInfo = JToken.Parse(await new StreamReader(s3Object.ResponseStream).ReadToEndAsync()); var bmc = await resourceManager.ResolveAsync <BMContent>(@event["data"]["bmContent"].ToString()); Logger.Debug("[BMContent]: " + bmc.ToMcmaJson()); Logger.Debug("[@event]:" + @event.ToMcmaJson().ToString()); Logger.Debug("[mediaInfo]:" + mediaInfo.ToMcmaJson().ToString()); var bme = CreateBmEssence(bmc, @event["data"]["repositoryFile"].ToMcmaObject <S3Locator>(), mediaInfo); Logger.Debug("Serializing essence..."); Logger.Debug("[bme]:" + bme.ToMcmaJson().ToString()); Logger.Debug("Creating essence..."); bme = await resourceManager.CreateAsync(bme); if (bme.Id == null) { throw new Exception("Failed to register BMEssence"); } Logger.Debug("[BMEssence ID]: " + bme.Id); bmc.BmEssences.Add(bme.Id); bmc = await resourceManager.UpdateAsync <BMContent>(bmc); return(bme.Id); }