public void LogErr(IJobExecutionContext context, string err) { LogErrModel model = new LogErrModel(); if (context.Trigger is ICronTrigger) { var cTrigger = (ICronTrigger)context.Trigger; model.CronExpression = cTrigger.CronExpressionString ?? "-"; } else { model.CronExpression = "-"; } model.ErrDateTime = DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss"); model.JobName = context.JobDetail.Key.Name; model.JobGroupName = context.JobDetail.Key.Group; model.JobType = context.JobDetail.JobType.FullName; model.JobData = JobUtils.GetJobDataString(context.JobDetail.JobDataMap); model.ErrMessage = err; lock (LockHelper) { _queueErrInfo.Enqueue(model); } }
private LightJobData getJobData(Vector3Int chunkId) { NativeArray <bool> directionsValid = new NativeArray <bool>(DirectionExtensions.numDirections, Allocator.Persistent); for (int i = 0; i < DirectionExtensions.numDirections; i++) { var offset = DirectionExtensions.Vectors[i]; var neighId = chunkId + offset; //Chunk only valid (for propagation etc) if it's fully generated and not outside the world limits directionsValid[i] = chunkManager.IsChunkFullyGenerated(neighId) && !chunkManager.WorldLimits.ChunkOutsideVerticalLimits(neighId); } var chunkData = chunkManager.GetReadOnlyChunkData(chunkId); LightJobData jobData = new LightJobData(chunkId.ToNative(), chunkManager.ChunkToWorldPosition(chunkId).ToInt().ToNative(), chunkDimensions.ToNative(), chunkData.ToNative(Allocator.Persistent), chunkData.LightToNative(Allocator.Persistent), JobUtils.CacheNeighbourData(chunkId, chunkManager), directionsValid, voxelTypeToEmissionMap, voxelTypeToAbsorptionMap, directionVectors ); return(jobData); }
public static List <JobViewModel> GetJobList() { List <JobViewModel> list = new List <JobViewModel>(); try { string path = GetXmlFilePath(); var data = XmlUtil.DeserializeXml <QuartzXmlConfiguration20>(path); if (data == null) { return(list); } if (data.schedule == null || data.schedule.Length == 0) { return(list); } foreach (var sch in data.schedule) { if (sch.job == null || sch.job.Length == 0) { continue; } foreach (var job in sch.job) { JobViewModel model = new JobViewModel(); model.JobName = job.name; model.JobGroupName = job.group; model.JobType = job.jobtype; model.JobData = JobUtils.GetJobDataString(job.jobdatamap); if (sch.trigger != null) { var tr = sch.trigger.SingleOrDefault(p => p.Item.jobname == job.name && p.Item.jobgroup == job.group); if (tr != null) { var cd = tr.Item as cronTriggerType; if (cd != null) { model.CronExpression = cd.cronexpression; } var state = JobUtils.GetTriggerState(tr.Item.name, tr.Item.group); model.JobState = JobUtils.GetTriggerStateValue(state); } } list.Add(model); } } } catch (Exception ex) { log.Error(ex); } return(list); }
private FaceDescriptor maskData(int3 position, Direction direction, int3 lightPosition) { var flatIndex = MultiIndexToFlat(position.x, position.y, position.z, data.dimensions.x, dxdy); var typeId = data.voxels[flatIndex]; var lv = JobUtils.GetLightValue(lightPosition, data.lights, data.dimensions, data.neighbourData); if (rotatedVoxelsMap.TryGetValue(flatIndex, out var rotation)) { return(makeFaceDescriptor(typeId, direction, lv, rotation)); } return(makeFaceDescriptor(typeId, direction, lv)); }
public JsonResult DeleteJob(string jobName, string groupName) { var model = JobUtils.GetJobParamBase(jobName, groupName); bool bl = _job.DeleteJob(model); if (bl) { return(BaseJson(MsgCode.Success, "删除成功")); } else { return(BaseJson(MsgCode.Failed, "删除失败")); } }
static void Main(string[] args) { { Console.WriteLine(JobUtils.GetJobDetail("d17aa752-0683-4afd-83b3-32c4a55abb87")); return; } { DLWorkspaceUtils.DataHandler dataHandler = new DLWorkspaceUtils.DataHandler(); List <Job> jobs = dataHandler.GetJobList(); foreach (var job in jobs) { Console.WriteLine(job.ToString()); } dataHandler.Close(); } }
private FaceDescriptor GetFaceInNeighbour(int3 position, Direction neighbourDirection, Direction faceDirection, int primaryAxis, int3 lightPosition) { var localIndexOfAdjacentVoxelInNeighbour = data.neighbourData.IndicesInNeighbour(primaryAxis, position); var neighbourChunkData = data.neighbourData.GetVoxels(neighbourDirection); var neighbourDimensions = data.neighbourData.IndicesInNeighbour(primaryAxis, data.dimensions); var flattenedIndex = MultiIndexToFlat(localIndexOfAdjacentVoxelInNeighbour.x, localIndexOfAdjacentVoxelInNeighbour.y, neighbourDimensions); var id = neighbourChunkData[flattenedIndex]; var lv = JobUtils.GetLightValue(lightPosition, data.lights, data.dimensions, data.neighbourData); //NOTE currently the rotation data is not fetched for neighbours, so this can't be incorporated. return(makeFaceDescriptor(id, faceDirection, lv)); }
private Dictionary <string, string> GetParaDict(string json) { Dictionary <string, string> dict = new Dictionary <string, string>(); if (string.IsNullOrEmpty(json)) { return(dict); } var para = JobUtils.ToObject <List <KeyValueModel> >(json); if (para != null) { foreach (var item in para) { if (!dict.ContainsKey(item.key)) { dict.Add(item.key, item.value); } } } return(dict); }
public static async Task <HttpResponseData> Run([HttpTrigger(AuthorizationLevel.Function, "get", "post")] HttpRequestData req, FunctionContext executionContext) { var log = executionContext.GetLogger("SubmitEncodingJob"); log.LogInformation("C# HTTP trigger function processed a request."); // Get request body data. string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); var data = (RequestBodyModel)JsonConvert.DeserializeObject(requestBody, typeof(RequestBodyModel)); // Return bad request if input asset name is not passed in if (data.InputAssetName == null && data.InputUrl == null) { return(HttpRequest.ResponseBadRequest(req, "Please pass inputAssetName or inputUrl in the request body")); } // Return bad request if input asset name is not passed in if (data.TransformName == null) { return(HttpRequest.ResponseBadRequest(req, "Please pass transformName in the request body")); } ConfigWrapper config = ConfigUtils.GetConfig(); IAzureMediaServicesClient client; try { client = await Authentication.CreateMediaServicesClientAsync(config); log.LogInformation("AMS Client created."); } catch (Exception e) { if (e.Source.Contains("ActiveDirectory")) { log.LogError("TIP: Make sure that you have filled out the appsettings.json file before running this sample."); } log.LogError($"{e.Message}"); return(HttpRequest.ResponseBadRequest(req, e.Message)); } // Set the polling interval for long running operations to 2 seconds. // The default value is 30 seconds for the .NET client SDK client.LongRunningOperationRetryTimeout = 2; // Creating a unique suffix so that we don't have name collisions if you run the sample // multiple times without cleaning up. string uniqueness = Guid.NewGuid().ToString().Substring(0, 13); string jobName = $"job-{uniqueness}"; string outputAssetName = $"output-{uniqueness}"; Transform transform; try { // Ensure that you have the encoding Transform. This is really a one time setup operation. transform = await TransformUtils.CreateEncodingTransform(client, log, config.ResourceGroup, config.AccountName, data.TransformName, data.BuiltInPreset); log.LogInformation("Transform retrieved."); } catch (ErrorResponseException ex) { return(HttpRequest.ResponseBadRequest(req, LogUtils.LogError(log, ex, "Error when creating the transform."))); } Asset outputAsset; try { // Output from the job must be written to an Asset, so let's create one outputAsset = await AssetUtils.CreateAssetAsync(client, log, config.ResourceGroup, config.AccountName, outputAssetName, data.OutputAssetStorageAccount); log.LogInformation($"Output asset '{outputAssetName}' created."); } catch (ErrorResponseException ex) { return(HttpRequest.ResponseBadRequest(req, LogUtils.LogError(log, ex, "Error when creating the output asset."))); } // Job input prepration : asset or url JobInput jobInput; if (data.InputUrl != null) { jobInput = new JobInputHttp(files: new[] { data.InputUrl }); log.LogInformation("Input is a Url."); } else { jobInput = new JobInputAsset(assetName: data.InputAssetName); log.LogInformation($"Input is asset '{data.InputAssetName}'."); } Job job; try { // Job submission to Azure Media Services job = await JobUtils.SubmitJobAsync( client, log, config.ResourceGroup, config.AccountName, data.TransformName, jobName, jobInput, outputAssetName ); log.LogInformation($"Job '{jobName}' submitted."); } catch (ErrorResponseException ex) { return(HttpRequest.ResponseBadRequest(req, LogUtils.LogError(log, ex, "Error when submitting the job."))); } AnswerBodyModel dataOk = new() { OutputAssetName = outputAsset.Name, JobName = job.Name }; return(HttpRequest.ResponseOk(req, dataOk, HttpStatusCode.Accepted)); } }
public static JobDataModel GetJobDataByKey(JobKey jobKey) { JobDataModel jobData = new JobDataModel(); jobData.JobType = JobUtils.GetJobType(jobKey.Group); jobData.JobName = jobKey.Name; jobData.JobGroupName = jobKey.Group; try { string path = GetXmlFilePath(); var data = XmlUtil.DeserializeXml <QuartzXmlConfiguration20>(path); if (data == null) { return(null); } if (data.schedule == null || data.schedule.Length == 0) { return(null); } foreach (var sch in data.schedule) { if (sch.job == null || sch.job.Length == 0) { continue; } var item = sch.job.SingleOrDefault(p => p.name == jobKey.Name && p.group == jobKey.Group); if (item != null) { if (jobData.JobType == JobType.Http) { jobData.CallbackUrl = GetDataMapValue(item.jobdatamap, JobConfig.CallbackUrl); string paramString = GetDataMapValue(item.jobdatamap, JobConfig.CallbackParams); jobData.CallbackParams = JobUtils.GetDictFromString(paramString); } else { if (item.jobtype != null) { var arr = item.jobtype.Split(",", StringSplitOptions.RemoveEmptyEntries); if (arr.Length == 2) { jobData.TypeFullName = arr[0]; jobData.AssemblyDllName = arr[1]; } } } if (sch.trigger != null) { var tr = sch.trigger.SingleOrDefault(p => p.Item.jobname == jobKey.Name && p.Item.jobgroup == jobKey.Group); if (tr != null) { var cd = tr.Item as cronTriggerType; if (cd != null) { jobData.CronExpression = cd.cronexpression; } } } return(jobData); } } } catch (Exception ex) { log.Error(ex); } return(null); }
public string Get(string op) { string ret = "test"; if (op == "SubmitJob") { DLWorkspaceUtils.Job job = new Job(); job.jobParams = new JobParams(); job.jobName = HttpContext.Request.Query["jobName"]; job.jobType = HttpContext.Request.Query["jobType"]; job.userName = HttpContext.Request.Query["userName"]; job.jobParams.gpu = Int32.Parse(HttpContext.Request.Query["resourcegpu"]); job.jobParams.workPath = HttpContext.Request.Query["workPath"]; job.jobParams.dataPath = HttpContext.Request.Query["dataPath"]; job.jobParams.dockerImage = HttpContext.Request.Query["image"]; job.jobParams.cmd = HttpContext.Request.Query["cmd"]; job.jobParams.interactivePort = HttpContext.Request.Query["interactivePort"]; job.jobParams.jobtrainingtype = HttpContext.Request.Query["jobtrainingtype"]; job.jobParams.runningasroot = HttpContext.Request.Query["runningasroot"]; job.jobParams.userName = HttpContext.Request.Query["userName"]; job.jobParams.userId = HttpContext.Request.Query["userId"]; job.jobParams.containerUserId = HttpContext.Request.Query["containerUserId"]; if (job.jobParams.containerUserId == null) { if (job.jobParams.runningasroot != null && job.jobParams.runningasroot == "1") { job.jobParams.containerUserId = "0"; } else { job.jobParams.containerUserId = job.jobParams.userId; } } ret = DLWorkspaceUtils.JobUtils.SubmitJob(job.ToString()); } else if (op == "submitPhilly") { DLWorkspaceUtils.Job job = new Job(); job.jobParams = new JobParams(); //CMD += "configFile=$USERNAME%2FloopTest.lua&" //CMD += "minGPUs=1&" //CMD += "name=cust-test!~!~!1&" //CMD += "isdebug=false&" //CMD += "iscrossrack=false&" //CMD += "inputDir=%2Fhdfs%2F$VC%2F$USERNAME%2FData&" //CMD += "userName=$USERNAME" job.jobParams.workPath = HttpContext.Request.Query["workPath"]; job.jobParams.dataPath = HttpContext.Request.Query["dataPath"]; job.jobName = HttpContext.Request.Query["JobName"]; job.jobParams.gpu = Int32.Parse(HttpContext.Request.Query["MinGPUs"]); job.jobParams.dockerImage = HttpContext.Request.Query["CustomDockerName"]; string toolType = HttpContext.Request.Query["toolType"]; if (!job.jobParams.dockerImage.Contains("/") && toolType != null && toolType == "cust") { job.jobParams.dockerImage = "master:5000/rr1-prod/infrastructure:" + job.jobParams.dockerImage; } job.jobParams.cmd = HttpContext.Request.Query["cmd"]; job.jobType = "training"; job.jobParams.userName = HttpContext.Request.Query["UserName"]; job.jobParams.userId = "-1"; job.jobParams.runningasroot = "1"; job.jobParams.containerUserId = "0"; ret = DLWorkspaceUtils.JobUtils.SubmitJob(job.ToString()); } else if (op == "submit") { Dictionary <string, string> retdict = new Dictionary <string, string>(); retdict.Add("jobId", Guid.NewGuid().ToString()); ret = JsonConvert.SerializeObject(retdict); } else if (op == "list") { Dictionary <string, string> retdict = new Dictionary <string, string>(); retdict.Add("phillyversion", "116"); ret = JsonConvert.SerializeObject(retdict); } else if (op == "status") { string jobId = HttpContext.Request.Query["jobId"]; jobId = jobId.Replace("application_", ""); Job job = JobUtils.GetJobDetail(jobId); Dictionary <string, string> retdict = new Dictionary <string, string>(); string dir = @"\\storage.cly.philly.selfhost.corp.microsoft.com\" + job.jobParams.dataPath.Replace("/", "\\") + "\\" + jobId + "\\"; retdict.Add("dir", dir); retdict.Add("scratch", job.jobParams.workPath); retdict.Add("evalErr", "0"); retdict.Add("finishDateTime", job.jobTime.ToString()); retdict.Add("gpus", job.jobParams.gpu.ToString()); retdict.Add("name", job.jobName); retdict.Add("appId", "application_" + job.jobId); retdict.Add("retries", "0"); retdict.Add("preempt", "0"); retdict.Add("progress", "0"); retdict.Add("queueDateTime", job.jobTime.ToString()); retdict.Add("startDateTime", job.jobTime.ToString()); string status = job.jobStatus; // philly status: Pass, Queued, Running, Failed, Killed if (status == "finished") { status = "Pass"; } else if (status == "failed" || status == "error") { status = "Failed"; } else if (status == "queued" || status == "scheduling") { status = "Queued"; } else if (status == "running" || status == "killing") { status = "Running"; } else if (status == "Killed") { status = "Killed"; } retdict.Add("status", status); retdict.Add("userName", job.userName.Replace("@microsoft.com", "")); retdict.Add("queue", "default"); retdict.Add("vc", job.jobParams.vcId); ret = JsonConvert.SerializeObject(retdict); Console.WriteLine(ret); } else if (op == "abort") { Dictionary <string, string> retdict = new Dictionary <string, string>(); string jobId = HttpContext.Request.Query["jobId"]; jobId = jobId.Replace("application_", ""); retdict.Add("jobKilled", jobId); ret = JsonConvert.SerializeObject(retdict); DLWorkspaceUtils.JobUtils.KillJob(jobId); Console.WriteLine(jobId); } return(ret); }
public static async Task <HttpResponseData> Run([HttpTrigger(AuthorizationLevel.Function, "get", "post")] HttpRequestData req, FunctionContext executionContext) { var log = executionContext.GetLogger("SubmitSubclipJob"); log.LogInformation("C# HTTP trigger function processed a request."); string triggerStart = DateTime.UtcNow.ToString("yyMMddHHmmss"); // Get request body data. string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); var data = (RequestBodyModel)JsonConvert.DeserializeObject(requestBody, typeof(RequestBodyModel)); // Return bad request if input asset name is not passed in if (data.LiveEventName == null || data.LiveOutputName == null) { return(HttpRequest.ResponseBadRequest(req, "Please pass liveEventName and liveOutputName in the request body")); } data.IntervalSec ??= 60; ConfigWrapper config = ConfigUtils.GetConfig(); IAzureMediaServicesClient client; try { client = await Authentication.CreateMediaServicesClientAsync(config); } catch (Exception e) { if (e.Source.Contains("ActiveDirectory")) { log.LogError("TIP: Make sure that you have filled out the appsettings.json file before running this sample."); } log.LogError($"{e.Message}"); return(HttpRequest.ResponseBadRequest(req, e.Message)); } // Set the polling interval for long running operations to 2 seconds. // The default value is 30 seconds for the .NET client SDK client.LongRunningOperationRetryTimeout = 2; try { // Ensure that you have customized encoding Transform. This is really a one time setup operation. Transform transform = await TransformUtils.GetOrCreateSubclipTransform(client, log, config.ResourceGroup, config.AccountName, SubclipTransformName); } catch (ErrorResponseException ex) { return(HttpRequest.ResponseBadRequest(req, LogUtils.LogError(log, ex, "Error when getting or creating the transform."))); } var liveOutput = await client.LiveOutputs.GetAsync(config.ResourceGroup, config.AccountName, data.LiveEventName, data.LiveOutputName); // let's analyze the client manifest and adjust times for the subclip job var doc = await LiveManifest.TryToGetClientManifestContentAsABlobAsync(client, config.ResourceGroup, config.AccountName, liveOutput.AssetName); var assetmanifestdata = LiveManifest.GetManifestTimingData(doc); if (assetmanifestdata.Error) { return(HttpRequest.ResponseBadRequest(req, "Data cannot be read from live output / asset manifest.")); } log.LogInformation("Timestamps : " + string.Join(",", assetmanifestdata.TimestampList.Select(n => n.ToString()).ToArray())); var livetime = TimeSpan.FromSeconds(assetmanifestdata.TimestampEndLastChunk / (double)assetmanifestdata.TimeScale); log.LogInformation($"Livetime : {livetime}"); var starttime = LiveManifest.ReturnTimeSpanOnGOP(assetmanifestdata, livetime.Subtract(TimeSpan.FromSeconds((int)data.IntervalSec))); log.LogInformation($"Value starttime : {starttime}"); if (data.LastSubclipEndTime != null) { var lastEndTime = (TimeSpan)data.LastSubclipEndTime; log.LogInformation($"Value lastEndTime : {lastEndTime}"); var delta = (livetime - lastEndTime - TimeSpan.FromSeconds((int)data.IntervalSec)).Duration(); log.LogInformation($"Delta: {delta}"); if (delta < (TimeSpan.FromSeconds(3 * (int)data.IntervalSec))) // less than 3 times the normal duration (3*60s) { starttime = lastEndTime; log.LogInformation($"Value new starttime : {starttime}"); } } var duration = livetime - starttime; log.LogInformation($"Value duration: {duration}"); if (duration == new TimeSpan(0)) // Duration is zero, this may happen sometimes ! { return(HttpRequest.ResponseBadRequest(req, "Stopping. Duration of subclip is zero.")); } Asset outputAsset; try { // Output from the Job must be written to an Asset, so let's create one outputAsset = await AssetUtils.CreateAssetAsync(client, log, config.ResourceGroup, config.AccountName, liveOutput.Name + "-subclip-" + triggerStart, data.OutputAssetStorageAccount); } catch (ErrorResponseException ex) { return(HttpRequest.ResponseBadRequest(req, LogUtils.LogError(log, ex, "Error when creating the output asset."))); } JobInput jobInput = new JobInputAsset( assetName: liveOutput.AssetName, start: new AbsoluteClipTime(starttime.Subtract(TimeSpan.FromMilliseconds(100))), end: new AbsoluteClipTime(livetime.Add(TimeSpan.FromMilliseconds(100))) ); Job job; try { job = await JobUtils.SubmitJobAsync( client, log, config.ResourceGroup, config.AccountName, SubclipTransformName, $"Subclip-{liveOutput.Name}-{triggerStart}", jobInput, outputAsset.Name ); } catch (ErrorResponseException ex) { return(HttpRequest.ResponseBadRequest(req, LogUtils.LogError(log, ex, "Error when submitting the job."))); } AnswerBodyModel dataOk = new() { SubclipAssetName = outputAsset.Name, SubclipJobName = job.Name, SubclipTransformName = SubclipTransformName, SubclipEndTime = starttime + duration }; return(HttpRequest.ResponseOk(req, dataOk, HttpStatusCode.Accepted)); } }
public AbstractPipelineJob <MeshDescriptor> CreateMeshJob(Vector3Int chunkID) { Profiler.BeginSample("CreateMeshJob"); var chunkDimensions = chunkManager.ChunkDimensions; var chunkData = chunkManager.GetReadOnlyChunkData(chunkID); //Copy chunk data to native array Profiler.BeginSample("VoxelsToNative"); NativeArray <VoxelTypeID> voxels = chunkData.ToNative(); Profiler.EndSample(); NeighbourData neighbourData = new NeighbourData(); //Cache neighbour data neighbourData = JobUtils.CacheNeighbourData(chunkID, chunkManager); var meshingJob = createMeshingJob(new MeshJobData(chunkDimensions.ToNative(), chunkManager.IncludeLighting, voxels, chunkData.NativeRotations(), chunkData.LightToNative(), neighbourData, voxelTypeManager.nativeMeshDatabase, voxelTypeManager.nativeVoxelTypeDatabase, Allocator.Persistent )); var indexingWrapper = new JobWrapper <SortIndicesByMaterialJob>(); //AsDeferredJobArray takes the length etc at the time of execution, rather than now. indexingWrapper.job.allTriangleIndices = meshingJob.data.allTriangleIndices.AsDeferredJobArray(); indexingWrapper.job.materialRuns = meshingJob.data.materialRuns.AsDeferredJobArray(); indexingWrapper.job.packedRuns = new NativeList <MaterialRun>(Allocator.Persistent); indexingWrapper.job.packedIndices = new NativeList <int>(Allocator.Persistent); indexingWrapper.job.collisionMeshMaterialRunLength = meshingJob.data.collisionSubmesh.collisionMeshMaterialRunLength; Func <MeshDescriptor> cleanup = () => { Profiler.BeginSample("MeshJobCleanup"); Mesh mesh = new Mesh(); if (meshingJob.data.vertices.Length >= ushort.MaxValue) { //Cope with bigger meshes mesh.indexFormat = UnityEngine.Rendering.IndexFormat.UInt32; } mesh.vertices = meshingJob.data.vertices.ToArray(); if (meshingJob.data.includeLighting) { mesh.colors = meshingJob.data.vertexColours.ToArray(); } mesh.SetUVs(0, meshingJob.data.uvs.ToArray()); mesh.normals = meshingJob.data.normals.ToArray(); mesh.subMeshCount = indexingWrapper.job.packedRuns.Length; MeshDescriptor meshDescriptor = new MeshDescriptor(); meshDescriptor.materialsBySubmesh = new Material[mesh.subMeshCount]; for (int i = 0; i < indexingWrapper.job.packedRuns.Length; i++) { var run = indexingWrapper.job.packedRuns[i]; var slice = new NativeSlice <int>(indexingWrapper.job.packedIndices, run.range.start, run.range.Length); mesh.SetTriangles(slice.ToArray(), i); meshDescriptor.materialsBySubmesh[i] = voxelTypeManager.GetMaterial(run.materialID); } meshDescriptor.mesh = mesh; meshDescriptor.collidableLengthVertices = meshingJob.data.collisionSubmesh.collisionMeshLengthVertices[0]; meshDescriptor.collidableLengthIndices = meshingJob.data.collisionSubmesh.collisionMeshLengthTriangleIndices[0]; //Disposal meshingJob.Dispose(); //Dispose of packed containers indexingWrapper.job.packedIndices.Dispose(); indexingWrapper.job.packedRuns.Dispose(); Profiler.EndSample(); return(meshDescriptor); }; Profiler.EndSample(); //Single threaded version if (!Parrallel) { return(new BasicFunctionJob <MeshDescriptor>(() => { meshingJob.Run(); indexingWrapper.Run(); return cleanup(); })); } var meshingHandle = meshingJob.Schedule(); var handle = indexingWrapper.Schedule(meshingHandle); return(new PipelineUnityJob <MeshDescriptor>(handle, cleanup)); }