/// <summary> /// Creates a TaskData object containing the Task and the IJob-Object as byte[] /// </summary> /// <param name="withoutChildOptimizers"> /// if true the Child-Optimizers will not be serialized (if the task contains an Experiment) /// </param> public virtual TaskData GetAsTaskData(bool withoutChildOptimizers, out List <IPluginDescription> plugins) { if (ItemTask == null) { plugins = new List <IPluginDescription>(); return(null); } IEnumerable <Type> usedTypes; byte[] taskByteArray = PersistenceUtil.Serialize(ItemTask, out usedTypes); TaskData taskData = new TaskData() { TaskId = task.Id, Data = taskByteArray }; plugins = PluginUtil.GetPluginsForTask(usedTypes, ItemTask); return(taskData); }
public override TaskData GetAsTaskData(bool withoutChildOptimizers, out List <IPluginDescription> plugins) { if (ItemTask == null) { plugins = new List <IPluginDescription>(); return(null); } TaskData jobData = new TaskData(); IEnumerable <Type> usedTypes; // clone operation and remove unnecessary scopes; don't do this earlier to avoid memory problems ((IAtomicOperation)ItemTask.InitialOperation).Scope.Parent = parentScopeClone; ItemTask.InitialOperation = (IOperation)ItemTask.InitialOperation.Clone(); ((IAtomicOperation)ItemTask.InitialOperation).Scope.ClearParentScopes(); jobData.Data = PersistenceUtil.Serialize(ItemTask, out usedTypes); plugins = PluginUtil.GetPluginsForTask(usedTypes, ItemTask); return(jobData); }
/// <summary> /// Creates a TaskData object containing the Task and the IJob-Object as byte[] /// </summary> /// <param name="withoutChildOptimizers"> /// if true the Child-Optimizers will not be serialized (if the task contains an Experiment) /// </param> public override TaskData GetAsTaskData(bool withoutChildOptimizers, out List <IPluginDescription> plugins) { if (ItemTask == null) { plugins = new List <IPluginDescription>(); return(null); } IEnumerable <Type> usedTypes; byte[] jobByteArray; if (withoutChildOptimizers && ItemTask.Item is Optimization.Experiment) { OptimizerTask clonedJob = (OptimizerTask)ItemTask.Clone(); // use a cloned task, so that the childHiveJob don't get confused clonedJob.OptimizerAsExperiment.Optimizers.Clear(); jobByteArray = PersistenceUtil.Serialize(clonedJob, out usedTypes); } else if (withoutChildOptimizers && ItemTask.Item is Optimization.BatchRun) { OptimizerTask clonedJob = (OptimizerTask)ItemTask.Clone(); clonedJob.OptimizerAsBatchRun.Optimizer = null; jobByteArray = PersistenceUtil.Serialize(clonedJob, out usedTypes); } else if (ItemTask.Item is IAlgorithm) { ((IAlgorithm)ItemTask.Item).StoreAlgorithmInEachRun = false; // avoid storing the algorithm in runs to reduce size jobByteArray = PersistenceUtil.Serialize(ItemTask, out usedTypes); } else { jobByteArray = PersistenceUtil.Serialize(ItemTask, out usedTypes); } TaskData jobData = new TaskData() { TaskId = task.Id, Data = jobByteArray }; plugins = PluginUtil.GetPluginsForTask(usedTypes, ItemTask); return(jobData); }
/// <summary> /// Uploads the given task and all its child-jobs while setting the proper parentJobId values for the childs /// </summary> /// <param name="parentHiveTask">shall be null if its the root task</param> private void UploadTaskWithChildren(IProgress progress, HiveTask hiveTask, HiveTask parentHiveTask, int[] taskCount, int totalJobCount, Guid configPluginId, Guid jobId, ILog log, CancellationToken cancellationToken) { taskUploadSemaphore.WaitOne(); bool semaphoreReleased = false; try { cancellationToken.ThrowIfCancellationRequested(); lock (jobCountLocker) { taskCount[0]++; } TaskData taskData; List <IPluginDescription> plugins; if (hiveTask.ItemTask.ComputeInParallel) { hiveTask.Task.IsParentTask = true; hiveTask.Task.FinishWhenChildJobsFinished = true; taskData = hiveTask.GetAsTaskData(true, out plugins); } else { hiveTask.Task.IsParentTask = false; hiveTask.Task.FinishWhenChildJobsFinished = false; taskData = hiveTask.GetAsTaskData(false, out plugins); } cancellationToken.ThrowIfCancellationRequested(); TryAndRepeat(() => { if (!cancellationToken.IsCancellationRequested) { lock (pluginLocker) { HiveServiceLocator.Instance.CallHiveService((s) => hiveTask.Task.PluginsNeededIds = PluginUtil.GetPluginDependencies(s, this.onlinePlugins, this.alreadyUploadedPlugins, plugins)); } } }, Settings.Default.MaxRepeatServiceCalls, "Failed to upload plugins"); cancellationToken.ThrowIfCancellationRequested(); hiveTask.Task.PluginsNeededIds.Add(configPluginId); hiveTask.Task.JobId = jobId; log.LogMessage(string.Format("Uploading task ({0} kb, {1} objects)", taskData.Data.Count() / 1024, hiveTask.ItemTask.GetObjectGraphObjects().Count())); TryAndRepeat(() => { if (!cancellationToken.IsCancellationRequested) { if (parentHiveTask != null) { hiveTask.Task.Id = HiveServiceLocator.Instance.CallHiveService((s) => s.AddChildTask(parentHiveTask.Task.Id, hiveTask.Task, taskData)); } else { hiveTask.Task.Id = HiveServiceLocator.Instance.CallHiveService((s) => s.AddTask(hiveTask.Task, taskData)); } } }, Settings.Default.MaxRepeatServiceCalls, "Failed to add task", log); cancellationToken.ThrowIfCancellationRequested(); lock (jobCountLocker) { progress.ProgressValue = (double)taskCount[0] / totalJobCount; progress.Message = string.Format("Uploaded task ({0} of {1})", taskCount[0], totalJobCount); } var tasks = new List <TS.Task>(); foreach (HiveTask child in hiveTask.ChildHiveTasks) { var task = TS.Task.Factory.StartNew((tuple) => { var arguments = (Tuple <HiveTask, HiveTask>)tuple; UploadTaskWithChildren(progress, arguments.Item1, arguments.Item2, taskCount, totalJobCount, configPluginId, jobId, log, cancellationToken); }, new Tuple <HiveTask, HiveTask>(child, hiveTask)); task.ContinueWith((x) => log.LogException(x.Exception), TaskContinuationOptions.OnlyOnFaulted); tasks.Add(task); } taskUploadSemaphore.Release(); semaphoreReleased = true; // the semaphore has to be release before waitall! TS.Task.WaitAll(tasks.ToArray()); } finally { if (!semaphoreReleased) { taskUploadSemaphore.Release(); } } }