protected TaskData(TaskData original, Cloner cloner) { cloner.RegisterClonedObject(original, this); if (original.Data != null) { this.Data = new byte[original.Data.Length]; Array.Copy(original.Data, this.Data, original.Data.Length); } this.LastUpdate = original.LastUpdate; }
public HiveTask(Task task, TaskData taskData, bool autoCreateChildHiveTasks) { this.syncTasksWithOptimizers = autoCreateChildHiveTasks; this.Task = task; try { this.ItemTask = PersistenceUtil.Deserialize <ItemTask>(taskData.Data); } catch { this.ItemTask = null; } this.childHiveTasks = new ItemList <HiveTask>(); this.syncTasksWithOptimizers = true; RegisterChildHiveTasksEvents(); }
protected Tuple <Task, TaskData> DownloadTaskData(Task task) { downloadSemaphore.WaitOne(); TaskData result = null; try { if (abort) { return(null); } HiveClient.TryAndRepeat(() => { result = HiveServiceLocator.Instance.CallHiveService(s => s.GetTaskData(task.Id)); }, Settings.Default.MaxRepeatServiceCalls, "Failed to download task data."); } finally { downloadSemaphore.Release(); } return(new Tuple <Task, TaskData>(task, result)); }
/// <summary> /// Creates a TaskData object containing the Task and the IJob-Object as byte[] /// </summary> /// <param name="withoutChildOptimizers"> /// if true the Child-Optimizers will not be serialized (if the task contains an Experiment) /// </param> public virtual TaskData GetAsTaskData(bool withoutChildOptimizers, out List <IPluginDescription> plugins) { if (ItemTask == null) { plugins = new List <IPluginDescription>(); return(null); } IEnumerable <Type> usedTypes; byte[] taskByteArray = PersistenceUtil.Serialize(ItemTask, out usedTypes); TaskData taskData = new TaskData() { TaskId = task.Id, Data = taskByteArray }; plugins = PluginUtil.GetPluginsForTask(usedTypes, ItemTask); return(taskData); }
public override TaskData GetAsTaskData(bool withoutChildOptimizers, out List <IPluginDescription> plugins) { if (ItemTask == null) { plugins = new List <IPluginDescription>(); return(null); } TaskData jobData = new TaskData(); IEnumerable <Type> usedTypes; // clone operation and remove unnecessary scopes; don't do this earlier to avoid memory problems ((IAtomicOperation)ItemTask.InitialOperation).Scope.Parent = parentScopeClone; ItemTask.InitialOperation = (IOperation)ItemTask.InitialOperation.Clone(); ((IAtomicOperation)ItemTask.InitialOperation).Scope.ClearParentScopes(); jobData.Data = PersistenceUtil.Serialize(ItemTask, out usedTypes); plugins = PluginUtil.GetPluginsForTask(usedTypes, ItemTask); return(jobData); }
/// <summary> /// Creates a TaskData object containing the Task and the IJob-Object as byte[] /// </summary> /// <param name="withoutChildOptimizers"> /// if true the Child-Optimizers will not be serialized (if the task contains an Experiment) /// </param> public override TaskData GetAsTaskData(bool withoutChildOptimizers, out List <IPluginDescription> plugins) { if (ItemTask == null) { plugins = new List <IPluginDescription>(); return(null); } IEnumerable <Type> usedTypes; byte[] jobByteArray; if (withoutChildOptimizers && ItemTask.Item is Optimization.Experiment) { OptimizerTask clonedJob = (OptimizerTask)ItemTask.Clone(); // use a cloned task, so that the childHiveJob don't get confused clonedJob.OptimizerAsExperiment.Optimizers.Clear(); jobByteArray = PersistenceUtil.Serialize(clonedJob, out usedTypes); } else if (withoutChildOptimizers && ItemTask.Item is Optimization.BatchRun) { OptimizerTask clonedJob = (OptimizerTask)ItemTask.Clone(); clonedJob.OptimizerAsBatchRun.Optimizer = null; jobByteArray = PersistenceUtil.Serialize(clonedJob, out usedTypes); } else if (ItemTask.Item is IAlgorithm) { ((IAlgorithm)ItemTask.Item).StoreAlgorithmInEachRun = false; // avoid storing the algorithm in runs to reduce size jobByteArray = PersistenceUtil.Serialize(ItemTask, out usedTypes); } else { jobByteArray = PersistenceUtil.Serialize(ItemTask, out usedTypes); } TaskData jobData = new TaskData() { TaskId = task.Id, Data = jobByteArray }; plugins = PluginUtil.GetPluginsForTask(usedTypes, ItemTask); return(jobData); }