public bool ProcessInputs() // todo : should be private. Fix after PackageBase fix { lock (_taskLock) { bool isProcesedNow = false; if (!_inputsProcessed) { if (this.Priority == TaskPriority.Urgent && (!this.ExecParams.ContainsKey("MinTime") || !this.ExecParams.ContainsKey("MaxTime"))) { throw new Exception("Urgent tasks should have 'MinTime' and 'MaxTime' params"); } var inputFilesTime = TimeSpan.Zero; Time.AddToOverheads(TaskTimeOverheads.PackageBase, () => { Log.Debug("Processing inputs for task " + TaskId.ToString()); try { this.Params = PackageBaseProxy.UpdateInputs(this.PackageEngineState, this.Params); this.Params = PackageBaseProxy.UpdateInputs(this.PackageEngineState, this.InputFiles); this.Incarnation = PackageBaseProxy.ProcessInputFiles(this.PackageEngineState, out inputFilesTime); bool expectGroups; var expectedOutFiles = PackageBaseProxy.ListExpectedOutputs(this.PackageEngineState, out expectGroups); this.Incarnation.ExpectedOutputFileNames = expectedOutFiles.ToArray(); this.Incarnation.CanExpectMoreFiles = expectGroups; Log.Debug(String.Format("Expected outputs for task {2} are {0} {1}", String.Join(", ", expectedOutFiles.Select(name => "'" + name + "'")), expectGroups ? "with groups" : "", TaskId )); } catch (Exception e) { _inputsProcessingError = e.Message; Log.Error(String.Format("Error while processing inputs for task {0}: {1}", this.TaskId, e)); if (this.State == TaskState.ReadyToExecute) { throw; } } }); Time.OverheadsSpecial["pb/inputFiles"] = inputFilesTime; _inputsProcessed = true; isProcesedNow = true; } return(isProcesedNow); } }
public void Init(TaskDescription taskDescription, string storagePathBase) { StoragePathBase = storagePathBase; if (_taskDescription == null) { _taskDescription = new TaskDescription(taskDescription); } if (CompiledDef == null) { CompiledDef = PackageBaseProxy.GetCompiledDef(taskDescription.Package); } if (EngineContext == null) { var engine = new PackageEngine(CompiledDef); EngineContext = engine.Ctx; } }
internal void Complete(string ftpFolder) { lock (_taskLock) { /* * foreach (var adapter in Broker.Adapters) // todo: adapters priority based on it's type OnFinish()? * if (adapter.Mathces(this)) * { * Log.Info(String.Format( * "Задача {1}, запущен адаптер {0}", adapter.ToString(), this.TaskId * )); * * adapter.OnFinish(this, ftpFolder); * } */ if (!String.IsNullOrEmpty(ftpFolder)) { var outputFilesTime = TimeSpan.Zero; Time.AddToOverheads(TaskTimeOverheads.OutputFilesCopy, () => // todo : PB time here { IEnumerable <TaskFileDescription> outFiles = null; var outParams = PackageBaseProxy.ProcessOutputs(this.PackageEngineState, ftpFolder, out outFiles, out outputFilesTime); this.OutputFiles = outFiles.ToArray(); this.OutputParams = outParams; //foreach (var pair in outParams) // this.Params[pair.Key] = pair.Value; }); Time.OverheadsSpecial["pb/outputFiles"] = outputFilesTime; } State = TaskState.Completed; //Over(); Log.Info(String.Format("Task {0} completed", this.TaskId)); _lastEvent = Eventing.EventType.TaskCompleted; } }
private void WriteModelCoefs() { lock (_csvFilesLock) { try { if (this.CurrentSchedule != null && this.CurrentSchedule.Estimation != null && this.CurrentSchedule.Estimation.ByModel != null && this.CurrentSchedule.Estimation.ByModel != null && this.CurrentSchedule.Estimation.ByModel.CalculationTime != null && this.CurrentSchedule.Estimation.ByModel.CalculationTime.IsSet) { try { // todo : remove history hack PackageBaseProxy.AddHistorySample(new HistorySample( this.Package, this.CurrentSchedule.ResourceName, this.CurrentSchedule.Nodes.ToArray(), new Dictionary <string, string>(this.Params), new Dictionary <string, double>(this.CurrentSchedule.Estimation.ModelCoeffs), this.Time.Duration[TaskTimeMetric.Calculation], new Easis.PackageBase.Engine.PackageEngine( (Easis.PackageBase.Engine.CompiledModeDef) this.PackageEngineState.CompiledDef, (Easis.PackageBase.Engine.PackageEngineContext) this.PackageEngineState.EngineContext.Clone()) )); } catch (Exception e) { Log.Error("Could not add history sample: " + e.ToString()); } double estimated = this.CurrentSchedule.Estimation.ByModel.CalculationTime.Value; double real = this.Time.Duration[TaskTimeMetric.Calculation].TotalSeconds; var invariantCulture = System.Globalization.CultureInfo.InvariantCulture; var table = new List <string[]>() { new[] { "TaskId", this.TaskId.ToString() }, new[] { "Package", this.Package }, new[] { "State", this.State.ToString() }, new[] { "Resource.Node", this.CurrentSchedule.ResourceName + "." + String.Join("+", this.CurrentSchedule.Nodes.Select(n => n.NodeName)) }, new[] { "DateTimeStarted", this.Time.WhenStarted[TaskTimeMetric.Calculation].ToString() }, new[] { "DateTimeEnded", this.Time.WhenFinished[TaskTimeMetric.Calculation].ToString() }, new[] { "EstimatedTime", estimated.ToString(invariantCulture) }, new[] { "RealTime", real.ToString(invariantCulture) }, new[] { "DiffAbs", (estimated - real).ToString(invariantCulture) }, new[] { "DiffRel", ((estimated - real) / real).ToString(invariantCulture) }, new[] { "Coeffs", "[" + String.Join(", ", this.CurrentSchedule.Estimation.ModelCoeffs .Select(pair => String.Format(invariantCulture, "{{\"{0}\": \"{1}\"}}", pair.Key, pair.Value)) ) + "]" }, new[] { "Params", "[" + String.Join(", ", this.Params.Select(pair => String.Format("{{\"{0}\": \"{1}\"}}", pair.Key, pair.Value)) ) + "]" }, }; string headerFileName = CONST.Path.ModelCoefHeadersFile; string headerContents = String.Join(";", table.Select(row => row.First())) + Environment.NewLine; File.WriteAllText(headerFileName, headerContents); string csvFileName = CONST.Path.ModelCoefCsvFile; string csvLine = String.Join(";", table.Select(row => row.Last())) + Environment.NewLine; File.AppendAllText(csvFileName, csvLine); } } catch (Exception e) { Log.Error("Could not write model params to file: " + e.ToString()); } } }
public void Run(TaskSchedule schedule, IEnumerable <Resource> resources) { lock (_taskLock) { try { var execStarted = DateTime.Now; CurrentSchedule = schedule; Params[CONST.Params.Method] = Method; var resource = resources.First(r => r.ResourceName == schedule.ResourceName); string incarnatedFtpFolder = GetFtpFolder(schedule.Nodes.First(), resource, CopyPhase.In); if (PackageBaseProxy.GetSupportedPackageNames() .Any(name => String.Equals(name, Package, StringComparison.InvariantCultureIgnoreCase)) ) { //ProcessInputs(); Time.AddToOverheads(TaskTimeOverheads.InputFilesCopy, () => { Log.Debug("Uploading incarnated inputs"); foreach (var file in Incarnation.FilesToCopy) { Log.Debug(file.FileName + ": started"); IOProxy.Ftp.MakePath(incarnatedFtpFolder + Path.GetDirectoryName(file.FileName).Replace("\\", "/")); Log.Debug(file.FileName + ": path been made"); IOProxy.Storage.Download(file.StorageId, incarnatedFtpFolder + file.FileName); Log.Debug(file.FileName + ": downloaded"); } Log.Debug("Uploading incarnated inputs done"); }); } else { //ApplyAdapters(Broker.Adapters.Where(a => a.Type == AdapterType.Machine), incarnatedFtpFolder); //ApplyAdapters(Broker.Adapters.Where(a => a.Type == AdapterType.Package), incarnatedFtpFolder); //ApplyAdapters(Broker.Adapters.Where(a => a.Type == AdapterType.Mixed), incarnatedFtpFolder); } Incarnation.PackageName = Package; Incarnation.UserCert = UserCert; if (String.IsNullOrWhiteSpace(Incarnation.CommandLine)) { throw new Exception("Impossible to run task with empty command line"); } if (!Incarnation.CommandLine.Contains("{0}") && Incarnation.CommandLine.StartsWith(Package, StringComparison.InvariantCultureIgnoreCase)) { Incarnation.CommandLine = "{0}" + Incarnation.CommandLine.Substring(Package.Length); } Log.Stats("T_adapters", this.WfId, this.TaskId, DateTime.Now - execStarted); Time.AddToOverheads(TaskTimeOverheads.Provider, () => { //var provider = Broker.ProviderByName(resource.ProviderName); var controller = Discovery.GetControllerFarm(resource); try { //Incarnation.ProvidedTaskId = provider.Run(this.TaskId, this.Incarnation, resource, schedule.Nodes); var runContext = new ServiceProxies.ControllerFarmService.TaskRunContext() { TaskId = this.TaskId, //Incarnation = this.Incarnation, UserCert = this.UserCert, PackageName = this.Incarnation.PackageName, CommandLine = this.Incarnation.CommandLine, InputFiles = this.Incarnation.FilesToCopy.Select(f => new ServiceProxies.ControllerFarmService.FileContext() { FileName = f.FileName, StorageId = f.StorageId, }).ToArray(), ExpectedOutputFileNames = this.Incarnation.ExpectedOutputFileNames.ToArray(), NodesConfig = schedule.Nodes.Select(n => new ServiceProxies.ControllerFarmService.NodeRunConfig() { ResourceName = n.ResourceName, NodeName = n.NodeName, Cores = n.Cores }).ToArray() }; Log.Debug("Running task on controller: " + TaskId.ToString()); controller.Run(runContext); Log.Debug("Run done: " + TaskId.ToString()); controller.Close(); } catch (Exception e) { controller.Abort(); Log.Error("Exception on Task.Run for task " + this.TaskId + ": " + e.ToString()); throw; } }); State = TaskState.Started; Time.Started(TaskTimeMetric.Calculation); Log.Stats("T_clust_start", this.WfId, this.TaskId, DateTime.Now); _lastEvent = Eventing.EventType.TaskStarted; } catch (Exception e) { Log.Error(String.Format("Error on executing task {0}: {1}\n{2}", TaskId, e.Message, e.StackTrace )); Fail(reason: e.Message); } } }