/// <summary> /// Creates set of spots /// </summary> /// <param name="spots">entities to be persisted</param> /// <returns>List of created entities</returns> private IEnumerable <Spot> CreateSpots(List <CreateSpot> spots) { var createdSpots = new List <Spot>(); bool isScheduleDataUploadStarted = false; using (MachineLock.Create("xggameplan.checkisscheduledatauploadstarted", TimeSpan.FromSeconds(30))) { isScheduleDataUploadStarted = _repository.CountAll == 0 && _breakRepository.CountAll == 0; } foreach (var spot in spots) { var dto = _mapper.Map <Spot>(spot); dto.Uid = Guid.NewGuid(); _repository.Add(dto); createdSpots.Add(dto); } // Generate notification for schedule data upload started if (isScheduleDataUploadStarted) { _auditEventRepository.Insert(AuditEventFactory.CreateAuditEventForScheduleDataUploadStarted(0, 0, null)); } return(createdSpots); }
/// <summary> /// Updates the TaskInstance status /// </summary> /// <param name="id"></param> /// <param name="repositoryFactory"></param> /// <param name="status"></param> /// <param name="timeCompleted"></param> public static void UpdateTaskInstanceStatus(Guid id, IRepositoryFactory repositoryFactory, TaskInstanceStatues?status = null, DateTime?timeCompleted = null, DateTime?timeLastActive = null) { using (MachineLock.Create($"xggameplan.TaskExecutor.UpdateTaskInstanceStatus.{id}", TimeSpan.FromSeconds(60))) using (var scope = repositoryFactory.BeginRepositoryScope()) { var taskInstanceRepository = scope.CreateRepository <ITaskInstanceRepository>(); var taskInstance = taskInstanceRepository.Get(id); if (status != null) { taskInstance.Status = status.Value; } if (timeCompleted != null) { taskInstance.TimeCompleted = timeCompleted.Value; } if (timeLastActive != null) { taskInstance.TimeLastActive = timeLastActive.Value; } taskInstanceRepository.Update(taskInstance); taskInstanceRepository.SaveChanges(); } }
public IHttpActionResult Post([FromBody] List <CreateBreak> breaks) { if (breaks is null || !breaks.Any() || !ModelState.IsValid) { return(BadRequest(ModelState)); } // Determine if they've just started uploading schedule data bool isScheduleDataUploadStarted = false; using (MachineLock.Create("xggameplan.checkisscheduledatauploadstarted", TimeSpan.FromSeconds(30))) { isScheduleDataUploadStarted = _breakRepository.CountAll == 0 && _spotRepository.CountAll == 0; } ValidateBreaks(breaks); if (!ModelState.IsValid) { return(BadRequest(ModelState)); } // zero out BroadcastDate's times foreach (var b in breaks) { if (b.BroadcastDate.HasValue) { b.BroadcastDate = b.BroadcastDate.Value.Date; } } // group by date and channels breaks.GroupBy(s => new { s.ScheduledDate.Date, s.SalesArea }).ForEach(grp => { using (MachineLock.Create(string.Format("xggameplan.scheduleday.{0}.{1}", grp.Key.SalesArea, grp.Key.Date), new TimeSpan(0, 10, 0))) { var schedule = _scheduleRepository.GetOrCreateSchedule(grp.Key.SalesArea, grp.Key.Date); var breaklist = _mapper.Map <List <Break> >(grp.ToList()); LoadBreakProperties(ref breaklist); _breakRepository.Add(breaklist); schedule.Breaks = breaklist.ToList(); _scheduleRepository.Add(schedule); _scheduleRepository.SaveChanges(); } }); // Generate notification for schedule data upload started if (isScheduleDataUploadStarted) { _auditEventRepository.Insert(AuditEventFactory.CreateAuditEventForScheduleDataUploadStarted(0, 0, null)); } return(Ok()); }
public void SmoothSalesAreaForDateTimePeriod( Guid runId, Guid firstScenarioId, SalesArea salesArea, DateTime processorDateTime, DateTimeRange smoothPeriod, ImmutableSmoothData threadSafeCollections, Action <string> raiseInfo, Action <string> raiseWarning, Action <string, Exception> raiseException ) { SmoothOutput smoothOutput = null; Exception caughtException = null; try { using (MachineLock.Create($"SmoothEngine.Smooth.{salesArea.Name}", new TimeSpan(1, 0, 0))) { var worker = new SmoothWorkerForSalesAreaDuringDateTimePeriod( _repositoryFactory, _smoothLogFileFolder, threadSafeCollections, _clashExposureCountService, raiseInfo, raiseWarning, raiseException ); // Define handler for worker notification of day complete worker.OnSmoothBatchComplete += (sender, currentFromDateTime, currentToDateTime, recommendations, smoothFailures) => { // Notify parent OnSmoothBatchComplete?.Invoke(this, salesArea, currentFromDateTime, currentToDateTime, recommendations, smoothFailures); }; smoothOutput = worker.ActuallyStartSmoothing( runId, firstScenarioId, processorDateTime, smoothPeriod, salesArea); } } catch (Exception ex) { caughtException = ex; Debug.WriteLine(ex.ToString()); } finally { OnSmoothComplete?.Invoke(this, salesArea, caughtException, smoothOutput); } }
public static void Reattach(IMedium vdi, Stack <MediamReattachInfo> att) { while (att.Count > 0) { var i = att.Pop(); var machine = Client.VBox.FindMachine(i.MachineId); if (machine == null) { continue; } using (var ml = new MachineLock(machine)) ml.Raw.AttachDevice(i.ControllerName, i.ContollerPort, i.DeviceSlot, i.DeviceType, vdi); } }
public IHttpActionResult PostExecuteTask([FromUri] string id) { if (!_systemTasksManager.TaskExists(id)) { return NotFound(); } // Prevent simultaneous execution of task using (MachineLock.Create(string.Format("xggameplan.systemtasks.{0}.execute", id), TimeSpan.FromSeconds(60))) { var systemTaskResults = _systemTasksManager.ExecuteTask(id); return Ok(systemTaskResults); } }
public static Stack <MediamReattachInfo> Detach(IMedium vdi) { var attInfo = new Stack <MediamReattachInfo>(); foreach (var machine in Client.VBox.Machines.Cast <IMachine>().Where(m => vdi.MachineIds.Cast <string>().Any(id => string.Equals(m.Id, id, StringComparison.Ordinal)))) { var medAtt = machine.MediumAttachments.Cast <IMediumAttachment>().FirstOrDefault(ma => ma.Medium?.Id == vdi.Id); if (medAtt == null) { throw new ArgumentException(nameof(vdi)); } attInfo.Push(new MediamReattachInfo(machine, vdi, medAtt)); using (var ml = new MachineLock(machine)) ml.Raw.DetachDevice(medAtt.Controller, medAtt.Port, medAtt.Device); } return(attInfo); }
public IHttpActionResult Post([FromBody] List <CreateProgramme> programs) { if (programs == null || !programs.Any() || !ModelState.IsValid) { return(BadRequest(ModelState)); } ValidatePrograms(programs); var programmeList = _mapper.Map <IEnumerable <Programme> >(programs) .OrderBy(x => x.StartDateTime) .SequentiallyCount <Programme>(new ProgrammePrgtNoSequenceCounter()) .ToList(); _programmeRepository.Add(programmeList); // group by date and channels programmeList.GroupBy(s => new { s.StartDateTime.Date, s.SalesArea }).ForEach(grp => { using (MachineLock.Create(string.Format("xggameplan.scheduleday.{0}.{1}", grp.Key.SalesArea, grp.Key.Date), new TimeSpan(0, 10, 0))) { using (var scope = _repositoryFactory.BeginRepositoryScope()) { var scheduleRepository = scope.CreateRepository <IScheduleRepository>(); var schedule = scheduleRepository.GetOrCreateSchedule(grp.Key.SalesArea, grp.Key.Date); if (schedule.Programmes is null) { schedule.Programmes = new List <Programme>(); } schedule.Programmes.AddRange(grp); scheduleRepository.Add(schedule); scheduleRepository.SaveChanges(); } } }); return(Ok()); }
public void Execute( Run run, RunScenario scenario, IReadOnlyCollection <AutoBookInstanceConfiguration> autoBookInstanceConfigurationsForRun, double autoBookRequiredStorageGB, ConcurrentBag <RunInstance> runInstances, ConcurrentDictionary <Guid, ScenarioStatuses> newScenarioStatuses, ConcurrentDictionary <Guid, bool> scenarioSyncStatuses, bool autoDistributed) { AutoBookDomainObject autoBook = null; IAutoBook autoBookInterface = null; bool runStarted = false; RaiseInfo($"Begin Execute for ScenarioID: { scenario.Id}"); try { AutoBookInstanceConfiguration runAutoBookInstanceConfiguration = null; if (autoDistributed) { RaiseInfo($"AutoDistributed - RunScenarioTask Execute Starting ScenarioID ={ scenario.Id}, RunID ={ run.Id}"); //create instance for scenario RunInstance runInstance = _runInstanceCreator.Create(run.Id, scenario.Id); RaiseInfo($"AutoDistributed - about to enter: {nameof(runInstance.UploadInputFilesAndCreateAutoBookRequest)}"); runInstance.UploadInputFilesAndCreateAutoBookRequest(autoBookInstanceConfigurationsForRun, autoBookRequiredStorageGB); RaiseInfo($"AutoDistributed - returned from: {nameof(runInstance.UploadInputFilesAndCreateAutoBookRequest)}"); // Flag run as started runStarted = true; runInstances.Add(runInstance); _ = newScenarioStatuses.TryRemove(scenario.Id, out _); // Don't update scenario status at the end scenarioSyncStatuses[scenario.Id] = false; RaiseInfo($"AutoDistributed - RunScenarioTask Execute Started ScenarioID ={ scenario.Id}, RunID ={ run.Id}"); } else { try { using (MachineLock.Create("xggameplan.AWSAutoBooks.GetFreeAutoBook", new TimeSpan(0, 10, 0))) { foreach (var autoBookInstanceConfiguration in autoBookInstanceConfigurationsForRun) { autoBook = _autoBooks.GetFirstAdequateIdleAutoBook(autoBookInstanceConfiguration, autoBookRequiredStorageGB, true); if (autoBook != null) // Got free AutoBook { RaiseInfo($"Got Free AutoBook: {autoBook.Id} ConfigurationId: {autoBook.InstanceConfigurationId}"); runAutoBookInstanceConfiguration = autoBookInstanceConfiguration; break; } } } } catch (MachineLockTimeoutException) { RaiseInfo($"MachineLockTimeoutException in xggameplan.AWSAutoBooks.GetFreeAutoBook"); } // Get autobook interface autoBookInterface = (autoBook == null) ? null : _autoBooks.GetInterface(autoBook); // Get free AutoBook instance, will be locked so that it can't be used elsewhere if (autoBook != null) // Free AutoBook - start run { RaiseInfo($"Free Autobook - Starting ScenarioID ={ scenario.Id}, AutoBookID ={autoBook?.Id}, RunID ={ run.Id}, Instance Configuration = { runAutoBookInstanceConfiguration.Description }"); // Start run, exception will cause cleanup below RunInstance runInstance = _runInstanceCreator.Create(run.Id, scenario.Id); runInstance.UploadInputFilesStartAutoBookRun(autoBookInterface, autoBook); // Flag run as started runStarted = true; runInstances.Add(runInstance); _ = newScenarioStatuses.TryRemove(scenario.Id, out _); // Don't update scenario status at the end scenarioSyncStatuses[scenario.Id] = false; RaiseInfo($"Started ScenarioID ={ scenario.Id}, AutoBookID ={ autoBook?.Id}, RunID ={ run.Id}, Instance Configuration = { runAutoBookInstanceConfiguration?.Description }"); } else // No free AutoBook, awaiting for provisioning { _auditEventRepository.Insert(AuditEventFactory.CreateAuditEventForWarningMessage(0, 0, $"No free AutoBook, awaiting for provisioning, waiting for existing AutoBooks to be Idle (RunID={run.Id}, ScenarioID={scenario.Id})")); // Update scenario so that it can be retried later when an AutoBook becomes idle RunManager.UpdateScenarioStatuses(_repositoryFactory, _auditEventRepository, run.Id, new List <Guid> { scenario.Id }, new List <ScenarioStatuses> { ScenarioStatuses.Scheduled }, new List <DateTime?> { null }); _ = newScenarioStatuses.TryRemove(scenario.Id, out _); // Don't update scenario status at the end scenarioSyncStatuses[scenario.Id] = false; } } } catch (System.Exception exception) { // Log exception but don't throw it. We want to try and start other scenarios _auditEventRepository.Insert(AuditEventFactory.CreateAuditEventForException(0, 0, $"Error starting scenario (RunID={run.Id}, ScenarioID={scenario.Id}, AutoBookID={(autoBook == null ? "Unknown" : autoBook.Id)})", exception)); } finally { // If we locked a free AutoBook instance but didn't start the scenario then reset to free, unlocks it. if (!runStarted && autoBook != null) { autoBookInterface.ResetFree(); } } }
private void UploadRunData(Run run, Guid scenarioId) { string runFilePath = null; string scenarioFilePath = null; bool uploadRunData = false; bool uploadScenarioData = false; string runFile = $"{run.Id}.zip"; string runFileNameWithPath = $@"input/{runFile}"; string scenarioFile = $"{scenarioId}.zip"; string scenarioFileNameWithPath = $@"input/{scenarioFile}"; bool loggedStarted = false; try { // Ensure that only one instance attempts to upload <RunId>.zip. We can't just check FileExists because it will take time for the file to // be uploaded and appear. using (MachineLock.Create($"xggameplan.AWSInputHandler.UploadRunData.Run Id: {run.Id}", new TimeSpan(2, 0, 0))) { if (!_cloudStorage.FileExists(new S3FileComment() { BucketName = _awsSettings.S3Bucket, FileNameWithPath = runFileNameWithPath })) { uploadRunData = true; if (!loggedStarted) { _auditEventRepository.Insert(AuditEventFactory.CreateAuditEventForGameplanPipelineStart( 0, 0, PipelineEventIDs.STARTED_GENERATING_INPUT_FILES, run.Id, scenarioId, null, null)); _pipelineAuditEventRepository.Add(PipelineEventHelper.CreatePipelineAuditEvent(AuditEventTypes.GamePlanRun, PipelineEventIDs.STARTED_GENERATING_INPUT_FILES, run.Id, scenarioId, null)); loggedStarted = true; } runFilePath = _optimiserInputFiles.PopulateRunData(run); } } // Upload scenario data if (!_cloudStorage.FileExists(new S3FileComment() { BucketName = _awsSettings.S3Bucket, FileNameWithPath = scenarioFileNameWithPath })) { uploadScenarioData = true; if (!loggedStarted) { _auditEventRepository.Insert(AuditEventFactory.CreateAuditEventForGameplanPipelineStart(0, 0, PipelineEventIDs.STARTED_GENERATING_INPUT_FILES, run.Id, scenarioId, null, null)); _pipelineAuditEventRepository.Add(PipelineEventHelper.CreatePipelineAuditEvent(AuditEventTypes.GamePlanRun, PipelineEventIDs.STARTED_GENERATING_INPUT_FILES, run.Id, scenarioId, null)); loggedStarted = true; } scenarioFilePath = _optimiserInputFiles.PopulateScenarioData(run, scenarioId); } if (loggedStarted) { _auditEventRepository.Insert(AuditEventFactory.CreateAuditEventForGameplanPipelineEnd(0, 0, PipelineEventIDs.FINISHED_GENERATING_INPUT_FILES, run.Id, scenarioId, null, null, null, null)); _pipelineAuditEventRepository.Add(PipelineEventHelper.CreatePipelineAuditEvent(AuditEventTypes.GamePlanRun, PipelineEventIDs.FINISHED_GENERATING_INPUT_FILES, run.Id, scenarioId, null)); } } catch (System.Exception exception) { if (loggedStarted) { _auditEventRepository.Insert(AuditEventFactory.CreateAuditEventForGameplanPipelineEnd(0, 0, PipelineEventIDs.FINISHED_GENERATING_INPUT_FILES, run.Id, scenarioId, null, null, exception.Message, exception)); _pipelineAuditEventRepository.Add(PipelineEventHelper.CreatePipelineAuditEvent(AuditEventTypes.GamePlanRun, PipelineEventIDs.FINISHED_GENERATING_INPUT_FILES, run.Id, scenarioId, exception.Message)); } throw; } finally { _pipelineAuditEventRepository.SaveChanges(); } // Because we do zipping as we create each input file... _auditEventRepository.Insert(AuditEventFactory.CreateAuditEventForGameplanPipelineStart(0, 0, PipelineEventIDs.STARTED_ZIPPING_INPUT_FILES, run.Id, scenarioId, null, null)); _pipelineAuditEventRepository.Add(PipelineEventHelper.CreatePipelineAuditEvent(AuditEventTypes.GamePlanRun, PipelineEventIDs.STARTED_ZIPPING_INPUT_FILES, run.Id, scenarioId, null)); _auditEventRepository.Insert(AuditEventFactory.CreateAuditEventForGameplanPipelineEnd(0, 0, PipelineEventIDs.FINISHED_ZIPPING_INPUT_FILES, run.Id, scenarioId, null, null, null, null)); _pipelineAuditEventRepository.Add(PipelineEventHelper.CreatePipelineAuditEvent(AuditEventTypes.GamePlanRun, PipelineEventIDs.FINISHED_ZIPPING_INPUT_FILES, run.Id, scenarioId, null)); _pipelineAuditEventRepository.SaveChanges(); try { using (MachineLock.Create( $"xggameplan.AWSInputHandler.UploadRunData.Run Id: {run.Id}", new TimeSpan(2, 0, 0))) { _auditEventRepository.Insert(AuditEventFactory.CreateAuditEventForGameplanPipelineStart(0, 0, PipelineEventIDs.STARTED_UPLOADING_INPUT_ZIP_ARCHIVE_TO_CLOUD_STORAGE, run.Id, scenarioId, null, null)); _pipelineAuditEventRepository.Add(PipelineEventHelper.CreatePipelineAuditEvent(AuditEventTypes.GamePlanRun, PipelineEventIDs.STARTED_UPLOADING_INPUT_ZIP_ARCHIVE_TO_CLOUD_STORAGE, run.Id, scenarioId, null)); //Upload run data if (uploadRunData && !String.IsNullOrWhiteSpace(runFilePath) && File.Exists(runFilePath) && !_cloudStorage.FileExists(new S3FileComment() { BucketName = _awsSettings.S3Bucket, FileNameWithPath = runFileNameWithPath })) { _cloudStorage.Upload(new S3UploadComment() { BucketName = _awsSettings.S3Bucket, DestinationFilePath = runFileNameWithPath, SourceFilePath = runFilePath }); uploadRunData = false; if (File.Exists(runFilePath)) { File.Delete(runFilePath); } } // Upload scenario data if (uploadScenarioData && !String.IsNullOrWhiteSpace(scenarioFilePath) && File.Exists(scenarioFilePath)) { _cloudStorage.Upload(new S3UploadComment() { BucketName = _awsSettings.S3Bucket, DestinationFilePath = scenarioFileNameWithPath, SourceFilePath = scenarioFilePath }); if (File.Exists(scenarioFilePath)) { File.Delete(scenarioFilePath); } } _auditEventRepository.Insert(AuditEventFactory.CreateAuditEventForGameplanPipelineEnd(0, 0, PipelineEventIDs.FINISHED_UPLOADING_INPUT_ZIP_ARCHIVE_TO_CLOUD_STORAGE, run.Id, scenarioId, null, null, null, null)); _pipelineAuditEventRepository.Add(PipelineEventHelper.CreatePipelineAuditEvent(AuditEventTypes.GamePlanRun, PipelineEventIDs.FINISHED_UPLOADING_INPUT_ZIP_ARCHIVE_TO_CLOUD_STORAGE, run.Id, scenarioId, null)); } } catch (System.Exception exception) { _auditEventRepository.Insert(AuditEventFactory.CreateAuditEventForGameplanPipelineEnd(0, 0, PipelineEventIDs.FINISHED_UPLOADING_INPUT_ZIP_ARCHIVE_TO_CLOUD_STORAGE, run.Id, scenarioId, null, null, exception.Message, exception)); _pipelineAuditEventRepository.Add(PipelineEventHelper.CreatePipelineAuditEvent(AuditEventTypes.GamePlanRun, PipelineEventIDs.FINISHED_UPLOADING_INPUT_ZIP_ARCHIVE_TO_CLOUD_STORAGE, run.Id, scenarioId, exception.Message)); throw; } finally { _pipelineAuditEventRepository.SaveChanges(); } }