/// <summary> /// Inserts a move request to move one or more series in a study. /// </summary> /// <param name="context">The persistence context used for database connection.</param> /// <param name="partition">The <see cref="ServerPartition"/> where the study resides</param> /// <param name="studyInstanceUid">The Study Instance Uid of the study</param> /// <param name="deviceKey">The Key of the device to move the series to.</param> /// <param name="seriesInstanceUids">The Series Instance Uid of the series to be move.</param> /// <param name="externalRequest">Optional <see cref="ExternalRequestQueue"/> entry that triggered this move</param> /// <returns>A MoveSeries <see cref="WorkQueue"/> entry inserted into the system.</returns> /// <exception cref="InvalidStudyStateOperationException"></exception> public static IList<WorkQueue> MoveSeries(IUpdateContext context, ServerPartition partition, string studyInstanceUid, ServerEntityKey deviceKey, List<string> seriesInstanceUids, ExternalRequestQueue externalRequest=null) { // Find all location of the study in the system and insert series delete request IList<StudyStorageLocation> storageLocations = StudyStorageLocation.FindStorageLocations(partition.Key, studyInstanceUid); IList<WorkQueue> entries = new List<WorkQueue>(); foreach (StudyStorageLocation location in storageLocations) { try { // insert a move series request WorkQueue request = InsertMoveSeriesRequest(context, location, seriesInstanceUids, deviceKey, externalRequest); Debug.Assert(request.WorkQueueTypeEnum.Equals(WorkQueueTypeEnum.WebMoveStudy)); entries.Add(request); } catch (Exception ex) { Platform.Log(LogLevel.Error, ex, "Errors occurred when trying to insert move request"); if (!ServerHelper.UnlockStudy(location.Key)) throw new ApplicationException("Unable to unlock the study"); } } return entries; }
public override void Update(IGameContext gameContext, IUpdateContext updateContext) { base.Update(gameContext, updateContext); var mouse = Mouse.GetState(); var keyboard = Keyboard.GetState(); if (mouse.LeftPressed(this)) { this.X = mouse.X; this.Y = mouse.Y; this.XSpeed = 0; this.YSpeed = 0; this.m_JumpHandle.Play(); } if (keyboard.IsKeyDown(Keys.Left)) this.m_Platforming.ApplyMovement(this, -4, 0, gameContext.World.Entities.Cast<IBoundingBox>(), x => x is Solid); if (keyboard.IsKeyDown(Keys.Right)) this.m_Platforming.ApplyMovement(this, 4, 0, gameContext.World.Entities.Cast<IBoundingBox>(), x => x is Solid); if (!this.OnGround(gameContext)) this.m_Platforming.ApplyGravity(this, 0, 0.5f); else if (this.YSpeed > 0) { this.YSpeed = 0; this.m_Platforming.ApplyActionUntil(this, a => a.Y += 1, a => this.OnGround(gameContext), 12); } this.m_Platforming.ClampSpeed(this, null, 12); if (keyboard.IsKeyPressed(Keys.Up) && this.OnGround(gameContext)) this.YSpeed = -6; }
public void Update(IGameContext gameContext, IUpdateContext updateContext) { using (this.m_Profiler.Measure("resize_window")) { gameContext.ResizeWindow(800, 600); } }
/// <summary> /// Internally called by <see cref="SensorEngineHook"/> to update sensors /// during the update step. /// </summary> /// <param name="gameContext">The current game context.</param> /// <param name="updateContext">The current update context.</param> public void Update(IGameContext gameContext, IUpdateContext updateContext) { foreach (var sensor in _sensors) { sensor.Update(gameContext, updateContext); } }
protected override void OnExecute(CommandProcessor theProcessor, IUpdateContext updateContext) { var insert = updateContext.GetBroker<IInsertWorkQueue>(); var parms = new InsertWorkQueueParameters { WorkQueueTypeEnum = WorkQueueTypeEnum.StudyProcess, StudyStorageKey = _storageLocation.GetKey(), ServerPartitionKey = _storageLocation.ServerPartitionKey, SeriesInstanceUid = _message.DataSet[DicomTags.SeriesInstanceUid].GetString(0, String.Empty), SopInstanceUid = _message.DataSet[DicomTags.SopInstanceUid].GetString(0, String.Empty), ScheduledTime = Platform.Time, WorkQueueGroupID = _uidGroupId }; if (_duplicate) { parms.Duplicate = _duplicate; parms.Extension = _extension; parms.UidGroupID = _uidGroupId; } _insertedWorkQueue = insert.FindOne(parms); if (_insertedWorkQueue == null) throw new ApplicationException("UpdateWorkQueueCommand failed"); }
/// <summary> /// Inserts delete request(s) to delete a series in a study. /// </summary> /// <param name="context">The persistence context used for database connection.</param> /// <param name="partition">The <see cref="ServerPartition"/> where the study resides</param> /// <param name="studyInstanceUid">The Study Instance Uid of the study</param> /// <param name="seriesInstanceUids">The Series Instance Uid of the series to be deleted.</param> /// <param name="reason">The reason for deleting the series.</param> /// <returns>A list of DeleteSeries <see cref="WorkQueue"/> entries inserted into the system.</returns> /// <exception cref="InvalidStudyStateOperationException"></exception> public static IList<WorkQueue> DeleteSeries(IUpdateContext context, ServerPartition partition, string studyInstanceUid, List<string> seriesInstanceUids, string reason) { // Find all location of the study in the system and insert series delete request IList<StudyStorageLocation> storageLocations = StudyStorageLocation.FindStorageLocations(partition.Key, studyInstanceUid); IList<WorkQueue> entries = new List<WorkQueue>(); foreach (StudyStorageLocation location in storageLocations) { try { string failureReason; if (ServerHelper.LockStudy(location.Key, QueueStudyStateEnum.WebDeleteScheduled, out failureReason)) { // insert a delete series request WorkQueue request = InsertDeleteSeriesRequest(context, location, seriesInstanceUids, reason); Debug.Assert(request.WorkQueueTypeEnum.Equals(WorkQueueTypeEnum.WebDeleteStudy)); entries.Add(request); } else { throw new ApplicationException(String.Format("Unable to lock storage location {0} for deletion : {1}", location.Key, failureReason)); } } catch(Exception ex) { Platform.Log(LogLevel.Error, ex, "Errors occurred when trying to insert delete request"); if (!ServerHelper.UnlockStudy(location.Key)) throw new ApplicationException("Unable to unlock the study"); } } return entries; }
/// <summary> /// Import user from CSV format. /// </summary> /// <param name="rows"> /// Each string in the list must contain 25 CSV fields, as follows: /// 0 - UserName /// 1 - StaffType /// 2 - Id /// 3 - FamilyName /// 4 - GivenName /// 5 - MiddleName /// 6 - Prefix /// 7 - Suffix /// 8 - Degree /// </param> /// <param name="context"></param> public override void Import(List<string> rows, IUpdateContext context) { _context = context; List<User> importedUsers = new List<User>(); foreach (string row in rows) { string[] fields = ParseCsv(row, _numFields); string userName = fields[0]; string staffId = fields[2]; string staffFamilyName = fields[3]; string staffGivenName = fields[4]; User user = GetUser(userName, importedUsers); if (user == null) { UserInfo userInfo = new UserInfo(userName, string.Format("{0} {1}", staffFamilyName, staffGivenName), null, null, null); user = User.CreateNewUser(userInfo, _settings.DefaultTemporaryPassword); _context.Lock(user, DirtyState.New); importedUsers.Add(user); } } }
/// <summary> /// Imports the specified set of authority tokens. /// </summary> /// <param name="tokenDefs"></param> /// <param name="addToGroups"></param> /// <param name="context"></param> /// <returns></returns> public IList<AuthorityToken> Import(IEnumerable<AuthorityTokenDefinition> tokenDefs, IList<string> addToGroups, IUpdateContext context) { // first load all the existing tokens into memory // there should not be that many tokens ( < 500), so this should not be a problem var broker = context.GetBroker<IAuthorityTokenBroker>(); var existingTokens = broker.FindAll(); // if there are groups to add to, load the groups var groups = addToGroups != null && addToGroups.Count > 0 ? LoadGroups(addToGroups, context) : new List<AuthorityGroup>(); // order the input such that the renames are processed first // otherwise there may be a corner case where a newly imported token is immediately renamed tokenDefs = tokenDefs.OrderBy(t => t.FormerIdentities.Length > 0); foreach (var tokenDef in tokenDefs) { var token = ProcessToken(tokenDef, existingTokens, context); // add to groups CollectionUtils.ForEach(groups, g => g.AuthorityTokens.Add(token)); } return existingTokens; }
/// <summary> /// Inserts delete request(s) to delete a series in a study. /// </summary> /// <param name="context">The persistence context used for database connection.</param> /// <param name="partition">The <see cref="ServerPartition"/> where the study resides</param> /// <param name="studyInstanceUid">The Study Instance Uid of the study</param> /// <param name="reason">The reason for deleting the series.</param> /// <returns>A list of DeleteSeries <see cref="WorkQueue"/> entries inserted into the system.</returns> /// <exception cref="InvalidStudyStateOperationException"></exception> public static WorkQueue DeleteStudy(IUpdateContext context, ServerPartition partition, string studyInstanceUid, string reason) { StudyStorageLocation location = FindStudyStorageLocation(context, partition, studyInstanceUid); string failureReason; try { if (LockStudyForDeletion(location.Key, out failureReason)) { WorkQueue deleteRequest = InsertDeleteStudyRequest(context, location, reason); if (deleteRequest == null) throw new ApplicationException( String.Format("Unable to insert a Delete Study request for study {0}", location.StudyInstanceUid)); return deleteRequest; } } catch (Exception ex) { Platform.Log(LogLevel.Error, ex, "Errors occurred when trying to insert study delete request"); if (!ReleaseDeletionLock(location.Key)) Platform.Log(LogLevel.Error, "Unable to unlock the study: " + location.StudyInstanceUid); throw; } throw new ApplicationException( String.Format("Unable to lock storage location {0} for deletion : {1}", location.Key, failureReason)); }
/// <summary> /// Execute the insert. /// </summary> /// <param name="theProcessor">The command processor calling us</param> /// <param name="updateContext">The persistent store connection to use for the update.</param> protected override void OnExecute(CommandProcessor theProcessor, IUpdateContext updateContext) { var locInsert = updateContext.GetBroker<IInsertStudyStorage>(); var insertParms = new InsertStudyStorageParameters { ServerPartitionKey = _serverPartitionKey, StudyInstanceUid = _studyInstanceUid, Folder = _folder, FilesystemKey = _filesystemKey, QueueStudyStateEnum = QueueStudyStateEnum.Idle }; if (_transfersyntax.LosslessCompressed) { insertParms.TransferSyntaxUid = _transfersyntax.UidString; insertParms.StudyStatusEnum = StudyStatusEnum.OnlineLossless; } else if (_transfersyntax.LossyCompressed) { insertParms.TransferSyntaxUid = _transfersyntax.UidString; insertParms.StudyStatusEnum = StudyStatusEnum.OnlineLossy; } else { insertParms.TransferSyntaxUid = _transfersyntax.UidString; insertParms.StudyStatusEnum = StudyStatusEnum.Online; } // Find one so we don't uselessly process all the results. _location = locInsert.FindOne(insertParms); }
/// <summary> /// Execute the command /// </summary> /// <param name="updateContext">Database update context.</param> /// <param name="theProcessor">The processor executing the command.</param> protected override void OnExecute(CommandProcessor theProcessor, IUpdateContext updateContext) { var columns = new ArchiveStudyStorageUpdateColumns { ArchiveTime = Platform.Time, PartitionArchiveKey = _partitionArchiveKey, StudyStorageKey = _studyStorageKey, ArchiveXml = _archiveXml, ServerTransferSyntaxKey = _serverTransferSyntaxKey }; var insertBroker = updateContext.GetBroker<IArchiveStudyStorageEntityBroker>(); ArchiveStudyStorage storage = insertBroker.Insert(columns); var parms = new UpdateArchiveQueueParameters { ArchiveQueueKey = _archiveQueueKey, ArchiveQueueStatusEnum = ArchiveQueueStatusEnum.Completed, ScheduledTime = Platform.Time, StudyStorageKey = _studyStorageKey }; var broker = updateContext.GetBroker<IUpdateArchiveQueue>(); if (!broker.Execute(parms)) throw new ApplicationException("InsertArchiveStudyStorageCommand failed"); }
static public DataAccessGroup Insert(IUpdateContext update, DataAccessGroup entity) { var broker = update.GetBroker<IDataAccessGroupEntityBroker>(); var updateColumns = new DataAccessGroupUpdateColumns(); updateColumns.AuthorityGroupOID = entity.AuthorityGroupOID; updateColumns.Deleted = entity.Deleted; DataAccessGroup newEntity = broker.Insert(updateColumns); return newEntity; }
static public StudyDataAccess Insert(IUpdateContext update, StudyDataAccess entity) { var broker = update.GetBroker<IStudyDataAccessEntityBroker>(); var updateColumns = new StudyDataAccessUpdateColumns(); updateColumns.StudyStorageKey = entity.StudyStorageKey; updateColumns.DataAccessGroupKey = entity.DataAccessGroupKey; StudyDataAccess newEntity = broker.Insert(updateColumns); return newEntity; }
/// <summary> /// Initializes a new instance of a <see cref="PhysicsEvent"/>. This constructor /// is intended to be used internally within the engine. /// </summary> /// <param name="gameContext">The current game context, or null if running on a server.</param> /// <param name="serverContext">The current server context, or null if running on a client.</param> /// <param name="updateContext">The current update context.</param> protected PhysicsEvent( IGameContext gameContext, IServerContext serverContext, IUpdateContext updateContext) { GameContext = gameContext; ServerContext = serverContext; UpdateContext = updateContext; }
static public ServerPartitionDataAccess Insert(IUpdateContext update, ServerPartitionDataAccess entity) { var broker = update.GetBroker<IServerPartitionDataAccessEntityBroker>(); var updateColumns = new ServerPartitionDataAccessUpdateColumns(); updateColumns.ServerPartitionKey = entity.ServerPartitionKey; updateColumns.DataAccessGroupKey = entity.DataAccessGroupKey; ServerPartitionDataAccess newEntity = broker.Insert(updateColumns); return newEntity; }
static public RequestAttributes Insert(IUpdateContext update, RequestAttributes entity) { var broker = update.GetBroker<IRequestAttributesEntityBroker>(); var updateColumns = new RequestAttributesUpdateColumns(); updateColumns.SeriesKey = entity.SeriesKey; updateColumns.RequestedProcedureId = entity.RequestedProcedureId; updateColumns.ScheduledProcedureStepId = entity.ScheduledProcedureStepId; RequestAttributes newEntity = broker.Insert(updateColumns); return newEntity; }
static public DevicePreferredTransferSyntax Insert(IUpdateContext update, DevicePreferredTransferSyntax entity) { var broker = update.GetBroker<IDevicePreferredTransferSyntaxEntityBroker>(); var updateColumns = new DevicePreferredTransferSyntaxUpdateColumns(); updateColumns.DeviceKey = entity.DeviceKey; updateColumns.ServerSopClassKey = entity.ServerSopClassKey; updateColumns.ServerTransferSyntaxKey = entity.ServerTransferSyntaxKey; DevicePreferredTransferSyntax newEntity = broker.Insert(updateColumns); return newEntity; }
static public PartitionTransferSyntax Insert(IUpdateContext update, PartitionTransferSyntax entity) { var broker = update.GetBroker<IPartitionTransferSyntaxEntityBroker>(); var updateColumns = new PartitionTransferSyntaxUpdateColumns(); updateColumns.ServerPartitionKey = entity.ServerPartitionKey; updateColumns.ServerTransferSyntaxKey = entity.ServerTransferSyntaxKey; updateColumns.Enabled = entity.Enabled; PartitionTransferSyntax newEntity = broker.Insert(updateColumns); return newEntity; }
static public ServerTransferSyntax Insert(IUpdateContext update, ServerTransferSyntax entity) { var broker = update.GetBroker<IServerTransferSyntaxEntityBroker>(); var updateColumns = new ServerTransferSyntaxUpdateColumns(); updateColumns.Uid = entity.Uid; updateColumns.Description = entity.Description; updateColumns.Lossless = entity.Lossless; ServerTransferSyntax newEntity = broker.Insert(updateColumns); return newEntity; }
static public CannedText Insert(IUpdateContext update, CannedText entity) { var broker = update.GetBroker<ICannedTextEntityBroker>(); var updateColumns = new CannedTextUpdateColumns(); updateColumns.Label = entity.Label; updateColumns.Category = entity.Category; updateColumns.Text = entity.Text; CannedText newEntity = broker.Insert(updateColumns); return newEntity; }
static public ServerSopClass Insert(IUpdateContext update, ServerSopClass entity) { var broker = update.GetBroker<IServerSopClassEntityBroker>(); var updateColumns = new ServerSopClassUpdateColumns(); updateColumns.SopClassUid = entity.SopClassUid; updateColumns.Description = entity.Description; updateColumns.NonImage = entity.NonImage; ServerSopClass newEntity = broker.Insert(updateColumns); return newEntity; }
static public PartitionSopClass Insert(IUpdateContext update, PartitionSopClass entity) { var broker = update.GetBroker<IPartitionSopClassEntityBroker>(); var updateColumns = new PartitionSopClassUpdateColumns(); updateColumns.ServerPartitionKey = entity.ServerPartitionKey; updateColumns.ServerSopClassKey = entity.ServerSopClassKey; updateColumns.Enabled = entity.Enabled; PartitionSopClass newEntity = broker.Insert(updateColumns); return newEntity; }
/// <summary> /// Do the insertion of the AutoRoute. /// </summary> protected override void OnExecute(CommandProcessor theProcessor, IUpdateContext updateContext) { DeviceSelectCriteria deviceSelectCriteria = new DeviceSelectCriteria(); deviceSelectCriteria.AeTitle.EqualTo(_deviceAe); deviceSelectCriteria.ServerPartitionKey.EqualTo(_context.ServerPartitionKey); IDeviceEntityBroker selectDevice = updateContext.GetBroker<IDeviceEntityBroker>(); Device dev = selectDevice.FindOne(deviceSelectCriteria); if (dev == null) { Platform.Log(LogLevel.Warn, "Device '{0}' on partition {1} not in database for autoroute request! Ignoring request.", _deviceAe, _context.ServerPartition.AeTitle); ServerPlatform.Alert( AlertCategory.Application, AlertLevel.Warning, SR.AlertComponentAutorouteRule, AlertTypeCodes.UnableToProcess, null, TimeSpan.FromMinutes(5), SR.AlertAutoRouteUnknownDestination, _deviceAe, _context.ServerPartition.AeTitle); return; } if (!dev.AllowAutoRoute) { Platform.Log(LogLevel.Warn, "Auto-route attempted to device {0} on partition {1} with autoroute support disabled. Ignoring request.", dev.AeTitle, _context.ServerPartition.AeTitle); ServerPlatform.Alert(AlertCategory.Application, AlertLevel.Warning, SR.AlertComponentAutorouteRule, AlertTypeCodes.UnableToProcess, null, TimeSpan.FromMinutes(5), SR.AlertAutoRouteDestinationAEDisabled, dev.AeTitle, _context.ServerPartition.AeTitle); return; } InsertWorkQueueParameters parms = new InsertWorkQueueParameters { WorkQueueTypeEnum = WorkQueueTypeEnum.AutoRoute, ScheduledTime = _scheduledTime.HasValue ? _scheduledTime.Value : Platform.Time.AddSeconds(10), StudyStorageKey = _context.StudyLocationKey, ServerPartitionKey = _context.ServerPartitionKey, DeviceKey = dev.GetKey(), SeriesInstanceUid = _context.Message.DataSet[DicomTags.SeriesInstanceUid].GetString(0, string.Empty), SopInstanceUid = _context.Message.DataSet[DicomTags.SopInstanceUid].GetString(0, string.Empty) }; IInsertWorkQueue broker = updateContext.GetBroker<IInsertWorkQueue>(); if (broker.FindOne(parms)==null) { throw new ApplicationException("InsertAutoRouteCommand failed"); } }
static public FilesystemStudyStorage Insert(IUpdateContext update, FilesystemStudyStorage entity) { var broker = update.GetBroker<IFilesystemStudyStorageEntityBroker>(); var updateColumns = new FilesystemStudyStorageUpdateColumns(); updateColumns.StudyStorageKey = entity.StudyStorageKey; updateColumns.FilesystemKey = entity.FilesystemKey; updateColumns.ServerTransferSyntaxKey = entity.ServerTransferSyntaxKey; updateColumns.StudyFolder = entity.StudyFolder; FilesystemStudyStorage newEntity = broker.Insert(updateColumns); return newEntity; }
static public DatabaseVersion Insert(IUpdateContext update, DatabaseVersion entity) { var broker = update.GetBroker<IDatabaseVersionEntityBroker>(); var updateColumns = new DatabaseVersionUpdateColumns(); updateColumns.Major = entity.Major; updateColumns.Minor = entity.Minor; updateColumns.Build = entity.Build; updateColumns.Revision = entity.Revision; DatabaseVersion newEntity = broker.Insert(updateColumns); return newEntity; }
public void Update(IGameContext gameContext, IUpdateContext updateContext) { if (State == ConsoleState.Closed || State == ConsoleState.OpenNoInput || State == ConsoleState.FullOpenNoInput) { return; } _consoleInput.Update(gameContext, updateContext, Log); }
public void Archive(IUpdateContext context) { var insertArchiveQueueBroker = context.GetBroker<IInsertArchiveQueue>(); var parms = new InsertArchiveQueueParameters { ServerPartitionKey = ServerPartitionKey, StudyStorageKey = Key }; if (!insertArchiveQueueBroker.Execute(parms)) { throw new ApplicationException("Unable to schedule study archive"); } }
public Updater(Version version, string binDir, string feedUrl, string publicKey, string appTitle) { this.context = new UpdateContext { ApplicationDirectory = new DirectoryInfo(binDir), TempDirectory = Directory.CreateDirectory(Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString())), ApplicationVersion = version, ApplicationTitle = appTitle, }; this.feedUrl = new Uri(feedUrl); this.publicKey = publicKey; }
protected override void OnExecute(CommandProcessor theProcessor, IUpdateContext updateContext) { // Update StudyStatusEnum in the StudyStorageTable IStudyStorageEntityBroker studyStorageUpdate = updateContext.GetBroker<IStudyStorageEntityBroker>(); StudyStorageUpdateColumns studyStorageUpdateColumns = new StudyStorageUpdateColumns(); studyStorageUpdateColumns.StudyStatusEnum = _newStatus; studyStorageUpdate.Update(_location.Key, studyStorageUpdateColumns); // Update ServerTransferSyntaxGUID in FilesystemStudyStorage IFilesystemStudyStorageEntityBroker filesystemUpdate = updateContext.GetBroker<IFilesystemStudyStorageEntityBroker>(); FilesystemStudyStorageUpdateColumns filesystemUpdateColumns = new FilesystemStudyStorageUpdateColumns(); filesystemUpdateColumns.ServerTransferSyntaxKey = _newSyntax.Key; filesystemUpdate.Update(_location.FilesystemStudyStorageKey, filesystemUpdateColumns); }
public void Update(ComponentizedEntity entity, IGameContext gameContext, IUpdateContext updateContext) { if (!Enabled) { return; } if (_jitterWorld != _physicsEngine.GetInternalPhysicsWorld()) { // TODO: Deregister rigid bodies from old world. if (_jitterWorld != null && _physicsControllerConstraint != null) { _jitterWorld.RemoveConstraint(_physicsControllerConstraint); _physicsControllerConstraint = null; } _jitterWorld = _physicsEngine.GetInternalPhysicsWorld(); } if (_physicalComponent.RigidBodies.Length > 0 && _physicalComponent.RigidBodies[0] != _rigidBody) { if (_physicsControllerConstraint != null) { _jitterWorld.RemoveConstraint(_physicsControllerConstraint); _physicsControllerConstraint = null; } } if (_physicalComponent.RigidBodies.Length > 0) { if (_physicsControllerConstraint == null) { _physicsControllerConstraint = new PhysicsControllerConstraint( _jitterWorld, _physicalComponent.RigidBodies[0]); _jitterWorld.AddConstraint(_physicsControllerConstraint); } _physicsControllerConstraint.TargetVelocity = TargetVelocity.ToJitterVector(); _physicsControllerConstraint.TryJump = TryJump; _physicsControllerConstraint.JumpVelocity = JumpVelocity; _physicsControllerConstraint.Stiffness = Stiffness; if (TargetVelocity.LengthSquared() > 0f) { // Wake up the rigid body. _physicalComponent.RigidBodies[0].IsActive = true; } } }
public UnitType Execute(IUpdateContext <AccountState> context) { context.State.Balance += Amount; return(UnitType.Value); }
/// <summary> /// The update. /// </summary> /// <param name="entity"> /// The entity. /// </param> /// <param name="gameContext"> /// The game context. /// </param> /// <param name="updateContext"> /// The update context. /// </param> public abstract void Update(IEntity entity, IGameContext gameContext, IUpdateContext updateContext);
protected override void OnExecute(CommandProcessor theProcessor, IUpdateContext updateContext) { Initialize(); UpdateDatabase(); }
public void OnStudyDeleted() { if (!Enabled) { return; } if (_context.WorkQueueItem.WorkQueueTypeEnum == WorkQueueTypeEnum.WebDeleteStudy) { Study study = _context.Study; if (study == null) { Platform.Log(LogLevel.Info, "Not logging Study Delete information due to missing Study record for study: {0} on partition {1}", _context.StorageLocation.StudyInstanceUid, _context.ServerPartition.AeTitle); return; } StudyStorageLocation storage = _context.StorageLocation; using (IUpdateContext updateContext = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush)) { // Setup the parameters IStudyDeleteRecordEntityBroker broker = updateContext.GetBroker <IStudyDeleteRecordEntityBroker>(); StudyDeleteRecordUpdateColumns parms = new StudyDeleteRecordUpdateColumns(); parms.Timestamp = Platform.Time; WebDeleteStudyLevelQueueData extendedInfo = XmlUtils.Deserialize <WebDeleteStudyLevelQueueData>(_context.WorkQueueItem.Data); parms.Reason = extendedInfo != null? extendedInfo.Reason:_context.WorkQueueItem.WorkQueueTypeEnum.LongDescription; parms.ServerPartitionAE = _context.ServerPartition.AeTitle; parms.FilesystemKey = storage.FilesystemKey; parms.AccessionNumber = study.AccessionNumber; parms.PatientId = study.PatientId; parms.PatientsName = study.PatientsName; parms.StudyInstanceUid = study.StudyInstanceUid; parms.StudyDate = study.StudyDate; parms.StudyDescription = study.StudyDescription; parms.StudyTime = study.StudyTime; parms.BackupPath = BackupZipFileRelativePath; if (_archives != null && _archives.Count > 0) { parms.ArchiveInfo = XmlUtils.SerializeAsXmlDoc(_archives); } StudyDeleteExtendedInfo extInfo = new StudyDeleteExtendedInfo(); extInfo.ServerInstanceId = ServerPlatform.ServerInstanceId; extInfo.UserId = _context.UserId; extInfo.UserName = _context.UserName; parms.ExtendedInfo = XmlUtils.SerializeAsString(extInfo); StudyDeleteRecord deleteRecord = broker.Insert(parms); if (deleteRecord == null) { Platform.Log(LogLevel.Error, "Unexpected error when trying to create study delete record: {0} on partition {1}", study.StudyInstanceUid, _context.ServerPartition.Description); } else { updateContext.Commit(); } } } }
protected override void OnExecute(CommandProcessor theProcessor, IUpdateContext updateContext) { // Check if the File is the same syntax as the TransferSyntax fileSyntax = _file.TransferSyntax; TransferSyntax dbSyntax = TransferSyntax.GetTransferSyntax(_location.TransferSyntaxUid); // Check if the syntaxes match the location if ((!fileSyntax.Encapsulated && !dbSyntax.Encapsulated) || (fileSyntax.LosslessCompressed && dbSyntax.LosslessCompressed) || (fileSyntax.LossyCompressed && dbSyntax.LossyCompressed)) { // no changes necessary, just return; return; } // Select the Server Transfer Syntax var syntaxCriteria = new ServerTransferSyntaxSelectCriteria(); var syntaxBroker = updateContext.GetBroker <IServerTransferSyntaxEntityBroker>(); syntaxCriteria.Uid.EqualTo(fileSyntax.UidString); ServerTransferSyntax serverSyntax = syntaxBroker.FindOne(syntaxCriteria); if (serverSyntax == null) { Platform.Log(LogLevel.Error, "Unable to load ServerTransferSyntax for {0}. Unable to update study status.", fileSyntax.Name); return; } // Get the FilesystemStudyStorage update broker ready var filesystemStudyStorageEntityBroker = updateContext.GetBroker <IFilesystemStudyStorageEntityBroker>(); var filesystemStorageUpdate = new FilesystemStudyStorageUpdateColumns(); var filesystemStorageCritiera = new FilesystemStudyStorageSelectCriteria(); filesystemStorageUpdate.ServerTransferSyntaxKey = serverSyntax.Key; filesystemStorageCritiera.StudyStorageKey.EqualTo(_location.Key); // Get the StudyStorage update broker ready var studyStorageBroker = updateContext.GetBroker <IStudyStorageEntityBroker>(); var studyStorageUpdate = new StudyStorageUpdateColumns(); StudyStatusEnum statusEnum = _location.StudyStatusEnum; if (fileSyntax.LossyCompressed) { studyStorageUpdate.StudyStatusEnum = statusEnum = StudyStatusEnum.OnlineLossy; } else if (fileSyntax.LosslessCompressed) { studyStorageUpdate.StudyStatusEnum = statusEnum = StudyStatusEnum.OnlineLossless; } studyStorageUpdate.LastAccessedTime = Platform.Time; if (!filesystemStudyStorageEntityBroker.Update(filesystemStorageCritiera, filesystemStorageUpdate)) { Platform.Log(LogLevel.Error, "Unable to update FilesystemQueue row: Study {0}, Server Entity {1}", _location.StudyInstanceUid, _location.ServerPartitionKey); } else if (!studyStorageBroker.Update(_location.GetKey(), studyStorageUpdate)) { Platform.Log(LogLevel.Error, "Unable to update StudyStorage row: Study {0}, Server Entity {1}", _location.StudyInstanceUid, _location.ServerPartitionKey); } else { // Update the location, so the next time we come in here, we don't try and update the database // for another sop in the study. _location.StudyStatusEnum = statusEnum; _location.TransferSyntaxUid = fileSyntax.UidString; _location.ServerTransferSyntaxKey = serverSyntax.Key; } }
private void EnsureConsistentObjectCount(StudyXml studyXml, IDictionary <string, List <string> > processedSeriesMap) { Platform.CheckForNullReference(studyXml, "studyXml"); // We have to ensure that the counts in studyXml and what we have processed are consistent. // Files or folder may be reprocessed but then become missing when then entry is resumed. // We have to removed them from the studyXml before committing the it. Platform.Log(LogLevel.Info, "Verifying study xml against the filesystems"); int filesProcessed = 0; foreach (string seriesUid in processedSeriesMap.Keys) { filesProcessed += processedSeriesMap[seriesUid].Count; } // Used to keep track of the series to be removed. // We can't remove the item from the study xml while we are // interating through it var seriesToRemove = new List <string>(); foreach (SeriesXml seriesXml in studyXml) { if (!processedSeriesMap.ContainsKey(seriesXml.SeriesInstanceUid)) { seriesToRemove.Add(seriesXml.SeriesInstanceUid); } else { //check all instance in the series List <string> foundInstances = processedSeriesMap[seriesXml.SeriesInstanceUid]; var instanceToRemove = new List <string>(); foreach (InstanceXml instanceXml in seriesXml) { if (!foundInstances.Contains(instanceXml.SopInstanceUid)) { // the sop no long exists in the filesystem instanceToRemove.Add(instanceXml.SopInstanceUid); } } foreach (string instanceUid in instanceToRemove) { seriesXml[instanceUid] = null; Platform.Log(LogLevel.Info, "Removed SOP {0} in the study xml: it no longer exists.", instanceUid); } } } foreach (string seriesUid in seriesToRemove) { studyXml[seriesUid] = null; Platform.Log(LogLevel.Info, "Removed Series {0} in the study xml: it no longer exists.", seriesUid); } Platform.CheckTrue(studyXml.NumberOfStudyRelatedSeries == processedSeriesMap.Count, String.Format("Number of series in the xml do not match number of series reprocessed: {0} vs {1}", studyXml.NumberOfStudyRelatedInstances, processedSeriesMap.Count)); Platform.CheckTrue(studyXml.NumberOfStudyRelatedInstances == filesProcessed, String.Format("Number of instances in the xml do not match number of reprocessed: {0} vs {1}", studyXml.NumberOfStudyRelatedInstances, filesProcessed)); Platform.Log(LogLevel.Info, "Study xml has been verified."); if (StorageLocation.Study != null) { // update the instance count in the db using (IUpdateContext updateContext = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush)) { var broker = updateContext.GetBroker <IStudyEntityBroker>(); var columns = new StudyUpdateColumns { NumberOfStudyRelatedInstances = studyXml.NumberOfStudyRelatedInstances, NumberOfStudyRelatedSeries = studyXml.NumberOfStudyRelatedSeries }; broker.Update(StorageLocation.Study.GetKey(), columns); updateContext.Commit(); } } else { // alert orphaned StudyStorage entry RaiseAlert(WorkQueueItem, AlertLevel.Critical, String.Format("Study {0} has been reprocessed but Study record was NOT created. Images reprocessed: {1}. Path={2}", StorageLocation.StudyInstanceUid, filesProcessed, StorageLocation.GetStudyPath())); } }
public void Update(IServerContext serverContext, IUpdateContext updateContext) { _physicsEngine.Update(serverContext, updateContext); _physicsWorldControl.SyncPendingChanges(); }
protected override void OnExecute(CommandProcessor theProcessor, IUpdateContext updateContext) { _location = FindOrCreateStudyStorageLocation(); }
public void Update(IGameContext gameContext, IUpdateContext updateContext) { }
public override async Task HandleAsync(IUpdateContext context, UpdateDelegate next, string[] args) { await _telegramService.AddUpdateContext(context); var client = _telegramService.Client; var message = _telegramService.Message; var chatId = _telegramService.ChatId; var reducedChatId = _telegramService.ReducedChatId; if (!await _telegramService.IsBeta()) { return; } if (message.ReplyToMessage == null) { await _telegramService.SendTextMessageAsync("Balas pesan Stiker untuk membangun StikerPack"); return; } var repMsg = message.ReplyToMessage; var repMsgId = repMsg.MessageId; if (repMsg.Type != MessageType.Sticker) { await _telegramService.SendTextMessageAsync("Balas pesan Stiker untuk membangun StikerPack."); return; } await _telegramService.SendTextMessageAsync("Sedang mengumpulkan StikerSet.."); var sticker = repMsg.Sticker; Log.Debug("Sticker: {0}", sticker.ToJson(true)); var setName = sticker.SetName; var stickerPack = await _telegramService.Client.GetStickerSetAsync(setName); Log.Debug("StikerPack: {0}", stickerPack.ToJson(true)); var listStickers = stickerPack.Stickers; var sendEdit = $"Sedang mengunduh {listStickers.Length} Stiker" + $"\nNama: {stickerPack.Name}" + $"\nJudul: {stickerPack.Title}"; await _telegramService.EditMessageTextAsync(sendEdit); var cachePath = Path.Combine("Storage", "Caches", reducedChatId + "_stikerpack-" + repMsgId).SanitizeSlash(); var packsPath = Path.Combine(cachePath, "stiker-packs").EnsureDirectory(true); foreach (var listSticker in listStickers) { var fileId = listSticker.FileId; var filePath = Path.Combine(packsPath, fileId + ".webp").SanitizeSlash(); Log.Debug("Downloading Sticker: {0} to {1}", fileId, filePath); var fileStream = new FileStream(filePath, FileMode.OpenOrCreate); await client.GetInfoAndDownloadFileAsync(fileId, fileStream); fileStream.Close(); fileStream.Dispose(); } await _telegramService.EditMessageTextAsync("Sedang membangun StikerPack.."); var listStikerItem = new List <StickerItem>(); foreach (var listSticker in listStickers) { var filePath = listSticker.FileId + ".webp"; listStikerItem.Add(new StickerItem() { Emojis = new List <string>() { listSticker.Emoji }, ImageFile = filePath }); } var listStikerPacks = new List <StickerPack> { new StickerPack() { Identifier = "stiker-packs", Name = sticker.SetName, Publisher = "ZiZi StikerPack Kit", TrayImageFile = listStikerItem.First().ImageFile, ImageDataVersion = 1, AvoidCache = false, PublisherEmail = "*****@*****.**", PublisherWebsite = new Uri("https://github.com/WinTenDev/WinTenBot.NET"), PrivacyPolicyWebsite = "", LicenseAgreementWebsite = "https://github.com/WinTenDev/WinTenBot.NET/blob/master/LICENSE", Stickers = listStikerItem } }; var stikerPacksJson = new StickerAppItem() { AndroidPlayStoreLink = new Uri("https://play.google.com/store/apps/details?id=com.kanelai.stickerapp"), IosAppStoreLink = "", StickerPacks = listStikerPacks }.ToJson(true); var contents = Path.Combine(cachePath, "contents.json"); await _telegramService.EditMessageTextAsync("Sedang menulis Metadata.."); await File.WriteAllTextAsync(contents, stikerPacksJson); await _telegramService.EditMessageTextAsync("Sedang Membuat paket StikerPacks.."); var zipFileName = $"zizi-stikerpacks-{sticker.SetName}-{repMsgId}.stikerpacks"; var packName = Path.Combine(cachePath, $"zizi-stikerpacks-{sticker.SetName}-{repMsgId}.stikerpacks"); var files = Directory.GetFiles(cachePath, "*.*", SearchOption.AllDirectories) .Where(x => !x.Contains(".stikerpacks")); var zipPack = files.CreateZip(packName); await _telegramService.SendMediaAsync(packName, MediaType.LocalDocument); }
public static bool ExampleButton(IUpdateContext obj) { return(obj.Update.Message.Text.StartsWith("Emoji :)")); // just example :) all keyboardButtons should start with emoji }
/// <summary> /// Updates the entity. /// </summary> /// <param name="gameContext">The current game context.</param> /// <param name="updateContext">The current update context.</param> public void Update(IGameContext gameContext, IUpdateContext updateContext) { _update.Invoke(gameContext, updateContext); }
public Task HandleAsync(IUpdateContext context, UpdateDelegate next, CancellationToken cancellationToken) => _predicate(context) ? _branch(context, cancellationToken) : next(context, cancellationToken);
public void ReceivePredictedNetworkIDFromClient(IServerContext serverContext, IUpdateContext updateContext, MxClient client, int predictedIdentifier) { throw new InvalidOperationException( "Entity groups can not receive predicted network IDs. This indicates an error in the code."); }
/// <summary> /// Import authority groups. /// </summary> /// <remarks> /// Creates any authority groups that do not already exist. /// This method performs an additive import. It will never remove an existing authority group or /// remove authority tokens from an existing group. /// </remarks> /// <param name="groupDefs"></param> /// <param name="context"></param> public IList <AuthorityGroup> Import(IEnumerable <AuthorityGroupDefinition> groupDefs, IUpdateContext context) { // first load all the existing tokens into memory // there should not be that many tokens ( < 500), so this should not be a problem IAuthorityTokenBroker tokenBroker = context.GetBroker <IAuthorityTokenBroker>(); IList <AuthorityToken> existingTokens = tokenBroker.FindAll(); // load existing groups IAuthorityGroupBroker groupBroker = context.GetBroker <IAuthorityGroupBroker>(); IList <AuthorityGroup> existingGroups = groupBroker.FindAll(); foreach (AuthorityGroupDefinition groupDef in groupDefs) { AuthorityGroup group = CollectionUtils.SelectFirst(existingGroups, g => g.Name == groupDef.Name); // if group does not exist, create it if (group == null) { group = new AuthorityGroup { Name = groupDef.Name, Description = groupDef.Description, DataGroup = groupDef.DataGroup }; context.Lock(group, DirtyState.New); existingGroups.Add(group); } // process all token nodes contained in group foreach (string tokenName in groupDef.Tokens) { AuthorityToken token = CollectionUtils.SelectFirst(existingTokens, t => t.Name == tokenName); // ignore non-existent tokens if (token == null) { continue; } // add the token to the group group.AuthorityTokens.Add(token); } } return(existingGroups); }
/// <summary> /// The processing thread. /// </summary> /// <remarks> /// This method queries the database for ServiceLock entries to work on, and then uses /// a thread pool to process the entries. /// </remarks> public void Run() { // Start the thread pool if (!_threadPool.Active) { _threadPool.Start(); } // Reset any queue items related to this service that are have the Lock bit set. try { ResetLocked(); } catch (Exception e) { Platform.Log(LogLevel.Fatal, e, "Unable to reset ServiceLock items on startup. There may be ServiceLock items orphaned in the queue."); } Platform.Log(LogLevel.Info, "ServiceLock Processor is running"); while (true) { try { if (_threadPool.CanQueueItem) { Model.ServiceLock queueListItem; using (IUpdateContext updateContext = _store.OpenUpdateContext(UpdateContextSyncMode.Flush)) { IQueryServiceLock select = updateContext.GetBroker <IQueryServiceLock>(); ServiceLockQueryParameters parms = new ServiceLockQueryParameters(); parms.ProcessorId = ServerPlatform.ProcessorId; queueListItem = select.FindOne(parms); updateContext.Commit(); } if (queueListItem == null) { WaitHandle.WaitAny(new WaitHandle[] { _terminationEvent, _threadStop }, TimeSpan.FromSeconds(30), false); _threadStop.Reset(); } else { if (!_extensions.ContainsKey(queueListItem.ServiceLockTypeEnum)) { Platform.Log(LogLevel.Error, "No extensions loaded for ServiceLockTypeEnum item type: {0}. Failing item.", queueListItem.ServiceLockTypeEnum); //Just fail the ServiceLock item, not much else we can do ResetServiceLock(queueListItem); continue; } IServiceLockProcessorFactory factory = _extensions[queueListItem.ServiceLockTypeEnum]; IServiceLockItemProcessor processor; try { processor = factory.GetItemProcessor(); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception creating ServiceLock processor."); ResetServiceLock(queueListItem); continue; } _threadPool.Enqueue(processor, queueListItem, delegate(IServiceLockItemProcessor queueProcessor, Model.ServiceLock queueItem) { try { queueProcessor.Process(queueItem); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Unexpected exception when processing ServiceLock item of type {0}. Failing Queue item. (GUID: {1})", queueItem.ServiceLockTypeEnum, queueItem.GetKey()); ServerPlatform.Alert(AlertCategory.Application, AlertLevel.Error, "ServiceLockProcessor", AlertTypeCodes.UnableToProcess, null, TimeSpan.Zero, "Exception thrown when processing {0} ServiceLock item : {1}", queueItem.ServiceLockTypeEnum.Description, e.Message); ResetServiceLock(queueItem); } // Cleanup the processor queueProcessor.Dispose(); // Signal the thread to come out of sleep mode _threadStop.Set(); }); } } else { // Wait for only 5 seconds when the thread pool is all in use. WaitHandle.WaitAny(new WaitHandle[] { _terminationEvent, _threadStop }, TimeSpan.FromSeconds(5), false); _threadStop.Reset(); } } catch (Exception ex) { Platform.Log(LogLevel.Error, ex, "Exception has occurred : {0}. Retry later.", ex.Message); WaitHandle.WaitAny(new WaitHandle[] { _terminationEvent, _threadStop }, TimeSpan.FromSeconds(5), false); _threadStop.Reset(); } if (_stop) { return; } } }
/// <summary> /// The update. /// </summary> /// <param name="entity"> /// The entity. /// </param> public abstract void Update(IServerEntity entity, IServerContext serverContext, IUpdateContext updateContext);
/// <summary> /// Process StudyCompress Candidates retrieved from the <see cref="Model.FilesystemQueue"/> table /// </summary> /// <param name="candidateList">The list of candidate studies for deleting.</param> /// <param name="type">The type of compress candidate (lossy or lossless)</param> private void ProcessCompressCandidates(IEnumerable <FilesystemQueue> candidateList, FilesystemQueueTypeEnum type) { using (ServerExecutionContext context = new ServerExecutionContext()) { DateTime scheduledTime = Platform.Time.AddSeconds(10); foreach (FilesystemQueue queueItem in candidateList) { // Check for Shutdown/Cancel if (CancelPending) { break; } // First, get the StudyStorage locations for the study, and calculate the disk usage. StudyStorageLocation location; if (!FilesystemMonitor.Instance.GetWritableStudyStorageLocation(queueItem.StudyStorageKey, out location)) { continue; } StudyXml studyXml; try { studyXml = LoadStudyXml(location); } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Skipping compress candidate, unexpected exception loading StudyXml file for {0}", location.GetStudyPath()); continue; } using ( IUpdateContext update = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush)) { ILockStudy lockstudy = update.GetBroker <ILockStudy>(); LockStudyParameters lockParms = new LockStudyParameters(); lockParms.StudyStorageKey = location.Key; lockParms.QueueStudyStateEnum = QueueStudyStateEnum.CompressScheduled; if (!lockstudy.Execute(lockParms) || !lockParms.Successful) { Platform.Log(LogLevel.Warn, "Unable to lock study for inserting Lossless Compress. Reason:{0}. Skipping study ({1})", lockParms.FailureReason, location.StudyInstanceUid); continue; } scheduledTime = scheduledTime.AddSeconds(3); IInsertWorkQueueFromFilesystemQueue workQueueInsert = update.GetBroker <IInsertWorkQueueFromFilesystemQueue>(); InsertWorkQueueFromFilesystemQueueParameters insertParms = new InsertWorkQueueFromFilesystemQueueParameters(); insertParms.WorkQueueTypeEnum = WorkQueueTypeEnum.CompressStudy; insertParms.FilesystemQueueTypeEnum = FilesystemQueueTypeEnum.LosslessCompress; insertParms.StudyStorageKey = location.GetKey(); insertParms.ServerPartitionKey = location.ServerPartitionKey; DateTime expirationTime = scheduledTime; insertParms.ScheduledTime = expirationTime; insertParms.DeleteFilesystemQueue = true; insertParms.Data = queueItem.QueueXml; insertParms.FilesystemQueueTypeEnum = type; insertParms.WorkQueueTypeEnum = WorkQueueTypeEnum.CompressStudy; try { WorkQueue entry = workQueueInsert.FindOne(insertParms); InsertWorkQueueUidFromStudyXml(studyXml, update, entry.GetKey()); update.Commit(); _studiesInserted++; } catch (Exception e) { Platform.Log(LogLevel.Error, e, "Skipping compress record, unexpected problem inserting 'CompressStudy' record into WorkQueue for Study {0}", location.StudyInstanceUid); // throw; -- would cause abort of inserts, go ahead and try everything } } } } }
/// <summary> /// Import authority groups from extensions of <see cref="DefineAuthorityGroupsExtensionPoint"/>. /// </summary> /// <remarks> /// Creates any authority groups that do not already exist. /// This method performs an additive import. It will never remove an existing authority group or /// remove authority tokens from an existing group. /// </remarks> /// <param name="context"></param> public IList <AuthorityGroup> ImportFromPlugins(IUpdateContext context) { AuthorityGroupDefinition[] groupDefs = AuthorityGroupSetup.GetDefaultAuthorityGroups(); return(Import(groupDefs, context)); }
/// <summary> /// Migrates the study to new tier /// </summary> /// <param name="storage"></param> /// <param name="newFilesystem"></param> private void DoMigrateStudy(StudyStorageLocation storage, ServerFilesystemInfo newFilesystem) { Platform.CheckForNullReference(storage, "storage"); Platform.CheckForNullReference(newFilesystem, "newFilesystem"); TierMigrationStatistics stat = new TierMigrationStatistics { StudyInstanceUid = storage.StudyInstanceUid }; stat.ProcessSpeed.Start(); StudyXml studyXml = storage.LoadStudyXml(); stat.StudySize = (ulong)studyXml.GetStudySize(); Platform.Log(LogLevel.Info, "About to migrate study {0} from {1} to {2}", storage.StudyInstanceUid, storage.FilesystemTierEnum, newFilesystem.Filesystem.Description); string newPath = Path.Combine(newFilesystem.Filesystem.FilesystemPath, storage.PartitionFolder); DateTime startTime = Platform.Time; DateTime lastLog = Platform.Time; int fileCounter = 0; ulong bytesCopied = 0; long instanceCountInXml = studyXml.NumberOfStudyRelatedInstances; using (ServerCommandProcessor processor = new ServerCommandProcessor("Migrate Study")) { TierMigrationContext context = new TierMigrationContext { OriginalStudyLocation = storage, Destination = newFilesystem }; string origFolder = context.OriginalStudyLocation.GetStudyPath(); processor.AddCommand(new CreateDirectoryCommand(newPath)); newPath = Path.Combine(newPath, context.OriginalStudyLocation.StudyFolder); processor.AddCommand(new CreateDirectoryCommand(newPath)); newPath = Path.Combine(newPath, context.OriginalStudyLocation.StudyInstanceUid); // don't create this directory so that it won't be backed up by MoveDirectoryCommand CopyDirectoryCommand copyDirCommand = new CopyDirectoryCommand(origFolder, newPath, delegate(string path) { // Update the progress. This is useful if the migration takes long time to complete. FileInfo file = new FileInfo(path); bytesCopied += (ulong)file.Length; fileCounter++; if (file.Extension != null && file.Extension.Equals(ServerPlatform.DicomFileExtension, StringComparison.InvariantCultureIgnoreCase)) { TimeSpan elapsed = Platform.Time - lastLog; TimeSpan totalElapsed = Platform.Time - startTime; double speedInMBPerSecond = 0; if (totalElapsed.TotalSeconds > 0) { speedInMBPerSecond = (bytesCopied / 1024f / 1024f) / totalElapsed.TotalSeconds; } if (elapsed > TimeSpan.FromSeconds(WorkQueueSettings.Instance.TierMigrationProgressUpdateInSeconds)) { #region Log Progress StringBuilder stats = new StringBuilder(); if (instanceCountInXml != 0) { float pct = (float)fileCounter / instanceCountInXml; stats.AppendFormat("{0} files moved [{1:0.0}MB] since {2} ({3:0}% completed). Speed={4:0.00}MB/s", fileCounter, bytesCopied / 1024f / 1024f, startTime, pct * 100, speedInMBPerSecond); } else { stats.AppendFormat("{0} files moved [{1:0.0}MB] since {2}. Speed={3:0.00}MB/s", fileCounter, bytesCopied / 1024f / 1024f, startTime, speedInMBPerSecond); } Platform.Log(LogLevel.Info, "Tier migration for study {0}: {1}", storage.StudyInstanceUid, stats.ToString()); try { using (IUpdateContext ctx = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush)) { IWorkQueueEntityBroker broker = ctx.GetBroker <IWorkQueueEntityBroker>(); WorkQueueUpdateColumns parameters = new WorkQueueUpdateColumns { FailureDescription = stats.ToString() }; broker.Update(WorkQueueItem.GetKey(), parameters); ctx.Commit(); } } catch { // can't log the progress so far... just ignore it } finally { lastLog = DateTime.Now; } #endregion } } }); processor.AddCommand(copyDirCommand); DeleteDirectoryCommand delDirCommand = new DeleteDirectoryCommand(origFolder, false) { RequiresRollback = false }; processor.AddCommand(delDirCommand); TierMigrateDatabaseUpdateCommand updateDbCommand = new TierMigrateDatabaseUpdateCommand(context); processor.AddCommand(updateDbCommand); Platform.Log(LogLevel.Info, "Start migrating study {0}.. expecting {1} to be moved", storage.StudyInstanceUid, ByteCountFormatter.Format(stat.StudySize)); if (!processor.Execute()) { if (processor.FailureException != null) { throw processor.FailureException; } throw new ApplicationException(processor.FailureReason); } stat.DBUpdate = updateDbCommand.Statistics; stat.CopyFiles = copyDirCommand.CopySpeed; stat.DeleteDirTime = delDirCommand.Statistics; } stat.ProcessSpeed.SetData(bytesCopied); stat.ProcessSpeed.End(); Platform.Log(LogLevel.Info, "Successfully migrated study {0} from {1} to {2} in {3} [ {4} files, {5} @ {6}, DB Update={7}, Remove Dir={8}]", storage.StudyInstanceUid, storage.FilesystemTierEnum, newFilesystem.Filesystem.FilesystemTierEnum, TimeSpanFormatter.Format(stat.ProcessSpeed.ElapsedTime), fileCounter, ByteCountFormatter.Format(bytesCopied), stat.CopyFiles.FormattedValue, stat.DBUpdate.FormattedValue, stat.DeleteDirTime.FormattedValue); string originalPath = storage.GetStudyPath(); if (Directory.Exists(storage.GetStudyPath())) { Platform.Log(LogLevel.Info, "Original study folder could not be deleted. It must be cleaned up manually: {0}", originalPath); ServerPlatform.Alert(AlertCategory.Application, AlertLevel.Warning, WorkQueueItem.WorkQueueTypeEnum.ToString(), 1000, GetWorkQueueContextData(WorkQueueItem), TimeSpan.Zero, "Study has been migrated to a new tier. Original study folder must be cleaned up manually: {0}", originalPath); } UpdateAverageStatistics(stat); }
public void ReceiveNetworkIDFromServer(IGameContext gameContext, IUpdateContext updateContext, int identifier, int initialFrameTick) { throw new InvalidOperationException( "Entity groups can not receive network IDs. This indicates an error in the code."); }
public void Update(IServerContext serverContext, IUpdateContext updateContext) { }
/// <summary> /// Checks for a storage location for the study in the database, and creates a new location /// in the database if it doesn't exist. /// </summary> /// <param name="partition">The partition where the study is being sent to</param> /// <param name="created"></param> /// <param name="studyDate"></param> /// <param name="studyInstanceUid"></param> /// <param name="syntax"></param> /// <param name="updateContext">The update context to create the study on</param> /// <returns>A <see cref="StudyStorageLocation"/> instance.</returns> public StudyStorageLocation GetOrCreateWritableStudyStorageLocation(string studyInstanceUid, string studyDate, TransferSyntax syntax, IUpdateContext updateContext, ServerPartition partition, out bool created) { created = false; StudyStorageLocation location; try { GetWritableStudyStorageLocation(partition.Key, studyInstanceUid, StudyRestore.True, StudyCache.True, out location); return(location); } catch (StudyNotFoundException) { } FilesystemSelector selector = new FilesystemSelector(Instance); ServerFilesystemInfo filesystem = selector.SelectFilesystem(); if (filesystem == null) { throw new NoWritableFilesystemException(); } IInsertStudyStorage locInsert = updateContext.GetBroker <IInsertStudyStorage>(); InsertStudyStorageParameters insertParms = new InsertStudyStorageParameters { ServerPartitionKey = partition.GetKey(), StudyInstanceUid = studyInstanceUid, Folder = ResolveStorageFolder(partition.Key, studyInstanceUid, studyDate, updateContext, false /* set to false for optimization because we are sure it's not in the system */), FilesystemKey = filesystem.Filesystem.GetKey(), QueueStudyStateEnum = QueueStudyStateEnum.Idle }; if (syntax.LosslessCompressed) { insertParms.TransferSyntaxUid = syntax.UidString; insertParms.StudyStatusEnum = StudyStatusEnum.OnlineLossless; } else if (syntax.LossyCompressed) { insertParms.TransferSyntaxUid = syntax.UidString; insertParms.StudyStatusEnum = StudyStatusEnum.OnlineLossy; } else { insertParms.TransferSyntaxUid = TransferSyntax.ExplicitVrLittleEndianUid; insertParms.StudyStatusEnum = StudyStatusEnum.Online; } location = locInsert.FindOne(insertParms); created = true; return(location); }
public void Update(IGameContext gameContext, IUpdateContext updateContext) { _physicsEngine.Update(gameContext, updateContext); _physicsWorldControl.SyncPendingChanges(); }
public override async Task HandleAsync(IUpdateContext context, UpdateDelegate next, string[] args, CancellationToken cancellationToken) { _telegramService = new TelegramService(context); _wordFilterService = new WordFilterService(context.Update.Message); var msg = context.Update.Message; var cleanedMsg = msg.Text.GetTextWithoutCmd(); var partedMsg = cleanedMsg.Split(" "); var paramOption = partedMsg.ValueOfIndex(1) ?? ""; var word = partedMsg.ValueOfIndex(0); var isGlobalBlock = false; var isSudoer = _telegramService.IsSudoer(); var isAdmin = await _telegramService.IsAdminGroup() .ConfigureAwait(false); if (!isSudoer && !isAdmin) { return; } if (word.IsValidUrl()) { word = word.ParseUrl().Path; } var where = new Dictionary <string, object>() { { "word", word } }; if (paramOption.IsContains("-")) { if (paramOption.IsContains("g") && isSudoer) // Global { isGlobalBlock = true; await _telegramService.AppendTextAsync("Kata ini akan di blokir Global!") .ConfigureAwait(false); } if (paramOption.IsContains("d")) { } if (paramOption.IsContains("c")) { } } if (!paramOption.IsContains("g")) { @where.Add("chat_id", msg.Chat.Id); } if (!isSudoer) { await _telegramService.AppendTextAsync("Hanya Sudoer yang dapat memblokir Kata mode Group-wide!") .ConfigureAwait(false); } if (word.IsNotNullOrEmpty()) { await _telegramService.AppendTextAsync("Sedang menambahkan kata") .ConfigureAwait(false); var isExist = await _wordFilterService.IsExistAsync(@where) .ConfigureAwait(false); if (!isExist) { var save = await _wordFilterService.SaveWordAsync(word, isGlobalBlock) .ConfigureAwait(false); await _telegramService.AppendTextAsync("Kata berhasil di tambahkan") .ConfigureAwait(false); } else { await _telegramService.AppendTextAsync("Kata sudah di tambahkan") .ConfigureAwait(false); } } else { await _telegramService.SendTextAsync("Apa kata yg mau di blok?") .ConfigureAwait(false); } await _telegramService.DeleteAsync(delay : 3000) .ConfigureAwait(false); }
public void Update(IGameContext gameContext, IUpdateContext updateContext) { if (_requiresDelaySync && _gameHostClient != null) { SendTexturesToGameHost(); _requiresDelaySync = false; } if (_projectManager.Project == null || _projectManager.Project.DefaultGameBinPath == null) { return; } if (!_projectManager.Project.DefaultGameBinPath.Exists) { return; } if (_process == null || _process.HasExited || // TODO: Use file watcher... (_executingFile != null && _executingFile.LastWriteTimeUtc != new FileInfo(_executingFile.FullName).LastWriteTimeUtc) || _shouldDebug || _shouldRestart) { var extHostPath = Path.Combine(new FileInfo(Assembly.GetExecutingAssembly().Location).DirectoryName, "Protogame.Editor.GameHost.exe"); var processStartInfo = new ProcessStartInfo { FileName = extHostPath, Arguments = (_shouldDebug ? "--debug " : "") + "--track " + Process.GetCurrentProcess().Id + " --editor-url " + _grpcServer.GetServerUrl() + " --assembly-path \"" + _projectManager.Project.DefaultGameBinPath.FullName + "\"", WorkingDirectory = _projectManager.Project.DefaultGameBinPath.DirectoryName, UseShellExecute = false, RedirectStandardOutput = true, RedirectStandardError = true, CreateNoWindow = true }; // Update last write time. _baseDirectory = _projectManager.Project.DefaultGameBinPath.DirectoryName; _executingFile = new FileInfo(_projectManager.Project.DefaultGameBinPath.FullName); _shouldDebug = false; _shouldRestart = false; if (_process != null) { try { _process.EnableRaisingEvents = false; _process.Kill(); } catch { } _consoleHandle.LogDebug("Game host process was killed for reload: {0}", _projectManager.Project.DefaultGameBinPath.FullName); _process = null; _channel = null; _gameHostClient = null; _loadedGameState = null; // The process may have exited mid-draw, which could keep a texture locked. Destroy // the textures and recreate them to ensure they're not locked. _sharedRendererHost.DestroyTextures(); } _process = Process.Start(processStartInfo); _process.Exited += (sender, e) => { _consoleHandle.LogWarning("Game host process has unexpectedly quit: {0}", _projectManager.Project.DefaultGameBinPath.FullName); _process = null; _channel = null; _gameHostClient = null; _loadedGameState = null; // The process may have exited mid-draw, which could keep a texture locked. Destroy // the textures and recreate them to ensure they're not locked. _sharedRendererHost.DestroyTextures(); }; _process.OutputDataReceived += (sender, e) => { if (e.Data == null) { return; } if (_channel != null) { _consoleHandle.LogDebug(e.Data); return; } var editorGrpcServer = _grpcServer.GetServerUrl(); _consoleHandle.LogDebug("Editor gRPC server is {0}", editorGrpcServer); var url = e.Data?.Trim(); _consoleHandle.LogDebug("Creating gRPC channel on {0}...", url); _channel = new Channel(url, ChannelCredentials.Insecure); _gameHostClient = new GameHostServerClient(_channel); _requiresDelaySync = true; if (_runAfterRestart) { _gameHostClient?.SetPlaybackMode(new SetPlaybackModeRequest { Playing = true }); _runAfterRestart = false; } }; _process.ErrorDataReceived += (sender, e) => { if (e.Data != null) { _consoleHandle.LogError(e.Data); } }; _process.EnableRaisingEvents = true; _process.BeginErrorReadLine(); _process.BeginOutputReadLine(); } }
public void Update(IServerContext serverContext, IUpdateContext updateContext) { _coroutineScheduler.Update(serverContext, updateContext); }
public static bool NewUpdate(IUpdateContext context) => context.Update != null;
/// <summary> /// Inserts work queue entry to process the duplicates. /// </summary> /// <param name="entryKey"><see cref="ServerEntityKey"/> of the <see cref="StudyIntegrityQueue"/> entry that has <see cref="StudyIntegrityReasonEnum"/> equal to <see cref="StudyIntegrityReasonEnum.Duplicate"/> </param> /// <param name="action"></param> public void Process(ServerEntityKey entryKey, ProcessDuplicateAction action) { DuplicateSopReceivedQueue entry = DuplicateSopReceivedQueue.Load(HttpContext.Current.GetSharedPersistentContext(), entryKey); Platform.CheckTrue(entry.StudyIntegrityReasonEnum == StudyIntegrityReasonEnum.Duplicate, "Invalid type of entry"); IList <StudyIntegrityQueueUid> uids = LoadDuplicateSopUid(entry); using (IUpdateContext context = PersistentStoreRegistry.GetDefaultStore().OpenUpdateContext(UpdateContextSyncMode.Flush)) { ProcessDuplicateQueueEntryQueueData data = new ProcessDuplicateQueueEntryQueueData { Action = action, DuplicateSopFolder = entry.GetFolderPath(context), UserName = ServerHelper.CurrentUserName, }; LockStudyParameters lockParms = new LockStudyParameters { QueueStudyStateEnum = QueueStudyStateEnum.ReconcileScheduled, StudyStorageKey = entry.StudyStorageKey }; ILockStudy lockBbroker = context.GetBroker <ILockStudy>(); lockBbroker.Execute(lockParms); if (!lockParms.Successful) { throw new ApplicationException(lockParms.FailureReason); } IWorkQueueProcessDuplicateSopBroker broker = context.GetBroker <IWorkQueueProcessDuplicateSopBroker>(); WorkQueueProcessDuplicateSopUpdateColumns columns = new WorkQueueProcessDuplicateSopUpdateColumns { Data = XmlUtils.SerializeAsXmlDoc(data), GroupID = entry.GroupID, ScheduledTime = Platform.Time, ExpirationTime = Platform.Time.Add(TimeSpan.FromMinutes(15)), ServerPartitionKey = entry.ServerPartitionKey, WorkQueuePriorityEnum = WorkQueuePriorityEnum.Medium, StudyStorageKey = entry.StudyStorageKey, WorkQueueStatusEnum = WorkQueueStatusEnum.Pending }; WorkQueueProcessDuplicateSop processDuplicateWorkQueueEntry = broker.Insert(columns); IWorkQueueUidEntityBroker workQueueUidBroker = context.GetBroker <IWorkQueueUidEntityBroker>(); IStudyIntegrityQueueUidEntityBroker duplicateUidBroke = context.GetBroker <IStudyIntegrityQueueUidEntityBroker>(); foreach (StudyIntegrityQueueUid uid in uids) { WorkQueueUidUpdateColumns uidColumns = new WorkQueueUidUpdateColumns { Duplicate = true, Extension = ServerPlatform.DuplicateFileExtension, SeriesInstanceUid = uid.SeriesInstanceUid, SopInstanceUid = uid.SopInstanceUid, RelativePath = uid.RelativePath, WorkQueueKey = processDuplicateWorkQueueEntry.GetKey() }; workQueueUidBroker.Insert(uidColumns); duplicateUidBroke.Delete(uid.GetKey()); } IDuplicateSopEntryEntityBroker duplicateEntryBroker = context.GetBroker <IDuplicateSopEntryEntityBroker>(); duplicateEntryBroker.Delete(entry.GetKey()); context.Commit(); } }
public void Update(IGameContext gameContext, IUpdateContext updateContext) { _renderAutoCache.Update(gameContext, updateContext); }