public void DeleteLastAnnotation() { Annotation toRemove = Annotations.LastOrDefault(); if (toRemove != null) { Annotations.Remove(toRemove); if (this.AnnotationDeleted != null) { this.AnnotationDeleted(toRemove); } } }
/// <summary> /// Finds window corresponding to specified ViewModel. /// </summary> public Window FindOwnerWindow(IViewModel a_viewModel) { FrameworkElement view = m_views.LastOrDefault(a_v => ReferenceEquals(a_v.DataContext, a_viewModel)); if (view == null) { throw new ArgumentException("Viewmodel is not referenced by any registered View."); } // Get owner window Window owner = view as Window; if (owner == null) { owner = Window.GetWindow(view); } // Make sure owner window was found if (owner == null) { throw new InvalidOperationException(@"View is not contained within a Window."); } return(owner); }
private Guid?FindOrderHeaderIDByDescription(string description) { if (string.IsNullOrWhiteSpace(description)) { return(null); } try { NebimV3.DataConnector.SqlSelectStatement query = new NebimV3.DataConnector.SqlSelectStatement(); query.TableNames.Add("trOrderHeader", false); query.Parameters.Add(new NebimV3.DataConnector.PropertyCondition("trOrderHeader", "Description")); query.Parameters.Add(new NebimV3.DataConnector.PropertyCondition("trOrderHeader", "OrderHeaderID")); query.Filter = new NebimV3.DataConnector.GroupCondition(); if (description != null) { query.Filter.AddCondition( new NebimV3.DataConnector.BinaryCondition( new NebimV3.DataConnector.PropertyCondition("trOrderHeader", "Description"), new NebimV3.DataConnector.ValueCondition(description) )); } HashSet <Guid> results = new HashSet <Guid>(); using (System.Data.IDataReader reader = NebimV3.DataConnector.SqlStatmentExecuter.ExecuteSelect(query)) { while (reader.Read()) { results.Add((Guid)(reader["OrderHeaderID"])); } } // if (results.Count > 1) // Örnek olarak yaptik. Exception atmak yerine ne yapilmasi gerektigi uygulamaya göre degisebilir. // throw new Exception("More than one record with the same B2C customer Id"); if (results.Count == 0) { return(null); } return(results.LastOrDefault()); } catch (Exception ex) { NebimV3.Library.V3Exception v3Ex = ex as NebimV3.Library.V3Exception; if (v3Ex != null) { throw new Exception(NebimV3.ApplicationCommon.ExceptionHandlerBase.Default.GetExceptionMessage(v3Ex), ex); } throw; } }
private void HandleMarkers(Vector3 cam) { RenderText(0.125f, config.TextY - 0.1f, $"Markers: {_markers.Count}", 0.3f); if (Game.IsControlJustPressed(0, Control.ReplayHidehud)) { if (_markers.Count > 0) { // Delete most recent Blip mark = _markers.LastOrDefault(); TriggerServerEvent("helicam:removeMarker", mark.Position); _markers.Remove(mark); mark.Delete(); } if (_markers.Count == 0) { TriggerServerEvent("helicam:removeAllMarkers", Game.PlayerPed.CurrentVehicle.NetworkId); } } if (Game.IsControlJustPressed(0, Control.PhoneCameraExpression)) { if (_markers.Count > 9) { SendNuiMessage(JsonConvert.SerializeObject(new { type = "alert", message = "You have reached your marker limit!" })); return; } if (cam.IsZero) { SendNuiMessage(JsonConvert.SerializeObject(new { type = "alert", message = "You are not aiming at anything!" })); return; } string name = $"Marker #{_markers.Count} - {DateTime.Now.ToString("H:mm")}"; cam.Z += 0.01f; Blip mark = World.CreateBlip(cam); mark.Sprite = (BlipSprite)123; mark.Name = name; mark.Color = (BlipColor)27; mark.Rotation = 0; SetBlipDisplay(mark.Handle, 2); _markers.Add(mark); TriggerServerEvent("helicam:createMarker", Game.PlayerPed.CurrentVehicle.NetworkId, mark.Position, name); } }
private int?FindProductHierarchy(int[] hierarchLevelCodes) { int?productHierarchyId = null; try { NebimV3.DataConnector.SqlSelectStatement query = new NebimV3.DataConnector.SqlSelectStatement(); query.TableNames.Add("dfProductHierarchy", false); query.Parameters.Add(new NebimV3.DataConnector.PropertyCondition("dfProductHierarchy", "ProductHierarchyID")); for (int i = 0; i < hierarchLevelCodes.Length; i++) { // query.Parameters.Add(new NebimV3.DataConnector.PropertyCondition("dfProductHierarchy", "ProductHierarchyLevelCode0"+(i+1).ToString())); query.Filter.AddCondition( new NebimV3.DataConnector.BinaryCondition( new NebimV3.DataConnector.PropertyCondition("dfProductHierarchy", "ProductHierarchyLevelCode0" + (i + 1).ToString()), new NebimV3.DataConnector.ValueCondition(hierarchLevelCodes[i]) )); } HashSet <int> results = new HashSet <int>(); using (System.Data.IDataReader reader = NebimV3.DataConnector.SqlStatmentExecuter.ExecuteSelect(query)) { while (reader.Read()) { results.Add((int)(reader["ProductHierarchyID"])); } } // if (results.Count > 1) // Örnek olarak yaptik. Exception atmak yerine ne yapilmasi gerektigi uygulamaya göre degisebilir. // throw new Exception("More than one record with the same B2C customer Id"); if (results.Count == 0) { return(null); } productHierarchyId = results.LastOrDefault(); } catch (Exception ex) { NebimV3.Library.V3Exception v3Ex = ex as NebimV3.Library.V3Exception; if (v3Ex != null) { throw new Exception(NebimV3.ApplicationCommon.ExceptionHandlerBase.Default.GetExceptionMessage(v3Ex), ex); } throw; } return(productHierarchyId); }
private void Add() { IValueHolder value = factory.Create(Name, Value, Data?.Select(c => unchecked ((byte)c)).ToArray()); int hash1 = value.GetHashCode(); int hash2 = valueHolders.LastOrDefault()?.GetHashCode() ?? 0; if (valueHolders.Contains(value)) { MessageBox.Show("Element already exists"); } valueHolders.Add(value); Values = valueHolders.ToArray(); }
private async Task <TestResult> GetTestResult() { _logger.LogInformation("Test result is requested."); var allPossibleResults = new HashSet <FitnessRating>(ratingsBySpeedLevelForValidatingResults[_currentSpeedLevel]); var currentShuttleNo = _test.TestInfo.ShuttleNo; var previousShuttleNo = currentShuttleNo - 1; // putting up -1 here to get the last one var result = allPossibleResults.FirstOrDefault(x => x.ShuttleNo == previousShuttleNo); if (result == null) { var previousSpeedLevel = ratingsBySpeedLevelForValidatingResults.Keys.TakeWhile(x => x < _currentSpeedLevel).LastOrDefault(); if (previousShuttleNo < 1 && ratingsBySpeedLevelForValidatingResults.Keys.Contains(previousSpeedLevel)) { allPossibleResults = new HashSet <FitnessRating>(ratingsBySpeedLevelForValidatingResults[previousSpeedLevel]); result = allPossibleResults.LastOrDefault(); } else { result = allPossibleResults.FirstOrDefault(); } } var keyToIndexKeyMap = ratingsBySpeedLevelForValidatingResults.Keys.ToList(); var currentKeyIndex = keyToIndexKeyMap.IndexOf(_currentSpeedLevel); if (currentKeyIndex >= 1) { ratingsBySpeedLevelForValidatingResults[keyToIndexKeyMap[currentKeyIndex - 1]].ForEach(possibleResult => allPossibleResults.Add(possibleResult)); } if (currentKeyIndex < keyToIndexKeyMap.Count - 1) { ratingsBySpeedLevelForValidatingResults[keyToIndexKeyMap[currentKeyIndex + 1]].ForEach(possibleResult => allPossibleResults.Add(possibleResult)); } if (result == null) { result = allPossibleResults.FirstOrDefault(); } allPossibleResults.Remove(result); _logger.LogInformation("Test result is generated."); return(await Task.FromResult(new TestResult(result, allPossibleResults.ToList()))); }
public List <EntryPoint> GetEntryPoints(long ycId, DateTime date, ref long maxDay) { //DateTime? consitantDate = null; Dictionary <long, EntryPointHistory> eph = this[ycId]; KeyValuePair <long, DateTime> epk = new KeyValuePair <long, DateTime>(ycId, date); KeyValuePair <List <EntryPoint>, List <DiscountPoint> > epv = new KeyValuePair <List <EntryPoint>, List <DiscountPoint> >(); foreach (var ep in eph) { EntryPointHistory ych = (ep.Value); if ((ych.ValidDateBegin > date) || (ych.ValidDateEnd < date)) { continue; } if (ych.Instrument is Bond && (ych.Instrument as Bond).MaturityDate <= date) // skip if maturity date is not in future { continue; } HashSet <HistoricValue> vph = ych.epValueHistory; HistoricValue vp = vph.LastOrDefault(i => i.Date <= date); // TODO - do we need to check that all entry points are of the same date ? //if (consitantDate == null) // consitantDate = vp.Date; //else if (vp.Date != consitantDate) // continue; // // entry points ych.epValue = vp; // last historic matching the date epv.Key.Add(ych); // maxDay = Math.Max(maxDay, ych.Duration + 2); } ycEntryPointsByDateDic[epk] = epv; (ycEntryPointsByDateDic[epk].Key).Sort(new EntryPointCompare()); // sort by durations return(ycEntryPointsByDateDic[epk].Key); }
private GraphicsMode GetDefaultGraphicsMode() { int[] aaLevels = new int[] { 0, 2, 4, 6, 8, 16 }; HashSet <GraphicsMode> availGraphicsModes = new HashSet <GraphicsMode>(new GraphicsModeComparer()); foreach (int samplecount in aaLevels) { GraphicsMode mode = new GraphicsMode(32, 24, 0, samplecount, new OpenTK.Graphics.ColorFormat(0), 2, false); if (!availGraphicsModes.Contains(mode)) { availGraphicsModes.Add(mode); } } int highestAALevel = MathF.RoundToInt(MathF.Log(MathF.Max(availGraphicsModes.Max(m => m.Samples), 1.0f), 2.0f)); int targetAALevel = highestAALevel; if (DualityApp.AppData.MultisampleBackBuffer) { switch (DualityApp.UserData.AntialiasingQuality) { case AAQuality.High: targetAALevel = highestAALevel; break; case AAQuality.Medium: targetAALevel = highestAALevel / 2; break; case AAQuality.Low: targetAALevel = highestAALevel / 4; break; case AAQuality.Off: targetAALevel = 0; break; } } else { targetAALevel = 0; } int targetSampleCount = MathF.RoundToInt(MathF.Pow(2.0f, targetAALevel)); return(availGraphicsModes.LastOrDefault(m => m.Samples <= targetSampleCount) ?? availGraphicsModes.Last()); }
private async Task<bool> EnqueueMissingUpdatesAsync(ISynchronizationServerClient destinationSyncClient, SourceSynchronizationInformation synchronizationInfo, IList<FileHeader> needSyncingAgain) { LogFilesInfo("There were {0} file(s) that needed synchronization because the previous one went wrong: {1}", needSyncingAgain); var filesToSynchronization = new HashSet<FileHeader>(GetFilesToSynchronization(synchronizationInfo.LastSourceFileEtag, NumberOfFilesToCheckForSynchronization), FileHeaderNameEqualityComparer.Instance); LogFilesInfo("There were {0} file(s) that needed synchronization because of greater ETag value: {1}", filesToSynchronization); foreach (FileHeader needSyncing in needSyncingAgain) { filesToSynchronization.Add(needSyncing); } var filteredFilesToSynchronization = filesToSynchronization.Where( x => synchronizationStrategy.Filter(x, synchronizationInfo.DestinationServerId, filesToSynchronization)).ToList(); if (filesToSynchronization.Count > 0) LogFilesInfo("There were {0} file(s) that needed synchronization after filtering: {1}", filteredFilesToSynchronization); if (filteredFilesToSynchronization.Count == 0) { var lastFileBeforeFiltering = filesToSynchronization.LastOrDefault(); if (lastFileBeforeFiltering == null) return true; // there are no more files that need if (lastFileBeforeFiltering.Etag == synchronizationInfo.LastSourceFileEtag) return true; // already updated etag on destination side await destinationSyncClient.IncrementLastETagAsync(storage.Id, FileSystemUrl, lastFileBeforeFiltering.Etag).ConfigureAwait(false); return false; // all docs has been filtered out, update etag on destination side and retry } var destinationUrl = destinationSyncClient.BaseUrl; bool enqueued = true; foreach (var fileHeader in filteredFilesToSynchronization) { context.CancellationToken.ThrowIfCancellationRequested(); var file = fileHeader.FullPath; var localMetadata = GetLocalMetadata(file); RavenJObject destinationMetadata; try { destinationMetadata = await destinationSyncClient.GetMetadataForAsync(file).ConfigureAwait(false); } catch (Exception ex) { Log.WarnException( string.Format("Could not retrieve a metadata of a file '{0}' from {1} in order to determine needed synchronization type", file, destinationUrl), ex); continue; } NoSyncReason reason; var work = synchronizationStrategy.DetermineWork(file, localMetadata, destinationMetadata, FileSystemUrl, out reason); if (work == null) { Log.Debug("File '{0}' were not synchronized to {1}. {2}", file, destinationUrl, reason.GetDescription()); if (reason == NoSyncReason.ContainedInDestinationHistory) { var etag = localMetadata.Value<Guid>(Constants.MetadataEtagField); await destinationSyncClient.IncrementLastETagAsync(storage.Id, FileSystemUrl, etag).ConfigureAwait(false); RemoveSyncingConfiguration(file, destinationUrl); enqueued = false; } continue; } if (synchronizationQueue.EnqueueSynchronization(destinationUrl, work)) { publisher.Publish(new SynchronizationUpdateNotification { FileName = work.FileName, DestinationFileSystemUrl = destinationUrl, SourceServerId = storage.Id, SourceFileSystemUrl = FileSystemUrl, Type = work.SynchronizationType, Action = SynchronizationAction.Enqueue, Direction = SynchronizationDirection.Outgoing }); } enqueued = true; } return enqueued; }
private bool HandleProjectileCollision(Fixture target, Vector2 collisionNormal, Vector2 velocity) { if (User != null && User.Removed) { User = null; } if (IgnoredBodies.Contains(target.Body)) { return(false); } //ignore character colliders (the projectile only hits limbs) if (target.CollisionCategories == Physics.CollisionCharacter && target.Body.UserData is Character) { return(false); } AttackResult attackResult = new AttackResult(); Character character = null; if (target.Body.UserData is Submarine submarine) { item.Move(-submarine.Position); item.Submarine = submarine; item.body.Submarine = submarine; return(!Hitscan); } else if (target.Body.UserData is Limb limb) { //severed limbs don't deactivate the projectile (but may still slow it down enough to make it inactive) if (limb.IsSevered) { return(true); } if (limb.character == null || limb.character.Removed) { return(false); } limb.character.LastDamageSource = item; if (Attack != null) { attackResult = Attack.DoDamageToLimb(User, limb, item.WorldPosition, 1.0f); } if (limb.character != null) { character = limb.character; } } else if (target.Body.UserData is Item targetItem) { if (targetItem.Removed) { return(false); } if (Attack != null && targetItem.Prefab.DamagedByProjectiles && targetItem.Condition > 0) { attackResult = Attack.DoDamage(User, targetItem, item.WorldPosition, 1.0f); } } else if (target.Body.UserData is IDamageable damageable) { if (Attack != null) { attackResult = Attack.DoDamage(User, damageable, item.WorldPosition, 1.0f); } } else if (target.Body.UserData is VoronoiCell voronoiCell && voronoiCell.IsDestructible && Attack != null && Math.Abs(Attack.StructureDamage) > 0.0f) { if (Level.Loaded?.ExtraWalls.Find(w => w.Body == target.Body) is DestructibleLevelWall destructibleWall) { attackResult = Attack.DoDamage(User, destructibleWall, item.WorldPosition, 1.0f); } } if (character != null) { character.LastDamageSource = item; } ActionType actionType = ActionType.OnUse; if (_user != null && Rand.Range(0.0f, 0.5f) > DegreeOfSuccess(_user)) { actionType = ActionType.OnFailure; } #if CLIENT PlaySound(actionType, user: _user); PlaySound(ActionType.OnImpact, user: _user); #endif if (GameMain.NetworkMember == null || GameMain.NetworkMember.IsServer) { if (target.Body.UserData is Limb targetLimb) { ApplyStatusEffects(actionType, 1.0f, character, targetLimb, user: _user); ApplyStatusEffects(ActionType.OnImpact, 1.0f, character, targetLimb, user: _user); var attack = targetLimb.attack; if (attack != null) { // Apply the status effects defined in the limb's attack that was hit foreach (var effect in attack.StatusEffects) { if (effect.type == ActionType.OnImpact) { //effect.Apply(effect.type, 1.0f, targetLimb.character, targetLimb.character, targetLimb.WorldPosition); if (effect.HasTargetType(StatusEffect.TargetType.This)) { effect.Apply(effect.type, 1.0f, targetLimb.character, targetLimb.character, targetLimb.WorldPosition); } if (effect.HasTargetType(StatusEffect.TargetType.NearbyItems) || effect.HasTargetType(StatusEffect.TargetType.NearbyCharacters)) { var targets = new List <ISerializableEntity>(); effect.GetNearbyTargets(targetLimb.WorldPosition, targets); effect.Apply(ActionType.OnActive, 1.0f, targetLimb.character, targets); } } } } #if SERVER if (GameMain.NetworkMember.IsServer) { GameMain.Server?.CreateEntityEvent(item, new object[] { NetEntityEvent.Type.ApplyStatusEffect, actionType, this, targetLimb.character.ID, targetLimb, (ushort)0, item.WorldPosition }); GameMain.Server?.CreateEntityEvent(item, new object[] { NetEntityEvent.Type.ApplyStatusEffect, ActionType.OnImpact, this, targetLimb.character.ID, targetLimb, (ushort)0, item.WorldPosition }); } #endif } else { ApplyStatusEffects(actionType, 1.0f, useTarget: target.Body.UserData as Entity, user: _user); ApplyStatusEffects(ActionType.OnImpact, 1.0f, useTarget: target.Body.UserData as Entity, user: _user); #if SERVER if (GameMain.NetworkMember.IsServer) { GameMain.Server?.CreateEntityEvent(item, new object[] { NetEntityEvent.Type.ApplyStatusEffect, actionType, this, (ushort)0, null, (target.Body.UserData as Entity)?.ID ?? 0, item.WorldPosition }); GameMain.Server?.CreateEntityEvent(item, new object[] { NetEntityEvent.Type.ApplyStatusEffect, ActionType.OnImpact, this, (ushort)0, null, (target.Body.UserData as Entity)?.ID ?? 0, item.WorldPosition }); } #endif } } target.Body.ApplyLinearImpulse(velocity * item.body.Mass); target.Body.LinearVelocity = target.Body.LinearVelocity.ClampLength(NetConfig.MaxPhysicsBodyVelocity * 0.5f); if (hits.Count() >= MaxTargetsToHit || hits.LastOrDefault()?.UserData is VoronoiCell) { Deactivate(); } if (attackResult.AppliedDamageModifiers != null && attackResult.AppliedDamageModifiers.Any(dm => dm.DeflectProjectiles)) { item.body.LinearVelocity *= 0.1f; } else if (Vector2.Dot(velocity, collisionNormal) < 0.0f && hits.Count() >= MaxTargetsToHit && target.Body.Mass > item.body.Mass * 0.5f && (DoesStick || (StickToCharacters && target.Body.UserData is Limb) || (StickToStructures && target.Body.UserData is Structure) || (StickToItems && target.Body.UserData is Item))) { Vector2 dir = new Vector2( (float)Math.Cos(item.body.Rotation), (float)Math.Sin(item.body.Rotation)); if (GameMain.NetworkMember == null || GameMain.NetworkMember.IsServer) { if (target.Body.UserData is Structure structure && structure.Submarine != item.Submarine && structure.Submarine != null) { StickToTarget(structure.Submarine.PhysicsBody.FarseerBody, dir); } else { StickToTarget(target.Body, dir); } } #if SERVER if (GameMain.NetworkMember != null && GameMain.NetworkMember.IsServer) { item.CreateServerEvent(this); } #endif item.body.LinearVelocity *= 0.5f; return(Hitscan); }
public IDataPublisher <TOutput> GetPublisher <TOutput>() { return(Publishers.LastOrDefault(p => p is IDataPublisher <TOutput>) as IDataPublisher <TOutput>); }
private async Task <bool> EnqueueMissingUpdatesAsync(ISynchronizationServerClient destinationSyncClient, SourceSynchronizationInformation synchronizationInfo, IList <FileHeader> needSyncingAgain) { LogFilesInfo("There were {0} file(s) that needed synchronization because the previous one went wrong: {1}", needSyncingAgain); var filesToSynchronization = new HashSet <FileHeader>(GetFilesToSynchronization(synchronizationInfo.LastSourceFileEtag, NumberOfFilesToCheckForSynchronization), FileHeaderNameEqualityComparer.Instance); LogFilesInfo("There were {0} file(s) that needed synchronization because of greater ETag value: {1}", filesToSynchronization); foreach (FileHeader needSyncing in needSyncingAgain) { filesToSynchronization.Add(needSyncing); } var filteredFilesToSynchronization = filesToSynchronization.Where( x => synchronizationStrategy.Filter(x, synchronizationInfo.DestinationServerId, filesToSynchronization)).ToList(); if (filesToSynchronization.Count > 0) { LogFilesInfo("There were {0} file(s) that needed synchronization after filtering: {1}", filteredFilesToSynchronization); } if (filteredFilesToSynchronization.Count == 0) { var lastFileBeforeFiltering = filesToSynchronization.LastOrDefault(); if (lastFileBeforeFiltering == null) { return(true); // there are no more files that need } if (lastFileBeforeFiltering.Etag == synchronizationInfo.LastSourceFileEtag) { return(true); // already updated etag on destination side } await destinationSyncClient.IncrementLastETagAsync(storage.Id, FileSystemUrl, lastFileBeforeFiltering.Etag).ConfigureAwait(false); return(false); // all docs has been filtered out, update etag on destination side and retry } var destinationUrl = destinationSyncClient.BaseUrl; bool enqueued = true; foreach (var fileHeader in filteredFilesToSynchronization) { context.CancellationToken.ThrowIfCancellationRequested(); var file = fileHeader.FullPath; var localMetadata = GetLocalMetadata(file); RavenJObject destinationMetadata; try { destinationMetadata = await destinationSyncClient.GetMetadataForAsync(file).ConfigureAwait(false); } catch (Exception ex) { Log.WarnException( string.Format("Could not retrieve a metadata of a file '{0}' from {1} in order to determine needed synchronization type", file, destinationUrl), ex); continue; } NoSyncReason reason; var work = synchronizationStrategy.DetermineWork(file, localMetadata, destinationMetadata, FileSystemUrl, out reason); if (work == null) { if (Log.IsDebugEnabled) { Log.Debug("File '{0}' were not synchronized to {1}. {2}", file, destinationUrl, reason.GetDescription()); } if (reason == NoSyncReason.ContainedInDestinationHistory) { var etag = localMetadata.Value <Guid>(Constants.MetadataEtagField); await destinationSyncClient.IncrementLastETagAsync(storage.Id, FileSystemUrl, etag).ConfigureAwait(false); RemoveSyncingConfiguration(file, destinationUrl); enqueued = false; } continue; } if (synchronizationQueue.EnqueueSynchronization(destinationUrl, work)) { publisher.Publish(new SynchronizationUpdateNotification { FileName = work.FileName, DestinationFileSystemUrl = destinationUrl, SourceServerId = storage.Id, SourceFileSystemUrl = FileSystemUrl, Type = work.SynchronizationType, Action = SynchronizationAction.Enqueue, Direction = SynchronizationDirection.Outgoing }); } enqueued = true; } return(enqueued); }
private string FindCustomerCodeByDescription(string description) { string customerCode = null; try { NebimV3.DataConnector.SqlSelectStatement query = new NebimV3.DataConnector.SqlSelectStatement(); query.TableNames.Add("cdCurrAcc", false); query.TableNames.Add("cdCurrAccDesc", false); query.Parameters.Add(new NebimV3.DataConnector.PropertyCondition("cdCurrAcc", "CurrAccCode")); query.Filter = new NebimV3.DataConnector.GroupCondition(); query.Filter.AddCondition( new NebimV3.DataConnector.BinaryCondition( new NebimV3.DataConnector.PropertyCondition("cdCurrAcc", "CurrAccCode"), new NebimV3.DataConnector.PropertyCondition("cdCurrAccDesc", "CurrAccCode") )); if (description != null) { query.Filter.AddCondition( new NebimV3.DataConnector.BinaryCondition( new NebimV3.DataConnector.PropertyCondition("cdCurrAccDesc", "CurrAccDescription"), new NebimV3.DataConnector.ValueCondition(description) )); } query.Filter.AddCondition( new NebimV3.DataConnector.BinaryCondition( new NebimV3.DataConnector.PropertyCondition("cdCurrAcc", "CurrAccTypeCode"), new NebimV3.DataConnector.ValueCondition(NebimV3.ApplicationCommon.CurrAccTypes.RetailCustomer) )); HashSet <string> results = new HashSet <string>(); using (System.Data.IDataReader reader = NebimV3.DataConnector.SqlStatmentExecuter.ExecuteSelect(query)) { while (reader.Read()) { results.Add((string)(reader["CurrAccCode"])); } } // if (results.Count > 1) // Örnek olarak yaptik. Exception atmak yerine ne yapilmasi gerektigi uygulamaya göre degisebilir. // throw new Exception("More than one record with the same B2C customer Id"); if (results.Count == 0) { return(null); } customerCode = results.LastOrDefault(); } catch (Exception ex) { NebimV3.Library.V3Exception v3Ex = ex as NebimV3.Library.V3Exception; if (v3Ex != null) { throw new Exception(NebimV3.ApplicationCommon.ExceptionHandlerBase.Default.GetExceptionMessage(v3Ex), ex); } throw; } return(customerCode); }
/// <summary> /// Initializes this DualityApp. Should be called before performing any operations withing Duality. /// </summary> /// <param name="context">The <see cref="ExecutionContext"/> in which Duality runs.</param> /// <param name="args"> /// Command line arguments to run this DualityApp with. /// Usually these are just the ones from the host application, passed on. /// </param> public static void Init(ExecutionEnvironment env = ExecutionEnvironment.Unknown, ExecutionContext context = ExecutionContext.Unknown, string[] args = null) { if (initialized) { return; } // Set main thread mainThread = Thread.CurrentThread; // Process command line options if (args != null) { int logArgIndex = args.IndexOfFirst("logfile"); if (logArgIndex != -1 && logArgIndex + 1 < args.Length) { logArgIndex++; } else { logArgIndex = -1; } // Enter debug mode if (args.Contains(CmdArgDebug)) { System.Diagnostics.Debugger.Launch(); } // Run from editor if (args.Contains(CmdArgEditor)) { runFromEditor = true; } // Set logfile path if (logArgIndex != -1) { logfilePath = args[logArgIndex]; if (string.IsNullOrWhiteSpace(Path.GetExtension(logfilePath))) { logfilePath += ".txt"; } } } environment = env; execContext = context; // Initialize Logfile try { logfile = new StreamWriter(logfilePath); logfile.AutoFlush = true; TextWriterLogOutput logfileOutput = new TextWriterLogOutput(logfile); Log.Game.AddOutput(logfileOutput); Log.Core.AddOutput(logfileOutput); Log.Editor.AddOutput(logfileOutput); } catch (Exception e) { Log.Core.WriteWarning("Text Logfile unavailable: {0}", Log.Exception(e)); } // Assure Duality is properly terminated in any case and register additional AppDomain events AppDomain.CurrentDomain.ProcessExit += CurrentDomain_ProcessExit; AppDomain.CurrentDomain.UnhandledException += CurrentDomain_UnhandledException; AppDomain.CurrentDomain.AssemblyResolve += CurrentDomain_AssemblyResolve; AppDomain.CurrentDomain.AssemblyLoad += CurrentDomain_AssemblyLoad; sound = new SoundDevice(); LoadPlugins(); LoadAppData(); LoadUserData(); LoadMetaData(); // Determine available and default graphics modes int[] aaLevels = new int[] { 0, 2, 4, 6, 8, 16 }; foreach (int samplecount in aaLevels) { GraphicsMode mode = new GraphicsMode(32, 24, 0, samplecount, new OpenTK.Graphics.ColorFormat(0), 2, false); if (!availModes.Contains(mode)) { availModes.Add(mode); } } int highestAALevel = MathF.RoundToInt(MathF.Log(MathF.Max(availModes.Max(m => m.Samples), 1.0f), 2.0f)); int targetAALevel = highestAALevel; if (appData.MultisampleBackBuffer) { switch (userData.AntialiasingQuality) { case AAQuality.High: targetAALevel = highestAALevel; break; case AAQuality.Medium: targetAALevel = highestAALevel / 2; break; case AAQuality.Low: targetAALevel = highestAALevel / 4; break; case AAQuality.Off: targetAALevel = 0; break; } } else { targetAALevel = 0; } int targetSampleCount = MathF.RoundToInt(MathF.Pow(2.0f, targetAALevel)); defaultMode = availModes.LastOrDefault(m => m.Samples <= targetSampleCount) ?? availModes.Last(); // Initial changed event OnAppDataChanged(); OnUserDataChanged(); Formatter.InitDefaultMethod(); Log.Core.Write("DualityApp initialized"); Log.Core.Write("Debug Mode: {0}", System.Diagnostics.Debugger.IsAttached); Log.Core.Write("Command line arguments: {0}", args != null ? args.ToString(", ") : "null"); initialized = true; InitPlugins(); }
private async Task <bool> EnqueueMissingUpdatesAsync(ISynchronizationServerClient destinationSyncClient, SourceSynchronizationInformation synchronizationInfo, IList <FileHeader> needSyncingAgain) { LogFilesInfo("There were {0} file(s) that needed synchronization because the previous one went wrong: {1}", needSyncingAgain); var filesToSynchronization = new HashSet <FileHeader>(GetFilesToSynchronization(synchronizationInfo.LastSourceFileEtag, NumberOfFilesToCheckForSynchronization), FileHeaderNameEqualityComparer.Instance); LogFilesInfo("There were {0} file(s) that needed synchronization because of greater ETag value: {1}", filesToSynchronization); foreach (FileHeader needSyncing in needSyncingAgain) { filesToSynchronization.Add(needSyncing); } var filteredFilesToSynchronization = filesToSynchronization.Where( x => synchronizationStrategy.Filter(x, synchronizationInfo.DestinationServerId, filesToSynchronization)).ToList(); if (filesToSynchronization.Count > 0) { LogFilesInfo("There were {0} file(s) that needed synchronization after filtering: {1}", filteredFilesToSynchronization); } if (filteredFilesToSynchronization.Count == 0) { var lastFileBeforeFiltering = filesToSynchronization.LastOrDefault(); if (lastFileBeforeFiltering == null) { return(true); // there are no more files that need } if (lastFileBeforeFiltering.Etag == synchronizationInfo.LastSourceFileEtag) { return(true); // already updated etag on destination side } await destinationSyncClient.IncrementLastETagAsync(storage.Id, FileSystemUrl, lastFileBeforeFiltering.Etag).ConfigureAwait(false); return(false); // all docs has been filtered out, update etag on destination side and retry } var destinationUrl = destinationSyncClient.BaseUrl; bool enqueued = false; var maxEtagOfFilteredDoc = Etag.Empty; foreach (var fileHeader in filteredFilesToSynchronization) { context.CancellationToken.ThrowIfCancellationRequested(); var file = fileHeader.FullPath; var localMetadata = GetLocalMetadata(file); RavenJObject destinationMetadata; try { destinationMetadata = await destinationSyncClient.GetMetadataForAsync(file).ConfigureAwait(false); } catch (Exception ex) { Log.WarnException( string.Format("Could not retrieve a metadata of a file '{0}' from {1} in order to determine needed synchronization type", file, destinationUrl), ex); continue; } NoSyncReason reason; var work = synchronizationStrategy.DetermineWork(file, localMetadata, destinationMetadata, FileSystemUrl, out reason); if (work == null) { Log.Debug("File '{0}' was not synchronized to {1}. {2}", file, destinationUrl, reason.GetDescription()); switch (reason) { case NoSyncReason.ContainedInDestinationHistory: case NoSyncReason.DestinationFileConflicted: case NoSyncReason.NoNeedToDeleteNonExistigFile: var localEtag = Etag.Parse(localMetadata.Value <string>(Constants.MetadataEtagField)); if (reason == NoSyncReason.ContainedInDestinationHistory) { RemoveSyncingConfiguration(file, destinationUrl); } else if (reason == NoSyncReason.DestinationFileConflicted) { if (needSyncingAgain.Contains(fileHeader, FileHeaderNameEqualityComparer.Instance) == false) { CreateSyncingConfiguration(fileHeader.Name, fileHeader.Etag, destinationUrl, SynchronizationType.Unknown); } } else if (reason == NoSyncReason.NoNeedToDeleteNonExistigFile) { // after the upgrade to newer build there can be still an existing syncing configuration for it RemoveSyncingConfiguration(file, destinationUrl); } if (EtagUtil.IsGreaterThan(localEtag, maxEtagOfFilteredDoc)) { maxEtagOfFilteredDoc = localEtag; } break; } continue; } if (synchronizationQueue.EnqueueSynchronization(destinationUrl, work)) { publisher.Publish(new SynchronizationUpdateNotification { FileName = work.FileName, DestinationFileSystemUrl = destinationUrl, SourceServerId = storage.Id, SourceFileSystemUrl = FileSystemUrl, Type = work.SynchronizationType, Action = SynchronizationAction.Enqueue, Direction = SynchronizationDirection.Outgoing }); } enqueued = true; } if (enqueued == false && EtagUtil.IsGreaterThan(maxEtagOfFilteredDoc, synchronizationInfo.LastSourceFileEtag)) { await destinationSyncClient.IncrementLastETagAsync(storage.Id, FileSystemUrl, maxEtagOfFilteredDoc).ConfigureAwait(false); return(false); // we bumped the last synced etag on a destination server, let it know it need to repeat the operation } return(true); }
private GraphicsMode GetDefaultGraphicsMode() { int[] aaLevels = new int[] { 0, 2, 4, 6, 8, 16 }; HashSet<GraphicsMode> availGraphicsModes = new HashSet<GraphicsMode>(new GraphicsModeComparer()); foreach (int samplecount in aaLevels) { GraphicsMode mode = new GraphicsMode(32, 24, 0, samplecount, new OpenTK.Graphics.ColorFormat(0), 2, false); if (!availGraphicsModes.Contains(mode)) availGraphicsModes.Add(mode); } int highestAALevel = MathF.RoundToInt(MathF.Log(MathF.Max(availGraphicsModes.Max(m => m.Samples), 1.0f), 2.0f)); int targetAALevel = highestAALevel; if (DualityApp.AppData.MultisampleBackBuffer) { switch (DualityApp.UserData.AntialiasingQuality) { case AAQuality.High: targetAALevel = highestAALevel; break; case AAQuality.Medium: targetAALevel = highestAALevel / 2; break; case AAQuality.Low: targetAALevel = highestAALevel / 4; break; case AAQuality.Off: targetAALevel = 0; break; } } else { targetAALevel = 0; } int targetSampleCount = MathF.RoundToInt(MathF.Pow(2.0f, targetAALevel)); return availGraphicsModes.LastOrDefault(m => m.Samples <= targetSampleCount) ?? availGraphicsModes.Last(); }
private StructureElement getLastInsertedStructureElement() { return((StructureElement)_allReportedObjects.LastOrDefault(x => x.IsAnImplementationOf(typeof(StructureElement)))); }