public override int GetHashCode() => HashHelpers.Hash(Configuration, Platform);
public override int GetHashCode() { int a = Item1 != null?Item1.GetHashCode() : 0; int b = Item2 != null?Item2.GetHashCode() : 0; return(HashHelpers.CombineHashCodes(a, b)); }
public static async ValueTask <Message?> ReceiveMessage(PipeReader reader, EndPoint address, uint magic, ILogger log, CancellationToken token = default) { while (true) { var readResult = await reader.ReadAsync(token).ConfigureAwait(false); var buffer = readResult.Buffer; log.LogDebug("read {length} bytes from pipe {address} {IsCompleted} {IsCanceled} ", readResult.Buffer.Length, address, readResult.IsCompleted, readResult.IsCanceled); SequencePosition consumed = buffer.Start; SequencePosition examined = buffer.End; try { if (readResult.IsCanceled) { throw new OperationCanceledException(); } var messageRead = TryReadMessage(ref buffer, out var message, out var advance); if (advance) { consumed = buffer.End; examined = consumed; } if (messageRead) { Debug.Assert(message != null); return(message); } if (readResult.IsCompleted) { if (buffer.Length > 0) { throw new InvalidDataException("Incomplete message."); } return(null); } } finally { reader.AdvanceTo(consumed, examined); } } bool TryReadMessage(ref ReadOnlySequence <byte> _buffer, out Message?_message, out bool _advance) { _message = null; _advance = false; if (_buffer.Length < MessageHeader.Size) { log.LogDebug("Haven't received enough data to read the message header {bufferLength} {address}", _buffer.Length, address); return(false); } if (!MessageHeader.TryRead(_buffer, out var header)) { throw new InvalidDataException("MessageHeader could not be parsed"); } log.LogDebug("Received {command} message header {magic} {length} {checksum} {address}", header.Command, header.Magic, header.Length, header.Checksum, address); var messageLength = MessageHeader.Size + header.Length; if (_buffer.Length < messageLength) { log.LogDebug("Haven't received enough data to read the message payload {bufferNeeded} {bufferLength} {address}", messageLength, _buffer.Length, address); return(false); } _buffer = _buffer.Slice(0, messageLength); _advance = true; if (header.Magic != magic) { // ignore messages sent with the wrong magic value log.LogWarning("Ignoring message with incorrect magic {expected} {actual} {address}", magic, header.Magic, address); return(false); } Span <byte> hashBuffer = stackalloc byte[UInt256.Size]; HashHelpers.TryHash256(_buffer.Slice(MessageHeader.Size), hashBuffer); var checksum = BinaryPrimitives.ReadUInt32LittleEndian(hashBuffer.Slice(0, sizeof(uint))); if (header.Checksum != checksum) { // ignore messages sent with invalid checksum log.LogWarning("Ignoring message with incorrect checksum {expected} {actual} {address}", checksum, header.Checksum, address); return(false); } if (Message.TryRead(_buffer, header, out _message)) { log.LogDebug("Receive {message} {address}", _message.GetType().Name, address); return(true); } else { throw new InvalidDataException($"'{header.Command}' Message could not be parsed"); } } }
private void ReplayActionsFromXml(IEnumerable <XElement> actionElements, string currentDbVersion, string backendUrl, int updateId) { foreach (var xmlAction in actionElements) { XmlDbUpdateRecordedAction action; var xmlActionString = xmlAction.ToNormalizedString(SaveOptions.DisableFormatting, true); try { action = XmlDbUpdateSerializerHelpers.DeserializeAction(xmlAction); } catch (Exception ex) { var throwEx = new XmlDbUpdateReplayActionException("Error while deserializing xml action string.", ex); throwEx.Data.Add(LoggerData.XmlDbUpdateExceptionXmlActionStringData, xmlActionString.ToJsonLog()); throw throwEx; } var logEntry = new XmlDbUpdateActionsLogModel { UserId = _userId, Applied = DateTime.Now, ParentId = action.ParentId, UpdateId = updateId, SourceXml = xmlActionString, Hash = HashHelpers.CalculateMd5Hash(xmlActionString) }; if (_dbLogService.IsActionAlreadyReplayed(logEntry.Hash)) { Logger.Warn() .Message("XmlDbUpdateAction conflict: Current action already applied and exist at database") .Property("logEntry", logEntry) .Write(); if (_throwActionReplayed) { var throwEx = new XmlDbUpdateReplayActionException( $"Current action (code: {action.Code}, ids: [{String.Join(",", action.Ids)}], parentId: {action.ParentId}) already applied and exists in database. " ); throw throwEx; } continue; } var xmlActionStringLog = xmlAction.RemoveDescendants().ToString(SaveOptions.DisableFormatting); Logger.Debug() .Message("-> Begin replaying action [{hash}]: -> {xml}", logEntry.Hash, xmlActionStringLog) .Write(); var replayedAction = ReplayAction(action, backendUrl); Logger.Debug() .Message("End replaying action [{hash}]: -> {xml}", logEntry.Hash, xmlActionStringLog) .Write(); logEntry.Ids = string.Join(",", replayedAction.Ids); logEntry.ResultXml = XmlDbUpdateSerializerHelpers.SerializeAction(replayedAction, currentDbVersion, backendUrl, true).ToNormalizedString(SaveOptions.DisableFormatting, true); _dbLogService.InsertActionLogEntry(logEntry); } PostReplay(); }
public int GetHashCode(ParameterInfo obj) { return(HashHelpers.Combine(obj.Position.GetHashCode(), obj.Member.GetHashCode())); }
/// <summary> /// Gets a hash for the file pattern match. /// </summary> /// <returns>Some number</returns> public override int GetHashCode() => HashHelpers.Combine(GetHashCode(Path), GetHashCode(Stem));
public SettingsViewModel() : base("Settings") { Global = Locator.Current.GetService <Global>(); this.ValidateProperty(x => x.SomePrivacyLevel, ValidateSomePrivacyLevel); this.ValidateProperty(x => x.FinePrivacyLevel, ValidateFinePrivacyLevel); this.ValidateProperty(x => x.StrongPrivacyLevel, ValidateStrongPrivacyLevel); this.ValidateProperty(x => x.DustThreshold, ValidateDustThreshold); this.ValidateProperty(x => x.TorSocks5EndPoint, ValidateTorSocks5EndPoint); this.ValidateProperty(x => x.BitcoinP2pEndPoint, ValidateBitcoinP2pEndPoint); Autocopy = Global.UiConfig.Autocopy; CustomFee = Global.UiConfig.IsCustomFee; CustomChangeAddress = Global.UiConfig.IsCustomChangeAddress; var config = new Config(Global.Config.FilePath); config.LoadOrCreateDefaultFile(); Network = config.Network; TorSocks5EndPoint = config.TorSocks5EndPoint.ToString(-1); UseTor = config.UseTor; TerminateTorOnExit = config.TerminateTorOnExit; StartLocalBitcoinCoreOnStartup = config.StartLocalBitcoinCoreOnStartup; StopLocalBitcoinCoreOnShutdown = config.StopLocalBitcoinCoreOnShutdown; SomePrivacyLevel = config.PrivacyLevelSome.ToString(); FinePrivacyLevel = config.PrivacyLevelFine.ToString(); StrongPrivacyLevel = config.PrivacyLevelStrong.ToString(); DustThreshold = config.DustThreshold.ToString(); BitcoinP2pEndPoint = config.GetP2PEndpoint().ToString(defaultPort: -1); LocalBitcoinCoreDataDir = config.LocalBitcoinCoreDataDir; IsModified = !Global.Config.AreDeepEqual(config); this.WhenAnyValue( x => x.Network, x => x.UseTor, x => x.TerminateTorOnExit, x => x.StartLocalBitcoinCoreOnStartup, x => x.StopLocalBitcoinCoreOnShutdown) .ObserveOn(RxApp.TaskpoolScheduler) .Subscribe(_ => Save()); this.WhenAnyValue(x => x.Autocopy) .ObserveOn(RxApp.TaskpoolScheduler) .Subscribe(x => Global.UiConfig.Autocopy = x); this.WhenAnyValue(x => x.CustomFee) .ObserveOn(RxApp.TaskpoolScheduler) .Subscribe(x => Global.UiConfig.IsCustomFee = x); this.WhenAnyValue(x => x.CustomChangeAddress) .ObserveOn(RxApp.TaskpoolScheduler) .Subscribe(x => Global.UiConfig.IsCustomChangeAddress = x); OpenConfigFileCommand = ReactiveCommand.CreateFromTask(OpenConfigFileAsync); SetClearPinCommand = ReactiveCommand.Create(() => { var pinBoxText = PinBoxText; if (string.IsNullOrEmpty(pinBoxText)) { NotificationHelpers.Error("Please provide a PIN."); return; } var trimmedPinBoxText = pinBoxText?.Trim(); if (string.IsNullOrEmpty(trimmedPinBoxText) || trimmedPinBoxText.Any(x => !char.IsDigit(x))) { NotificationHelpers.Error("Invalid PIN."); return; } if (trimmedPinBoxText.Length > 10) { NotificationHelpers.Error("PIN is too long."); return; } var uiConfigPinHash = Global.UiConfig.LockScreenPinHash; var enteredPinHash = HashHelpers.GenerateSha256Hash(trimmedPinBoxText); if (IsPinSet) { if (uiConfigPinHash != enteredPinHash) { NotificationHelpers.Error("PIN is incorrect."); PinBoxText = ""; return; } Global.UiConfig.LockScreenPinHash = ""; NotificationHelpers.Success("PIN was cleared."); } else { Global.UiConfig.LockScreenPinHash = enteredPinHash; NotificationHelpers.Success("PIN was changed."); } PinBoxText = ""; }); TextBoxLostFocusCommand = ReactiveCommand.Create(Save); Observable .Merge(OpenConfigFileCommand.ThrownExceptions) .Merge(SetClearPinCommand.ThrownExceptions) .Merge(TextBoxLostFocusCommand.ThrownExceptions) .ObserveOn(RxApp.TaskpoolScheduler) .Subscribe(ex => Logger.LogError(ex)); SelectedFeeDisplayFormat = Enum.IsDefined(typeof(FeeDisplayFormat), Global.UiConfig.FeeDisplayFormat) ? (FeeDisplayFormat)Global.UiConfig.FeeDisplayFormat : FeeDisplayFormat.SatoshiPerByte; this.WhenAnyValue(x => x.SelectedFeeDisplayFormat) .ObserveOn(RxApp.MainThreadScheduler) .Subscribe(x => Global.UiConfig.FeeDisplayFormat = (int)x); }
public override int GetHashCode() { return(HashHelpers.GetStructuralHashCode(this.Priority, this.sliceId, this.time, this.sliceHash)); }
public override int GetHashCode() { return(HashHelpers.CalculateCompositeHash(LatestRun, base.GetHashCode())); }
public override int GetHashCode() { return(HashHelpers.CalculateCompositeHash(Id, Status, Duration)); }
private void Insert(TKey key, TValue value, bool add) { if (key == null) { ThrowHelper("ExceptionArgument.key"); } if (buckets == null) { Initialize(0); } int hashCode = comparer.GetHashCode(key) & 0x7FFFFFFF; int targetBucket = hashCode % buckets.Length; #if FEATURE_RANDOMIZED_STRING_HASHING int collisionCount = 0; #endif for (int i = buckets[targetBucket]; i >= 0; i = entries[i].next) { if (entries[i].hashCode == hashCode && comparer.Equals(entries[i].key, key)) { if (add) { ThrowHelper("Argument_AddingDuplicate"); } entries[i].value = value; version++; return; } #if FEATURE_RANDOMIZED_STRING_HASHING collisionCount++; #endif } int index; if (freeCount > 0) { index = freeList; freeList = entries[index].next; freeCount--; } else { if (count == entries.Length) { Resize(); targetBucket = hashCode % buckets.Length; } index = count; count++; } entries[index].hashCode = hashCode; entries[index].next = buckets[targetBucket]; entries[index].key = key; entries[index].value = value; buckets[targetBucket] = index; version++; #if FEATURE_RANDOMIZED_STRING_HASHING if (collisionCount > HashHelpers.HashCollisionThreshold && HashHelpers.IsWellKnownEqualityComparer(comparer)) { comparer = (IEqualityComparer <TKey>)HashHelpers.GetRandomizedEqualityComparer(comparer); Resize(entries.Length, true); } #endif }
public override int GetHashCode() => HashHelpers.Combine(Name.GetHashCode(), Version.GetHashCode());
private void Resize() { // Start by assuming we won't resize. int newSize = _buckets.Length; // If any expired keys exist, we won't resize. bool hasExpiredEntries = false; int entriesIndex; for (entriesIndex = 0; entriesIndex < _entries.Length; entriesIndex++) { if (_entries[entriesIndex].depHnd.IsAllocated && _entries[entriesIndex].depHnd.GetPrimary() == null) { hasExpiredEntries = true; break; } } if (!hasExpiredEntries) { newSize = HashHelpers.GetPrime(_buckets.Length == 0 ? _initialCapacity + 1 : _buckets.Length * 2); } // Reallocate both buckets and entries and rebuild the bucket and freelists from scratch. // This serves both to scrub entries with expired keys and to put the new entries in the proper bucket. int newFreeList = -1; int[] newBuckets = new int[newSize]; for (int bucketIndex = 0; bucketIndex < newSize; bucketIndex++) { newBuckets[bucketIndex] = -1; } Entry[] newEntries = new Entry[newSize]; // Migrate existing entries to the new table. for (entriesIndex = 0; entriesIndex < _entries.Length; entriesIndex++) { DependentHandle depHnd = _entries[entriesIndex].depHnd; if (depHnd.IsAllocated && depHnd.GetPrimary() != null) { // Entry is used and has not expired. Link it into the appropriate bucket list. int bucket = _entries[entriesIndex].hashCode % newSize; newEntries[entriesIndex].depHnd = depHnd; newEntries[entriesIndex].hashCode = _entries[entriesIndex].hashCode; newEntries[entriesIndex].next = newBuckets[bucket]; newBuckets[bucket] = entriesIndex; } else { // Entry has either expired or was on the freelist to begin with. Either way // insert it on the new freelist. _entries[entriesIndex].depHnd.Free(); newEntries[entriesIndex].depHnd = new DependentHandle(); newEntries[entriesIndex].next = newFreeList; newFreeList = entriesIndex; } } // Add remaining entries to freelist. while (entriesIndex != newEntries.Length) { newEntries[entriesIndex].depHnd = new DependentHandle(); newEntries[entriesIndex].next = newFreeList; newFreeList = entriesIndex; entriesIndex++; } _buckets = newBuckets; _entries = newEntries; _freeList = newFreeList; }
/// <inheritdoc /> public int GetHashCode(Vector4 obj) => HashHelpers.Combine(obj.GetHashCode(), this.Precision.GetHashCode());
private void Resize() { Resize(HashHelpers.ExpandPrime(elemCount), false); }
/// <summary> /// Returns a hash code value for this object. </summary> public override int GetHashCode() { //If this doesn't work hash all elements of positions. This was used to reduce time overhead return(Number.FloatToIntBits(Boost) ^ slop ^ TermArraysHashCode() ^ ((positions.Count == 0) ? 0 : HashHelpers.CombineHashCodes(positions.First().GetHashCode(), positions.Last().GetHashCode(), positions.Count) ^ 0x4AC65113)); }
private void Resize() => Resize(HashHelpers.ExpandPrime(_count), false);
private void Resize() { Resize(HashHelpers.ExpandPrime(count), forceNewHashCodes: false); }
public override int GetHashCode() { return(HashHelpers.CalculateCompositeHash(Name, RunCommand, JobType, Error)); }
public HashedKeyValue(TKey key, TValue value) { Key = key; Value = value; HashCode = HashHelpers.GetHashCode(key); }
/// <inheritdoc/> public override int GetHashCode() => HashHelpers.Combine(this.First.GetHashCode(), this.Second.GetHashCode());
bool AddValue(T key, ref W value) { //get the hash and bucket index int hash = key.GetHashCode() & int.MaxValue; int bucketIndex = hash % _buckets.Length; //buckets value -1 means it's empty var valueIndex = GetBucketIndex(_buckets[bucketIndex]); if (valueIndex == -1) { //create the infonode at the last position and fill it with the relevant information _valuesInfo[_freeValueCellIndex] = new Node(ref key, hash); } else { int currentValueIndex = valueIndex; do { //must check if the key already exists in the dictionary //for some reason this is way faster they use Comparer<T>.default, should investigate if (_valuesInfo[currentValueIndex].hashcode == hash && _valuesInfo[currentValueIndex].key.CompareTo(key) == 0) { return(false); } currentValueIndex = _valuesInfo[currentValueIndex].previous; }while (currentValueIndex != -1); //oops collision! _collisions++; //create a new one that points to the existing one //new one prev = the first in the bucket _valuesInfo[_freeValueCellIndex] = new Node(ref key, hash, valueIndex); //the first in the bucket next = new one _valuesInfo[valueIndex].next = (int)_freeValueCellIndex; } //item with this bucketIndex will point to the last value created _buckets[bucketIndex] = _freeValueCellIndex + 1; _values[_freeValueCellIndex] = value; if (++_freeValueCellIndex == _values.Length) { Array.Resize(ref _values, HashHelpers.ExpandPrime((int)_freeValueCellIndex)); Array.Resize(ref _valuesInfo, HashHelpers.ExpandPrime((int)_freeValueCellIndex)); } //too many collisions? if (_collisions > _buckets.Length) { //we need more space and less collisions _buckets = new int[HashHelpers.ExpandPrime(_collisions)]; _collisions = 0; //we need to scan all the values inserted so far //to recompute the collision indices for (int i = 0; i < _freeValueCellIndex; i++) { //get the original hash code and find the new bucketIndex bucketIndex = (_valuesInfo[i].hashcode) % _buckets.Length; //bucketsIndex can be -1 or a next value. If it's -1 //means no collisions. If there is collision, it will //link to the next value index and the bucket will //be updated with the current one. In this way we can //rebuild the linkedlist. valueIndex = GetBucketIndex(_buckets[bucketIndex]); if (valueIndex != -1) { _collisions++; _valuesInfo[i].previous = valueIndex; _valuesInfo[valueIndex].next = i; } else { _valuesInfo[i].next = -1; _valuesInfo[i].previous = -1; } //buckets at bucketIndex will remember the value/valueInfo //index for that bucketIndex. _buckets[bucketIndex] = i + 1; } } _count++; return(true); }
public override int GetHashCode() => HashHelpers.Combine(Width.GetHashCode(), Height.GetHashCode());
public FasterDictionary(int size) { _valuesInfo = new Node[size]; _values = new W[size]; _buckets = new int[HashHelpers.GetPrime(size)]; }
public override int GetHashCode() => HashHelpers.Combine(this.X.GetHashCode(), this.Y.GetHashCode());
public static byte[] GetHashFile(string filePath) { var bytes = File.ReadAllBytes(filePath); return(HashHelpers.GenerateSha256Hash(bytes)); }
void Resize() { Resize(HashHelpers.ExpandPrime(count)); }
public override int GetHashCode() => HashHelpers.Combine(HashHelpers.Combine(HashHelpers.Combine(X, Y), Width), Height);
/// <summary> /// <para> /// Returns a hash code. /// </para> /// </summary> public override int GetHashCode() => HashHelpers.Combine(X, Y);
public void TestFasterDictionary() { FasterDictionary <int, Test> test = new FasterDictionary <int, Test>(); uint dictionarysize = 10000; int[] numbers = new int[dictionarysize]; for (int i = 1; i < dictionarysize; i++) { numbers[i] = numbers[i - 1] + i * HashHelpers.ExpandPrime((int)dictionarysize); } for (int i = 0; i < dictionarysize; i++) { test[i] = new Test(numbers[i]); } for (int i = 0; i < dictionarysize; i++) { if (test[i].i != numbers[i]) { throw new Exception(); } } for (int i = 0; i < dictionarysize; i += 2) { if (test.Remove(i) == false) { throw new Exception(); } } test.Trim(); for (int i = 0; i < dictionarysize; i++) { test[i] = new Test(numbers[i]); } for (int i = 1; i < dictionarysize - 1; i += 2) { if (test[i].i != numbers[i]) { throw new Exception(); } } for (int i = 0; i < dictionarysize; i++) { if (test[i].i != numbers[i]) { throw new Exception(); } } for (int i = (int)(dictionarysize - 1); i >= 0; i -= 3) { if (test.Remove(i) == false) { throw new Exception(); } } test.Trim(); for (int i = (int)(dictionarysize - 1); i >= 0; i -= 3) { test[i] = new Test(numbers[i]); } for (int i = 0; i < dictionarysize; i++) { if (test[i].i != numbers[i]) { throw new Exception(); } } for (int i = 0; i < dictionarysize; i++) { if (test.Remove(i) == false) { throw new Exception(); } } for (int i = 0; i < dictionarysize; i++) { if (test.Remove(i) == true) { throw new Exception(); } } test.Trim(); test.Clear(); for (int i = 0; i < dictionarysize; i++) { test[numbers[i]] = new Test(i); } for (int i = 0; i < dictionarysize; i++) { Test JapaneseCalendar = test[numbers[i]]; if (JapaneseCalendar.i != i) { throw new Exception("read back test failed"); } } }