public void CreateStorageTransactionTest(int prodID, int amount, int storage1, int storage2) { int old_id = StorageTransaction.GetNextID(); Product TestProd = SC.ProductDictionary[prodID]; if (!TestProd.StorageWithAmount.ContainsKey(storage1)) { TestProd.StorageWithAmount.TryAdd(storage1, 0); } if (!TestProd.StorageWithAmount.ContainsKey(storage2)) { TestProd.StorageWithAmount.TryAdd(storage2, 0); } int Storage1Count = TestProd.StorageWithAmount[storage1]; int Storage2Count = TestProd.StorageWithAmount[storage2]; StorageTransaction TestTrans = new StorageTransaction(TestProd, amount, storage1, storage2, SC.StorageRoomDictionary); TestTrans.Execute(); TestTrans.UploadToDatabase(); int new_id = StorageTransaction.GetNextID(); bool idCompare = new_id > old_id; bool storageCompare1 = TestProd.StorageWithAmount[storage1] == Storage1Count - amount; bool storageCompare2 = TestProd.StorageWithAmount[storage2] == Storage2Count + amount; Assert.IsTrue(idCompare && storageCompare1 && storageCompare2); }
public void Write(Action <StorageActionsAccessor> func) { lock (locker) { using (var reader = OpenReader()) { var mutator = new StorageMutator(writer, reader, transaction); var viewer = new StorageReader(reader, transaction); var accessor = new StorageActionsAccessor(mutator, viewer, reader, writer); func(accessor); mutator.Flush(); var storageTransaction = mutator.CreateTransaction(); if (transaction.Equals(storageTransaction)) { return; } WriteTransaction(storageTransaction); writer.Flush(true); transaction = storageTransaction; accessor.RaiseCommitEvent(); if (onCommit != null) { onCommit(); } } } }
private void button_MoveProduct_Click(object sender, RoutedEventArgs e) { Product product = _storageController.ProductDictionary[productID]; int destinationRoom = _storageController.StorageRoomDictionary.Where(x => x.Value.Name == comboBox_Destination.Text).Select(x => x.Key).First(); if (!product.StorageWithAmount.Keys.Contains(_storageController.StorageRoomDictionary.Where(x => x.Value.Name == comboBox_Destination.Text).Select(x => x.Key).First())) { product.StorageWithAmount.TryAdd(_storageController.StorageRoomDictionary.Where(x => x.Value.Name == comboBox_Destination.Text).Select(x => x.Key).First(), 0); product.UpdateInDatabase(); } int parsevalue; Int32.TryParse(textBox_ProductAmount.Text, out parsevalue); if (textBox_ProductAmount.Text != "") { StorageTransaction storageTransaction = new StorageTransaction(product, parsevalue, sourceRoom, destinationRoom, _storageController.StorageRoomDictionary); storageTransaction.Execute(); storageTransaction.UploadToDatabase(); this.Close(); } else { textBox_ProductAmount.BorderBrush = Brushes.Red; } }
private void WriteTransaction(StorageTransaction tx) { var position = writer.Position; new JsonSerializer().Serialize(new BsonWriter(writer), tx); binaryWriter.Write(position); // this _have_ to be 8 bytes long, so we can read it from the end binaryWriter.Write(TransactionSignature); }
public Object Get(string id, StorageTransaction transaction) { // This would ordinarily be using a Snapshot for the transaction in LevelDB Object outValue; if(this.backing.TryGetValue(id, out outValue)) return outValue; return null; }
private void CommitTransaction(StorageTransaction transaction) { // This would be LevelDB.BatchWrite, I just use a database wide lock, mileage may vary lock (this.backing) { transaction.Commit(this); } this.TryPruneObsoleteTransactions(); }
private StorageTransaction CreateTransaction() { Interlocked.Increment(ref this.lastTransactionId); var currentTransactions = this.activeTransactions.ToArray(); var transaction = new StorageTransaction(this.lastTransactionId, currentTransactions.Select(x => x.Value).ToArray()); this.activeTransactions.TryAdd(this.lastTransactionId, transaction); return(transaction); }
public Object Get(string id, StorageTransaction transaction) { // This would ordinarily be using a Snapshot for the transaction in LevelDB Object outValue; if (this.backing.TryGetValue(id, out outValue)) { return(outValue); } return(null); }
public void Delete(string id, StorageTransaction transaction) { this.keysToTransactionId.AddOrUpdate(id, (key) => { transaction.AddOperation(storage => storage.Delete(id)); return(transaction.Id); }, (key, oldValue) => { // NOTE: This doesn't handle the transaction doing multiple operations on the same key throw new Exception("This should be a concurrency exception"); }); }
public void Put(string id, Object obj, StorageTransaction transaction) { this.keysToTransactionId.AddOrUpdate(id, (key) => { transaction.AddOperation(storage => storage.Put(id, obj)); return transaction.Id; }, (key, oldValue) => { // NOTE: This doesn't handle the transaction doing multiple operations on the same key throw new Exception("This should be a concurrency exception"); }); }
private static CloudQueueMessage CreateCloudQueueMessage(StorageTransaction <T> transaction) { if (transaction == null) { throw new ArgumentNullException(nameof(transaction)); } if (!(transaction.Transaction is CloudQueueMessage cloudQueueMessage)) { throw new ArgumentException("Invalid transaction type", nameof(transaction)); } return(cloudQueueMessage); }
public bool Equals(StorageTransaction other) { if (ReferenceEquals(null, other)) { return(false); } if (ReferenceEquals(this, other)) { return(true); } return(other.DocumentsInTransactionPosition == DocumentsInTransactionPosition && other.DocumentsByIdPosition == DocumentsByIdPosition && other.DocumentsByEtagPosition == DocumentsByEtagPosition && other.DocumentsPosition == DocumentsPosition && other.AttachmentPosition == AttachmentPosition && other.TasksPosition == TasksPosition && other.TasksCount == TasksCount && other.DocumentsCount == DocumentsCount && other.AttachmentsCount == AttachmentsCount && other.TransactionPosition == TransactionPosition && other.IdentityPosition == IdentityPosition && other.IndexesPosition == IndexesPosition && other.QueuesPosition == QueuesPosition && other.TasksByIndexPosition == TasksByIndexPosition && other.MappedResultsByReduceKeyPosition == MappedResultsByReduceKeyPosition && other.MappedResultsByDocumentIdPosition == MappedResultsByDocumentIdPosition); }
private void TryReadingExistingFile() { using (var reader = OpenReader()) using (var binaryReader = new BinaryReaderWith7BitEncoding(reader)) { var headerSignature = binaryReader.ReadBytes(16); if (headerSignature.Length != 16 || new Guid(headerSignature) != HeaderSignatureGuid) { throw new Exceptions.InvalidFileFormatException("File signature is invalid, probably not a valid Raven storage file, or a corrupted one"); } var version = binaryReader.Read7BitEncodedInt(); if (version != Version) { throw new Exceptions.InvalidFileFormatException("File signature is valid, but the version information is " + version + ", while " + Version + " was expected"); } var fileId = binaryReader.ReadBytes(16); if (fileId.Length != 16) { throw new Exceptions.InvalidFileFormatException("File signature is valid, but the file ID has been truncated"); } Id = new Guid(fileId); var pos = reader.Length; while (pos > 16) { reader.Position = pos - 16; if (new Guid(binaryReader.ReadBytes(16)) == TransactionSignatureGuid) { reader.Position = pos - 24; // move to the position of the transaction itself var txPos = binaryReader.ReadInt64(); reader.Position = txPos; transaction = new JsonSerializer().Deserialize <StorageTransaction>(new BsonReader(reader)); return; } pos -= 1; } throw new Exceptions.InvalidFileFormatException("Could not find a valid transaction in the file"); } }
public StorageTransaction(int id, StorageTransaction[] activeTransactions) { this.id = id; this.trackedTransactions = activeTransactions; foreach(var otherTransaction in this.trackedTransactions) { otherTransaction.IncreaseRefCount(); } }
public Task CommitAsync(StorageTransaction <T> transaction, CancellationToken cancellationToken) { var message = CreateCloudQueueMessage(transaction); return(_queue.DeleteMessageAsync(message, cancellationToken)); }
public Task RollbackAsync(StorageTransaction <T> transaction, CancellationToken cancellationToken) { var message = CreateCloudQueueMessage(transaction); return(_queue.UpdateMessageAsync(message, TimeSpan.Zero, MessageUpdateFields.Visibility, cancellationToken)); }
public StorageAccessor(Storage storage, StorageTransaction transaction) { this.storage = storage; this.transaction = transaction; }
private void CommitTransaction(StorageTransaction transaction) { // This would be LevelDB.BatchWrite, I just use a database wide lock, mileage may vary lock(this.backing) { transaction.Commit(this); } this.TryPruneObsoleteTransactions(); }
private StorageTransaction CreateTransaction() { Interlocked.Increment(ref this.lastTransactionId); var currentTransactions = this.activeTransactions.ToArray(); var transaction = new StorageTransaction(this.lastTransactionId, currentTransactions.Select(x=> x.Value).ToArray()); this.activeTransactions.TryAdd(this.lastTransactionId, transaction); return transaction; }
private void HandleSecondPool() { Row Data; if (FirstPoolDone()) { Debug.WriteLine("First Queue is [DONE] going to second queue"); } while (!SecondPoolDone()) { while (_storageStatusQueue.TryDequeue(out Data)) { int id = Convert.ToInt32(Data.Values[0]); if (ProductDictionary.ContainsKey(id)) { ProductDictionary[id].StorageWithAmount.TryAdd(Convert.ToInt32(Data.Values[1]), Convert.ToInt32(Data.Values[2])); } else { DisabledProducts[id].StorageWithAmount.TryAdd(Convert.ToInt32(Data.Values[1]), Convert.ToInt32(Data.Values[2])); } Interlocked.Increment(ref _doneStorageStatusCount); } while (_storageTransactionsQueue.TryDequeue(out Data)) { int ProductID = Convert.ToInt32(Data.Values[1]); int SourceID = Convert.ToInt32(Data.Values[4]); int DestinationID = Convert.ToInt32(Data.Values[5]); if (StorageRoomDictionary.ContainsKey(SourceID) && StorageRoomDictionary.ContainsKey(DestinationID)) { StorageTransaction StorageTrans = new StorageTransaction(Data, true); StorageRoom Source = StorageRoomDictionary[SourceID]; StorageRoom Destination = StorageRoomDictionary[DestinationID]; BaseProduct prod; if (ProductDictionary.ContainsKey(ProductID)) { prod = ProductDictionary[ProductID]; } else { prod = DisabledProducts[ProductID]; } StorageTrans.SetInformation(Source, Destination, prod); StorageTransactionDictionary.TryAdd(StorageTrans.ID, StorageTrans); } Interlocked.Increment(ref _doneStorageTransactionCount); } while (_orderTransactionsQueue.TryDequeue(out Data)) { OrderTransaction OrderTrans = new OrderTransaction(Data, true); int ProductID = Convert.ToInt32(Data.Values[1]); if (ProductDictionary.ContainsKey(ProductID)) { OrderTrans.SetInformation(ProductDictionary[ProductID]); } else { OrderTrans.SetInformation(DisabledProducts[ProductID]); } OrderTransactionDictionary.TryAdd(OrderTrans.ID, OrderTrans); Interlocked.Increment(ref _doneOrderTransactionCount); } } }
private void WriteTransaction(StorageTransaction tx) { var position = writer.Position; new JsonSerializer().Serialize(new BsonWriter(writer), tx); binaryWriter.Write(position);// this _have_ to be 8 bytes long, so we can read it from the end binaryWriter.Write(TransactionSignature); }
public bool Equals(StorageTransaction other) { if (ReferenceEquals(null, other)) return false; if (ReferenceEquals(this, other)) return true; return other.DocumentsInTransactionPosition == DocumentsInTransactionPosition && other.DocumentsByIdPosition == DocumentsByIdPosition && other.DocumentsByEtagPosition == DocumentsByEtagPosition && other.DocumentsPosition == DocumentsPosition && other.AttachmentPosition == AttachmentPosition && other.TasksPosition == TasksPosition && other.TasksCount == TasksCount && other.DocumentsCount == DocumentsCount && other.AttachmentsCount == AttachmentsCount && other.TransactionPosition == TransactionPosition && other.IdentityPosition == IdentityPosition && other.IndexesPosition == IndexesPosition && other.QueuesPosition == QueuesPosition && other.TasksByIndexPosition == TasksByIndexPosition && other.MappedResultsByReduceKeyPosition == MappedResultsByReduceKeyPosition && other.MappedResultsByDocumentIdPosition == MappedResultsByDocumentIdPosition; }
public void Write(Action<StorageActionsAccessor> func) { lock (locker) { using (var reader = OpenReader()) { var mutator = new StorageMutator(writer, reader, transaction); var viewer = new StorageReader(reader, transaction); var accessor = new StorageActionsAccessor(mutator, viewer, reader, writer); func(accessor); mutator.Flush(); var storageTransaction = mutator.CreateTransaction(); if (transaction.Equals(storageTransaction)) return; WriteTransaction(storageTransaction); writer.Flush(true); transaction = storageTransaction; accessor.RaiseCommitEvent(); if (onCommit != null) onCommit(); } } }
private void RollbackTransaction(StorageTransaction transaction) { transaction.Rollback(); this.TryPruneObsoleteTransactions(); }
private void TryReadingExistingFile() { using (var reader = OpenReader()) using (var binaryReader = new BinaryReaderWith7BitEncoding(reader)) { var headerSignature = binaryReader.ReadBytes(16); if (headerSignature .Length != 16 || new Guid(headerSignature) != HeaderSignatureGuid) throw new Exceptions.InvalidFileFormatException("File signature is invalid, probably not a valid Raven storage file, or a corrupted one"); var version = binaryReader.Read7BitEncodedInt(); if (version != Version) throw new Exceptions.InvalidFileFormatException("File signature is valid, but the version information is " + version + ", while " + Version + " was expected"); var fileId = binaryReader.ReadBytes(16); if(fileId.Length!=16) throw new Exceptions.InvalidFileFormatException("File signature is valid, but the file ID has been truncated"); Id = new Guid(fileId); var pos = reader.Length; while (pos > 16) { reader.Position = pos - 16; if (new Guid(binaryReader.ReadBytes(16)) == TransactionSignatureGuid) { reader.Position = pos - 24; // move to the position of the transaction itself var txPos = binaryReader.ReadInt64(); reader.Position = txPos; transaction = new JsonSerializer().Deserialize<StorageTransaction>(new BsonReader(reader)); return; } pos -= 1; } throw new Exceptions.InvalidFileFormatException("Could not find a valid transaction in the file"); } }