private async Task AuditLog(int currentUserId, int objectId, object newObject, object priorObject, bool priorObjectAlreadySerialized = false) { if (_auditSet == null) { // audit logging is not enabled return; } var audit = new AuditLog { EntityType = newObject.GetType().ToString(), EntityId = objectId, UpdatedBy = currentUserId, UpdatedAt = DateTime.Now, CurrentValue = _entitySerializer.Serialize(newObject) }; if (priorObject != null) { if (priorObjectAlreadySerialized) { audit.PreviousValue = priorObject.ToString(); } else { audit.PreviousValue = _entitySerializer.Serialize(priorObject); } } await AuditSet.AddAsync(audit); }
public virtual async Task RemoveAsync(int userId, int id) { var entity = await DbSet.FindAsync(id); if (entity == null) { throw new GraException($"{nameof(DomainEntity)} id {id} could not be found."); } DbSet.Remove(entity); if (AuditSet != null) { var audit = new AuditLog { EntityType = entity.GetType().ToString(), EntityId = entity.Id, UpdatedBy = userId, UpdatedAt = _dateTimeProvider.Now, CurrentValue = null, PreviousValue = _entitySerializer.Serialize(entity) }; await AuditSet.AddAsync(audit); } }
/// <inheritdoc /> public Task Save(Cigar cigar) { if (cigar == null) { throw new ArgumentNullException(nameof(cigar)); } string data = _serializer.Serialize(cigar); var parameters = new { cigar.Id, Data = data }; return(_client.ExecuteInManagedConnectionAsync(connection => connection.ExecuteAsync( "[Catalog].[Cigars_Save]", parameters, commandType: CommandType.StoredProcedure) )); }
public string Serialize(IEntitySerializer serializer) { return(serializer.Serialize(this)); }
public async Task <IActionResult> GetCategories() { var categories = await _categoryFetcher.GetCategoriesAsync(); return(new ObjectResult(_serializer.Serialize(categories))); }
protected virtual string SerializeEntity <T>(T entity) where T : class { return(Serializer.Serialize(entity)); }
protected JObject Serialize(object obj) { var jObject = _serializer.Serialize(obj); return(JsonLdProcessor.Compact(jObject, new JObject(), new JsonLdOptions())); }
public T Create(T entity) { if (entity == null) { throw new ArgumentNullException("Entity argument can't be null"); } // make sure the entity name matches the document store type. string requiredEntityName = entity.GetType().Name; if (_entityName != requiredEntityName) { throw new ArgumentException("Entity type is not valid for this data store."); } // make sure entity has key field if (!InternalReflectionHelper.PropertyExists(entity, "Id")) { throw new Exception("Entity must have an Id property and be of type short, integer or long." + "This is used as the primary key for the entity being stored."); } // load the document key from the entity as its needed for adding to the index. var documentKey = InternalReflectionHelper.GetPropertyValueInt64(entity, "Id"); // get the data store header so we can generate keys and store record counts. var header = GetDataHeader(); // boolean so we know to check for duplicate or not on insert. // duplicates only need checked when the user has specified the document key. bool checkForDuplicate = true; if (documentKey == 0) { checkForDuplicate = false; } // get the next document key from the data file header record. documentKey = header.GenerateNextRecord(documentKey); // update the entity value so that the callers entity gets the saved document key. InternalReflectionHelper.SetPropertyValue(entity, "Id", documentKey); // parse the document into a binary json document for storing in the data file. byte[] binaryJson = _serializer.Serialize <T>(entity); using (var _deletedDataIndexFile = new BlobIndexFile(_deletedDataIndexFilePath, DiskBufferSizes.Larger, DiskBufferSizes.Default)) { // create the data index with the data pointer at the end of the document. // check to see if there is a deleted slot that can be used to store the data. var dataIndex = _deletedDataIndexFile.GetBlobIndexWithEnoughSpace(binaryJson.Length); if (dataIndex != null) { // assign this document key to the deleted index. dataIndex.ChangeDocumentKey(documentKey); dataIndex.UpdateRecordLength(binaryJson.Length); } else { // create a new data index. dataIndex = new BlobIndex(documentKey, this.FileSize, binaryJson.Length, this.PaddingFactor); // update the size of the datafile this.FileSize = this.FileSize + dataIndex.RecordLength + dataIndex.PaddingLength; } using (var _dataIndexFile = new BlobIndexFile(_dataIndexFilePath, DiskBufferSizes.Larger, DiskBufferSizes.Default)) { // create the data index (AddIndex throws ConcurrencyException so no data will save) if (checkForDuplicate) { _dataIndexFile.AddIndexCheckForDuplicate(dataIndex); } else { _dataIndexFile.AddIndex(dataIndex); } } // remove the index from the deleted index file if it exists _deletedDataIndexFile.RemoveIndex(dataIndex.DocumentKey); using (var _fileStreamWriter = new FileStream(_dataFilePath, FileMode.OpenOrCreate, FileAccess.Write, FileShare.Read, (int)DiskBufferSizes.Default, FileOptions.SequentialScan)) { using (var _binaryWriter = new BinaryWriter(_fileStreamWriter)) { // add the data record to the data file _binaryWriter.BaseStream.Position = dataIndex.Pointer; // write the record _binaryWriter.Write(binaryJson); // write the padding. if (dataIndex.PaddingLength > 0) { _binaryWriter.Write(new Byte[dataIndex.PaddingLength]); } // save the data _binaryWriter.Flush(); } } // update the header record UpdateDataHeader(header); } return(entity); }