private FileObjectCollection PopulateCollection <T>(FileObject fileObject, IList <T> records) where T : IRecord { FileObjectCollection collection = new FileObjectCollection() { fileObject }; TotalFilesFormatted++; TotalRecords += records.Count; if (Config.IsKustoConfigured()) { // kusto native format is Csv // kusto json ingest is 2 to 3 times slower and does *not* use standard json format. uses json document per line no comma // using csv and compression for best performance collection = SerializeCsv(fileObject, records); if (Config.KustoCompressed) { collection.ForEach(x => x.Stream.Compress()); } } if (Config.IsLogAnalyticsConfigured()) { // la is kusto based but only accepts non compressed json format ingest collection = SerializeJson(fileObject, records); } collection.ForEach(x => SaveToCache(x)); records.Clear(); return(collection); }
private FileObjectCollection SerializeJson <T>(FileObject fileObject, IList <T> records) { Log.Debug("enter"); FileObjectCollection collection = new FileObjectCollection() { fileObject }; int counter = 0; string sourceFile = fileObject.FileUri.ToLower().Replace(JsonExtension, ""); fileObject.FileUri = $"{sourceFile}{JsonExtension}"; List <byte> jsonSerializedBytes = new List <byte>(); byte[] leftBracket = Encoding.UTF8.GetBytes("["); byte[] rightBracket = Encoding.UTF8.GetBytes("]"); byte[] comma = Encoding.UTF8.GetBytes(","); jsonSerializedBytes.AddRange(leftBracket); foreach (T record in records) { byte[] recordBytes = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(record, new JsonSerializerSettings() { })); if (jsonSerializedBytes.Count + recordBytes.Length + rightBracket.Length > MaxJsonTransmitBytes) { jsonSerializedBytes.AddRange(rightBracket); fileObject.Stream.Set(jsonSerializedBytes.ToArray()); fileObject.Length = fileObject.Stream.Get().Length; jsonSerializedBytes.Clear(); fileObject = new FileObject($"{sourceFile}.{counter}{JsonExtension}", fileObject.BaseUri); jsonSerializedBytes.AddRange(leftBracket); Log.Debug($"json serialized size: {jsonSerializedBytes.Count} file: {fileObject.FileUri}"); collection.Add(fileObject); } jsonSerializedBytes.AddRange(recordBytes); jsonSerializedBytes.AddRange(comma); counter++; } jsonSerializedBytes.RemoveRange(jsonSerializedBytes.Count - comma.Length, comma.Length); jsonSerializedBytes.AddRange(rightBracket); fileObject.Stream.Set(jsonSerializedBytes.ToArray()); fileObject.Length = fileObject.Stream.Get().Length; Log.Debug($"json serialized size: {jsonSerializedBytes.Count} file: {fileObject.FileUri}"); return(collection); }
private FileObjectCollection SerializeCsv <T>(FileObject fileObject, IList <T> records) { Log.Debug("enter"); FileObjectCollection collection = new FileObjectCollection() { fileObject }; int counter = 0; string sourceFile = fileObject.FileUri.ToLower().Replace(CsvExtension, ""); fileObject.FileUri = $"{sourceFile}{CsvExtension}"; List <byte> csvSerializedBytes = new List <byte>(); foreach (T record in records) { byte[] recordBytes = Encoding.UTF8.GetBytes(record.ToString()); if (csvSerializedBytes.Count + recordBytes.Length > MaxCsvTransmitBytes) { fileObject.Stream.Set(csvSerializedBytes.ToArray()); fileObject.Length = fileObject.Stream.Get().Length; csvSerializedBytes.Clear(); fileObject = new FileObject($"{sourceFile}.{counter}{CsvExtension}", fileObject.BaseUri); Log.Debug($"csv serialized size: {csvSerializedBytes.Count} file: {fileObject.FileUri}"); collection.Add(fileObject); } csvSerializedBytes.AddRange(recordBytes); counter++; } fileObject.Stream.Set(csvSerializedBytes.ToArray()); fileObject.Length = fileObject.Stream.Get().Length; Log.Debug($"csv serialized size: {csvSerializedBytes.Count} file: {fileObject.FileUri}"); return(collection); }
public void ImportJson(FileObjectCollection fileObjectCollection) { fileObjectCollection.ForEach(x => ImportJson(x)); }
public void IngestMultipleFiles(FileObjectCollection fileObjectCollection) { fileObjectCollection.ForEach(x => IngestSingleFile(x)); }