public void UploadBatch(bool lastBatch) { lock (uploadLock) { if (_currentBatch != null) { throw new Exception("Upload must not be called before the batch currently being uploaded is complete"); } _currentBatch = _nextBatch; _nextBatch = new List <IDictionary <string, object> >(); try { if (_currentBatch.Count > 0) { if (_ingestClient != null) { var data = new DictionaryDataReader(_currentBatch); _ingestClient.IngestFromDataReader(data, _ingestionProperties); Console.Write("{0} ", _currentBatch.Count); } else if (outputFile != null) { string content = lastBatch ? $"{JsonConvert.SerializeObject(_currentBatch, Formatting.Indented).Trim(new char[] { '[', ']' })}" : $"{JsonConvert.SerializeObject(_currentBatch, Formatting.Indented).Trim(new char[] { '[', ']' })},"; try { outputFile.Write(content); } catch { } Console.Write("{0} ", _currentBatch.Count); } else { foreach (var item in _currentBatch) { Console.WriteLine(string.Join("\t", item.Values)); } } } _currentBatch = null; _lastUploadTime = DateTime.UtcNow; } catch (Exception e) { Console.WriteLine(e); } } }
public void UploadBatch(bool lastBatch) { lock (uploadLock) { if (_currentBatch != null) { throw new Exception("Upload must not be called before the batch currently being uploaded is complete"); } _currentBatch = _nextBatch; _nextBatch = new List <IDictionary <string, object> >(); try { if (_currentBatch.Count > 0) { if (_ingestClient != null) { var data = new DictionaryDataReader(_currentBatch); _ingestClient.IngestFromDataReader(data, _ingestionProperties); Console.Write("{0} ", _currentBatch.Count); } else if (blobContainerClient != null) { //Create a blob with unique names and upload string blobName = $"{Guid.NewGuid()}_1_{Guid.NewGuid():N}.json"; var blobClient = blobContainerClient.GetBlobClient(blobName); UploadToContainerAsync(blobClient, _currentBatch).Wait(); } } _currentBatch = null; _lastUploadTime = DateTime.UtcNow; } catch (Exception e) { Console.WriteLine(e); } } }