private static async Task <Func <MemoryStream, bool, int> > GetDataChunkUploader(this IFileMedia fileMedia, IFileConfiguration fileConfig, Interfaces.ILog log) { var getContext = await fileMedia.GetWaveContextFunc(fileConfig.Name, log); var xmdJson = fileConfig.GetMetadataBuilder(); var chunkNo = 0; return((stream, isFinalizing) => { var context = getContext(); if (string.IsNullOrWhiteSpace(context.SetId)) { context.InitiateDatasetUpload(xmdJson(), fileMedia.Operation, log).Wait(); if (string.IsNullOrWhiteSpace(context.SetId)) { throw new ImporterException( Localization.GetLocalizationString("Could not get job id from wave, cannot upload chunks")); } } stream.Flush(); var encCSVchunk = Convert.ToBase64String(stream.ToArray()); var payload = $"{{\"InsightsExternalDataId\":\"{context.SetId}\",\"PartNumber\":{++chunkNo},\"DataFile\":\"{encCSVchunk}\"}}"; var tryCount = 0; while (true) { try { var client = new HttpClient(); var content = new StringContent(payload, Encoding.ASCII); content.Headers.ContentType = new MediaTypeHeaderValue("application/json"); var url = $"{context.EntryPoint}/services/data/v41.0/sobjects/InsightsExternalDataPart"; client.AddAuthorization(context); log.Debug($"Uploading chunk #{chunkNo}"); var response = client.PostAsync(url, content).Result; if (response.IsSuccessStatusCode) { log.Debug($"Uploaded chunk #{chunkNo}"); if (isFinalizing) { context.FinalizeDatasetUpload(log).Wait(); } return chunkNo; } } catch (Exception ex) { log.Error(Localization.GetLocalizationString("Error while uploading chunk #{0} - {1}", chunkNo, ex.Message)); log.Debug(ex.ToString()); } if (++tryCount > 4) { throw new ImporterUploadException(Localization.GetLocalizationString("Failed to upload dataset.")); } log.Debug(Localization.GetLocalizationString("Retrying to upload chunk#{0}", chunkNo)); } }); }
public static Func <IDataRow> ParseData(this Func <ISourceRow> getLineFunc, IFileConfiguration fileConfig, Interfaces.ILog logger) { if (fileConfig == null) { var msg = Localization.GetLocalizationString("Could not get Source Configuration..."); logger?.Fatal(msg); throw new ArgumentException(msg); } logger?.Info(string.Format(Localization.GetLocalizationString("Parsing data from {0}"), fileConfig.Name)); var parsers = fileConfig.GetRowParsers(); var currentRecord = (long)0; var parsedRecords = (long)0; return(() => { var line = getLineFunc(); if (line != null) { currentRecord++; try { var row = parsers(line, currentRecord, currentRecord); if (row != null) { parsedRecords++; return row; } } catch (Exception ex) { logger?.Error(Localization.GetLocalizationString("Failed to parse line: \"{0}\"", string.Join(",", line.Fields.Select(x => x.Source)))); throw ex; } return new DataRow( new Dictionary <string, IValue> { { "raw", new ValueWrapper <string>(string.Join(",", line.Fields.Select(x => x.Source)), Localization.GetLocalizationString("Parse error"), true, string.Join(",", line.Fields.Select(x => x.Source))) } }, Localization.GetLocalizationString("Could not parse line."), currentRecord, line.LineNumber, line.Context.SourcePath, line.Context.FileConfiguration.Name); } logger?.Info(string.Format(Localization.GetLocalizationString("Parsed {0}/{1} records from {2}"), parsedRecords, currentRecord, fileConfig.Name)); return null; }); }
private static Stream GetLocalStream(this IFileMedia media, Interfaces.ILog log) { if (File.Exists(media.Path) && media.Operation != DataOperation.Append) { try { File.Delete(media.Path); } catch (Exception ex) { log.Error(ex.ToString()); return(null); } } return(File.Open(media.Path, FileMode.OpenOrCreate, FileAccess.Write, FileShare.Read)); }
public static Func <IDataRow, long> GetCsvWriter(this StreamWriter stream, IFile fileConfig, Interfaces.ILog log) { var nullValue = string.IsNullOrWhiteSpace(fileConfig.NullValue) ? "" : fileConfig.NullValue; var delimiter = string.IsNullOrWhiteSpace(fileConfig.Delimiter)?DEFAULT_DELIMITER[0]:fileConfig.Delimiter[0]; var filters = fileConfig.Rows.Select(x => x.PrepareTargetFilter()).ToList(); var rowCount = (long)0; var getQualifiedString = fileConfig.GetQualifiedStrFunc(log); bool isHeadersWritten = false; return(row => { if (!isHeadersWritten && fileConfig.HasHeaders) { for (var r = 0; r < fileConfig.Rows.Count; r++) { for (var c = 0; c < fileConfig.Rows[r].Columns.Count; c++) { if (c > 0) { stream.Write(delimiter); } stream.Write(fileConfig.Rows[r].Columns[c].Alias ?? fileConfig.Rows[r].Columns[c].Name); } stream.WriteLine(); } isHeadersWritten = true; } if (!string.IsNullOrWhiteSpace(row.Error)) { log.Error(Localization.GetLocalizationString("Error in file {0}, line {1}", row.SourcePath, row.RawLineNumber) + " - " + (row.Error ?? "") + "\n\r\t" + string.Join(",", row.Columns.Values.Select(x => x.Source))); return rowCount; } for (var r = 0; r < fileConfig.Rows.Count; r++) { if (!filters[r](row)) { continue; } var columns = fileConfig.Rows[r].Columns; var rowString = new StringBuilder(); for (var c = 0; c < columns.Count; c++) { var column = columns[c]; var value = row[column.Alias ?? column.Name]; if (value == null) { //log.Warning(Localization.GetLocalizationString("Could not find column {0}", columns[c].Name)); } var valueString = (value != null && !value.IsNull) ? getQualifiedString(value.ToString(column.Format), column.Type) : nullValue; rowString.Append(valueString); if (columns.Count - 1 > c) { rowString.Append(delimiter); } } stream.WriteLine(rowString.ToString()); rowCount++; } return rowCount; }); }