private async Task <BulkImportStatus> ValidateImportStream(StreamReader sr)
        {
            var status = new BulkImportStatus();

            if (_csvService.IsEmpty(sr))
            {
                status.ValidationError = "No headers - File is empty so cannot be processed";
            }

            if (_csvService.HasData(sr))
            {
                status.ValidationError = "Missing data - there is no data to process";
            }

            IList <string> csvFields = CsvFields(sr);

            var fieldValidation = await _bulkImportService.ValidateFields(csvFields);

            if (fieldValidation.IsValid == false)
            {
                status.HeaderErrors = fieldValidation.Errors;
            }

            if (status.ValidationError != null || status.HeaderErrors.Any())
            {
                status.ImportFileIsValid = false;
            }
            return(status);
        }
        public async Task <BulkImportStatus> Handle(Stream personCsv)
        {
            _logger.LogInformation($"about to handle person import");

            var fileStatus = new BulkImportStatus();

            using (var sr = new StreamReader(personCsv))
            {
                fileStatus = await ValidateImportStream(sr);

                if (fileStatus.ImportFileIsValid == false)
                {
                    return(fileStatus);
                }

                IList <dynamic> contacts = null;
                try
                {
                    contacts = await _csvService.ConvertToList(sr);
                }
                catch (Exception e)
                {
                    _logger.LogError(e, "Unable to process csv file");

                    var status = new BulkImportStatus();
                    status.ImportFileIsValid = false;
                    status.ValidationError   = "Unable to parse CSV file, the format of the file is invalid";
                    return(status);
                }

                return(await _bulkImportService.ImportPeople(contacts));
            }
        }
Exemple #3
0
        public string CreateImportReport(BulkImportStatus importStatus)
        {
            var sb = new StringBuilder();

            sb.Append($"################################################################################").AppendLine();
            sb.Append($"#################### Marketo bulk person import report #########################").AppendLine();
            sb.Append($"################################################################################").AppendLine().AppendLine();
            sb.Append($"Import time: {importStatus.StartTime}").AppendLine();
            sb.Append($"Import duration: {importStatus.Duration}ms").AppendLine().AppendLine();


            if (importStatus.Status == ImportStatus.ValidationFailed)
            {
                ReportValidationErrors(importStatus, sb);
                return(sb.ToString());
            }
            sb.Append(
                $"{importStatus.BulkImportJobs.Count} jobs have been queued for import into marketo. Please see the status of each import job below:")
            .AppendLine().AppendLine();

            if (importStatus?.BulkImportJobStatus?.Count > 0)
            {
                ReportStatus(importStatus, sb);
            }
            else
            {
                ReportJobs(importStatus, sb);
            }
            sb.Append($"################################################################################");
            return(sb.ToString());
        }
Exemple #4
0
        public async Task <BulkImportStatus> ImportCustomObject <T>(IList <T> data, string objectName)
        {
            var fileStatus = new BulkImportStatus();

            var contactsChunks = _chunkingService.GetChunks(_csvService.GetByteCount(data), data).ToList();

            _logger.LogInformation($"ImportCustomObject: total of {data.Count} {objectName} to import, in {contactsChunks.Count} chunks");

            var        runningJobsCount = 0;
            var        index            = 1;
            List <int> completedJobs    = new List <int>();

            foreach (var contactsList in contactsChunks)
            {
                var importResult =
                    await ImportChunkedObject(contactsList, objectName);

                fileStatus.BulkImportJobs.Add(importResult);

                _logger.LogInformation($"ImportCustomObject chunk {index} of {contactsChunks.Count()} for object: {objectName} has been queued. \n Job details: {importResult} ");

                runningJobsCount++;
                index++;

                if (runningJobsCount > 9)
                {
                    bool getJobStatus     = true;
                    var  notCompletedJobs = fileStatus.BulkImportJobs.Where(x => !completedJobs.Any(y => y == x.batchId))
                                            .Select(x => x.batchId).ToList();
                    while (getJobStatus)
                    {
                        foreach (var job in notCompletedJobs)
                        {
                            var val = await _marketoBulkImportClient.GetStatus(job);

                            var status = val.Result.FirstOrDefault().status;
                            _logger.LogInformation($"BatchStatus batchId : {job} - status :{status}");

                            if (status == "Complete" || status == "Failed")
                            {
                                getJobStatus = false;
                                runningJobsCount--;
                                completedJobs.Add(job);
                                break;
                            }
                            else
                            {
                                await Task.Delay(1000);
                            }
                        }
                    }
                }
            }

            return(fileStatus);
        }
Exemple #5
0
        private static void ReportStatus(BulkImportStatus importStatus, StringBuilder sb)
        {
            for (int i = 0; i < importStatus.BulkImportJobStatus.Count; i++)
            {
                var status = importStatus.BulkImportJobStatus[i];
                sb.Append($"################################################################################").AppendLine();
                sb.Append($"Bulk import job {i + 1} of {importStatus.BulkImportJobStatus.Count} details:").AppendLine();
                sb.Append(status.ToString());

                sb.Append($"################################################################################").AppendLine();
            }
        }
Exemple #6
0
        public async Task Run([ActivityTrigger] BulkImportStatus myJobs, Binder binder, ILogger log)
        {
            var report = _reportService.CreateImportReport(myJobs);

            var attributes = new Attribute[]
            {
                new BlobAttribute($"{myJobs.Container}/Report/{myJobs.Name}.report.txt", FileAccess.Write),
                new StorageAccountAttribute("Storage")
            };

            using (var writer = await binder.BindAsync <TextWriter>(attributes))
            {
                await writer.WriteAsync(report);
            }
        }
Exemple #7
0
 private void ReportValidationErrors(BulkImportStatus importStatus, StringBuilder sb)
 {
     sb.Append($"################################################################################").AppendLine();
     if (importStatus.ValidationError != null)
     {
         sb.Append($"The provided csv file is not a valid csv, please check the format of the file and try import again").AppendLine();
         sb.Append($"Error description: {importStatus.ValidationError}").AppendLine();
     }
     else
     {
         sb.Append($"Some headers provided in the CSV file are not valid in Marketo,").AppendLine().AppendLine();
         sb.Append($"Headers failing validation:").AppendLine();
         foreach (var importStatusHeaderError in importStatus.HeaderErrors)
         {
             sb.Append(importStatusHeaderError).AppendLine();
         }
     }
 }
        public async Task <BulkImportStatus> Handle(Stream personCsv, string campaignId)
        {
            _logger.LogInformation($"about to handle campaign members import");

            var fileStatus = new BulkImportStatus();


            using (var sr = new StreamReader(personCsv))
            {
                fileStatus = await ValidateImportStream(sr);

                if (fileStatus.ImportFileIsValid == false)
                {
                    return(fileStatus);
                }



                var contacts = await _csvService.ConvertToList(sr);

                var contactsChunks = _chunkingService.GetChunks(sr.BaseStream.Length, contacts).ToList();

                var index = 1;

                foreach (var contactsList in contactsChunks)
                {
                    var importResult = await _bulkImportService.ImportToCampaign(contactsList, campaignId);

                    fileStatus.BulkImportJobs.Add(importResult);

                    _logger.LogInformation($"Bulk import chunk {index} of {contactsChunks.Count()} to campaign ID {campaignId} has been queued. \n Job details: {importResult} ");

                    index++;
                }
            }
            return(fileStatus);
        }
Exemple #9
0
        public async Task <BulkImportStatus> ImportCustomObject <T>(IList <T> data, string objectName)
        {
            var fileStatus = new BulkImportStatus();

            var contactsChunks = _chunkingService.GetChunks(_csvService.GetByteCount(data), data).ToList();

            _logger.LogInformation($"ImportCustomObject: total of {data.Count} {objectName} to import, in {contactsChunks.Count} chunks");

            var index = 1;

            foreach (var contactsList in contactsChunks)
            {
                var importResult =
                    await ImportChunkedObject(contactsList, objectName);

                fileStatus.BulkImportJobs.Add(importResult);

                _logger.LogInformation($"ImportCustomObject chunk {index} of {contactsChunks.Count()} for object: {objectName} has been queued. \n Job details: {importResult} ");

                index++;
            }

            return(fileStatus);
        }
Exemple #10
0
        public async Task <BulkImportStatus> ImportPeople <T>(IList <T> leads)
        {
            var fileStatus = new BulkImportStatus();

            var contactsChunks = _chunkingService.GetChunks(_csvService.GetByteCount(leads), leads).ToList();

            _logger.LogInformation($"ImportPeople: total of {leads.Count} leads to import, in {contactsChunks.Count} chunks");

            var index = 1;

            foreach (var contactsList in contactsChunks)
            {
                var importResult =
                    await ImportChunkedPeople(contactsList);

                fileStatus.BulkImportJobs.Add(importResult);

                _logger.LogInformation($"ImportPeople chunk {index} of {contactsChunks.Count()} has been queued. \n Job details: {importResult} ");

                index++;
            }

            return(fileStatus);
        }