public string Upload() { try { int productId = 2; var product = _productRepository.GetProduct(productId); string fileSavingPath = XMLDownloader.GetFileName(productId); byte[] bytes = new byte[SchedulerConfig.MaxBytesCount]; // var file = Request.Files["file"]; using (System.IO.FileStream fs = System.IO.File.Create(fileSavingPath)) { int bytesRead; while ((bytesRead = Request.InputStream.Read(bytes, 0, bytes.Length)) > 0) { fs.Write(bytes, 0, bytesRead); } } _arrivalMessagesService.CreateArrivalMessage(product.Id, "", fileSavingPath, ""); var message = "success"; log.Info(message); return(message); } catch (Exception e) { var message = String.Format("{0}; {1}", e.Message, e.StackTrace); log.Error(message); return("error"); } }
/// <summary> /// Processing message for one product /// </summary> /// <param name="checkingProductId"></param> /// <param name="xmlMessages"></param> /// <param name="productSettings"></param> /// <param name="messagesCount"></param> /// <param name="enableInsertingArrMessages"></param> public void ProcessArrivalMessagesForProduct(int checkingProductId, List <string> xmlMessages, IList <ValidationSetting> productSettings, int messagesCount, bool enableInsertingArrMessages) { var startedAt = DateTime.UtcNow; _log.Info("Count of messages: " + xmlMessages.Count); try { var allArrivalMessages = new ConcurrentBag <ArrivalMessage>(); var allEvents = new ConcurrentBag <Event>(); var processTasks = new List <Task>(); var counter = 10; var step = messagesCount / counter; // First: we parse all xml-messages and build sometimes xml-tree(as update) // also it is parallel process by each N messages(10 tasks for all messages) for (var i = 0; i < counter; i++) { var index = i * 1; var processingMessages = xmlMessages.Skip(index * step).Take(step).ToList(); var task = Task.Factory.StartNew(() => { foreach (var xmlMessage in processingMessages) { var message = _arrivalMessageService.CreateArrivalMessage(checkingProductId, "", "", ""); // Parsing and checking: sometimes xml contains only keep-alive messages if (_downloadService.ProcessEventMessageAndParse(message, xmlMessage, enableInsertingArrMessages, ref allEvents)) { message.IsProcessed = true; allArrivalMessages.Add(message); } // pass } }); processTasks.Add(task); } Task.WaitAll(processTasks.ToArray()); // Now we get all events var events = allEvents.ToList(); var eventsCount = events.Count; CreateSuccessSchedulerHistory(startedAt, checkingProductId, SchedulerTypes.ParseXmlFile, "Count: " + eventsCount + " events"); _log.Info("Processing: " + eventsCount + " events; unique: " + events.DistinctBy(x => x.EventId).Count()); if (eventsCount > 0) { var startedOn = DateTime.UtcNow; // Then we also parallel all processing(inserting arrival messages and etc) var waitingTasks = new List <Task>(); //1 task, if it is push-product, but not product #5 if (enableInsertingArrMessages) { // buld insert the packet of messages var taskUpdateArrivalMessages = Task.Factory.StartNew(() => { _arrivalMessageService.BulkInsert(allArrivalMessages.ToList(), ""); }); waitingTasks.Add(taskUpdateArrivalMessages); } //2 task var taskMergeEvents = Task.Factory.StartNew(() => { var startMergeEvents = DateTime.UtcNow; // Ae get all old events var oldEvents = _merger.GetEventsByEventIds(events.Select(x => x.EventId).ToList(), checkingProductId); // And we add them all as repeated, because there are a lot of not-unique events. It is such optimization. sportsEventsStores[checkingProductId.ToString()].AddRepeated(oldEvents, events); CreateSuccessSchedulerHistory(startMergeEvents, checkingProductId, SchedulerTypes.Merger, "Merged: " + oldEvents.Count + events.Count); }); // if it is't product #5 we create common arrival message if (!enableInsertingArrMessages) { var taskInseringCommonArrMessage = Task.Factory.StartNew(() => { var commonArrivalMessage = new ArrivalMessage { ProductId = checkingProductId, IsProcessed = true, Url = "Count of processed arrival messages: " + messagesCount, }; var bets = allArrivalMessages.SelectMany(x => x.Bets ?? new List <string>()).ToList(); var messagesEvents = allArrivalMessages.SelectMany(x => x.Events ?? new List <string>()).ToList(); var leagues = allArrivalMessages.SelectMany(x => x.Leagues ?? new List <string>()).ToList(); var markets = allArrivalMessages.SelectMany(x => x.Markets ?? new List <string>()).ToList(); var sports = allArrivalMessages.SelectMany(x => x.Sports ?? new List <string>()).ToList(); var locations = allArrivalMessages.SelectMany(x => x.Locations ?? new List <string>()).ToList(); var providers = allArrivalMessages.SelectMany(x => x.Providers ?? new List <string>()).ToList(); var statuses = allArrivalMessages.SelectMany(x => x.Statuses ?? new List <string>()).ToList(); commonArrivalMessage.SetFields(true, messagesEvents, sports, leagues, locations, statuses, markets, providers, bets); _arrivalMessageRepository.Insert(commonArrivalMessage); }); waitingTasks.Add(taskInseringCommonArrMessage); } _log.Info(String.Format("merged")); // We can't validate messages, while not merge all of them(for NumberOfChanges and DistinctNumberOfChanges) taskMergeEvents.Wait(); // Then we get all events for processing var eventsForProcessing = sportsEventsStores[checkingProductId.ToString()].GetEvents(); // And...cal saving dropdowns, validation, saving events! StartProcessingTasks(startedOn, eventsForProcessing, checkingProductId, productSettings); // Clearing from memory. sportsEventsStores[checkingProductId.ToString()].Clear(); Task.WaitAll(waitingTasks.ToArray()); _log.Info(String.Format("end processing-creating-merging-validating")); } } catch (Exception e) { var errorMessage = GetExceptionInfo(e); _log.Error(errorMessage); CreateFailedSchedulerHistory(startedAt, checkingProductId, SchedulerTypes.ProcessingAllArrivalMessages, errorMessage); } }
// creating new arrival message in database public ArrivalMessage CreateArrivalMessage(int productId, string pathToFile, string gateway, string sportId) { // 2 step: create arrival message with xml return(_arrivalMessageService.CreateArrivalMessage(productId, pathToFile, gateway, sportId));// xml a very big: out of memory exception }