public ArrivalMessage CreateArrivalMessage(int productId, string pathToFile, string gateway, string sportId) { var arrivalMessage = new ArrivalMessage { Events = new List <string>(), Leagues = new List <string>(), Providers = new List <string>(), Locations = new List <string>(), Markets = new List <string>(), Statuses = new List <string>(), Sports = new List <string>(), Bets = new List <string>(), ProductId = productId, CreatedOn = DateTime.UtcNow, CreatedBy = "Admin", UpdatedBy = "Admin", UpdatedOn = DateTime.UtcNow, GroupId = sportId, Url = gateway, // Gateway = gateway, PathToXmlFile = pathToFile }; return(arrivalMessage); }
public async Task SendArrival(Schedule schedule) { var message = new ArrivalMessage { Name = schedule.Ship.Name }; schedule.ArrivalSent = true; await SendMessage(schedule, message); }
/// <summary> /// Setting as processed current arrival message. /// </summary> /// <param name="message"></param> /// <returns></returns> public bool setAsProcessedWithLock(ArrivalMessage message) { lock (locker) { messagesStore.SetAsProcessed(message.Id); // Is all messages processed for this sportId? return(!messagesStore.IsContainsNotProcessedForGroup(message.GroupId)); } }
public int Insert(ArrivalMessage arrivalMessage) { lock (thisLock) { using (var model = new gb_dvsstagingEntities()) { var message = NewRepositoryArrivalMessage(arrivalMessage, ""); model.dvs_arrivalmessage.Add(message); model.SaveChanges(); return(message.Id); } } }
public void Update(ArrivalMessage arrivalMessage) { lock (thisLock) { using (var model = new gb_dvsstagingEntities()) { var val_message = model.dvs_arrivalmessage.Find(arrivalMessage.Id, arrivalMessage.ProductId); CopyFromTo(ref arrivalMessage, ref val_message); model.Entry(val_message).State = EntityState.Modified; model.SaveChanges(); } } }
public dvs_arrivalmessage NewRepositoryArrivalMessage(ArrivalMessage arrivalMessage, string groupId) { return(new dvs_arrivalmessage { Events = string.Join(",", arrivalMessage.Events), Leagues = string.Join(",", arrivalMessage.Leagues), Locations = string.Join(",", arrivalMessage.Locations), Markets = string.Join(",", arrivalMessage.Markets), Providers = string.Join(",", arrivalMessage.Providers), Sports = string.Join(",", arrivalMessage.Sports), Statuses = string.Join(",", arrivalMessage.Statuses), Bets = string.Join(",", arrivalMessage.Bets), EventsCount = arrivalMessage.EventsCount, SportsCount = arrivalMessage.SportsCount, LocationsCount = arrivalMessage.LocationsCount, LeaguesCount = arrivalMessage.LeaguesCount, BetsCount = arrivalMessage.BetsCount, StatusesCount = arrivalMessage.StatusesCount, MarketsCount = arrivalMessage.MarketsCount, ProvidersCount = arrivalMessage.ProvidersCount, ProductId = arrivalMessage.ProductId, // LastUpdate = arrivalMessage.LastUpdate, CreatedOn = arrivalMessage.CreatedOn, CreatedBy = arrivalMessage.CreatedBy ?? "Admin", UpdatedBy = arrivalMessage.UpdatedBy, UpdatedOn = arrivalMessage.UpdatedOn, Url = arrivalMessage.Url, PathToXmlFile = arrivalMessage.PathToXmlFile, GroupId = arrivalMessage.GroupId, IsProcessed = arrivalMessage.IsProcessed, IsActive = true }); }
/// <summary> /// Processing message for one product /// </summary> /// <param name="checkingProductId"></param> /// <param name="xmlMessages"></param> /// <param name="productSettings"></param> /// <param name="messagesCount"></param> /// <param name="enableInsertingArrMessages"></param> public void ProcessArrivalMessagesForProduct(int checkingProductId, List <string> xmlMessages, IList <ValidationSetting> productSettings, int messagesCount, bool enableInsertingArrMessages) { var startedAt = DateTime.UtcNow; _log.Info("Count of messages: " + xmlMessages.Count); try { var allArrivalMessages = new ConcurrentBag <ArrivalMessage>(); var allEvents = new ConcurrentBag <Event>(); var processTasks = new List <Task>(); var counter = 10; var step = messagesCount / counter; // First: we parse all xml-messages and build sometimes xml-tree(as update) // also it is parallel process by each N messages(10 tasks for all messages) for (var i = 0; i < counter; i++) { var index = i * 1; var processingMessages = xmlMessages.Skip(index * step).Take(step).ToList(); var task = Task.Factory.StartNew(() => { foreach (var xmlMessage in processingMessages) { var message = _arrivalMessageService.CreateArrivalMessage(checkingProductId, "", "", ""); // Parsing and checking: sometimes xml contains only keep-alive messages if (_downloadService.ProcessEventMessageAndParse(message, xmlMessage, enableInsertingArrMessages, ref allEvents)) { message.IsProcessed = true; allArrivalMessages.Add(message); } // pass } }); processTasks.Add(task); } Task.WaitAll(processTasks.ToArray()); // Now we get all events var events = allEvents.ToList(); var eventsCount = events.Count; CreateSuccessSchedulerHistory(startedAt, checkingProductId, SchedulerTypes.ParseXmlFile, "Count: " + eventsCount + " events"); _log.Info("Processing: " + eventsCount + " events; unique: " + events.DistinctBy(x => x.EventId).Count()); if (eventsCount > 0) { var startedOn = DateTime.UtcNow; // Then we also parallel all processing(inserting arrival messages and etc) var waitingTasks = new List <Task>(); //1 task, if it is push-product, but not product #5 if (enableInsertingArrMessages) { // buld insert the packet of messages var taskUpdateArrivalMessages = Task.Factory.StartNew(() => { _arrivalMessageService.BulkInsert(allArrivalMessages.ToList(), ""); }); waitingTasks.Add(taskUpdateArrivalMessages); } //2 task var taskMergeEvents = Task.Factory.StartNew(() => { var startMergeEvents = DateTime.UtcNow; // Ae get all old events var oldEvents = _merger.GetEventsByEventIds(events.Select(x => x.EventId).ToList(), checkingProductId); // And we add them all as repeated, because there are a lot of not-unique events. It is such optimization. sportsEventsStores[checkingProductId.ToString()].AddRepeated(oldEvents, events); CreateSuccessSchedulerHistory(startMergeEvents, checkingProductId, SchedulerTypes.Merger, "Merged: " + oldEvents.Count + events.Count); }); // if it is't product #5 we create common arrival message if (!enableInsertingArrMessages) { var taskInseringCommonArrMessage = Task.Factory.StartNew(() => { var commonArrivalMessage = new ArrivalMessage { ProductId = checkingProductId, IsProcessed = true, Url = "Count of processed arrival messages: " + messagesCount, }; var bets = allArrivalMessages.SelectMany(x => x.Bets ?? new List <string>()).ToList(); var messagesEvents = allArrivalMessages.SelectMany(x => x.Events ?? new List <string>()).ToList(); var leagues = allArrivalMessages.SelectMany(x => x.Leagues ?? new List <string>()).ToList(); var markets = allArrivalMessages.SelectMany(x => x.Markets ?? new List <string>()).ToList(); var sports = allArrivalMessages.SelectMany(x => x.Sports ?? new List <string>()).ToList(); var locations = allArrivalMessages.SelectMany(x => x.Locations ?? new List <string>()).ToList(); var providers = allArrivalMessages.SelectMany(x => x.Providers ?? new List <string>()).ToList(); var statuses = allArrivalMessages.SelectMany(x => x.Statuses ?? new List <string>()).ToList(); commonArrivalMessage.SetFields(true, messagesEvents, sports, leagues, locations, statuses, markets, providers, bets); _arrivalMessageRepository.Insert(commonArrivalMessage); }); waitingTasks.Add(taskInseringCommonArrMessage); } _log.Info(String.Format("merged")); // We can't validate messages, while not merge all of them(for NumberOfChanges and DistinctNumberOfChanges) taskMergeEvents.Wait(); // Then we get all events for processing var eventsForProcessing = sportsEventsStores[checkingProductId.ToString()].GetEvents(); // And...cal saving dropdowns, validation, saving events! StartProcessingTasks(startedOn, eventsForProcessing, checkingProductId, productSettings); // Clearing from memory. sportsEventsStores[checkingProductId.ToString()].Clear(); Task.WaitAll(waitingTasks.ToArray()); _log.Info(String.Format("end processing-creating-merging-validating")); } } catch (Exception e) { var errorMessage = GetExceptionInfo(e); _log.Error(errorMessage); CreateFailedSchedulerHistory(startedAt, checkingProductId, SchedulerTypes.ProcessingAllArrivalMessages, errorMessage); } }
/// <summary> /// One message - one processing. /// </summary> /// <param name="message"></param> /// <param name="validationSettings"></param> protected void ProcessPullMessage(ArrivalMessage message, IList <ValidationSetting> validationSettings) { DateTime startedOn = DateTime.UtcNow; try { var startDate = DateTime.UtcNow; // We get all parsed events from big-xml file(message) var messageEvents = _downloadService.ProcessPullArrivalMessage(message); var messageText = "Count: " + messageEvents.Count + " events"; CreateSuccessSchedulerHistory(startDate, message.ProductId, SchedulerTypes.ParseXmlFile, messageText); if (messageEvents.Count > 0) { //var startValidateEvents = DateTime.UtcNow; //XmlValidator _validator = new XmlValidator(); //_validator.ValidateEventsForProduct(messageEvents, validationSettings); //CreateSuccessSchedulerHistory(startValidateEvents, message.ProductId, SchedulerTypes.EventsValidator, messageEvents.Count + " events"); // Then by locking each kind of sport we process all all messages lock (sportsLockers[message.GroupId]) { var startMergeEvents = DateTime.UtcNow; // distinct by EventId(for getting old events from storage) var messageEventsIds = messageEvents.Select(x => x.EventId) .Distinct(); // all current processed events var savedEventsIds = sportsEventsStores[message.GroupId].GetEvents() .Select(x => x.EventId) .Distinct(); // and then we get all new events(which are not exist in storage) var notSavedEventsIds = messageEventsIds.Except(savedEventsIds) .ToList(); // Then we add merge events as not-repeated(because one big xml-file contains only unique events) sportsEventsStores[message.GroupId].AddNotRepeated(_merger.GetEventsByEventIds(notSavedEventsIds, message.ProductId), messageEvents); CreateSuccessSchedulerHistory(startMergeEvents, message.ProductId, SchedulerTypes.Merger, messageText); // Can we process this xml-file?(Is it the last xml-file for selected kind of sport?) bool canProcess = setAsProcessedWithLock(message); if (canProcess) { var processedEvents = sportsEventsStores[message.GroupId].GetEvents(); if (processedEvents.Count > 0) { // Validation,saving of events StartProcessingTasks(startDate, processedEvents, message.ProductId, validationSettings); // Clearing from memory sportsEventsStores[message.GroupId].Clear(); System.GC.Collect(); } } } } else { setAsProcessedWithLock(message); } CreateSuccessSchedulerHistory(startedOn, message.ProductId, SchedulerTypes.ProcessingArrivalMessage, ""); } catch (Exception e) { var errorMessage = GetExceptionInfo(e); _log.Error(errorMessage); CreateFailedSchedulerHistory(startedOn, message.ProductId, SchedulerTypes.ProcessingArrivalMessage, errorMessage); } }
public Dictionary <string, ArrivalMessage> GetChartDataForProduct(int productId, FilterObject filter) { if (filter == null) { filter = new FilterObject(); } using (var dvs_model = new gb_dvsstagingEntities()) { if (filter.Event == null) { filter.Event = new FilterValue(); } if (filter.Country == null) { filter.Country = new FilterValue(); } if (filter.EventStatus == null) { filter.EventStatus = new FilterValue(); } if (filter.League == null) { filter.League = new FilterValue(); } if (filter.Market == null) { filter.Market = new FilterValue(); } if (filter.Provider == null) { filter.Provider = new FilterValue(); } if (filter.Sport == null) { filter.Sport = new FilterValue(); } dvs_model.Database.CommandTimeout = CONNECTION_TIMEOUT; var q = dvs_model.dvs_arrivalmessage.AsNoTracking() .Where(result => result.ProductId == productId && result.IsProcessed && result.IsActive && (filter.StartDate == null || result.CreatedOn >= filter.StartDate) && (filter.EndDate == null || result.CreatedOn <= filter.EndDate) && (!filter.Sport.ExternalId.HasValue || result.Sports.Contains(",") && result.Sports.Contains("," + filter.Sport.ExternalId) || result.Sports.Contains(filter.Sport.ExternalId.ToString())) && (!filter.Event.ExternalId.HasValue || result.Events.Contains(",") && result.Events.Contains("," + filter.Event.ExternalId) || result.Events.Contains(filter.Event.ExternalId.ToString())) && (!filter.Country.ExternalId.HasValue || result.Locations.Contains(",") && result.Locations.Contains("," + filter.Country.ExternalId) || result.Locations.Contains(filter.Country.ExternalId.ToString())) && (string.IsNullOrEmpty(filter.EventStatus.Name) || result.Statuses.Contains(",") && result.Statuses.Contains("," + filter.EventStatus.Name) || result.Statuses.Contains(filter.EventStatus.Name)) && (!filter.League.ExternalId.HasValue || (result.Leagues.Contains(",") && result.Leagues.Contains("," + filter.League.ExternalId) || result.Leagues.Contains(filter.League.ExternalId.ToString())) && (string.IsNullOrEmpty(filter.Market.Name) || result.Markets.Contains(",") && result.Markets.Contains("," + filter.Market.Name) || result.Markets.Contains(filter.Market.Name)) && (string.IsNullOrEmpty(filter.Provider.Name) || result.Markets.Contains(",") && result.Providers.Contains("," + filter.Provider.Name) || result.Markets.Contains(filter.Provider.Name)))) .OrderBy(message => message.CreatedOn) .ToList(); var arrivalMessages = q .Select(result => new ArrivalMessage { Id = result.Id, Url = result.Url, EventsCount = result.EventsCount, LeaguesCount = result.LeaguesCount, LocationsCount = result.LocationsCount, MarketsCount = result.MarketsCount, ProvidersCount = result.ProvidersCount, SportsCount = result.SportsCount, StatusesCount = result.StatusesCount, BetsCount = result.BetsCount, CreatedOn = result.CreatedOn, Events = result.Events?.Split(',').ToList(), Bets = result.Bets?.Split(',').ToList(), Leagues = result.Leagues?.Split(',').ToList(), Locations = result.Locations?.Split(',').ToList(), Markets = result.Markets?.Split(',').ToList(), Providers = result.Providers?.Split(',').ToList(), Sports = result.Sports?.Split(',').ToList(), Statuses = result.Statuses?.Split(',').ToList() }).ToList(); var startDate = new DateTime(filter.StartDate.Year, filter.StartDate.Month, filter.StartDate.Day, 0, 0, 0); var endDate = new DateTime(filter.EndDate.Year, filter.EndDate.Month, filter.EndDate.Day, 23, 59, 59); endDate.AddDays(1); var nextDate = new DateTime(filter.StartDate.Year, filter.StartDate.Month, filter.StartDate.Day, 23, 59, 59); var dataAccumulator = new Dictionary <DateTime, ArrivalMessage>(); for (var i = 0; i < arrivalMessages.Count && startDate <= endDate;) { var model = arrivalMessages[i]; var createdOn = model.CreatedOn; var inRange = createdOn >= startDate && nextDate >= createdOn; if (!dataAccumulator.ContainsKey(startDate)) { dataAccumulator.Add(startDate, new ArrivalMessage() { Sports = new List <string>(), Locations = new List <string>(), Leagues = new List <string>(), Events = new List <string>(), Providers = new List <string>(), Markets = new List <string>(), Bets = new List <string>() }); } // pass if (!inRange && startDate <= createdOn) { startDate = startDate.AddDays(1); nextDate = new DateTime(startDate.Year, startDate.Month, startDate.Day, 23, 59, 59); } else if (!inRange && createdOn <= startDate) { i++; } else if (inRange) { if (dataAccumulator.ContainsKey(startDate)) { if (model.Sports == null) { model.Sports = new List <string>(); } if (model.Locations == null) { model.Locations = new List <string>(); } if (model.Leagues == null) { model.Leagues = new List <string>(); } if (model.Events == null) { model.Events = new List <string>(); } if (model.Markets == null) { model.Markets = new List <string>(); } if (model.Providers == null) { model.Providers = new List <string>(); } if (model.Bets == null) { model.Bets = new List <string>(); } dataAccumulator[startDate].Sports.AddRange(model.Sports); dataAccumulator[startDate].Locations.AddRange(model.Locations); dataAccumulator[startDate].Leagues.AddRange(model.Leagues); dataAccumulator[startDate].Events.AddRange(model.Events); dataAccumulator[startDate].Markets.AddRange(model.Markets); dataAccumulator[startDate].Providers.AddRange(model.Providers); dataAccumulator[startDate].Bets.AddRange(model.Bets); } else { dataAccumulator[startDate] = new ArrivalMessage { Sports = model.Sports, Locations = model.Locations, Leagues = model.Leagues, Events = model.Events, Markets = model.Markets, Providers = model.Providers, Bets = model.Bets }; } i++; } // pass } var newDataAccumulator = new Dictionary <string, ArrivalMessage>(); foreach (var keyValue in dataAccumulator) { newDataAccumulator.Add(keyValue.Key.ToString(), new ArrivalMessage() { SportsCount = dataAccumulator[keyValue.Key].Sports.Distinct().Count(), LocationsCount = dataAccumulator[keyValue.Key].Locations.Distinct().Count(), LeaguesCount = dataAccumulator[keyValue.Key].Leagues.Distinct().Count(), EventsCount = dataAccumulator[keyValue.Key].Events.Distinct().Count(), MarketsCount = dataAccumulator[keyValue.Key].Markets.Distinct().Count(), ProvidersCount = dataAccumulator[keyValue.Key].Providers.Distinct().Count(), BetsCount = dataAccumulator[keyValue.Key].Bets.Distinct().Count(), }); } return(newDataAccumulator); } }
public void CopyFromTo(ref ArrivalMessage from, ref dvs_arrivalmessage to) { if (from.Events == null) { from.Events = new List <string>(); } if (from.Leagues == null) { from.Leagues = new List <string>(); } if (from.Locations == null) { from.Locations = new List <string>(); } if (from.Markets == null) { from.Markets = new List <string>(); } if (from.Providers == null) { from.Providers = new List <string>(); } if (from.Sports == null) { from.Sports = new List <string>(); } if (from.Statuses == null) { from.Statuses = new List <string>(); } if (from.Bets == null) { from.Bets = new List <string>(); } to.Events = string.Join(",", from.Events); to.Leagues = string.Join(",", from.Leagues); to.Locations = string.Join(",", from.Locations); to.Markets = string.Join(",", from.Markets); to.Providers = string.Join(",", from.Providers); to.Sports = string.Join(",", from.Sports); to.Statuses = string.Join(",", from.Statuses); to.Bets = string.Join(",", from.Bets); to.EventsCount = from.EventsCount; to.SportsCount = from.SportsCount; to.LocationsCount = from.LocationsCount; to.LeaguesCount = from.LeaguesCount; to.BetsCount = from.BetsCount; to.StatusesCount = from.StatusesCount; to.MarketsCount = from.MarketsCount; to.ProvidersCount = from.ProvidersCount; // val_message.ProductId = arrivalMessage.ProductId; // val_message.XmlMessage = arrivalMessage.XmlMessage; // val_message.LastUpdate = arrivalMessage.LastUpdate; // val_message.UpdatedOn = DateTime.UtcNow; to.IsProcessed = from.IsProcessed; to.PathToXmlFile = from.PathToXmlFile; }
public bool ProcessEventMessageAndParse(ArrivalMessage message, string xmlMessage, bool enableInsertingArrMessages, ref ConcurrentBag <Event> @events) { isProductTreeUpdating = false; bool isContainEvents = false; // 1 step: refresh product tree TreeNode newTree = null; var isRandom = r.Next(0, 100) < 3; if (isRandom) { newTree = new TreeNode { text = "xml" }; } if (!string.IsNullOrEmpty(xmlMessage)) { var eventNames = new List <string>(); var sportNames = new List <string>(); var leagueNames = new List <string>(); var locationNames = new List <string>(); var statusNames = new List <string>(); var marketNames = new List <string>(); var providerNames = new List <string>(); var betNames = new List <string>(); using (XmlReader myTextReader = XmlReader.Create(new StringReader(xmlMessage))) { var headerXml = ""; var xmlDoc = new XmlDocument(); var xmlText = ""; while (myTextReader.EOF == false) { if (myTextReader.NodeType == XmlNodeType.Element) { if (myTextReader.LocalName == "Event") { isContainEvents = true; xmlText = myTextReader.ReadOuterXml(); var newEvent = _parser.LoadEvent(enableInsertingArrMessages, message.ProductId, headerXml, xmlText, eventNames, sportNames, leagueNames, locationNames, statusNames, marketNames, providerNames, betNames); if (isRandom && !isProductTreeUpdating) { xmlDoc.LoadXml(newEvent.XmlTextExample.XPathSelectElement("/xml/Event").ToString()); _parser.ParseToTreeNode(xmlDoc.DocumentElement, newTree); } @events.Add(newEvent); } else if (isRandom && myTextReader.LocalName == "Header") { headerXml = myTextReader.ReadOuterXml(); if (!isProductTreeUpdating && !string.IsNullOrEmpty(headerXml)) { xmlDoc.LoadXml(headerXml); _parser.ParseToTreeNode(xmlDoc.DocumentElement, newTree); } } else { myTextReader.Read(); } } else { myTextReader.Read(); } } }; if (isContainEvents) { message.SetFields(!enableInsertingArrMessages, eventNames, sportNames, leagueNames, locationNames, statusNames, marketNames, providerNames, betNames); } } if (isContainEvents && !isProductTreeUpdating && isRandom) { ProcessProductTree(message.ProductId, newTree); } isProductTreeUpdating = true; return(isContainEvents); }
public List <Event> ProcessPullArrivalMessage(ArrivalMessage message) { isProductTreeUpdating = false; bool isContainEvents = false; TreeNode newTree = null; newTree = new TreeNode { text = "xml" }; var messageEvents = new List <Event>(); var eventNames = new List <string>(); var sportNames = new List <string>(); var leagueNames = new List <string>(); var locationNames = new List <string>(); var statusNames = new List <string>(); var marketNames = new List <string>(); var providerNames = new List <string>(); var betNames = new List <string>(); var filePath = message.PathToXmlFile; // var fileData = DeCompressFile(filePath); using (XmlTextReader myTextReader = new XmlTextReader(filePath)) { myTextReader.WhitespaceHandling = WhitespaceHandling.None; var headerXml = ""; var xmlDoc = new XmlDocument(); var xmlText = ""; while (myTextReader.EOF == false) { if (myTextReader.NodeType == XmlNodeType.Element) { if (myTextReader.LocalName == "Event") { isContainEvents = true; xmlText = myTextReader.ReadOuterXml(); var newEvent = _parser.LoadEvent(true, message.ProductId, headerXml, xmlText, eventNames, sportNames, leagueNames, locationNames, statusNames, marketNames, providerNames, betNames); if (!isProductTreeUpdating) { xmlDoc.LoadXml(newEvent.XmlTextExample.XPathSelectElement("/xml/Event").ToString()); _parser.ParseToTreeNode(xmlDoc.DocumentElement, newTree); } messageEvents.Add(newEvent); } else if (myTextReader.LocalName == "Header") { headerXml = myTextReader.ReadOuterXml(); if (!isProductTreeUpdating && !string.IsNullOrEmpty(headerXml)) { xmlDoc.LoadXml(headerXml); _parser.ParseToTreeNode(xmlDoc.DocumentElement, newTree); } } else { myTextReader.Read(); } } else { myTextReader.Read(); } } } if (isContainEvents && !isProductTreeUpdating) { ProcessProductTree(message.ProductId, newTree); } if (isContainEvents) { message.SetFields(true, eventNames, sportNames, leagueNames, locationNames, statusNames, marketNames, providerNames, betNames); } isProductTreeUpdating = true; return(messageEvents); }