public static async Task RefillSearchIndex(TextWriter log) { try { log.WriteLine($"{DateTime.Now} :: Function is invoked"); var custRepo = new CustomerRepository(); var sRepo = new SearchRepository(); var tRepo = new TransactionRepository(); var customers = custRepo.GetAllCustomers(); log.WriteLine($"Total customers: {customers.Count()}"); DateTime dtLimit = DateTime.UtcNow.Date.AddDays(-Constants.DaysToKeepTransactions); DateTime dtLimitWoDay = dtLimit.AddDays(-dtLimit.Day + 1); //remove day component from DateTime DateTime dtEnd = DateTime.UtcNow.Date; DateTime dtEndWoDay = dtEnd.AddDays(-dtEnd.Day + 1); foreach (var customer in customers) { log.WriteLine($"{DateTime.Now} :: Processing customer {customer.InternalID} {customer.Name}"); for (DateTime d = dtLimitWoDay; d <= dtEndWoDay; d = d.AddMonths(1)) { string strDate = d.ToString(Constants.DateStringFormat); int dateVal = int.Parse(strDate); var res =await tRepo.GetTransactionsForCustomer(customer.InternalID.Value, dateVal); var matchingEnts = res.Where(r => r.DateTime >= dtLimit); sRepo.AddToIndex(matchingEnts.ToArray()); } } int i = 1; TableContinuationToken continuationToken = null; do { var items = tRepo.GetTransactionTotalsItemBatch(ref continuationToken); var searchItems = items.Select(item => new TotalsSearchItem(item)); sRepo.AddOrUpdateTotalsItem(searchItems.ToArray()); log.WriteLine($"{DateTime.Now} :: Added totals item batch {i++} ({searchItems.Count()} items)"); if (continuationToken != null) Thread.Sleep(500); } while (continuationToken != null); log.WriteLine($"{DateTime.Now} :: DONE"); } catch (Exception ex) { var tags = new List<string> { "RefillSearchIndex" }; new RaygunWebApiClient(ConfigurationManager.AppSettings[Constants.SettingKey_RaygunKey]) .SendInBackground(ex, tags, null); throw; } }
public IHttpActionResult GetTransactionCountsForTopCustomers(int mvanumber, int startMonth, int startYear, int endMonth, int endYear, string productID) { string strStartDateVal = startYear.ToString("D4") + startMonth.ToString("D2"); int startDateVal = int.Parse(strStartDateVal); string strEndDateVal = endYear.ToString("D4") + endMonth.ToString("D2"); int endDateVal = int.Parse(strEndDateVal); var counts = new SearchRepository().GetTransactionCountsForTopCustomers(mvanumber, productID, startDateVal, endDateVal); return Ok(counts); }
public IHttpActionResult GetPerMonthPerProductTransactionCountsForCustomer(int mvanumber, int startMonth, int startYear, int endMonth, int endYear, string customerInternalID = null) { string strStartDateVal = startYear.ToString("D4") + startMonth.ToString("D2"); int startDateVal = int.Parse(strStartDateVal); string strEndDateVal = endYear.ToString("D4") + endMonth.ToString("D2"); int endDateVal = int.Parse(strEndDateVal); var counts = new SearchRepository().GetTransactionCountsPerMonthPerProduct(mvanumber, customerInternalID, startDateVal, endDateVal); return Ok(counts); }
private static async Task ProcessTransactions(SearchRepository sRepo, TransactionRepository tRepo, Customer customer, string productID, int? dateVal) { //any transaction newer than 'dtLimit' should not be processed because it's kept as an individual search item DateTime dtLimit = DateTime.UtcNow.Date.AddDays(-Constants.DaysToKeepTransactions); int curDateVal; if(dateVal.HasValue) { curDateVal = dateVal.Value; } else { curDateVal = int.Parse(dtLimit.AddDays(-1).ToString(Constants.DateStringFormat)); } var existingItems = sRepo.GetTotalsItemsForCustomer(customer.InternalID.Value, productID, curDateVal); HashSet<string> existingItemIDs = new HashSet<string>(existingItems.Select(e => e.DocUniqueID)); Dictionary<string, int> productTotals = new Dictionary<string, int>(); var res =await tRepo.GetTransactionsForCustomer(customer.InternalID.Value, curDateVal); if(!string.IsNullOrWhiteSpace(productID)) { res = res.Where(r => r.ProductID == productID); } res = res.Where(r => r.DateTime < dtLimit); foreach (var transaction in res) { if (productTotals.ContainsKey(transaction.ProductID)) { productTotals[transaction.ProductID] = productTotals[transaction.ProductID] + transaction.Amount.Value; } else { productTotals.Add(transaction.ProductID, transaction.Amount.Value); } } var searchItems = new List<TotalsSearchItem>(); foreach (var entry in productTotals) { var newItem = new TotalsSearchItem { DocUniqueID = TotalsSearchItem.CreateUID(curDateVal, entry.Key, customer.InternalID.Value), ClientInternalID = customer.InternalID.Value.ToString(), CustomerNumber = customer.CustomerNumber, Date = curDateVal, MvaNumber = customer.MvaNumber, ProductID = entry.Key, Amount = entry.Value }; searchItems.Add(newItem); existingItemIDs.Remove(newItem.DocUniqueID); } sRepo.AddOrUpdateTotalsItem(searchItems.ToArray()); tRepo.AddOrUpdateTransactionTotalsItem(searchItems.Select(item => new TotalsItemTableEntity(item)).ToArray()); //in case a transaction was deleted, remove all totals items from search index that are no longer valid sRepo.DeleteTotalsItemWithID(existingItemIDs.ToArray()); tRepo.DeleteTransactionTotalsItemWithID(existingItemIDs.ToArray()); }
public static async Task UpdateTotals(TextWriter log) { try { log.WriteLine($"{DateTime.Now} :: Function is invoked"); List<Task> tasks = new List<Task>(); var custRepo = new CustomerRepository(); var sRepo = new SearchRepository(); var tRepo = new TransactionRepository(); var customers = custRepo.GetAllCustomers(); log.WriteLine($"Total customers: {customers.Count()}"); foreach (var customer in customers) { log.WriteLine($"{DateTime.Now} :: Processing customer {customer.InternalID} {customer.Name}"); tasks.Add(ProcessTransactions(sRepo, tRepo, customer, null, null)); } if (tasks.Any()) await Task.WhenAll(tasks); log.WriteLine($"{DateTime.Now} :: All customers processed"); sRepo.DeleteOldTransactions(); tasks = new List<Task>(); log.WriteLine($"{DateTime.Now} :: Old transactions processed"); TableContinuationToken continuationToken = null; do { var items = tRepo.GetReIndexableItemBatch(ref continuationToken); log.WriteLine($"{DateTime.Now} :: Current re-index batch items: {items.Count()}"); foreach (ReIndexTableEntity item in items) { log.WriteLine($"{DateTime.Now} :: Processing re-index item {item.RowKey}"); int reIndexDateVal = int.Parse(item.Date); if(item.CustomerInternalID.HasValue) { var reIndexCustomer = custRepo.GetCustomerByInternalID(item.CustomerInternalID.Value); if(reIndexCustomer == null) { throw new Exception($"Error while processing re-index item, customer with ID {item.CustomerInternalID.Value} not found."); } tasks.Add(ProcessTransactions(sRepo, tRepo, reIndexCustomer, item.ProductID, reIndexDateVal)); } else { //no customer ID specified, process all of them foreach (var customer in customers) { tasks.Add(ProcessTransactions(sRepo, tRepo, customer, item.ProductID, reIndexDateVal)); } } } tRepo.DeleteReIndexItemBatch(items.ToArray()); //TODO: should ignore errors that are caused by the ETag mismatch. //These can happen when an existing request was overwritten during the processing above. //The correct thing to do would be to ignore it and to process the item again next time. } while (continuationToken != null); if (tasks.Any()) await Task.WhenAll(tasks); log.WriteLine($"{DateTime.Now} :: DONE"); } catch (Exception ex) { var tags = new List<string> { "UpdateTotals" }; new RaygunWebApiClient(ConfigurationManager.AppSettings[Constants.SettingKey_RaygunKey]) .SendInBackground(ex, tags, null); throw; } }
public IHttpActionResult GetProductIDs(int mvanumber) { List<string> values = new SearchRepository().GetUniqueProductIDs(mvanumber); return Ok(values); }
/// <summary> /// Inserts or updates (using merge operation) a batch of transaction table entities. /// Also generates IDs for items that do not have them and sets Date to DateTime.Now if it is null. /// </summary> /// <param name="batch">Batch of Transaction items to add/update</param> /// <exception cref="System.Exception">Thrown when any of the items is not valid or if adding to either table storage or search index failed.</exception> public void AddOrUpdateTransactionBatch(IEnumerable<Transaction> batch) { var tableEnts = new List<TransactionTableEntity>(); EnsureTransactionHasCustomerInternalID(batch.ToArray()); foreach (Transaction t in batch) { //also ensure that all entities are valid before any of them are committed to table storage or search index t.EnsureEntityValidForStorage(true); var ent = new TransactionTableEntity(t.ID.Value, t.Date.Value, t.InternalRef.Value) { MvaNumber = t.MvaNumber, Amount = t.Amount, Description = t.Description, CustomerNumber = t.CustomerNumber, AccountID = t.AccountID, ProductID = t.ProductID, NotInvoiceable = t.NotInvoiceable }; tableEnts.Add(ent); } //Need to group entities by partition key because table storage only allows batch operations in the scope of the same partition key var entsGroupedByPartition = tableEnts.GroupBy(t => t.PartitionKey); foreach (var entGroup in entsGroupedByPartition) { for (int i = 0; i < entGroup.Count(); i += 100) { var tableBatch = new TableBatchOperation(); foreach (var ent in entGroup.Skip(i).Take(100)) { tableBatch.Add(TableOperation.InsertOrMerge(ent)); } TransactionTable.ExecuteBatch(tableBatch); } } var dtReIndexReq = DateTime.UtcNow.Date.AddDays(-Constants.DaysToKeepTransactions); var entsToAddToIndex = tableEnts.Where(t => t.DateTime >= dtReIndexReq); var sRepo = new SearchRepository(); sRepo.AddToIndex(entsToAddToIndex.ToArray()); AddReIndexRequestIfNeeded(tableEnts.ToArray()); }