protected override void Append(LoggingEvent[] loggingEvents) { try { TableBatchOperation batchOperation = new TableBatchOperation(); base.Append(loggingEvents); foreach (var e in loggingEvents) { var logitem = new LogItem { Exception = e.GetExceptionString(), Level = e.Level.Name, LoggerName = e.LoggerName, Message = e.RenderedMessage, RoleInstance = RoleInfo }; batchOperation.Insert(logitem); } _log4NetTable.ExecuteBatch(batchOperation); } catch (Exception ex) { Trace.TraceError("AzureTableAppender Append " + ex.Message); } }
private TableBatchOperation ComposeBatch(IEnumerable<LogEvent> events) { var batch = new TableBatchOperation(); foreach (LogEvent e in events) { var row = new ElasticTableEntity { PartitionKey = e.EventTime.ToString("yy-MM-dd"), RowKey = e.EventTime.ToString("HH-mm-ss-fff") }; row.Add("source", e.SourceName); row.Add("severity", e.Severity); row.Add("message", e.FormattedMessage); row.Add("error", e.ErrorException == null ? string.Empty : e.ErrorException.ToString()); if (e.Properties != null) { foreach (var p in e.Properties) { if (p.Key == LogEvent.ErrorPropertyName) continue; row.Add(p.Key, p.Value); } } batch.Insert(row); } return batch.Count > 0 ? batch : null; }
async Task<CommandResult> DoInsert(CloudTable table, long n, Func<long, EntityNk[]> entityFactory) { var batchOperation = new TableBatchOperation(); foreach (var e in entityFactory(n)) { batchOperation.Insert(e); } var cresult = new CommandResult { Start = DateTime.UtcNow.Ticks }; var cbt = 0L; var context = GetOperationContext((t) => cbt = t); try { var results = await table.ExecuteBatchAsync(batchOperation, operationContext: context); cresult.Elapsed = cbt; } catch (Exception ex) { cresult.Elapsed = -1; Console.Error.WriteLine("Error DoInsert {0} {1}", n, ex.ToString()); } return cresult; }
public async void Add(IList<ITableEntity> notes) { var batchOperation = new TableBatchOperation(); notes.ToList().ForEach(n => batchOperation.Insert(n)); await _table.ExecuteBatchAsync(batchOperation); }
private void InsertBets() { if (this.Bets.Count == 0) { return; } var tableBets = this.CreateTableClient("Bet"); tableBets.CreateIfNotExists(); var batchOperation = new TableBatchOperation(); foreach (var kvp in this.Bets) { var bet = new BetEntity(); bet.TicketNumber = kvp.Key; bet.BetNumber = kvp.Value; bet.RaffleId = this.RaffleId; batchOperation.Insert(bet); } tableBets.ExecuteBatch(batchOperation); }
public void TableBatchAddNullShouldThrow() { TableBatchOperation batch = new TableBatchOperation(); try { batch.Add(null); Assert.Fail(); } catch (ArgumentNullException) { // no op } catch (Exception) { Assert.Fail(); } try { batch.Insert(0, null); Assert.Fail(); } catch (ArgumentNullException) { // no op } catch (Exception) { Assert.Fail(); } }
public void TableBatchAllSupportedOperationsSync() { TableBatchOperation batch = new TableBatchOperation(); string pk = Guid.NewGuid().ToString(); // insert batch.Insert(GenerateRandomEnitity(pk)); // delete { DynamicReplicatedTableEntity entity = GenerateRandomEnitity(pk); this.repTable.Execute(TableOperation.Insert(entity)); batch.Delete(entity); } // replace { DynamicReplicatedTableEntity entity = GenerateRandomEnitity(pk); this.repTable.Execute(TableOperation.Insert(entity)); batch.Replace(entity); } // insert or replace { DynamicReplicatedTableEntity entity = GenerateRandomEnitity(pk); this.repTable.Execute(TableOperation.Insert(entity)); batch.InsertOrReplace(entity); } // merge { DynamicReplicatedTableEntity entity = GenerateRandomEnitity(pk); this.repTable.Execute(TableOperation.Insert(entity)); batch.Merge(entity); } // insert or merge { DynamicReplicatedTableEntity entity = GenerateRandomEnitity(pk); this.repTable.Execute(TableOperation.Insert(entity)); batch.InsertOrMerge(entity); } IList <TableResult> results = this.repTable.ExecuteBatch(batch); Assert.AreEqual(results.Count, 6); IEnumerator <TableResult> enumerator = results.GetEnumerator(); for (int i = 0; i < results.Count; i++) { enumerator.MoveNext(); Assert.AreEqual((int)HttpStatusCode.NoContent, enumerator.Current.HttpStatusCode, "HttpStatusCode mismatch i={0}", i); } }
public ActionResult AddEntities() { CloudStorageAccount storageAccount = CloudStorageAccount.Parse( CloudConfigurationManager.GetSetting("StorageConnectionString")); CloudTableClient tableClient = storageAccount.CreateCloudTableClient(); CloudTable table = tableClient.GetTableReference("TestTable"); CustomerEntity customer1 = new CustomerEntity("Smith", "Jeff"); customer1.Email = "*****@*****.**"; CustomerEntity customer2 = new CustomerEntity("Smith", "Ben"); customer2.Email = "*****@*****.**"; TableBatchOperation batchOperation = new TableBatchOperation(); batchOperation.Insert(customer1); batchOperation.Insert(customer2); IList <TableResult> results = table.ExecuteBatch(batchOperation); return(View(results)); }
/// <summary> /// batch insert entities /// </summary> /// <param name="entities"></param> public void Insert(IEnumerable <ActionRequestLogEntry> entities) { var batchOperation = new TableBatchOperation(); foreach (var entity in entities) { batchOperation.Insert(entity); } GetTableReference().ExecuteBatch(batchOperation); }
public async Task AddAsync(IEnumerable <T> objs) { TableBatchOperation batchAdd = new TableBatchOperation(); foreach (var obj in objs) { batchAdd.Insert(obj); } await table.ExecuteBatchAsync(batchAdd).ConfigureAwait(false); }
public void SaveToStorageInBatch(List<Log> list) { var batchOperation = new TableBatchOperation(); foreach (var log in list) { var entity = GetLogTableEntity(log); batchOperation.Insert(entity); } var result = _table.ExecuteBatch(batchOperation); }
static void Main(string[] args) { Console.WriteLine("Starting the batch"); var storageAccount = CloudStorageAccount.Parse(ConfigurationManager.AppSettings["StorageConnectionString"]); var tableClient = storageAccount.CreateCloudTableClient(); var table = tableClient.GetTableReference("EventRegistrations"); Console.WriteLine("Create table storage EventRegistrations if not exists"); table.CreateIfNotExists(); List <Registration> registrationList = new List <Registration>(); string dbConnectionString = ConfigurationManager.ConnectionStrings["EventsContextConnectionString"].ConnectionString; bool isAzureDb = Regex.IsMatch(dbConnectionString, PATTERN_AZURE_CONNECTION, RegexOptions.Compiled); if (!isAzureDb) { Console.WriteLine("Deleting sql database"); using (EventsContext context = new EventsContext()) { context.Database.Delete(); } } using (EventsContext context = new EventsContext()) { Console.WriteLine("Removing data from database"); context.Events.RemoveRange(context.Events); context.SaveChanges(); Console.WriteLine("Adding data to database"); for (int i = 0; i < 55; i++) { var result = CreateEvent(context); registrationList.AddRange(result); } } registrationList = registrationList.OrderBy(r => rand.Next()).ToList(); Console.WriteLine("adding data to table storage"); foreach (var registrationGroup in registrationList.GroupBy(r => r.PartitionKey)) { TableBatchOperation operation = new TableBatchOperation(); foreach (var registration in registrationGroup) { operation.Insert(registration); } table.ExecuteBatch(operation); } }
static void Main(string[] args) { var connectionString = CloudConfigurationManager.GetSetting("StorageConnectionString"); var storageAccount = CloudStorageAccount.Parse(connectionString); var tableClient = storageAccount.CreateCloudTableClient(); var table = tableClient.GetTableReference("employee1"); table.CreateIfNotExists(); var employeeEntity1 = new EmployeeEntity("Kasagoni", "Person1") { Email = "*****@*****.**", PhoneNumber = "123-456-7890" }; var employeeEntity2 = new EmployeeEntity("Kasagoni", "Person2") { Email = "*****@*****.**", PhoneNumber = "123-456-7890" }; var employeeEntity3 = new EmployeeEntity("Kasagoni", "Person3") { Email = "*****@*****.**", PhoneNumber = "123-456-7890" }; var batchOpertaion = new TableBatchOperation(); batchOpertaion.Insert(employeeEntity1); batchOpertaion.Insert(employeeEntity2); batchOpertaion.Insert(employeeEntity3); table.ExecuteBatch(batchOpertaion); Console.WriteLine("Table Storage Operations Complete, Press Any Key to Exit!"); Console.ReadKey(); }
private async Task <IList <TableResult> > AddQuestionsToPlayer(IEnumerable <QuestionEntity> questions) { var table = _tableClient.GetTableReference(TableNames.Player); TableBatchOperation insertBatch = new TableBatchOperation(); foreach (var question in questions) { insertBatch.Insert(question); } return(await table.ExecuteBatchAsync(insertBatch)); }
public async Task StoreGame(GameEntity game) { var tableClient = GetClient(); CloudTable table = tableClient.GetTableReference("games"); await table.CreateIfNotExistsAsync(); TableBatchOperation batchOperation = new TableBatchOperation(); batchOperation.Insert(game); table.ExecuteBatch(batchOperation); }
// All must have the same partition key public async Task WriteBatchAsync(T[] entities, TableInsertMode mode = TableInsertMode.Insert) { if (entities.Length == 0) { return; // nothing to write. } string partitionKey = entities[0].PartitionKey; const int BatchSize = 99; TableBatchOperation batchOperation = new TableBatchOperation(); foreach (var entity in entities) { if (entity.PartitionKey != partitionKey) { throw new InvalidOperationException("All entities in a batch must have same partition key"); } ValidateRowKey(entity.RowKey); switch (mode) { case TableInsertMode.Insert: batchOperation.Insert(entity); break; case TableInsertMode.InsertOrMerge: batchOperation.InsertOrMerge(entity); break; case TableInsertMode.InsertOrReplace: batchOperation.InsertOrReplace(entity); break; default: throw new InvalidOperationException("Unsupported insert mode: " + mode.ToString()); } if (batchOperation.Count == BatchSize) { // Flush await _table.ExecuteBatchAsync(batchOperation); batchOperation = new TableBatchOperation(); } } if (batchOperation.Count > 0) { await _table.ExecuteBatchAsync(batchOperation); } }
public static async Task <IList <TableResult> > InsertNewPerson(string AppID, string ContextID, string PersonID, string UserID) { TableBatchOperation BatchOperation = new TableBatchOperation(); MappingEntity UserMapping = new MappingEntity(AppID + "-" + ContextID, UserID); UserMapping.Value = PersonID; MappingEntity PersonGroupPersonMapping = new MappingEntity(AppID + "-" + ContextID, PersonID); PersonGroupPersonMapping.Value = UserID; BatchOperation.Insert(UserMapping); BatchOperation.Insert(PersonGroupPersonMapping); CloudStorageAccount StorageAccount = GetCloudStorageAccount(); CloudTableClient TableClient = StorageAccount.CreateCloudTableClient(); CloudTable MappingTable = TableClient.GetTableReference("mappingTable");//table name MappingTable.CreateIfNotExists(); return(await MappingTable.ExecuteBatchAsync(BatchOperation)); }
public async Task TruncateLog(long lsn) { var continuationToken = default(TableContinuationToken); var query = new TableQuery <CommitRow>().Where(TableQuery.CombineFilters( TableQuery.GenerateFilterCondition(PartitionKey, QueryComparisons.Equal, commitRecordPartitionKey), TableOperators.And, TableQuery.CombineFilters( TableQuery.GenerateFilterCondition(RowKey, QueryComparisons.LessThanOrEqual, CommitRow.MakeRowKey(lsn)), TableOperators.And, TableQuery.GenerateFilterCondition(RowKey, QueryComparisons.GreaterThanOrEqual, CommitRow.MinRowKey)))); var batchOperation = new TableBatchOperation(); do { var queryResult = await table.ExecuteQuerySegmentedAsync(query, continuationToken).ConfigureAwait(false); continuationToken = queryResult.ContinuationToken; if (queryResult.Results.Count > 0) { foreach (var row in queryResult) { var transactions = DeserializeCommitRecords(row.Transactions); if (transactions.Count > 0 && transactions[transactions.Count - 1].LSN <= lsn) { batchOperation.Delete(row); if (this.archiveLogOptions.ArchiveLog) { var archiveRow = new ArchivalRow(this.clusterOptions, row.Transactions, transactions.Select(tx => tx.TransactionId).Min(), transactions.Select(tx => tx.LSN).Min()); batchOperation.Insert(archiveRow); } if (batchOperation.Count == BatchOperationLimit) { await table.ExecuteBatchAsync(batchOperation).ConfigureAwait(false); batchOperation = new TableBatchOperation(); } } else { break; } } } } while (continuationToken != default(TableContinuationToken)); if (batchOperation.Count > 0) { await table.ExecuteBatchAsync(batchOperation).ConfigureAwait(false); } }
private void batchInsert() { TableBatchOperation batchOperation = new TableBatchOperation(); for (int i = 0; i < 100; i++) { Console.Out.WriteLine(_counter); batchOperation.Insert(new CustomerEntity("Mike", "Jordan" + (_counter++), "*****@*****.**", "12345")); } _table.ExecuteBatch(batchOperation); }
/// <summary> /// Creates new entities in the table using batching /// </summary> /// <param name="entities">The entities to store in the table</param> public void CreateEntities(IEnumerable <T> entities) { Validate.Null(entities, "entities"); var batchOperation = new TableBatchOperation(); foreach (var entity in entities) { batchOperation.Insert(entity); } cloudTable.ExecuteBatch(batchOperation); }
public async Task SaveNewEventsForAggregate(Guid aggregateId, List <Event> newEvents) { // Create the batch operation. CloudTable table = _tableClient.GetTableReference(_tableName); TableBatchOperation batchOperation = new TableBatchOperation(); foreach (var evt in newEvents) { batchOperation.Insert(new TableEntityAdapter <Event>(evt, evt.AggregateId.ToString(), evt.Id)); } await table.ExecuteBatchAsync(batchOperation); }
public static async Task <IActionResult> Up( [HttpTrigger(AuthorizationLevel.Anonymous, "post", Route = "reset")] HttpRequest req, [Table("reminders", Connection = "AzureWebJobsStorage")] CloudTable remindersTable, ILogger log) { // batch operations - https://stackoverflow.com/a/53293614/1366033 var query = new TableQuery <ReminderTableEntity>(); var result = await remindersTable.ExecuteQuerySegmentedAsync(query, null); // Create the batch operation. TableBatchOperation batchDeleteOperation = new TableBatchOperation(); foreach (var row in result) { batchDeleteOperation.Delete(row); } // Execute the batch operation. await remindersTable.ExecuteBatchAsync(batchDeleteOperation); // Create the batch operation. TableBatchOperation batchInsertOperation = new TableBatchOperation(); ReminderModel[] defaultRecords = new [] { new ReminderModel("8", "Wake Up"), new ReminderModel("9", "Wake Up Now!"), new ReminderModel("10", "Food, Teeth, Wallet, Check"), new ReminderModel("11", "Survey Says..... go to school"), new ReminderModel("12", "Now What!?!?!"), new ReminderModel("13", "Shoot Mah Goot!!"), new ReminderModel("14", "Sometimes you play your cards, sometimes you don't"), new ReminderModel("15", "Frida Says 'Hi'"), new ReminderModel("16", "Enjoy a small break"), new ReminderModel("17", "Do your homework"), new ReminderModel("18", "Do your homework"), new ReminderModel("19", "Call gram/the fam and say hi"), new ReminderModel("20", "Did you eat dinner yet?"), new ReminderModel("21", "Screen's Off bud"), new ReminderModel("22", "Seriously, go to bed"), }; foreach (var rec in defaultRecords) { batchInsertOperation.Insert(rec.ToTableEntity()); } // Execute the batch operation. await remindersTable.ExecuteBatchAsync(batchInsertOperation); log.LogInformation($"Resetting entire table and adding {defaultRecords?.Count() ?? 0} record(s)"); return(new OkResult()); }
public virtual async Task AddAsync(List <TEntity> entities) { TableBatchOperation batchOperation = new TableBatchOperation(); // Create a customer entity and add it to the table. foreach (var entity in entities) { batchOperation.Insert(entity); } await this._tableClient.ExecuteBatchAsync(batchOperation); }
public async Task InsertBatch(List <TableEntity> entities) { TableBatchOperation batchOperation = new TableBatchOperation(); foreach (TableEntity entity in entities) { batchOperation.Insert(entity); } // Execute the insert operation. await _table.ExecuteBatchAsync(batchOperation); }
public static void InsertBatchIntoTable(IEnumerable <Person> people) { TableBatchOperation batchOperation = new TableBatchOperation(); CloudTable cloudTable = AzureUtils.GetCloudTable(Parameters.TableName); foreach (var person in people) { batchOperation.Insert(person); } cloudTable.ExecuteBatchAsync(batchOperation).Wait(); }
public virtual async Task <IList <TableResult> > InsertAsync(T[] objs) { Debug.Assert(objs != null && objs.Length != 0); var bop = new TableBatchOperation(); foreach (var obj in objs) { bop.Insert(obj); } return(await this.ExecuteBatchAsync(bop)); }
/// <summary> /// Creates new entities in the table using batching /// </summary> /// <param name="entities">The entities to store in the table</param> public async Task CreateEntitiesAsync(IEnumerable <T> entities) { Validate.Null(entities, "entities"); var batchOperation = new TableBatchOperation(); foreach (var entity in entities) { batchOperation.Insert(entity); } await cloudTable.ExecuteBatchAsync(batchOperation); }
public async Task Save(SerializedClefLog log) { if (log == null) throw new ArgumentNullException(nameof(log)); var batch = new TableBatchOperation(); var entity = m_tableEntityConverter.ConvertToDynamicEntity(log); entity.PartitionKey = m_keyGenerator.GeneratePartitionKey(log.LastEventTime); entity.RowKey = m_keyGenerator.GenerateRowKey(log.FirstEventTime); batch.Insert(entity); var table = await m_tableFactory.Create(log.LastEventTime); await table.ExecuteBatchAsync(batch); }
static void Main(string[] args) { Console.WriteLine("Obter configurações de acesso..."); CloudStorageAccount storageAccount = CloudStorageAccount.Parse( CloudConfigurationManager.GetSetting("StorageConnectionString")); Console.WriteLine( "Gerar a referência da tabela e criar no storage (caso a mesma não ainda exista)..."); CloudTableClient tableClient = storageAccount.CreateCloudTableClient(); CloudTable table = tableClient.GetTableReference("Cotacoes"); table.CreateIfNotExists(); Console.WriteLine("Incluindo cotações do Euro (EUR)..."); Carga.IncluirCotacao(table, "EUR", "2017-03-20 16:59", 3.3016); Carga.IncluirCotacao(table, "EUR", "2017-03-21 16:59", 3.3417); Console.WriteLine("Listando cotações do Euro (EUR)..."); Carga.ListarCotacoes <CotacaoEntity>(table, "EUR"); Console.WriteLine("Incluindo cotações da Libra Esterlina (LIB)..."); Carga.IncluirCotacao(table, "LIB", "2017-03-20 16:59", 3.7979); Carga.IncluirCotacao(table, "LIB", "2017-03-21 16:59", 3.8573); Console.WriteLine("Listando cotações da Libra Esterlina (LIB)..."); Carga.ListarCotacoes <CotacaoEntity>(table, "LIB"); Console.WriteLine("Incluindo cotações do Dólar (USD)..."); TableBatchOperation batch = new TableBatchOperation(); batch.Insert(new CotacaoDolarEntity("USD", "2017-03-20 16:59", 3.0717, 3.2300)); batch.Insert(new CotacaoDolarEntity("USD", "2017-03-21 16:59", 3.0900, 3.2600)); table.ExecuteBatch(batch); Console.WriteLine("Listando cotações do Dólar (USD)..."); Carga.ListarCotacoes <CotacaoDolarEntity>(table, "USD"); Console.WriteLine("Finalizado!"); Console.ReadKey(); }
public ActionResult About() { var storageAccount = CloudStorageAccount.Parse( CloudConfigurationManager.GetSetting("StorageConnectionString")); var client = storageAccount.CreateCloudTableClient(); var stebraTable = client.GetTableReference("StebraList"); if (!stebraTable.Exists()) { stebraTable.Create(); } TableOperation retrieveOperation = TableOperation.Retrieve<StebraEntity>("WeAreStebra", "ListItem3"); // Execute the operation. TableResult retrievedResult = stebraTable.Execute(retrieveOperation); StebraEntity result = (StebraEntity)retrievedResult.Result; if (result != null) { result.ImageUrl = "http://cdn.buzznet.com/assets/users16/crizz/default/funny-pictures-thriller-kitten-impresses--large-msg-121404159787.jpg"; //var awesomeStebra = new StebraEntity("WeAreStebra", "ListItem2"); //awesomeStebra.ImageUrl = "http://rubmint.com/wp-content/plugins/wp-o-matic/cache/6cb1b_funny-pictures-colur-blind-kitteh-finded-yew-a-pumikin.jpg"; //var coolStebra = new StebraEntity("WeAreStebra", "ListItem2"); //coolStebra.ImageUrl = "http://rubmint.com/wp-content/plugins/wp-o-matic/cache/6cb1b_funny-pictures-colur-blind-kitteh-finded-yew-a-pumikin.jpg"; //var batchOperation = new TableBatchOperation(); TableOperation batchOperation = TableOperation.InsertOrReplace(result); //batchOperation.Insert(awesomeStebra); //batchOperation.Insert(coolStebra); //stebraTable.ExecuteBatch(batchOperation); stebraTable.Execute(batchOperation); } else { var awesomeStebra = new StebraEntity("WeAreStebra", "ListItem3"); awesomeStebra.ImageUrl = "http://rubmint.com/wp-content/plugins/wp-o-matic/cache/6cb1b_funny-pictures-colur-blind-kitteh-finded-yew-a-pumikin.jpg"; var batchOperation = new TableBatchOperation(); batchOperation.Insert(awesomeStebra); stebraTable.ExecuteBatch(batchOperation); } return View(); }
private async Task SaveAndPublish(string stateType, Guid transaction, Guid streamId, long startVersion, IReadOnlyList <object> events, TracingProperties tracingProperties) { if (events.Count == 0) { return; } await SaveQueueTicket().ConfigureAwait(continueOnCapturedContext: false); await SaveEvents().ConfigureAwait(continueOnCapturedContext: false); await PublishPendingEvents().ConfigureAwait(continueOnCapturedContext: false); Task SaveQueueTicket() { var queueTicket = new QueueTicket(stateType, streamId, startVersion, events.Count, transaction); return(_table.ExecuteAsync(TableOperation.Insert(queueTicket))); } Task SaveEvents() { var batch = new TableBatchOperation(); for (int i = 0; i < events.Count; i++) { object source = events[i]; var streamEvent = new StreamEvent( stateType, streamId, version: startVersion + i, raisedTimeUtc: DateTime.UtcNow, eventType: _typeResolver.ResolveTypeName(source.GetType()), payload: _jsonProcessor.ToJson(source), messageId: $"{Guid.NewGuid()}", tracingProperties.OperationId, tracingProperties.Contributor, tracingProperties.ParentId, transaction); batch.Insert(streamEvent); } return(_table.ExecuteBatchAsync(batch)); } Task PublishPendingEvents() => _publisher.PublishEvents(stateType, streamId); }
async public Task <bool> InsertEntitiesAsync <T>(string tableName, T[] entities) where T : ITableEntity { if (entities == null || entities.Length == 0) { return(true); } CloudStorageAccount storageAccount = CloudStorageAccount.Parse(_connectionString); // Create the table client. CloudTableClient tableClient = storageAccount.CreateCloudTableClient(); // Create the CloudTable object that represents the "people" table. CloudTable table = tableClient.GetTableReference(tableName); var partitionKeys = entities.Select(e => e.PartitionKey).Distinct(); if (partitionKeys != null) { foreach (string partitionKey in partitionKeys) { if (String.IsNullOrWhiteSpace(partitionKey)) { continue; } // Create the batch operation. TableBatchOperation batchOperation = new TableBatchOperation(); int counter = 0; foreach (var entity in entities.Where(e => e?.PartitionKey == partitionKey)) { batchOperation.Insert(entity); if (++counter == 100) { var result = await table.ExecuteBatchAsync(batchOperation); batchOperation = new TableBatchOperation(); counter = 0; } } if (counter > 0) { // Execute the batch operation. var result = await table.ExecuteBatchAsync(batchOperation); } } } return(true); }
public async Task InsertManyAsync(T[] items) { CloudTable table = await GetTableAsync(); var operation = new TableBatchOperation(); foreach (var item in items) { //item.PrepareForSave(); operation.Insert(item); } await table.ExecuteBatchAsync(operation); }
private async Task WriteBatch(DynamicTableEntity[] rows) { foreach (var group in rows.GroupBy(x => x.PartitionKey)) { TableBatchOperation batchOperation = new TableBatchOperation(); foreach (var row in group) { batchOperation.Insert(row, false); } await table.ExecuteBatchAsync(batchOperation); } }
static void Main(string[] args) { Customers customer1 = new Customers("Ivan", "Melnyk") { Email = "*****@*****.**" }; Customers customer2 = new Customers("Petro", "Melnyk") { Email = "*****@*****.**" }; // сторадж=> клієнт => тейбл CloudStorageAccount storageAccount = CloudStorageAccount.Parse("StorageConnectionString"); CloudTableClient client = storageAccount.CreateCloudTableClient(); CloudTable table = client.GetTableReference("tmpLesson2"); table.CreateIfNotExists(); TableBatchOperation batch = new TableBatchOperation(); batch.Insert(customer1); batch.Insert(customer2); table.ExecuteBatch(batch); // Select //queryBuilder TableQuery <Customers> query = new TableQuery <Customers>().Where(TableQuery.GenerateFilterCondition("PartitionKey", QueryComparisons.Equal, "Melnyk")); foreach (Customers customer in table.ExecuteQuery(query)) { System.Console.WriteLine(customer.PartitionKey); System.Console.WriteLine(customer.RowKey); System.Console.WriteLine(customer.Email); System.Console.WriteLine(customer.Timestamp); System.Console.WriteLine("================"); } }
public async Task SaveAll(IEnumerable <ProjectTaskEntity> tasks) { CloudTable table = await _cloudTableHelper.GetCloudTableByName(_configuration["TasksTableName"]); TableBatchOperation batchOperation = new TableBatchOperation(); foreach (ProjectTaskEntity task in tasks) { batchOperation.Insert(task); } await table.ExecuteBatchAsync(batchOperation); }
public async Task Insert(List <T> items, string correlationToken) { var tableBatchOperation = new TableBatchOperation(); var table = await GetTableAsync(); foreach (var item in items) { tableBatchOperation.Insert(item, false); } await table.ExecuteBatchAsync(tableBatchOperation); }
public static async Task Insert(this CloudTable table, IEnumerable <ITableEntity> recs) { for (int j = 0; j < recs.Count(); j += 100) { var batch = new TableBatchOperation(); var m = recs.Skip(j).Take(100).ToList(); foreach (ITableEntity r in m) { batch.Insert(r); } await table.ExecuteBatchAsync(batch); } }
private static async Task BackupPostCollection(DocumentCollection dc, DocumentClient client) { Trace.TraceInformation("Collection '{0}' start. Time: '{1}'", dc.Id, DateTime.Now.ToString(CultureInfo.CurrentCulture)); try { var ds = from d in client.CreateDocumentQuery<PostMessage>(dc.DocumentsLink) where d.Type == "Post" select d; TableBatchOperation batchOperation = new TableBatchOperation(); List<dynamic> docList = new List<dynamic>(); foreach (var d in ds) { TablePost c = ModelService.TablePostData(d); batchOperation.Insert(c); docList.Add(d); if (batchOperation.Count == 100) { var operation = batchOperation; var res = await _retryPolicy.ExecuteAsync( () => _table.ExecuteBatchAsync(operation)); batchOperation = new TableBatchOperation(); if (res.Count == operation.Count) { await _iDbService.DocumentDb.BatchDelete(dc, docList); docList = new List<dynamic>(); Trace.TraceInformation("inserted"); } } } if (batchOperation.Count > 0) { var operation = batchOperation; var res = await _retryPolicy.ExecuteAsync( () => _table.ExecuteBatchAsync(operation)); if (res.Count == operation.Count) { await _iDbService.DocumentDb.BatchDelete(dc, docList); Trace.TraceInformation("inserted"); } } } catch (Exception e) { Trace.TraceError("Error in BackupCollection " + e.Message); } }
public void AddUser(int dddEventId, int eventbriteOrderNo, string firstName, string lastName, string userToken, string clientToken) { var mapping = new EventBriteUserTokenMapping { PartitionKey = dddEventId.ToString(), RowKey = eventbriteOrderNo.ToString(), UserToken = userToken, ClientToken = clientToken }; var user = new User { PartitionKey = dddEventId.ToString(), RowKey = userToken, FirstName = firstName, LastName = lastName }; TableBatchOperation batch = new TableBatchOperation(); batch.Insert(mapping); batch.Insert(user); usersTable.ExecuteBatch(batch); }
protected override void SendBuffer(LoggingEvent[] events) { var grouped = events.GroupBy(evt => evt.LoggerName); foreach (var group in grouped) { foreach (var batch in group.Batch(100)) { var batchOperation = new TableBatchOperation(); foreach (var azureLoggingEvent in batch.Select(GetLogEntity)) { batchOperation.Insert(azureLoggingEvent); } _table.ExecuteBatch(batchOperation); } } }
public async Task<IEnumerable<string>> Post() { var watch = new Stopwatch(); var result = new List<string>(); var table = _client.GetTableReference(TableName); await table.CreateIfNotExistsAsync(); for (int i = 1; i < 11; i++) { watch.Restart(); var batch = new TableBatchOperation(); batch.Insert(new Customer { RowKey = "1000" + i, PartitionKey = "None", Forename = "Bernd", Surname = "Mayer" }); await table.ExecuteBatchAsync(batch); watch.Stop(); result.Add($"Azure Table Save {i} try: {watch.ElapsedMilliseconds}"); } return result; }
public void Save(MessageLogEntity entity) { TableBatchOperation batchOperation = new TableBatchOperation(); this.retryPolicy.ExecuteAction(() => { var context = this.tableClient.GetTableReference(this.tableName); batchOperation.Insert(entity); try { context.ExecuteBatch(batchOperation); } catch { } }); }
//save news to azure table from inparameter listItems public static void SaveNews(ListItemCollection listItems) { var batchOperation = new TableBatchOperation(); //make only one call to Azure Table, use Batch. foreach (ListItem item in listItems) { //Convert ListItems to Table-entries(Entity) var entity = new StebraEntity( "News", //string Stebratype item["Title"].ToString(), //string newsEntry "Descriptive text", //string NewsDescription item["Article"].ToString(), //string NewsArticle item["Datum"].ToString(), //string NewsDate item["Body"].ToString() //string NewsBody ); batchOperation.Insert(entity); //Batch this } NewsTable.ExecuteBatch(batchOperation); //Execute Batch }
private static async Task BackupPostCollection(DocumentCollection dc) { var ds = from d in _client.CreateDocumentQuery<PostMessage>(dc.DocumentsLink) where d.Type == "Post" select d; var docNumber = 0; TableBatchOperation batchOperation = new TableBatchOperation(); foreach (var d in ds) { TablePost c = new TablePost(d.Type, d.id) { district = d.Path.District, school = d.Path.School, classes = d.Path.Classes, timestamp = d.Info.timestamp, user = d.Info.user, uid = d.Info.uid, message = d.Info.message }; if (docNumber < 100) { batchOperation.Insert(c); await DeletePost(dc,d.id); docNumber++; } else if (docNumber == 100) { await _table.ExecuteBatchAsync(batchOperation); batchOperation = new TableBatchOperation(); docNumber = 0; } } if (batchOperation.Count>0) { await _table.ExecuteBatchAsync(batchOperation); } }
protected override void SendBuffer(LoggingEvent[] events) { //Batched ops require single partition key, group //by loggername to obey requirment. var grouped = events.GroupBy(evt => evt.LoggerName); foreach (var group in grouped) { var batchOperation = new TableBatchOperation(); foreach (var azureLoggingEvent in group.Select(@event => new AzureLoggingEventEntity(@event))) { batchOperation.Insert(azureLoggingEvent); } _table.ExecuteBatch(batchOperation); } }
public static void MyClassInitialize(TestContext testContext) { tableClient = GenerateCloudTableClient(); currentTable = tableClient.GetTableReference(GenerateRandomTableName()); currentTable.CreateIfNotExists(); for (int i = 0; i < 15; i++) { TableBatchOperation batch = new TableBatchOperation(); for (int j = 0; j < 100; j++) { BaseEntity ent = GenerateRandomEntity("tables_batch_" + i.ToString()); ent.RowKey = string.Format("{0:0000}", j); batch.Insert(ent); } currentTable.ExecuteBatch(batch); } }
public void savetable(string partitionKey, string rowKey) { var connectionString = ConfigurationManager.AppSettings["StorageConnectionString"]; CloudStorageAccount storageAccount = CloudStorageAccount.Parse(connectionString); CloudTableClient tableClient = storageAccount.CreateCloudTableClient(); CloudTable table = tableClient.GetTableReference("Login"); //Create the batch operation TableBatchOperation batchOperation = new TableBatchOperation(); TableEntity table1 = new TableEntity("PartitionKey", "RowKey"); table1.PartitionKey = "*****@*****.**"; table1.RowKey = "prateekTest12345"; // Create the TableOperation object that inserts the customer entity. TableOperation insertOperation = TableOperation.Insert(table1); // Add both customer entities to the batch insert operation batchOperation.Insert(table1); //table.ExecuteBatch(batchOperation); // Execute the insert operation. table.Execute(insertOperation); }
static void Main(string[] args) { var storageAccount = CloudStorageAccount.DevelopmentStorageAccount; var tableClient = storageAccount.CreateCloudTableClient(); var table = tableClient.GetTableReference("EventRegistrations"); table.DeleteIfExists(); table.CreateIfNotExists(); List<Registration> registrationList = new List<Registration>(); using (EventsContext context = new EventsContext()) { context.Database.Delete(); } using (EventsContext context = new EventsContext()) { context.Events.RemoveRange(context.Events); context.SaveChanges(); for (int i = 0; i < 55; i++) { var result = CreateEvent(context); registrationList.AddRange(result); } } registrationList = registrationList.OrderBy(r => rand.Next()).ToList(); foreach (var registrationGroup in registrationList.GroupBy(r => r.PartitionKey)) { TableBatchOperation operation = new TableBatchOperation(); foreach (var registration in registrationGroup) { operation.Insert(registration); } table.ExecuteBatch(operation); } }
public static void ProcessPlayerGame(Int64 playerId, PlayerGameData gameData, Guid gameId, Int64 gameSeconds) { // Create the batch operation. TableBatchOperation batchOperation = new TableBatchOperation(); //TODO: Get Player Row PlayerEntity player = PlayerEntity.GetPlayerEntity(playerId); batchOperation.InsertOrReplace(player); //Update Player Entity with Game Data player.TotalDeaths += gameData.Deaths; player.TotalKills += gameData.Kills; player.TotalPoints += gameData.Points; player.TotalWins += gameData.Win ? 1 : 0; player.TotalGames += 1; player.TotalSecondsPlayed += gameSeconds; //Create PlayerGame Row PlayerGameEntity playerGame = new PlayerGameEntity(playerId, gameId) { Points = gameData.Points, Win = gameData.Win, Kills = gameData.Kills, Deaths = gameData.Deaths, GameDuration = gameSeconds }; batchOperation.Insert(playerGame); try { StorageManager.Instance.PlayersTable.ExecuteBatch(batchOperation); } catch (Exception ex) { //TODO: handle exception, check if its because an entity already existed. //This means we've already handled this data. } }
private async Task DoTableBatchOperationsWithEmptyKeysAsync(TablePayloadFormat format) { tableClient.DefaultRequestOptions.PayloadFormat = format; // Insert Entity DynamicTableEntity ent = new DynamicTableEntity() { PartitionKey = "", RowKey = "" }; ent.Properties.Add("foo2", new EntityProperty("bar2")); ent.Properties.Add("foo", new EntityProperty("bar")); TableBatchOperation batch = new TableBatchOperation(); batch.Insert(ent); await currentTable.ExecuteBatchAsync(batch); // Retrieve Entity TableBatchOperation retrieveBatch = new TableBatchOperation(); retrieveBatch.Retrieve(ent.PartitionKey, ent.RowKey); TableResult result = (await currentTable.ExecuteBatchAsync(retrieveBatch)).First(); DynamicTableEntity retrievedEntity = result.Result as DynamicTableEntity; Assert.IsNotNull(retrievedEntity); Assert.AreEqual(ent.PartitionKey, retrievedEntity.PartitionKey); Assert.AreEqual(ent.RowKey, retrievedEntity.RowKey); Assert.AreEqual(ent.Properties.Count, retrievedEntity.Properties.Count); Assert.AreEqual(ent.Properties["foo"].StringValue, retrievedEntity.Properties["foo"].StringValue); Assert.AreEqual(ent.Properties["foo"], retrievedEntity.Properties["foo"]); Assert.AreEqual(ent.Properties["foo2"].StringValue, retrievedEntity.Properties["foo2"].StringValue); Assert.AreEqual(ent.Properties["foo2"], retrievedEntity.Properties["foo2"]); // InsertOrMerge DynamicTableEntity insertOrMergeEntity = new DynamicTableEntity(ent.PartitionKey, ent.RowKey); insertOrMergeEntity.Properties.Add("foo3", new EntityProperty("value")); batch = new TableBatchOperation(); batch.InsertOrMerge(insertOrMergeEntity); await currentTable.ExecuteBatchAsync(batch); result = (await currentTable.ExecuteBatchAsync(retrieveBatch)).First(); retrievedEntity = result.Result as DynamicTableEntity; Assert.IsNotNull(retrievedEntity); Assert.AreEqual(insertOrMergeEntity.Properties["foo3"], retrievedEntity.Properties["foo3"]); // InsertOrReplace DynamicTableEntity insertOrReplaceEntity = new DynamicTableEntity(ent.PartitionKey, ent.RowKey); insertOrReplaceEntity.Properties.Add("prop2", new EntityProperty("otherValue")); batch = new TableBatchOperation(); batch.InsertOrReplace(insertOrReplaceEntity); await currentTable.ExecuteBatchAsync(batch); result = (await currentTable.ExecuteBatchAsync(retrieveBatch)).First(); retrievedEntity = result.Result as DynamicTableEntity; Assert.IsNotNull(retrievedEntity); Assert.AreEqual(1, retrievedEntity.Properties.Count); Assert.AreEqual(insertOrReplaceEntity.Properties["prop2"], retrievedEntity.Properties["prop2"]); // Merge DynamicTableEntity mergeEntity = new DynamicTableEntity(retrievedEntity.PartitionKey, retrievedEntity.RowKey) { ETag = retrievedEntity.ETag }; mergeEntity.Properties.Add("mergeProp", new EntityProperty("merged")); batch = new TableBatchOperation(); batch.Merge(mergeEntity); await currentTable.ExecuteBatchAsync(batch); // Retrieve Entity & Verify Contents result = (await currentTable.ExecuteBatchAsync(retrieveBatch)).First(); retrievedEntity = result.Result as DynamicTableEntity; Assert.IsNotNull(retrievedEntity); Assert.AreEqual(mergeEntity.Properties["mergeProp"], retrievedEntity.Properties["mergeProp"]); // Replace DynamicTableEntity replaceEntity = new DynamicTableEntity(ent.PartitionKey, ent.RowKey) { ETag = retrievedEntity.ETag }; replaceEntity.Properties.Add("replaceProp", new EntityProperty("replace")); batch = new TableBatchOperation(); batch.Replace(replaceEntity); await currentTable.ExecuteBatchAsync(batch); // Retrieve Entity & Verify Contents result = (await currentTable.ExecuteBatchAsync(retrieveBatch)).First(); retrievedEntity = result.Result as DynamicTableEntity; Assert.IsNotNull(retrievedEntity); Assert.AreEqual(replaceEntity.Properties.Count, retrievedEntity.Properties.Count); Assert.AreEqual(replaceEntity.Properties["replaceProp"], retrievedEntity.Properties["replaceProp"]); // Delete Entity batch = new TableBatchOperation(); batch.Delete(retrievedEntity); await currentTable.ExecuteBatchAsync(batch); // Retrieve Entity result = (await currentTable.ExecuteBatchAsync(retrieveBatch)).First(); Assert.IsNull(result.Result); }
private void DoTableGenericQueryWithSpecificOnSupportedTypes(TablePayloadFormat format) { CloudTableClient client = GenerateCloudTableClient(); CloudTable table = client.GetTableReference(GenerateRandomTableName()); table.Create(); client.DefaultRequestOptions.PayloadFormat = format; try { // Setup TableBatchOperation batch = new TableBatchOperation(); string pk = Guid.NewGuid().ToString(); ComplexEntity middleRef = null; for (int m = 0; m < 100; m++) { ComplexEntity complexEntity = new ComplexEntity(pk, string.Format("{0:0000}", m)); complexEntity.String = string.Format("{0:0000}", m); complexEntity.Binary = new byte[] { 0x01, 0x02, (byte)m }; complexEntity.BinaryPrimitive = new byte[] { 0x01, 0x02, (byte)m }; complexEntity.Bool = m % 2 == 0 ? true : false; complexEntity.BoolPrimitive = m % 2 == 0 ? true : false; complexEntity.Double = m + ((double)m / 100); complexEntity.DoublePrimitive = m + ((double)m / 100); complexEntity.Int32 = m; complexEntity.IntegerPrimitive = m; complexEntity.Int64 = (long)int.MaxValue + m; complexEntity.LongPrimitive = (long)int.MaxValue + m; complexEntity.Guid = Guid.NewGuid(); batch.Insert(complexEntity); if (m == 50) { middleRef = complexEntity; } // Add delay to make times unique Thread.Sleep(100); } table.ExecuteBatch(batch); // 1. Filter on String ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterCondition("String", QueryComparisons.GreaterThanOrEqual, "0050"), 50); // 2. Filter on Guid ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForGuid("Guid", QueryComparisons.Equal, middleRef.Guid), 1); // 3. Filter on Long ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForLong("Int64", QueryComparisons.GreaterThanOrEqual, middleRef.LongPrimitive), 50); ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForLong("LongPrimitive", QueryComparisons.GreaterThanOrEqual, middleRef.LongPrimitive), 50); // 4. Filter on Double ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForDouble("Double", QueryComparisons.GreaterThanOrEqual, middleRef.Double), 50); ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForDouble("DoublePrimitive", QueryComparisons.GreaterThanOrEqual, middleRef.DoublePrimitive), 50); // 5. Filter on Integer ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForInt("Int32", QueryComparisons.GreaterThanOrEqual, middleRef.Int32), 50); ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForInt("IntegerPrimitive", QueryComparisons.GreaterThanOrEqual, middleRef.IntegerPrimitive), 50); // 6. Filter on Date ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForDate("DateTimeOffset", QueryComparisons.GreaterThanOrEqual, middleRef.DateTimeOffset), 50); // 7. Filter on Boolean ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForBool("Bool", QueryComparisons.Equal, middleRef.Bool), 50); ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForBool("BoolPrimitive", QueryComparisons.Equal, middleRef.BoolPrimitive), 50); // 8. Filter on Binary ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForBinary("Binary", QueryComparisons.Equal, middleRef.Binary), 1); ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForBinary("BinaryPrimitive", QueryComparisons.Equal, middleRef.BinaryPrimitive), 1); // 9. Filter on Binary GTE ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForBinary("Binary", QueryComparisons.GreaterThanOrEqual, middleRef.Binary), 50); ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForBinary("BinaryPrimitive", QueryComparisons.GreaterThanOrEqual, middleRef.BinaryPrimitive), 50); // 10. Complex Filter on Binary GTE ExecuteQueryAndAssertResults(table, TableQuery.CombineFilters( TableQuery.GenerateFilterCondition("PartitionKey", QueryComparisons.Equal, middleRef.PartitionKey), TableOperators.And, TableQuery.GenerateFilterConditionForBinary("Binary", QueryComparisons.GreaterThanOrEqual, middleRef.Binary)), 50); ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForBinary("BinaryPrimitive", QueryComparisons.GreaterThanOrEqual, middleRef.BinaryPrimitive), 50); } finally { table.DeleteIfExists(); } }
private async Task DoTableRegionalQueryOnSupportedTypesAsync(TablePayloadFormat format) { CultureInfo currentCulture = Thread.CurrentThread.CurrentCulture; Thread.CurrentThread.CurrentCulture = new CultureInfo("tr-TR"); CloudTableClient client = GenerateCloudTableClient(); client.DefaultRequestOptions.PayloadFormat = format; CloudTable table = client.GetTableReference(GenerateRandomTableName()); await table.CreateAsync(); try { // Setup TableBatchOperation batch = new TableBatchOperation(); string pk = Guid.NewGuid().ToString(); DynamicTableEntity middleRef = null; for (int m = 0; m < 100; m++) { ComplexEntity complexEntity = new ComplexEntity(); complexEntity.String = string.Format("{0:0000}", m); complexEntity.Binary = new byte[] { 0x01, 0x02, (byte)m }; complexEntity.BinaryPrimitive = new byte[] { 0x01, 0x02, (byte)m }; complexEntity.Bool = m % 2 == 0 ? true : false; complexEntity.BoolPrimitive = m % 2 == 0 ? true : false; complexEntity.Double = m + ((double)m / 100); complexEntity.DoublePrimitive = m + ((double)m / 100); complexEntity.Int32 = m; complexEntity.IntegerPrimitive = m; complexEntity.Int64 = (long)int.MaxValue + m; complexEntity.LongPrimitive = (long)int.MaxValue + m; complexEntity.Guid = Guid.NewGuid(); DynamicTableEntity dynEnt = new DynamicTableEntity(pk, string.Format("{0:0000}", m)); dynEnt.Properties = complexEntity.WriteEntity(null); batch.Insert(dynEnt); if (m == 50) { middleRef = dynEnt; } // Add delay to make times unique await Task.Delay(100); } await table.ExecuteBatchAsync(batch); // 1. Filter on String ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterCondition("String", QueryComparisons.GreaterThanOrEqual, "0050"), 50); // 2. Filter on Guid ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForGuid("Guid", QueryComparisons.Equal, middleRef.Properties["Guid"].GuidValue.Value), 1); // 3. Filter on Long ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForLong("Int64", QueryComparisons.GreaterThanOrEqual, middleRef.Properties["LongPrimitive"].Int64Value.Value), 50); ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForLong("LongPrimitive", QueryComparisons.GreaterThanOrEqual, middleRef.Properties["LongPrimitive"].Int64Value.Value), 50); // 4. Filter on Double ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForDouble("Double", QueryComparisons.GreaterThanOrEqual, middleRef.Properties["Double"].DoubleValue.Value), 50); ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForDouble("DoublePrimitive", QueryComparisons.GreaterThanOrEqual, middleRef.Properties["DoublePrimitive"].DoubleValue.Value), 50); // 5. Filter on Integer ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForInt("Int32", QueryComparisons.GreaterThanOrEqual, middleRef.Properties["Int32"].Int32Value.Value), 50); ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForInt("IntegerPrimitive", QueryComparisons.GreaterThanOrEqual, middleRef.Properties["IntegerPrimitive"].Int32Value.Value), 50); // 6. Filter on Date ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForDate("DateTimeOffset", QueryComparisons.GreaterThanOrEqual, middleRef.Properties["DateTimeOffset"].DateTimeOffsetValue.Value), 50); // 7. Filter on Boolean ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForBool("Bool", QueryComparisons.Equal, middleRef.Properties["Bool"].BooleanValue.Value), 50); ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForBool("BoolPrimitive", QueryComparisons.Equal, middleRef.Properties["BoolPrimitive"].BooleanValue.Value), 50); // 8. Filter on Binary ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForBinary("Binary", QueryComparisons.Equal, middleRef.Properties["Binary"].BinaryValue), 1); ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForBinary("BinaryPrimitive", QueryComparisons.Equal, middleRef.Properties["BinaryPrimitive"].BinaryValue), 1); // 9. Filter on Binary GTE ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForBinary("Binary", QueryComparisons.GreaterThanOrEqual, middleRef.Properties["Binary"].BinaryValue), 50); ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForBinary("BinaryPrimitive", QueryComparisons.GreaterThanOrEqual, middleRef.Properties["BinaryPrimitive"].BinaryValue), 50); // 10. Complex Filter on Binary GTE ExecuteQueryAndAssertResults(table, TableQuery.CombineFilters( TableQuery.GenerateFilterCondition("PartitionKey", QueryComparisons.Equal, middleRef.PartitionKey), TableOperators.And, TableQuery.GenerateFilterConditionForBinary("Binary", QueryComparisons.GreaterThanOrEqual, middleRef.Properties["Binary"].BinaryValue)), 50); ExecuteQueryAndAssertResults(table, TableQuery.GenerateFilterConditionForBinary("BinaryPrimitive", QueryComparisons.GreaterThanOrEqual, middleRef.Properties["BinaryPrimitive"].BinaryValue), 50); } finally { Thread.CurrentThread.CurrentCulture = currentCulture; table.DeleteIfExistsAsync().Wait(); } }
public static void MyClassInitialize(TestContext testContext) { CloudTableClient tableClient = GenerateCloudTableClient(); currentTable = tableClient.GetTableReference(GenerateRandomTableName()); currentTable.CreateIfNotExists(); // Bulk Query Entities for (int i = 0; i < 15; i++) { TableBatchOperation batch = new TableBatchOperation(); for (int j = 0; j < 100; j++) { var ent = GenerateRandomEnitity("tables_batch_" + i.ToString()); ent.RowKey = string.Format("{0:0000}", j); batch.Insert(ent); } currentTable.ExecuteBatch(batch); } complexEntityTable = tableClient.GetTableReference(GenerateRandomTableName()); complexEntityTable.Create(); // Setup TableBatchOperation complexBatch = new TableBatchOperation(); string pk = Guid.NewGuid().ToString(); for (int m = 0; m < 100; m++) { ComplexEntity complexEntity = new ComplexEntity(pk, string.Format("{0:0000}", m)); complexEntity.String = string.Format("{0:0000}", m); complexEntity.Binary = new byte[] { 0x01, 0x02, (byte)m }; complexEntity.BinaryPrimitive = new byte[] { 0x01, 0x02, (byte)m }; complexEntity.Bool = m % 2 == 0 ? true : false; complexEntity.BoolPrimitive = m % 2 == 0 ? true : false; complexEntity.Double = m + ((double)m / 100); complexEntity.DoublePrimitive = m + ((double)m / 100); complexEntity.Int32 = m; complexEntity.Int32N = m; complexEntity.IntegerPrimitive = m; complexEntity.IntegerPrimitiveN = m; complexEntity.Int64 = (long)int.MaxValue + m; complexEntity.LongPrimitive = (long)int.MaxValue + m; complexEntity.LongPrimitiveN = (long)int.MaxValue + m; complexEntity.Guid = Guid.NewGuid(); complexBatch.Insert(complexEntity); if (m == 50) { middleRef = complexEntity; } // Add delay to make times unique Thread.Sleep(100); } complexEntityTable.ExecuteBatch(complexBatch); }
private async Task Backup(dynamic room) { try { if (room.Value.messages.Value != "") { var origin = new List<dynamic>(); foreach (var message in room.Value.messages) { origin.Add(message); } var sorted = origin.OrderBy(o => o.Value.timestamp).ToList(); if (sorted.Count <= RecordRemained) return; TableBatchOperation batchOperation = new TableBatchOperation(); for (int i = 0; i < sorted.Count - RecordRemained; i++) { var message = sorted[i]; TableChat c = _iDbService.TableChatData(room, message); batchOperation.Insert(c); if (batchOperation.Count == 100) { var operation = batchOperation; var res = await _retryPolicy.ExecuteAsync( () => _table.ExecuteBatchAsync(operation)); await DeleteFirebase(res); batchOperation = new TableBatchOperation(); } } if (batchOperation.Count > 0) { var operation = batchOperation; var res = await _retryPolicy.ExecuteAsync( () => _table.ExecuteBatchAsync(operation)); await DeleteFirebase(res); } } } catch (Exception e) { /* //retry is retryable if (retry > 0 && (e.RequestInformation.HttpStatusCode == 500 || e.RequestInformation.HttpStatusCode == 503)) { retry--; Thread.Sleep(1000); Backup(room, table, retry).Wait(); } else {*/ Trace.TraceError("Error in room : " + room.Name + e.Message); //} } }
private static void InsertBatchOfEntities(string tableName) { try { //Create Table client CloudTableClient tableClient = storageAcc.CreateCloudTableClient(); //Create Table CloudTable table = tableClient.GetTableReference(tableName); table.CreateIfNotExists(); //Create a new Entity Employee employee1 = new Employee("1", "spiderman"); employee1.LastName = "Parker"; employee1.FirstName = "Peter"; //Create a new Entity Employee employee2 = new Employee("1", "superman"); employee2.LastName = "Kent"; employee2.FirstName = "Clark"; //Create a new Entity Employee employee3 = new Employee("1", "tableman"); employee3.LastName = "Man"; employee3.FirstName = "Table"; //Use a TableOperation to insert entity TableBatchOperation batchInsert = new TableBatchOperation(); batchInsert.Insert(employee1); batchInsert.Insert(employee2); batchInsert.Insert(employee3); //Go ahead... Insert now... table.ExecuteBatch(batchInsert); Write(string.Format("Employees inserted in {0}", tableName)); } catch (Exception ex) { Write(ex.Message); } }
public void TableQueryWithRetryAPM() { CloudTableClient tableClient = GenerateCloudTableClient(); TableBatchOperation batch = new TableBatchOperation(); for (int m = 0; m < 1500; m++) { // Insert Entity DynamicTableEntity insertEntity = new DynamicTableEntity("insert test", m.ToString()); insertEntity.Properties.Add("prop" + m.ToString(), new EntityProperty(new byte[1 * 1024])); batch.Insert(insertEntity); if ((m + 1) % 100 == 0) { currentTable.ExecuteBatch(batch); batch = new TableBatchOperation(); } } TableQuery query = new TableQuery().Where(TableQuery.GenerateFilterCondition("PartitionKey", QueryComparisons.Equal, "insert test")); TestHelper.ExecuteAPMMethodWithRetry( 2, // 1 failure, one success new[] { //Insert upstream network delay to prevent upload to server @ 1000ms / kb PerformanceBehaviors.InsertDownstreamNetworkDelay(10000, AzureStorageSelectors.TableTraffic().IfHostNameContains(tableClient.Credentials.AccountName).Alternating(true)), // After 100 ms return throttle message DelayedActionBehaviors.ExecuteAfter(Actions.ThrottleTableRequest, 100, AzureStorageSelectors.TableTraffic().IfHostNameContains(tableClient.Credentials.AccountName).Alternating(true)) }, (options, opContext, callback, state) => currentTable.BeginExecuteQuerySegmented(query, null, (TableRequestOptions)options, opContext, callback, state), (res) => currentTable.EndExecuteQuerySegmented(res)); }
public void TableQueryWithRetrySync() { CloudTableClient tableClient = GenerateCloudTableClient(); TableBatchOperation batch = new TableBatchOperation(); for (int m = 0; m < 1500; m++) { // Insert Entity DynamicTableEntity insertEntity = new DynamicTableEntity("insert test", m.ToString()); batch.Insert(insertEntity); if ((m + 1) % 100 == 0) { currentTable.ExecuteBatch(batch); batch = new TableBatchOperation(); } } TableQuery query = new TableQuery().Where(TableQuery.GenerateFilterCondition("PartitionKey", QueryComparisons.Equal, "insert test")); TestHelper.ExecuteMethodWithRetry( 4, // 2 segments, 2 failures new[] { //Insert upstream network delay to prevent upload to server @ 1000ms / kb PerformanceBehaviors.InsertDownstreamNetworkDelay(10000, AzureStorageSelectors.TableTraffic().IfHostNameContains(tableClient.Credentials.AccountName).Alternating(true), new BehaviorOptions(4)), // After 100 ms return throttle message DelayedActionBehaviors.ExecuteAfter(Actions.ThrottleTableRequest, 100, AzureStorageSelectors.TableTraffic().IfHostNameContains(tableClient.Credentials.AccountName).Alternating(true), new BehaviorOptions(4)) }, (options, opContext) => currentTable.ExecuteQuery(query, (TableRequestOptions)options, opContext).ToList()); }
private async Task DoTableBatchAllSupportedOperationsAsync(TablePayloadFormat format) { tableClient.DefaultRequestOptions.PayloadFormat = format; TableBatchOperation batch = new TableBatchOperation(); string pk = Guid.NewGuid().ToString(); // insert batch.Insert(GenerateRandomEntity(pk)); // delete { DynamicTableEntity entity = GenerateRandomEntity(pk); await currentTable.ExecuteAsync(TableOperation.Insert(entity)); batch.Delete(entity); } // replace { DynamicTableEntity entity = GenerateRandomEntity(pk); await currentTable.ExecuteAsync(TableOperation.Insert(entity)); batch.Replace(entity); } // insert or replace { DynamicTableEntity entity = GenerateRandomEntity(pk); await currentTable.ExecuteAsync(TableOperation.Insert(entity)); batch.InsertOrReplace(entity); } // merge { DynamicTableEntity entity = GenerateRandomEntity(pk); await currentTable.ExecuteAsync(TableOperation.Insert(entity)); batch.Merge(entity); } // insert or merge { DynamicTableEntity entity = GenerateRandomEntity(pk); await currentTable.ExecuteAsync(TableOperation.Insert(entity)); batch.InsertOrMerge(entity); } IList<TableResult> results = await currentTable.ExecuteBatchAsync(batch); Assert.AreEqual(results.Count, 6); IEnumerator<TableResult> enumerator = results.GetEnumerator(); enumerator.MoveNext(); Assert.AreEqual(enumerator.Current.HttpStatusCode, (int)HttpStatusCode.Created); enumerator.MoveNext(); Assert.AreEqual(enumerator.Current.HttpStatusCode, (int)HttpStatusCode.NoContent); enumerator.MoveNext(); Assert.AreEqual(enumerator.Current.HttpStatusCode, (int)HttpStatusCode.NoContent); enumerator.MoveNext(); Assert.AreEqual(enumerator.Current.HttpStatusCode, (int)HttpStatusCode.NoContent); enumerator.MoveNext(); Assert.AreEqual(enumerator.Current.HttpStatusCode, (int)HttpStatusCode.NoContent); enumerator.MoveNext(); Assert.AreEqual(enumerator.Current.HttpStatusCode, (int)HttpStatusCode.NoContent); }