public void AddOrReplaceLinija(Linija linija) { TableOperation insertOperation = TableOperation.InsertOrReplace(linija); table.Execute(insertOperation); }
//bookstore public void AddOrReplaceBookstore(Bookstore Bookstore) { TableOperation add = TableOperation.InsertOrReplace(Bookstore); _tableBookstore.Execute(add); }
internal static DataAccessResponseType StoreImageRecord(string accountId, string storagePartition, string imageGroupTypeNameKey, string objectId, string imageGroupName, string imageGroupNameKey, string imageFormatName, string imageFormatNameKey, string title, string description, string url, string filename, string filepath, string containerName, string blobPath, int height, int width, bool isListing = false) { var response = new DataAccessResponseType(); var imageRecordEntity = new ImageRecordTableEntity { //PartitionKey = objectId, //RowKey = imageGroupNameKey + "-" + imageFormatNameKey, ObjectId = objectId, //<-- Partitionkey ImageKey = imageGroupNameKey + "-" + imageFormatNameKey, //<-- RowKey ImageGroup = imageGroupName, ImageGroupKey = imageGroupNameKey, ImageFormat = imageFormatName, ImageFormatKey = imageFormatNameKey, Title = title, Description = description, Url = url, FileName = filename, FilePath = filepath, BlobPath = blobPath, ContainerName = containerName, Height = height, Width = width }; //CloudTableClient cloudTableClient = Settings.Azure.Storage.StorageConnections.AccountsStorage.CreateCloudTableClient(); CloudTableClient cloudTableClient = Settings.Azure.Storage.GetStoragePartitionAccount(storagePartition).CreateCloudTableClient(); //Create and set retry policy-------- IRetryPolicy linearRetryPolicy = new LinearRetry(TimeSpan.FromSeconds(1), 4); cloudTableClient.DefaultRequestOptions.RetryPolicy = linearRetryPolicy; TableOperation operation = TableOperation.InsertOrReplace((imageRecordEntity as TableEntity)); string tableName = Sahara.Core.Common.Methods.SchemaNames.AccountIdToTableStorageName(accountId) + Internal.ImageRecordTableStorage.ImageRecordTableName(imageGroupTypeNameKey); //<-- accxxxxxproductimages / accxxxxxcategoryimages / accxxxxxaccountimages string listingTablename = Sahara.Core.Common.Methods.SchemaNames.AccountIdToTableStorageName(accountId) + Internal.ImageRecordTableStorage.ImageRecordListingTableName(imageGroupTypeNameKey); //<-- accxxxxxproductimages / accxxxxxcategoryimages / accxxxxxaccountimages if (tableName.Length > 63 || listingTablename.Length > 63) { return(new DataAccessResponseType { isSuccess = false, ErrorMessage = "Storage table names cannot be longer than 63 characters!" }); } try { CloudTable cloudTable = cloudTableClient.GetTableReference(tableName); //<-- accxxxxxproductimages / accxxxxxcategoryimages / accxxxxxaccountimages cloudTable.CreateIfNotExists(); var tableResult = cloudTable.Execute(operation); if (isListing) { //If this is a listing, we also add a copy to the listing variation of the table CloudTable cloudTable2 = cloudTableClient.GetTableReference(listingTablename); //<-- accxxxxxproductimages / accxxxxxcategoryimages / accxxxxxaccountimages cloudTable2.CreateIfNotExists(); var tableResult2 = cloudTable2.Execute(operation); response.isSuccess = true; //tableResult.; } else { response.isSuccess = true; } } catch { response.isSuccess = false; //tableResult.; //response.ErrorMessage = "image exists"; } return(response); }
public static bool RunLog(CBLoggers message) { if (globalVal.CloudBreadLoggerSetting != "") { if (string.IsNullOrEmpty(message.memberID)) { message.memberID = ""; } //ERROR는 바로 DB - CloudBreadErrorLog 로 저장 if (message.Level.ToUpper() == "ERROR") { try { string strQuery = string.Format("insert into dbo.CloudBreadErrorLog(memberid, jobID, [Thread], [Level], [Logger], [Message], [Exception]) values('{0}','{1}','{2}','{3}','{4}','{5}','{6}')", message.memberID, message.jobID, message.Thread, message.Level, message.Logger, message.Message, message.Exception ); SqlConnection connection = new SqlConnection(globalVal.DBConnectionString); { connection.Open(); SqlCommand command = new SqlCommand(strQuery, connection); int rowcount = command.ExecuteNonQuery(); connection.Close(); } } catch (Exception) { // DB 로깅이 실패했을 경우 throw; } } else { // 조건에 따라 사용자 로그 저장 try { switch (globalVal.CloudBreadLoggerSetting) { case "SQL": //DB로 저장 string strQuery = string.Format("insert into dbo.CloudBreadAdminLog(memberid, jobID, [Thread], [Level], [Logger], [Message], [Exception]) values('{0}','{1}','{2}','{3}','{4}','{5}','{6}')", message.memberID, message.jobID, message.Thread, message.Level, message.Logger, message.Message, message.Exception ); SqlConnection connection = new SqlConnection(globalVal.DBConnectionString); { connection.Open(); SqlCommand command = new SqlCommand(strQuery, connection); int rowcount = command.ExecuteNonQuery(); connection.Close(); //Console.WriteLine(rowcount); break; } case "ATS": //ATS로 독립 저장 { CloudStorageAccount storageAccountQ = CloudStorageAccount.Parse(globalVal.StorageConnectionString); CloudTableClient tableClient = storageAccountQ.CreateCloudTableClient(); var tableClient1 = storageAccountQ.CreateCloudTableClient(); CloudTable table = tableClient.GetTableReference("CloudBreadAdminLog"); CBATSMessageEntity Message = new CBATSMessageEntity( message.memberID, Guid.NewGuid().ToString() ); //memberid를 파티션키로 쓴다. Message.jobID = message.jobID; Message.Date = DateTimeOffset.UtcNow.ToString(); Message.Thread = message.Thread; Message.Level = message.Level; Message.Logger = message.Logger; Message.Message = message.Message; Message.Exception = message.Exception; TableOperation insertOperation = TableOperation.Insert(Message); table.Execute(insertOperation); break; } case "AQS": //Azure Queue Storage로 저장 { CloudStorageAccount storageAccount = CloudStorageAccount.Parse(globalVal.StorageConnectionString); CloudQueueClient queueClient = storageAccount.CreateCloudQueueClient(); CloudQueue queue = queueClient.GetQueueReference("messagestoadminlog"); // 반드시 소문자 CBATSMessageEntity Message = new CBATSMessageEntity( message.memberID, Guid.NewGuid().ToString()); Message.jobID = message.jobID; Message.Date = DateTimeOffset.UtcNow.ToString(); Message.Thread = message.Thread; Message.Level = message.Level; Message.Logger = message.Logger; Message.Message = message.Message; Message.Exception = message.Exception; CloudQueueMessage Qmessage = new CloudQueueMessage(JsonConvert.SerializeObject(Message)); queue.AddMessage(Qmessage); break; } case "DocDB": //DocDB로 저장 break; default: //저장안함 break; } } catch (Exception) { // 로그 저장 실패시 오류를 throw :: 재시도 하는 로직? throw; } } } return(true); }
public void Add(LogEntity entity) { var insertOperation = TableOperation.Insert(entity); _cloudTable.Execute(insertOperation); }
/// <summary> /// Update the destination point value in word file by source point value. /// </summary> /// <param name="message"></param> /// <param name="tableBinding"></param> /// <param name="log"></param> public void ProcessQueueMessage( [QueueTrigger(Constant.PUBLISH_QUEUE_NAME)] PublishedMessage message, [Table(Constant.PUBLISH_TABLE_NAME)] CloudTable tableBinding, TextWriter log) { try { var retValue = new PublishStatusEntity(message.PublishBatchId.ToString(), message.SourcePointId.ToString(), message.PublishHistoryId.ToString()); var publishHistory = _sourceService.GetPublishHistoryByIdAsync(message.PublishHistoryId).Result; if (publishHistory != null) { var destinationPoints = _destinationService.GetDestinationPointBySourcePointAsync(publishHistory.SourcePointId); var groupedDestinationPoints = destinationPoints.Result.GroupBy(o => o.CatalogId); var publishValue = publishHistory.Value; IDictionary <string, Task <DocumentUpdateResult> > tasks = new Dictionary <string, Task <DocumentUpdateResult> >(); foreach (var sameCatalogDestinationPoints in groupedDestinationPoints) { try { var documentId = sameCatalogDestinationPoints.First().Catalog.DocumentId; var points = sameCatalogDestinationPoints.Select(o => o); tasks.Add(documentId, _documentService.UpdateBookmarkValueAsync(documentId, points, publishValue)); } catch (Exception ex) { log.Write($"Publish the source point to file '{message.SourcePointId}' failed due to {ex.ToString()}"); } } Task.WaitAll(tasks.Values.ToArray()); var errorItems = tasks.Where(o => o.Value.Result.IsSuccess == false || o.Value.IsFaulted); retValue.Comments = String.Join("\n\n", tasks.Select(o => $"{o.Key}:\t{String.Join("\n", o.Value.Result.Message)}")); if (errorItems.Count() > 0) { retValue.Status = PublishStatus.Error; retValue.ErrorSummary = $"Update files: {String.Join(";", errorItems.Select(o => o.Key))} failed"; retValue.ErrorDetail = String.Join("\n", errorItems.SelectMany(o => o.Value.Result.Message)); log.Write($"Update the documents {retValue.ErrorSummary} failed due to {retValue.ErrorSummary} "); } else { retValue.Status = PublishStatus.Completed; log.Write($"Update the documents successfully."); } } else { retValue.Status = PublishStatus.Error; retValue.ErrorSummary = "The publish history cannot be found."; log.Write($"The publish history related to the source point: '{message.SourcePointId}' cannot be found."); } tableBinding.Execute(TableOperation.InsertOrReplace(retValue)); log.Write("Publish is finished."); } catch (Exception ex) { log.Write($"Publish the source point: '{message.SourcePointId}' failed due to {ex.ToString()}"); } }
//****************************** //* * //* InsertUpdateEntity_Click * //* * //****************************** // Insert or update the entity in cloud storage. private void InsertUpdateEntity_Click(object sender, RoutedEventArgs e) { String action = "update entity"; if (IsAddNew) { action = "insert entity"; } // Construct entity ElasticTableEntity entity = new ElasticTableEntity(); entity.RowKey = RowKey.Text; entity.PartitionKey = PartitionKey.Text; int fieldId; String fieldName, fieldType, fieldValue; foreach (KeyValuePair <int, TextBox> field in fieldNames) { fieldId = field.Key; TextBox nameTextBox = field.Value; ComboBox typeComboBox = fieldTypes[fieldId]; TextBox valueTextBox = fieldValues[fieldId]; fieldName = nameTextBox.Text; if (String.IsNullOrEmpty(fieldName)) { MessageBox.Show("Cannot " + action + ": '" + fieldName + "' is not a valid propert name", "Invalid Property Name"); return; } ComboBoxItem item = typeComboBox.SelectedItem as ComboBoxItem; fieldType = item.Content as String; fieldValue = valueTextBox.Text; switch (fieldType) { case "Guid": { Guid guidValue; if (Guid.TryParse(fieldValue, out guidValue)) { entity[fieldName] = guidValue; } else { MessageBox.Show("Cannot update entity: " + fieldName + " does not contain a valid GUID value: " + fieldValue, "Invalid Value"); this.Cursor = Cursors.Arrow; return; } } break; case "String": entity[fieldName] = fieldValue; break; case "Binary": { try { string hexValues = fieldValue; string[] hexValuesSplit = hexValues.Split(' '); byte[] bytes = new byte[hexValuesSplit.Length]; int offset = 0; foreach (String hex in hexValuesSplit) { bytes[offset++] = (byte)Convert.ToInt32(hex, 16); } entity[fieldName] = bytes; } catch (Exception ex) { MessageBox.Show("Cannot " + action + ": " + fieldName + " does not contain a valid hexadecimal bytes representation: " + fieldValue, "Invalid Value"); this.Cursor = Cursors.Arrow; return; } } break; case "Boolean": { bool boolValue = false; switch (fieldValue.ToLower()) { case "1": case "true": case "yes": case "on": fieldValue = "True"; break; case "0": case "false": case "no": case "off": fieldValue = "False"; break; } if (Boolean.TryParse(fieldValue, out boolValue)) { entity[fieldName] = boolValue; } else { MessageBox.Show("Cannot " + action + ": " + fieldName + " does not contain a valid boolean value: " + fieldValue, "Invalid Value"); this.Cursor = Cursors.Arrow; return; } } break; case "DateTime": { DateTime dateValue; if (DateTime.TryParse(fieldValue, out dateValue)) { entity[fieldName] = dateValue; } else { MessageBox.Show("Cannot update entity: " + fieldName + " does not contain a valid DateTime value: " + fieldValue, "Invalid Value"); this.Cursor = Cursors.Arrow; return; } } break; case "Double": { double doubleValue = 0; if (Double.TryParse(fieldValue, out doubleValue)) { entity[fieldName] = doubleValue; } else { MessageBox.Show("Cannot " + action + ": " + fieldName + " does not contain a valid double-precision value: " + fieldValue, "Invalid Value"); this.Cursor = Cursors.Arrow; return; } } break; case "Int32": { int intValue = 0; if (Int32.TryParse(fieldValue, out intValue)) { entity[fieldName] = intValue; } else { MessageBox.Show("Cannot " + action + ": " + fieldName + " does not contain a valid Int32 value: " + fieldValue, "Invalid Value"); this.Cursor = Cursors.Arrow; return; } } break; case "Int64": { Int64 intValue = 0; if (Int64.TryParse(fieldValue, out intValue)) { entity[fieldName] = intValue; } else { MessageBox.Show("Cannot " + action + ": " + fieldName + " does not contain a valid Int64 value: " + fieldValue, "Invalid Value"); this.Cursor = Cursors.Arrow; return; } } break; case "Null": // Type "Null" means, do not add to entity. break; default: MessageBox.Show("Cannot " + action + ": unknown type '" + fieldType + "'"); this.Cursor = Cursors.Arrow; return; } } // next field try { if (IsAddNew) { // Insert entity and keep dialog open. this.Cursor = Cursors.Wait; Table.Execute(TableOperation.Insert(entity)); RecordsAdded++; Message.Text = "Records Added: " + RecordsAdded.ToString(); CmdClose.Content = new TextBlock() { Text = "Close" }; this.Cursor = Cursors.Arrow; RowKey.Focus(); } else { // Update entity and close dialog. this.Cursor = Cursors.Wait; entity.ETag = "*"; //Table.Execute(TableOperation.Merge(entity)); Table.Execute(TableOperation.Replace(entity)); RecordsUpdated++; Message.Text = "Records Updaed: " + RecordsUpdated.ToString(); CmdClose.Content = new TextBlock() { Text = "Close" }; this.Cursor = Cursors.Arrow; //RowKey.Focus(); DialogResult = true; } } catch (Exception ex) { this.Cursor = Cursors.Arrow; if (IsAddNew) { Message.Text = "Error inserting record: " + ex.Message; } else { Message.Text = "Error updating record: " + ex.Message; } RowKey.Focus(); } }
private static void InsertCarData(CloudTable table, CarEntity carEntity) { TableOperation operation = TableOperation.Insert(carEntity); table.Execute(operation); }
/// <summary> /// Emit the provided log event to the sink. /// </summary> /// <param name="logEvent">The log event to write.</param> public void Emit(LogEvent logEvent) { // todo: Use batch insert operation via timer like the Mongo and Couch sinks? _table.Execute(TableOperation.Insert(new LogEventEntity(logEvent, _formatProvider))); }
public static void Run([QueueTrigger("vmchooserbatch", Connection = "vmchooser-sa-queue-batch")] string csvQueueItem, TraceWriter log) { string separator = ","; string[] values = System.Text.RegularExpressions.Regex.Split(csvQueueItem, separator); // Hard format, column based... :x string vmname = values[0]; string region = values[1]; string cores = values[2]; string memory = values[3]; string ssd = values[4]; string nic = values[5]; string data = values[6]; string iops = values[7]; string throughput = values[8]; string temp = values[9]; string peakcpu = values[10]; string peakmem = values[11]; string currency = values[12]; string contract = values[13]; string burst = values[14]; string csvfile = values[15]; // Fix for "dynamic" pricing mapping later on with the results currency = currency.ToUpper(); // Retrieve most optimal vm size string vmchooser_api_authorizationkey = System.Environment.GetEnvironmentVariable("vmchooser-api-authorizationkey"); string vmchooser_api_url_getvmsize = System.Environment.GetEnvironmentVariable("vmchooser-api-url-getvmsize"); string querysuffix = "?burstable=" + burst + "&maxresults=1®ion=" + region + "&cores=" + cores + "&memory=" + memory + "&iops=" + iops + "&data=" + data + "&temp=" + temp + "&throughput=" + throughput + "&nics=" + nic + "&ssd=" + ssd + "&avgcpupeak=" + peakcpu + "&avgmempeak=" + peakmem + "¤cy=" + currency + "&contract=" + contract; string apicall = vmchooser_api_url_getvmsize + querysuffix; //log.Info(apicall); HttpWebRequest request = WebRequest.Create(apicall) as HttpWebRequest; request.Headers["Ocp-Apim-Subscription-Key"] = vmchooser_api_authorizationkey; request.Method = "POST"; request.ContentLength = 0; try { using (WebResponse response = request.GetResponse()) { using (Stream stream = response.GetResponseStream()) { StreamReader reader = new StreamReader(stream, System.Text.Encoding.UTF8); String stringResponse = reader.ReadToEnd(); /* Example JSON Return * { * "1":{ * "Name":"b4ms", * "Region":"europe-north", * "Contract":"ri1y", * "Price (GBP/Hour)":0.098693817780000012, * "Price (GBP/200h)":19.738763556000002, * "Price (GBP/Month)":73.428200428320011, * "ACU":-1, * "SSD":"No", * "Cores":4, * "pCores":0.14625000000000002, * "Memory (GB)":16, * "NICs":4, * "Bandwidth (Mbps)":-1, * "Max Disks":8, * "Max IOPS":2880, * "Max Throughput (MB/s)":35 * } * } */ JObject joResponse = JObject.Parse(stringResponse); dynamic vmInfo = (JObject)joResponse["1"]; if (vmInfo == null) { log.Info("No results recevied"); return; } else { string vmInfoName = vmInfo.Name; string vmInfoRegion = vmInfo.Region; string vmInfoContract = vmInfo.Contract; string vmInfoPricehour = vmInfo["Price (" + currency + "/Hour)"]; string vmInfoPrice200h = vmInfo["Price (" + currency + "/200h)"]; string vmInfoPricemonth = vmInfo["Price (" + currency + "/Month)"]; string vmInfoAcu = vmInfo.ACU; string vmInfoSsd = vmInfo.SSD; string vmInfoCores = vmInfo.Cores; string vmInfoPcores = vmInfo.pCores; string vmInfoMemory = vmInfo["Memory (GB)"]; string vmInfoNics = vmInfo.NICs; string vmInfoBandwidth = vmInfo["Bandwidth (Mbps)"]; string vmInfoDisks = vmInfo["Max Disks"]; string vmInfoIops = vmInfo["Max IOPS"]; string vmInfoThroughput = vmInfo["Max Throughput (MB/s)"]; //log.Info(vmInfoName + "-" + vmInfoRegion + "-" + vmInfoContract + "-" + vmInfoPricehour + "-" + vmInfoPrice200h + "-" + vmInfoPricemonth + "-" + vmInfoAcu + "-" + vmInfoSsd + "-" + vmInfoCores + "-" + vmInfoPcores + "-" + vmInfoMemory + "-" + vmInfoNics + "-" + vmInfoBandwidth + "-" + vmInfoDisks + "-" + vmInfoIops + "-" + vmInfoThroughput); // Retrieve most optimal disk config string vmchooser_api_url_getdisksize = System.Environment.GetEnvironmentVariable("vmchooser-api-url-getdisksize"); decimal fixeddata; bool parsed = Decimal.TryParse(data, out fixeddata); //log.Info("FixedData:" + fixeddata.ToString() + " - Data:" + data); fixeddata = fixeddata * 1024; //Convert TB to GB //log.Info("FixedData:" + fixeddata.ToString() + " - Data:" + data); string diskquerysuffix = "?region=" + region + "&iops=" + iops + "&data=" + fixeddata.ToString() + "&throughput=" + throughput + "¤cy=" + currency + "&ssd=" + ssd + "&maxdisks=" + vmInfoDisks; string diskapicall = vmchooser_api_url_getdisksize + diskquerysuffix; //log.Info(diskapicall); HttpWebRequest diskrequest = WebRequest.Create(diskapicall) as HttpWebRequest; diskrequest.Headers["Ocp-Apim-Subscription-Key"] = vmchooser_api_authorizationkey; diskrequest.Method = "POST"; diskrequest.ContentLength = 0; try { using (WebResponse diskresponse = diskrequest.GetResponse()) { using (Stream diskstream = diskresponse.GetResponseStream()) { StreamReader diskreader = new StreamReader(diskstream, System.Text.Encoding.UTF8); String stringDiskResponse = diskreader.ReadToEnd(); log.Info("Disk : " + stringDiskResponse); /* Example JSON Return * { * "Disk T-Shirt Size":"s20", * "Disk Type":"standard", * "Capacity (GB) - per disk":512, * "IOPS (IO/s) - per disk":500, * "Througput (MB/s) - per disk":60, * "Number of Disks":3, * "Capacity (GB) - for all disks":1536, * "IOPS (IO/s) - for all disks":1500, * "Througput (MB/s) - for all disks":180, * "Description":"A raid0 / stripe of 3 disks of type s20", * "Price / Month - for all disks":55.050624000000006, * "Currency":"EUR" * } */ JObject joDiskResponse = JObject.Parse(stringDiskResponse); dynamic diskInfo = (JObject)joDiskResponse; string diskInfoTshirtsize = diskInfo["Disk T-Shirt Size"]; string diskInfoType = diskInfo["Disk Type"]; string diskInfoCapacitySingle = diskInfo["Capacity (GB) - per disk"]; string diskInfoIopsSingle = diskInfo["IOPS (IO/s) - per disk"]; string diskInfoThoughputSingle = diskInfo["Througput (MB/s) - per disk"]; string diskInfoDiskcount = diskInfo["Number of Disks"]; string diskInfoCapacityTotal = diskInfo["Capacity (GB) - for all disks"]; string diskInfoIopsTotal = diskInfo["IOPS (IO/s) - for all disks"]; string diskInfoThroughputTotal = diskInfo["Througput (MB/s) - for all disk"]; string diskInfoDescription = diskInfo["Description"]; string diskInfoPrice = diskInfo["Price / Month - for all disks"]; string diskInfoCurrency = diskInfo["Currency"]; // Write Results to Table Storage string vmchooser_sa_table_batch = System.Environment.GetEnvironmentVariable("vmchooser-sa-table-batch"); CloudStorageAccount storageAccount = CloudStorageAccount.Parse(vmchooser_sa_table_batch); CloudTableClient tableClient = storageAccount.CreateCloudTableClient(); CloudTable table = tableClient.GetTableReference("vmchooserbatch"); table.CreateIfNotExists(); VmchooserEntity vmresult = new VmchooserEntity(csvfile, vmname); vmresult.InputName = vmname; vmresult.InputRegion = region; vmresult.InputCores = cores; vmresult.InputMemory = memory; vmresult.InputSSD = ssd; vmresult.InputNICS = nic; vmresult.InputData = data; vmresult.InputIOPS = iops; vmresult.InputThroughput = throughput; vmresult.InputTemp = temp; vmresult.InputAvgCPU = peakcpu; vmresult.InputAvgMEM = peakmem; vmresult.InputContract = contract; vmresult.InputCurrency = currency; vmresult.InputBurstable = burst; vmresult.DiskType = diskInfoType; vmresult.DiskConfig = diskInfoDescription; vmresult.DiskConfigPrice = diskInfoPrice; vmresult.Name = vmInfoName; vmresult.ACU = vmInfoAcu; vmresult.SSD = vmInfoSsd; vmresult.Cores = vmInfoCores; vmresult.PCores = vmInfoPcores; vmresult.MemoryGB = vmInfoMemory; vmresult.NICS = vmInfoNics; vmresult.BandwidthMbps = vmInfoBandwidth; vmresult.MaxDisks = vmInfoDisks; vmresult.MaxIOPS = vmInfoIops; vmresult.MaxThroughputMBs = vmInfoThroughput; vmresult.PriceHour = vmInfoPricehour; vmresult.Price200h = vmInfoPrice200h; vmresult.PriceMonth = vmInfoPricemonth; TableOperation insertOperation = TableOperation.Insert(vmresult); table.Execute(insertOperation); } } } catch (WebException e) { using (WebResponse diskresponse = e.Response) { // Error whilst getting the Disk Sizing Info HttpWebResponse httpResponse = (HttpWebResponse)diskresponse; log.Error("Error code: " + httpResponse.StatusCode); return; } } } } } } catch (WebException e) { using (WebResponse response = e.Response) { // Error whilst getting the VM Size Info HttpWebResponse httpResponse = (HttpWebResponse)response; log.Error("Error code: " + httpResponse.StatusCode); return; } } }
public StreamOpenResult Execute() { return(Result(table.Execute(Prepare()))); }
static void Main(string[] args) { Console.WriteLine("start"); CloudStorageAccount storageAccount = CloudStorageAccount.Parse(storageconnectionstring); Console.WriteLine("Got valid storage account"); CloudTableClient tableClient = storageAccount.CreateCloudTableClient(new TableClientConfiguration()); Console.WriteLine("Created a table client"); var tableName = "demotable22082127"; CloudTable table = tableClient.GetTableReference(tableName); if (table.CreateIfNotExists()) { Console.WriteLine($"Created a table{tableName}"); } else { Console.WriteLine($"Already exists. {tableName}"); } CustomerEntity customer = new CustomerEntity("Harp", "Walter") { Email = "*****@*****.**", PhoneNumber = "0000" }; ExtendedCustomerEntity extendedCustomer = new ExtendedCustomerEntity("Kim", "Jinpyi") { Email = "*****@*****.**", PhoneNumber = "0000", PostCode = "3133" }; Console.WriteLine($"Created customer entity"); TableOperation tbOp = TableOperation.InsertOrReplace(customer); TableResult result = table.Execute(tbOp); CustomerEntity insertedCustomer = result.Result as CustomerEntity; Console.WriteLine($"Inserted customer entity"); if (result.RequestCharge.HasValue) { Console.WriteLine("Request Charge of InsertOrMerge Operation: " + result.RequestCharge); } Console.WriteLine("Update an existing Entity using the InsertOrMerge Upsert Operation."); tbOp = TableOperation.InsertOrMerge(extendedCustomer); result = table.Execute(tbOp); Console.WriteLine($"Inserted customer entity"); if (result.RequestCharge.HasValue) { Console.WriteLine("Request Charge of InsertOrMerge Operation: " + result.RequestCharge); } tbOp = TableOperation.Retrieve <CustomerEntity>("Harp", "Walter"); result = table.Execute(tbOp); CustomerEntity retreivecustomer = result.Result as CustomerEntity; if (retreivecustomer != null) { Console.WriteLine("\t{0}\t{1}\t{2}\t{3}", retreivecustomer.PartitionKey, retreivecustomer.RowKey, retreivecustomer.Email, retreivecustomer.PhoneNumber); } // Get the request units consumed by the current operation. RequestCharge of a TableResult is only applied to Azure CosmoS DB if (result.RequestCharge.HasValue) { Console.WriteLine("Request Charge of Retrieve Operation: " + result.RequestCharge); } tbOp = TableOperation.Retrieve <ExtendedCustomerEntity>("Kim", "Jinpyi"); result = table.Execute(tbOp); var ret = result.Result as ExtendedCustomerEntity; if (ret != null) { Console.WriteLine("\t{0}\t{1}\t{2}\t{3}\t{4}", ret.PartitionKey, ret.RowKey, ret.Email, ret.PhoneNumber, ret.PostCode); } // Get the request units consumed by the current operation. RequestCharge of a TableResult is only applied to Azure CosmoS DB if (result.RequestCharge.HasValue) { Console.WriteLine("Request Charge of Retrieve Operation: " + result.RequestCharge); } Console.ReadKey(); }
/// <summary> /// Emit the provided log event to the sink. /// </summary> /// <param name="logEvent">The log event to write.</param> public void Emit(LogEvent logEvent) { _table.Execute(TableOperation.Insert(new LogEventEntity(logEvent, _formatProvider, logEvent.Timestamp.Ticks))); }
/// <summary> /// Processes the queue message. /// </summary> /// <param name="message">The message.</param> /// <param name="blogTable">The blog table.</param> /// <param name="subscriberTable">The subscriber table.</param> public static void ProcessNewPostQueueMessage( [QueueTrigger(NewPostQueue)] string message, [Table(BlogTable)] CloudTable blogTable, [Table(SubscriberTable)] CloudTable subscriberTable) { try { Console.Out.WriteLine("New post message captured {0}", message); var sendgridUserName = ConfigurationManager.AppSettings[ApplicationConstants.SendgridUserName]; var sendgridPassword = ConfigurationManager.AppSettings[ApplicationConstants.SendgridPassword]; mailSystem = new SendgridMailClient(sendgridUserName, sendgridPassword); var operation = TableOperation.Retrieve(ApplicationConstants.BlogKey, message); var result = blogTable.Execute(operation, TableRequestOptions).Result as DynamicTableEntity; if (null == result) { Console.Error.WriteLine("Could not find record corresponding to RK {0}", message); return; } var title = result.Properties["Title"].StringValue; var postedDate = result.Properties["PostedDate"].DateTime; var bodySnippet = Routines.GeneratePreview(result.Properties["AutoIndexedElement_0_Body"].StringValue); var formattedUri = result.Properties["FormattedUri"].StringValue; //// Run paged queries to get subscribers. Console.Out.WriteLine("Going to get list of subscribers"); var query = (from record in subscriberTable.CreateQuery <DynamicTableEntity>() where record.PartitionKey == ApplicationConstants.SubscriberListKey && record.Properties["LastEmailIdentifier"].StringValue != result.RowKey && record.Properties["LastEmailIdentifier"].StringValue != string.Format("Faulted {0}", DateTime.UtcNow.Date) && record.Properties["IsVerified"].BooleanValue == true select record).Take(50).AsTableQuery(); TableContinuationToken token = null; do { var segment = subscriberTable.ExecuteQuerySegmented(query, token, TableRequestOptions); var batchUpdate = new TableBatchOperation(); if (null == segment || !segment.Any()) { Console.Out.WriteLine("No users found. Aborting current call."); return; } //// Compose Tuple of records of users. var userDetails = segment.Select( record => new Tuple <string, string, string>( record.Properties["FirstName"].StringValue, record.RowKey, record.Properties["VerificationString"].StringValue)).ToList(); Console.Out.WriteLine("Going to send mails to users"); if (SendNewPostMailsToUsers(userDetails, title, postedDate, formattedUri, bodySnippet)) { foreach (var record in segment) { record.Properties["LastEmailIdentifier"].StringValue = result.RowKey; batchUpdate.Add(TableOperation.InsertOrReplace(record)); } } else { foreach (var record in segment) { record.Properties["LastEmailIdentifier"].StringValue = string.Format( "Faulted {0}", DateTime.UtcNow.Date); batchUpdate.Add(TableOperation.InsertOrReplace(record)); } } subscriberTable.ExecuteBatch(batchUpdate, TableRequestOptions); token = segment.ContinuationToken; }while (token != null); Console.Out.WriteLine("Mails sent"); } catch (Exception exception) { Console.Error.WriteLine("Error at Time:{0} Message:{1}", DateTime.UtcNow, exception); throw; } }
public void Update(T entity) { var operation = TableOperation.InsertOrReplace(entity); _table.Execute(operation); }
public void Insert(DeviceStatus status) { _table.Execute(TableOperation.Insert(new DeviceStatusEntity(status))); }
public void AddUser(User newUser) { operation = TableOperation.Insert(newUser); _table.Execute(operation); }
static void CreateCustomer(CloudTable table, CustomerUS customer) { TableOperation insert = TableOperation.Insert(customer); table.Execute(insert); }
/// <summary> /// Imports data of DataTable to table storage /// </summary> /// <param name="dtSheetInfo"></param> /// <param name="strSheetName"></param> private void ImportDataToTable(System.Data.DataTable dtSheetInfo, string strSheetName) { var client = storageAccount.CreateCloudTableClient(); string strTableName = txt_TableName.Text; if (!string.IsNullOrEmpty(strTableName)) { Response.Write(new string(' ', 1024)); Response.Write(String.Format("<div>Uploading {0} rows for sheet {1}", dtSheetInfo.Rows.Count, strSheetName.Replace("$", ""))); Response.Flush(); CloudTable table = client.GetTableReference(strTableName); table.CreateIfNotExists(); // Create a new partition key for this data instead of overwriting old data. var partitionKey = strSheetName + DateTime.UtcNow.ToString("o"); var batch = new TableBatchOperation(); for (int j = 0; j < dtSheetInfo.Rows.Count; j++) { ExcelTableEntity entity = new ExcelTableEntity(partitionKey, (j + 2).ToString("D5")); var hasContent = false; for (int i = 0; i < dtSheetInfo.Columns.Count; i++) { string strCloName = dtSheetInfo.Columns[i].ColumnName; if (!(dtSheetInfo.Rows[j][i] is DBNull) && (dtSheetInfo.Rows[j][i] != null)) { hasContent = true; string strValue = dtSheetInfo.Rows[j][i].ToString().Trim(); if (!CheckPropertyExist(strCloName, strValue, entity)) { EntityProperty property = entity.ConvertToEntityProperty(strCloName, dtSheetInfo.Rows[j][i]); if (!entity.properties.ContainsKey(strCloName)) { entity.properties.Add(strCloName, property); } else { entity.properties[strCloName] = property; } } } } if (hasContent) { batch.Add(TableOperation.InsertOrReplace(entity)); } if (batch.Count >= 100) { table.ExecuteBatch(batch); Response.Write("."); Response.Flush(); batch.Clear(); } } if (batch.Count > 0) { table.ExecuteBatch(batch); Response.Write("."); Response.Flush(); } var pointer = new ExcelTableEntity(strSheetName.Replace("$", ""), "Latest"); pointer.properties.Add("ID", new EntityProperty(partitionKey)); table.Execute(TableOperation.InsertOrReplace(pointer)); Response.Write(String.Format("\n PartitionKey: <code>{0}</code></div><hr/>", partitionKey)); Response.Flush(); } }
static void UpdateCustomer(CloudTable table, CustomerUS customer) { TableOperation update = TableOperation.Replace(customer); table.Execute(update); }
//---------------------------USER ACTIONS-------------------------------------- //ADICIONAR PRODUTO public string AdicionarUser(User user) { try { TableQuery <ModeloTableUser> query = new TableQuery <ModeloTableUser>().Where(TableQuery.GenerateFilterCondition("RowKey", QueryComparisons.Equal, user.Email)); List <ModeloTableUser> resultado = table.ExecuteQuery(query).ToList <ModeloTableUser>(); //Verifica se existe if (resultado.Count > 0) { return("Já existe esse email registado."); } else { user.Autenticado = true; user.QuantLogins += 1; table.Execute(TableOperation.Insert(UserToModelTableUser(user))); } } catch (Exception ex) { //Erro conflito Console.WriteLine(ex.Message); return("Já existe esse email registado."); } return("Mensagem do helper User adicionado"); }
static void DeleteCustomer(CloudTable table, CustomerUS customer) { TableOperation delete = TableOperation.Delete(customer); table.Execute(delete); }
public ActionResult Index(string providerSearch, string categorySearch) { //default view is to show last x hours of news var startDate = DateTime.Now.AddHours(-24); var endDate = DateTime.Now; CloudTable table = DataAccess.GetDataTable("newsItems"); CloudTable tableMeta = DataAccess.GetDataTable("newsItemsMeta"); #region getFilterCounts var queryFilterAreas = table.CreateQuery <NewsItem>() .Where(d => d.PartitionKey == "newsFeels" && d.DatePublished >= startDate); var FilterCounts = queryFilterAreas.ToList(); Dictionary <string, List <double> > providers = new Dictionary <string, List <double> >(); Dictionary <string, List <double> > categories = new Dictionary <string, List <double> >(); foreach (var item in FilterCounts) { string provider = item.Provider; if (provider == null) { provider = "Unknown"; } string category = item.Category; double sentiment = item.Sentiment * 100; if (category == null) { category = "General"; } if (providers.ContainsKey(provider)) { providers[provider].Add(sentiment); } else { providers.Add(provider, new List <double> { sentiment }); } if (categories.ContainsKey(category)) { categories[category].Add(sentiment); } else { categories.Add(category, new List <double> { sentiment }); } } ViewBag.providers = providers; ViewBag.categories = categories; ViewBag.allSentimentAverage = FilterCounts.Average(p => p.Sentiment) * 100; #endregion // Create a query: in this example I use the DynamicTableEntity class var query = table.CreateQuery <NewsItem>() .Where(d => d.PartitionKey == "newsFeels" && d.DatePublished >= startDate); if (!String.IsNullOrEmpty(providerSearch)) { query = query.Where(d => d.Provider == providerSearch); } if (!String.IsNullOrEmpty(categorySearch)) { query = query.Where(d => d.Category == categorySearch); } //var newsItems = table.ExecuteQuery(query).ToList(); //var newsItems = table.Execute(query) var newsItems = query.ToList(); var sortedData = newsItems.OrderByDescending(c => c.Sentiment).ToList(); //get YTD string YTD = "YTD"; TableOperation retrieveOperation = TableOperation.Retrieve <newsItemMeta>("newsFeelsMeta", YTD.GetHashCode().ToString()); // Execute the retrieve operation. TableResult retrievedResult = tableMeta.Execute(retrieveOperation); ViewBag.YTD = Math.Round(((newsItemMeta)retrievedResult.Result).Average, 2) * 100; string today = DateTime.Now.ToShortDateString(); TableOperation retrieveOperationToday = TableOperation.Retrieve <newsItemMeta>("newsFeelsMeta", today.GetHashCode().ToString()); // Execute the retrieve operation. TableResult retrievedResultToday = tableMeta.Execute(retrieveOperationToday); //get today try { //ViewBag.Today = Math.Round(((newsItemMeta)retrievedResultToday.Result).Average, 2) * 100; ViewBag.Today = Math.Round(sortedData.Average(p => p.Sentiment), 2) * 100; } catch { ViewBag.Today = 0; } return(View(sortedData)); }
public void UpdateSource(DiagnosticsSource source) { _table.Execute(TableOperation.Merge(source.ToEntity())); }
public void InsertEntity <T>(T entity) where T : ITableEntity { Table.Execute(TableOperation.InsertOrReplace(entity)); }
/// <summary> /// for saing delay lead email data in message table /// </summary> public void sendDelayLeadNotification() { Trace.TraceInformation("Worker Role DelayLead sendDelayLeadNotification() called {0}", DateTime.Now); try { try { List <int> objallcompany = objLeadNotifcationBusiness.GetAllActiveCompanies(); string culture = ""; string UserName = ""; int StageDuration = 0; foreach (int companyId in objallcompany) { IList <ErucaCRM.Domain.LeadEmailNotificationModel> objLeadNotificationModel; try { objLeadNotificationModel = objLeadNotifcationBusiness.GetCompanyDataForEmail(companyId); } catch (Exception exception) { hasError = true; logdata.Append("\n"); logdata.Append("," + WorkerRoleDelayLead.UserId + "," + ErucaCRM.Utility.Enums.ResponseResult.Failure.ToString() + "," + exception.Message + ".Error Occured while Fetching Company data for Email."); continue; } for (int emailNotificationIndex = 0; emailNotificationIndex < objLeadNotificationModel.Count; emailNotificationIndex++) { culture = objLeadNotificationModel[emailNotificationIndex].CultureName; UserName = objLeadNotificationModel[emailNotificationIndex].Name; WorkerRoleDelayLead.UserId = objLeadNotificationModel[emailNotificationIndex].UserId; string[] LeadsData = objLeadNotificationModel[emailNotificationIndex].LeadIds.Split('|'); string currentStageName = ""; string leadId = ""; string dayDifference = ""; string title = ""; string stageId = ""; string stageName = ""; bool loopReachedEnd = false; StringBuilder objstringbuilder = new StringBuilder(); for (int leadDataIndex = 0; leadDataIndex < LeadsData.Count(); leadDataIndex++) { string[] LeadSeparated = LeadsData[leadDataIndex].Split(','); if (checkStageLeadData(LeadSeparated.Count())) { leadId = LeadSeparated[0]; dayDifference = LeadSeparated[1]; title = LeadSeparated[2]; stageId = LeadSeparated[3]; stageName = LeadSeparated[4]; int stageIdForLead = Convert.ToInt32(stageId); LeadNotifcationBusiness leadNotificationBusiness = new LeadNotifcationBusiness(unitOfWork); try { StageDuration = leadNotificationBusiness.GetStageLeadDuration(stageIdForLead); } catch (Exception exception) { hasError = true; logdata.Append("\n"); logdata.Append("," + WorkerRoleDelayLead.UserId + "," + ErucaCRM.Utility.Enums.ResponseResult.Failure.ToString() + "," + exception.Message + "." + "Error Occured while Fetching Lead data for StageDuration."); continue; } try { if (currentStageName != stageName) { if (currentStageName != "") { loopReachedEnd = true; } else { loopReachedEnd = false; } if (currentStageName != stageName && loopReachedEnd) { objstringbuilder.Append("</ul></div>"); } currentStageName = stageName; { objstringbuilder.Append("<div style='background-color:mintcream;border:1px solid Grey;border-radius:5px; float:left; margin-right:10px; margin-top:30px; width:800px; box-shadow:0 0 3px #666; padding:5px;'>"); objstringbuilder.Append("<p><b>" + CommonFunctions.GetGlobalizedLabel("Lead", "StageName", culture) + " :</b>" + currentStageName + " <i> (" + CommonFunctions.GetGlobalizedLabel("Lead", "MaxDuration", culture) + ":" + StageDuration + CommonFunctions.GetGlobalizedLabel("Lead", "Days", culture) + ") </i></p><br/><b style='float:left'>" + CommonFunctions.GetGlobalizedLabel("Lead", "Leads", culture) + "</b>"); objstringbuilder.Append("<ul style='float:left'><li><b><a href='" + ReadConfiguration.ErucaCRMURL + "#" + Convert.ToInt32(leadId).Encrypt() + "'> " + title + " </a></b><i> (" + CommonFunctions.GetGlobalizedLabel("Lead", "TotalTimeSpent", culture) + " :" + dayDifference + CommonFunctions.GetGlobalizedLabel("Lead", "Days", culture) + ") </i></li>"); // objstringbuilder.Append("<p><b>" + "Test Check" + " :</b>" + currentStageName + " <i> (" + "Test Check" + ":" + StageDuration + "Test Check" + ") </i></p><br/><b style='float:left'>" + "Test Check" + "</b>"); // objstringbuilder.Append("<ul style='float:left'><li><b><a href='" + ReadConfiguration.ErucaCRMURL + "#" + Convert.ToInt32(leadId).Encrypt() + "'> " + title + " </a></b><i> (" + "Test Check" + " :" + dayDifference + "Test Check" + ") </i></li>"); } } else { objstringbuilder.Append("<li><b><a href='" + ReadConfiguration.ErucaCRMURL + "#" + Convert.ToInt32(leadId).Encrypt() + "'>" + title + "</a></b><i> (" + CommonFunctions.GetGlobalizedLabel("Lead", "TotalTimeSpent", culture) + " :" + dayDifference + CommonFunctions.GetGlobalizedLabel("Lead", "Days", culture) + ") </i></li>"); } } catch (Exception exception) { hasError = true; logdata.Append("\n"); logdata.Append(companyId + "," + WorkerRoleDelayLead.UserId + "," + ErucaCRM.Utility.Enums.ResponseResult.Failure.ToString() + "," + exception.Message + "." + "Error Occured While Fetching Data From EXCEL File or Configuration File."); } } } objstringbuilder.Append("</ul></div>"); objmailhelper.ToAddress = objLeadNotificationModel[emailNotificationIndex].EmailId; objmailhelper.RecipientName = objLeadNotificationModel[emailNotificationIndex].Name; objmailhelper.Subject = Constants.LEADS_TIMEEXCEED_NOTIFICATION_SUBJECT; try { objmailhelper.Body = "<p>" + CommonFunctions.GetGlobalizedLabel("Lead", "Hi", culture) + " " + UserName + ",</p><br/><br/><p>" + CommonFunctions.GetGlobalizedLabel("Lead", "EmailTopMsg", culture) + ":</br></br>"; objmailhelper.Body = objmailhelper.Body + objstringbuilder.ToString() + "<div style='clear:both;'></div><div style='width:700px;margin-top:20px;'><p>Regards</p></BR><P>Administration</P></div>"; logdata.Append("\n"); CloudStorageAccount storageAccount = CloudStorageAccount.Parse( CloudConfigurationManager.GetSetting("StorageConnectionString")); CloudTableClient tableClient = storageAccount.CreateCloudTableClient(); CloudTable table = tableClient.GetTableReference("Message"); table.CreateIfNotExists(); Message message = new Message(); AutoMapper.Mapper.Map(objmailhelper, message); EmailBody = message.Body; var sendEmailRow = new SendEmail { PartitionKey = message.RecipientName, RowKey = message.ToAddress, EmailAddress = message.ToAddress, EmailSent = false, MessageBody = message.Body, ScheduledDate = DateTime.Now, FromEmailAddress = ReadConfiguration.EmailForScheduler, SubjectLine = message.Subject, }; try { Trace.TraceInformation("Worker Role DelayLead saved data in message table {0}", DateTime.Now); TableOperation insertOperation = TableOperation.InsertOrReplace(sendEmailRow); table.Execute(insertOperation); } catch (Exception ex) { string err = "Error creating SendEmail row: " + ex.Message; if (ex.InnerException != null) { err += " Inner Exception: " + ex.InnerException; } Trace.TraceError(err); } string queueMessageString = sendEmailRow.PartitionKey + "," + sendEmailRow.RowKey + ","; CloudQueueClient queueClient = storageAccount.CreateCloudQueueClient(); var queueMessage = new CloudQueueMessage(queueMessageString); sendEmailQueue = queueClient.GetQueueReference("azuremailqueue"); sendEmailQueue.AddMessage(queueMessage); Trace.TraceInformation("Worker Role DelayLead saved data in queue table {0}", DateTime.Now); logdata.Append(companyId + "," + WorkerRoleDelayLead.UserId + "," + ErucaCRM.Utility.Enums.ResponseResult.Success.ToString() + "," + objmailhelper.Subject); } catch (System.Net.Mail.SmtpException exception) { hasError = true; logdata.Append("\n"); logdata.Append(companyId + "," + WorkerRoleDelayLead.UserId + "," + ErucaCRM.Utility.Enums.ResponseResult.Failure.ToString() + "," + exception.Message + "." + "Error Occured on Sending Email to the the User"); } catch (Exception exception) { hasError = true; logdata.Append("\n"); logdata.Append(companyId + "," + WorkerRoleDelayLead.UserId + "," + ErucaCRM.Utility.Enums.ResponseResult.Failure.ToString() + "," + exception.Message + "." + "Error Occured on Sending Email to the the User"); } } } saveEmailData(hasError, logdata, startdate); // await messageTable.ExecuteAsync(returnInsertOperation); } catch (Exception exception) { hasError = true; logdata.Append("\n"); logdata.Append("," + WorkerRoleDelayLead.UserId + "," + ErucaCRM.Utility.Enums.ResponseResult.Failure.ToString() + "," + exception.Message + "." + "Error Occured while Fetching Company."); saveEmailData(hasError, logdata, startdate); } } catch (Exception ex) { // Add(ex, true); } }
//bank public void AddOrReplaceBank(Bank Bank) { TableOperation add = TableOperation.InsertOrReplace(Bank); _tableBank.Execute(add); }
public void Insert(T entity) { var insertOperation = TableOperation.Insert(entity); Table.Execute(insertOperation); }
internal static DataAccessResponseType UpdateImageRecordDescription(string accountId, string storagePartition, string imageGroupTypeNameKey, string objectId, string imageGroupNameKey, string imageFormatNameKey, string newDescription, bool isListing) { var response = new DataAccessResponseType(); //CloudTableClient cloudTableClient = Settings.Azure.Storage.StorageConnections.AccountsStorage.CreateCloudTableClient(); CloudTableClient cloudTableClient = Settings.Azure.Storage.GetStoragePartitionAccount(storagePartition).CreateCloudTableClient(); //Create and set retry policy-------- IRetryPolicy linearRetryPolicy = new LinearRetry(TimeSpan.FromSeconds(1), 4); cloudTableClient.DefaultRequestOptions.RetryPolicy = linearRetryPolicy; #region Process on master image record CloudTable cloudTable = cloudTableClient.GetTableReference(Sahara.Core.Common.Methods.SchemaNames.AccountIdToTableStorageName(accountId) + Internal.ImageRecordTableStorage.ImageRecordTableName(imageGroupTypeNameKey)); //<-- accxxxxxproductimages / accxxxxxcategoryimages / accxxxxxaccountimages //Get the entity to remove gallery index item from var imageRecordEntity = (from record in cloudTable.CreateQuery <ImageRecordTableEntity>().Where(p => p.PartitionKey == objectId && p.ImageKey == imageGroupNameKey + "-" + imageFormatNameKey) select record).FirstOrDefault(); imageRecordEntity.Description = newDescription; //Replace the record TableOperation operation = TableOperation.Replace((imageRecordEntity as TableEntity)); try { //cloudTable.CreateIfNotExists(); var tableResult = cloudTable.Execute(operation); response.isSuccess = true; } catch (Exception e) { var exceptionMessage = e.Message; response.isSuccess = false; //tableResult.; return(null); } #endregion #region Process on Listing image (if applicable) if (isListing) { CloudTable cloudTable2 = cloudTableClient.GetTableReference(Sahara.Core.Common.Methods.SchemaNames.AccountIdToTableStorageName(accountId) + Internal.ImageRecordTableStorage.ImageRecordListingTableName(imageGroupTypeNameKey)); //<-- accxxxxxproductimages / accxxxxxcategoryimages / accxxxxxaccountimages //Get the entity to remove gallery index item from var imageRecordEntity2 = (from record in cloudTable2.CreateQuery <ImageRecordTableEntity>().Where(p => p.PartitionKey == objectId && p.ImageKey == imageGroupNameKey + "-" + imageFormatNameKey) select record).FirstOrDefault(); imageRecordEntity2.Description = newDescription; //Replace the record TableOperation operation2 = TableOperation.Replace((imageRecordEntity2 as TableEntity)); try { //cloudTable.CreateIfNotExists(); var tableResult2 = cloudTable2.Execute(operation2); response.isSuccess = true; } catch (Exception e) { var exceptionMessage = e.Message; response.isSuccess = false; } } #endregion return(response); }
public void AddLog(LogInterrole newLog) { TableOperation insertOperation = TableOperation.Insert(newLog); _table.Execute(insertOperation); }