public static async Task Process(CloudTable settingTable, ILogger log) { // Create necessary objects to get information from the datastore IEntity config = new Config(); // Get all the settings for the CentralLogging partition DataService ds = new DataService(settingTable, config); Configs config_store = await ds.GetAll(Constants.ConfigStorePartitionKey); Configs central_logging = await ds.GetAll("central_logging"); // Create an instance of the LogAnalyticsWriter LogAnalyticsWriter log_analytics_writer = new LogAnalyticsWriter(log, config_store, central_logging); // Get the Automate token and fqdn from the config store string token_setting = config_store.logging_automate_token; string fqdn_setting = config_store.automate_fqdn; // Set the time that that count was performed DateTime time = DateTime.UtcNow; // Request the NodeCount data from the Automate Server NodeCount node_count = await GetData("node", config_store.logging_automate_token, config_store.automate_fqdn, log); node_count.time = time; node_count.subscriptionId = config_store.subscription_id; node_count.customerName = config_store.customer_name; // Submit the node count log_analytics_writer.Submit(node_count, "ChefAutomateAMAInfraNodeCount"); // Request the UserCount data from the Automate Server UserCount user_count = await GetData("user", config_store.logging_automate_token, config_store.automate_fqdn, log); user_count.time = time; user_count.subscriptionId = config_store.subscription_id; user_count.customerName = config_store.customer_name; log_analytics_writer.Submit(user_count, "ChefAutomateAMAUserCount"); }
public static async void Run( [QueueTrigger( "chef-statsd", Connection = "AzureWebJobsStorage") ] string rawmetric, [Table("settings")] CloudTable settingTable, ILogger log) { // Instantiate objects to get relevant data from the configuration store IEntity config = new Config(); DataService ds = new DataService(settingTable, config); // Get all the settings for the CentralLogging partition Configs config_store = await ds.GetAll(Constants.ConfigStorePartitionKey); Configs central_logging = await ds.GetAll("central_logging"); // Create an instance of the LogAnalyticsWriter LogAnalyticsWriter log_analytics_writer = new LogAnalyticsWriter(log, config_store, central_logging); // Create a datetime variable that is the Linux Epoch time System.DateTime dateTime = new System.DateTime(1970, 1, 1, 0, 0, 0, 0); // Parse the raw metric json into an object Newtonsoft.Json.Linq.JObject statsd = Newtonsoft.Json.Linq.JObject.Parse(rawmetric); // Iterate around the series data and create a message for each one var metrics = (JArray)statsd["series"]; foreach (JObject metric in metrics) { // create message to send to Log Analytics ChefMetricMessage message = new ChefMetricMessage(); // determine the time of the event DateTime time = dateTime.AddSeconds((double)metric["points"][0][0]); // set the properties of the object message.metricName = (string)metric["metric"]; message.metricType = (string)metric["type"]; message.metricHost = (string)metric["host"]; message.time = time; message.metricValue = (double)metric["points"][0][1]; message.customerName = config_store.customer_name; message.subscriptionId = config_store.subscription_id; // Submit the metric to Log Analytics log_analytics_writer.Submit(message, "statsd_log"); } }
public async Task <HttpResponseMessage> Process(HttpRequest req, CloudTable table, ILogger log, string category) { HttpResponseMessage msg; // Only respond to an HTTP Post if (req.Method == "POST") { // Create dataservice to access data in the config table Config config = new Config(); DataService ds = new DataService(table, config); // Get all the settings for the CentralLogging partition Configs config_store = await ds.GetAll(category); Configs central_logging = await ds.GetAll("centralLogging"); // Get the body of the request string body = await new StreamReader(req.Body).ReadToEndAsync(); string[] logs = body.Split('}'); // Create an instance of the LogAnalyticsWriter LogAnalyticsWriter log_analytics_writer = new LogAnalyticsWriter(log, config_store, central_logging); // Create an instance of AutomateLog which will hold the data that has been submitted AutomateLog data = new AutomateLog(); // iterate around each item in the logs string appended_item; string log_name; foreach (string item in logs) { appended_item = item; if (!appended_item.EndsWith("}")) { appended_item += "}"; } // output the item to the console log.LogInformation(item); // if the item is not empty, process it if (!string.IsNullOrEmpty(item)) { // Deserialise the item into the AutomateLog object data = JsonConvert.DeserializeObject <AutomateLog>(appended_item as string); // From this data create an AutomateMessage object AutomateMessage automate_message = AutomateLogParser.ParseGenericLogMessage(data.MESSAGE_s, config_store.customer_name, config_store.subscription_id, log); // if the message is known then submit to LogAnalytics if (automate_message.sourcePackage.ToLower() != "uknown entry") { // Determine the log name of the message log_name = automate_message.sourcePackage.Replace("-", "") + "log"; // Submit the data log_analytics_writer.Submit(automate_message, log_name); } } } _response.SetMessage("Log data accepted"); msg = _response.CreateResponse(); } else { _response.SetError("HTTP Method not supported", true, HttpStatusCode.BadRequest); msg = _response.CreateResponse(); } return(msg); }