public ProcessChangeFeed(string BlobStorageAccountName, DateTimeOffset start, DateTimeOffset end) { TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); var res = ProcessChangeFeedTask(BlobStorageAccountName, start, end).Result; DataTable dt = new DataTable(); dt.Columns.Add("EventTime", typeof(DateTimeOffset)); dt.Columns.Add("EventType", typeof(string)); dt.Columns.Add("Subject", typeof(string)); dt.Columns.Add("Topic", typeof(string)); dt.Columns.Add("EventData.BlobOperationName", typeof(string)); dt.Columns.Add("EventData.BlobType", typeof(string)); foreach (var r in res) { DataRow dr = dt.NewRow(); dr["EventTime"] = r.EventTime; dr["EventType"] = r.EventType.ToString(); dr["Subject"] = r.Subject; dr["Topic"] = r.Topic; dr["EventData.BlobOperationName"] = r.EventData.BlobOperationName.ToString(); dr["EventData.BlobType"] = r.EventData.BlobType.ToString(); dt.Rows.Add(dr); } Table DestTable = new Table(); DestTable.Name = "AzureStorageChangeFeed"; DestTable.Schema = "dbo"; TMD.BulkInsert(dt, DestTable, true); }
public async Task <IActionResult> Run([HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = null)] HttpRequest req, ILogger log, ExecutionContext context, System.Security.Claims.ClaimsPrincipal principal) { bool IsAuthorised = _sap.IsAuthorised(req, log); if (IsAuthorised) { Guid ExecutionId = context.InvocationId; using FrameworkRunner FR = new FrameworkRunner(log, ExecutionId); FrameworkRunner.FrameworkRunnerWorkerWithHttpRequest worker = RunFrameworkTasks.RunFrameworkTasksCore; FrameworkRunner.FrameworkRunnerResult result = FR.Invoke(req, "RunFrameworkTasksHttpTrigger", worker); if (result.Succeeded) { return(new OkObjectResult(JObject.Parse(result.ReturnObject))); } else { return(new BadRequestObjectResult(new { Error = "Execution Failed...." })); } } else { log.LogWarning("User is not authorised to call RunFrameworkTasksHttpTrigger."); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); short TaskRunnerId = System.Convert.ToInt16(req.Query["TaskRunnerId"]); TMD.ExecuteSql(string.Format("exec [dbo].[UpdFrameworkTaskRunner] {0}", TaskRunnerId)); return(new BadRequestObjectResult(new { Error = "User is not authorised to call this API...." })); } }
public static dynamic Testing_MarkTasksCompleteCore(Logging logging) { TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); TMD.ExecuteSql(string.Format("Insert into Execution values ('{0}', '{1}', '{2}')", logging.DefaultActivityLogItem.ExecutionUid, DateTimeOffset.Now.ToString("u"), DateTimeOffset.Now.AddYears(999).ToString("u"))); int res = TMD.GetSqlConnection().Execute(@" Declare @RunningTasks int = ( Select Count(*) from TaskInstance where TaskRunnerId is not null aND LastExecutionStatus = 'InProgress') Declare @TasksToBeMarkedComplete int = ( SELECT ROUND(1 + (RAND() * @RunningTasks),0) AS RAND_1_100) Update TaskInstance Set LastExecutionStatus = 'Completed', TaskRunnerId = null from TaskInstance a inner join ( Select *, Rn = ROW_NUMBER() over (order by TaskInstanceId) from TaskInstance where TaskRunnerId is not null aND LastExecutionStatus = 'InProgress' ) b on a.TaskInstanceId = b.TaskInstanceId where a.TaskRunnerId is not null and a.LastExecutionStatus = 'InProgress' and b.Rn <= @TasksToBeMarkedComplete "); return(new { }); }
private void SaveCursor(string cursor) { TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); using SqlConnection _con = TMD.GetSqlConnection(); TMD.ExecuteSql("Select null"); }
public static dynamic PrepareFrameworkTasksCore(Logging logging) { TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); TMD.ExecuteSql(string.Format("Insert into Execution values ('{0}', '{1}', '{2}')", logging.DefaultActivityLogItem.ExecutionUid, DateTimeOffset.Now.ToString("u"), DateTimeOffset.Now.AddYears(999).ToString("u"))); //Check status of running pipelines and calculate available "slots" based on max concurrency settings short _FrameworkWideMaxConcurrency = Shared.GlobalConfigs.GetInt16Config("FrameworkWideMaxConcurrency"); //ToDo: Write Pipelines that need to be checked to Queue for now I have just reduced to only those tasks that have been running for longer than x minutes. //CheckLongRunningPipelines(logging); //Get Count of All runnning pipelines directly from the database short _RunnningPipelines = CountRunnningPipelines(logging); short _AvailableConcurrencySlots = (short)(_FrameworkWideMaxConcurrency - _RunnningPipelines); //Generate new task instances based on task master and schedules CreateTaskInstance(logging); //Is there is Available Slots Proceed if (_AvailableConcurrencySlots > 0) { List <AdsGoFast.TaskMetaData.TaskGroup> _TaskGroups = TaskGroupsStatic.GetActive(); if (_TaskGroups.Count > 0) { short _ConcurrencySlotsAllocated = 0; short _DefaultTasksPerGroup = 0; short _DistributionLoopCounter = 1; //Distribute Concurrency Slots while (_AvailableConcurrencySlots > 0) { DistributeConcurrencySlots(ref _TaskGroups, ref _DefaultTasksPerGroup, ref _ConcurrencySlotsAllocated, ref _AvailableConcurrencySlots, _DistributionLoopCounter); _DistributionLoopCounter += 1; } Table TempTarget = new Table { Schema = "dbo", Name = "#TempGroups" + logging.DefaultActivityLogItem.ExecutionUid.ToString() }; SqlConnection _con = TMD.GetSqlConnection(); TMD.BulkInsert(_TaskGroups.ToDataTable(), TempTarget, true, _con); Dictionary <string, string> _params = new Dictionary <string, string> { { "TempTable", TempTarget.QuotedSchemaAndName() } }; string _sql = GenerateSQLStatementTemplates.GetSQL(Shared.GlobalConfigs.GetStringConfig("SQLTemplateLocation"), "UpdateTaskInstancesWithTaskRunner", _params); TMD.ExecuteSql(_sql, _con); } } return(new { }); }
/// <summary> /// Checks for long running pipelines and updates their status in the database /// </summary> /// <param name="logging"></param> /// <returns></returns> public static short CheckLongRunningPipelines(Logging logging) { dynamic _ActivePipelines = ActivePipelines.GetLongRunningPipelines(logging); short RunningPipelines = 0; short FinishedPipelines = 0; DataTable dt = new DataTable(); dt.Columns.Add(new DataColumn("TaskInstanceId", typeof(string))); dt.Columns.Add(new DataColumn("ExecutionUid", typeof(Guid))); dt.Columns.Add(new DataColumn("PipelineName", typeof(string))); dt.Columns.Add(new DataColumn("DatafactorySubscriptionUid", typeof(Guid))); dt.Columns.Add(new DataColumn("DatafactoryResourceGroup", typeof(string))); dt.Columns.Add(new DataColumn("DatafactoryName", typeof(string))); dt.Columns.Add(new DataColumn("RunUid", typeof(Guid))); dt.Columns.Add(new DataColumn("Status", typeof(string))); dt.Columns.Add(new DataColumn("SimpleStatus", typeof(string))); //Check Each Running Pipeline foreach (dynamic _Pipeline in _ActivePipelines) { dynamic _PipelineStatus = CheckPipelineStatus.CheckPipelineStatusMethod(_Pipeline.DatafactorySubscriptionUid.ToString(), _Pipeline.DatafactoryResourceGroup.ToString(), _Pipeline.DatafactoryName.ToString(), _Pipeline.PipelineName.ToString(), _Pipeline.AdfRunUid.ToString(), logging); if (_PipelineStatus["SimpleStatus"].ToString() == "Runnning") { RunningPipelines += 1; } if (_PipelineStatus["SimpleStatus"].ToString() == "Done") { FinishedPipelines += 1; } DataRow dr = dt.NewRow(); dr["TaskInstanceId"] = _Pipeline.TaskInstanceId; dr["ExecutionUid"] = _Pipeline.ExecutionUid; dr["DatafactorySubscriptionUid"] = _Pipeline.DatafactorySubscriptionUid; dr["DatafactoryResourceGroup"] = _Pipeline.DatafactoryResourceGroup; dr["DatafactoryName"] = _Pipeline.DatafactoryName; dr["Status"] = _PipelineStatus["Status"]; dr["SimpleStatus"] = _PipelineStatus["SimpleStatus"]; dr["RunUid"] = (Guid)_PipelineStatus["RunId"]; dr["PipelineName"] = _PipelineStatus["PipelineName"]; dt.Rows.Add(dr); } string TempTableName = "#Temp" + Guid.NewGuid().ToString(); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); //Todo: Update both the TaskInstanceExecution and the TaskInstance; TMD.AutoBulkInsertAndMerge(dt, TempTableName, "TaskInstanceExecution"); return(RunningPipelines); }
public static JObject UpdateWaterMark(HttpRequest req, Logging LogHelper) { string requestBody = new StreamReader(req.Body).ReadToEndAsync().Result; dynamic data = JsonConvert.DeserializeObject(requestBody); dynamic _TaskMasterId = JObject.Parse(data.ToString())["TaskMasterId"]; dynamic _TaskMasterWaterMarkColumnType = JObject.Parse(data.ToString())["TaskMasterWaterMarkColumnType"]; dynamic _WaterMarkValue = JObject.Parse(data.ToString())["WaterMarkValue"]; if (!string.IsNullOrEmpty(_WaterMarkValue.ToString())) { DataTable dt = new DataTable(); dt.Columns.Add(new DataColumn("TaskMasterId", typeof(long))); dt.Columns.Add(new DataColumn("TaskMasterWaterMarkColumnType", typeof(string))); dt.Columns.Add(new DataColumn("TaskMasterWaterMark_DateTime", typeof(DateTime))); dt.Columns.Add(new DataColumn("TaskMasterWaterMark_BigInt", typeof(long))); dt.Columns.Add(new DataColumn("ActiveYN", typeof(bool))); dt.Columns.Add(new DataColumn("UpdatedOn", typeof(DateTime))); DataRow dr = dt.NewRow(); dr["TaskMasterId"] = _TaskMasterId; dr["TaskMasterWaterMarkColumnType"] = _TaskMasterWaterMarkColumnType; if (_TaskMasterWaterMarkColumnType == "DateTime") { dr["TaskMasterWaterMark_DateTime"] = _WaterMarkValue; dr["TaskMasterWaterMark_BigInt"] = DBNull.Value; } else if (_TaskMasterWaterMarkColumnType == "BigInt") { dr["TaskMasterWaterMark_DateTime"] = DBNull.Value; dr["TaskMasterWaterMark_BigInt"] = _WaterMarkValue; } else { throw new System.ArgumentException(string.Format("Invalid WaterMark ColumnType = '{0}'", _TaskMasterWaterMarkColumnType)); } dr["ActiveYN"] = 1; dr["UpdatedOn"] = DateTime.UtcNow; dt.Rows.Add(dr); string TempTableName = "#Temp_TaskMasterWaterMark_" + Guid.NewGuid().ToString(); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); TMD.AutoBulkInsertAndMerge(dt, TempTableName, "TaskMasterWaterMark"); } JObject Root = new JObject { ["Result"] = "Complete" }; return(Root); }
public static void SendAlert(JObject task, Logging logging) { TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); try { if ((JObject)task["Target"] != null) { if ((JArray)task["Target"]["Alerts"] != null) { foreach (JObject Alert in (JArray)task["Target"]["Alerts"]) { //Only Send out for Operator Level Alerts //if (Alert["AlertCategory"].ToString() == "Task Specific Operator Alert") { //Get Plain Text and Email Subject from Template Files System.Collections.Generic.Dictionary <string, string> Params = new System.Collections.Generic.Dictionary <string, string>(); Params.Add("Source.RelativePath", task["Source"]["RelativePath"].ToString()); Params.Add("Source.DataFileName", task["Source"]["DataFileName"].ToString()); Params.Add("Alert.EmailRecepientName", Alert["EmailRecepientName"].ToString()); string _plainTextContent = System.IO.File.ReadAllText(System.IO.Path.Combine(Shared._ApplicationBasePath, Shared._ApplicationOptions.LocalPaths.HTMLTemplateLocation, Alert["EmailTemplateFileName"].ToString() + ".txt")); _plainTextContent = _plainTextContent.FormatWith(Params, MissingKeyBehaviour.ThrowException, null, '{', '}'); string _htmlContent = System.IO.File.ReadAllText(System.IO.Path.Combine(Shared._ApplicationBasePath, Shared._ApplicationOptions.LocalPaths.HTMLTemplateLocation, Alert["EmailTemplateFileName"].ToString() + ".html")); _htmlContent = _htmlContent.FormatWith(Params, MissingKeyBehaviour.ThrowException, null, '{', '}'); var apiKey = System.Environment.GetEnvironmentVariable("SENDGRID_APIKEY"); var client = new SendGridClient(apiKey); var msg = new SendGridMessage() { From = new EmailAddress(task["Target"]["SenderEmail"].ToString(), task["Target"]["SenderDescription"].ToString()), Subject = Alert["EmailSubject"].ToString(), PlainTextContent = _plainTextContent, HtmlContent = _htmlContent }; msg.AddTo(new EmailAddress(Alert["EmailRecepient"].ToString(), Alert["EmailRecepientName"].ToString())); var res = client.SendEmailAsync(msg).Result; } } } TMD.LogTaskInstanceCompletion(System.Convert.ToInt64(task["TaskInstanceId"]), Guid.Parse(task["ExecutionUid"].ToString()), TaskMetaData.BaseTasks.TaskStatus.Complete, System.Guid.Empty, ""); } } catch (Exception e) { logging.LogErrors(e); TMD.LogTaskInstanceCompletion(System.Convert.ToInt64(task["TaskInstanceId"]), Guid.Parse(task["ExecutionUid"].ToString()), TaskMetaData.BaseTasks.TaskStatus.FailedNoRetry, System.Guid.Empty, "Failed to send email"); } }
/// <summary> /// Returns a list of long running pipelines. These will be checked to ensure that they are still in-progress. /// </summary> /// <param name="logging"></param> /// <returns></returns> public static short CountActivePipelines(Logging logging) { logging.LogDebug("Get CountActivePipelines called."); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); IEnumerable <short> res = TMD.GetSqlConnection().QueryWithRetry <short>(@" Select count(*) ActiveCount from [dbo].[TaskInstance] where LastExecutionStatus in ('InProgress', 'Queued') or (LastExecutionStatus in ('Untried', 'FailedRetry') and TaskRunnerId is not null) "); return(res.First()); }
public void AutoBulkInsertAndMerge(DataTable dt, string StagingTableName, string TargetTableName) { TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); using (SqlConnection conn = TMD.GetSqlConnection()) { Table SourceTable = new Table { Name = StagingTableName, Schema = null, PersistedCon = conn }; Table TargetTable = new Table { Name = TargetTableName, Schema = "dbo", PersistedCon = conn }; TargetTable.GetColumnsFromExistingDB(true); TMD.BulkInsert(dt, SourceTable, true, conn); SourceTable.GetColumnsFromExistingDB(true); string PrimaryKeyJoin = Snippets.GenerateColumnJoinOrUpdateSnippet(SourceTable, TargetTable, "a", "b", "=", " and ", true, true, false, false, false, null, false, false); string ColList = Snippets.GenerateColumnJoinOrUpdateSnippet(SourceTable, TargetTable, "", "", "=", ",", true, true, false, false, true, null, true, false); string SelectListForInsert = Snippets.GenerateColumnJoinOrUpdateSnippet(SourceTable, TargetTable, "b", "", "", ",", true, false, false, false, true, null, true, false); string InsertList = Snippets.GenerateColumnJoinOrUpdateSnippet(SourceTable, TargetTable, "", "", "", ",", true, false, false, false, true, null, true, false); string UpdateClause = Snippets.GenerateColumnJoinOrUpdateSnippet(SourceTable, TargetTable, "b", "", "=", ",", false, false, false, false, true, null, false, false); Dictionary <string, string> SqlParams = new Dictionary <string, string> { { "TargetFullName", TargetTable.QuotedSchemaAndName() }, { "SourceFullName", SourceTable.QuotedSchemaAndName() }, { "PrimaryKeyJoin_AB", PrimaryKeyJoin }, { "UpdateClause", UpdateClause }, { "SelectListForInsert", SelectListForInsert }, { "InsertList", InsertList } }; string MergeSQL = GenerateSQLStatementTemplates.GetSQL(Shared.GlobalConfigs.GetStringConfig("SQLTemplateLocation"), "GenericMerge", SqlParams); conn.Execute(MergeSQL); } }
public static dynamic GetLongRunningPipelines(Logging logging) { logging.LogDebug("Get GetActivePipelines called."); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); dynamic res = TMD.GetSqlConnection().QueryWithRetry(@" Select * from [dbo].[TaskInstanceExecution] where Status in ('InProgress', 'Queued') and datediff(minute, StartDateTime, GetUtcDate()) > 30 order by StartDateTime desc "); return(res); }
public static async Task Run([TimerTrigger("0 */2 * * * *")] TimerInfo myTimer, ILogger log, ExecutionContext context) { if (Shared.GlobalConfigs.GetBoolConfig("EnableRunFrameworkTasks")) { TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); using (var client = new System.Net.Http.HttpClient()) { using (SqlConnection _con = TMD.GetSqlConnection()) { var ftrs = _con.QueryWithRetry("Exec dbo.GetFrameworkTaskRunners"); foreach (var ftr in ftrs) { int TaskRunnerId = ((dynamic)ftr).TaskRunnerId; try { //Lets get an access token based on MSI or Service Principal var secureFunctionAPIURL = string.Format("{0}/api/RunFrameworkTasksHttpTrigger?TaskRunnerId={1}", Shared.GlobalConfigs.GetStringConfig("AzureFunctionURL"), TaskRunnerId.ToString()); var accessToken = Shared.Azure.AzureSDK.GetAzureRestApiToken(Shared.GlobalConfigs.GetStringConfig("AzureFunctionURL")); using HttpRequestMessage httpRequestMessage = new HttpRequestMessage { Method = HttpMethod.Get, RequestUri = new Uri(secureFunctionAPIURL), Headers = { { System.Net.HttpRequestHeader.Authorization.ToString(), "Bearer " + accessToken } } }; //Todo Add some error handling in case function cannot be reached. Note Wait time is there to provide sufficient time to complete post before the HttpClient is disposed. var HttpTask = client.SendAsync(httpRequestMessage).Wait(3000); //string queryString = string.Format("{0}/api/RunFrameworkTasksHttpTrigger?TaskRunnerId={1}&code={2}", Shared.GlobalConfigs.GetStringConfig("AzureFunctionURL"), TaskRunnerId.ToString(), Shared.GlobalConfigs.GetStringConfig("RunFrameworkTasksHttpTriggerAzureFunctionKey")); //client.GetAsync(queryString).Wait(5000); } catch (Exception e) { _con.ExecuteWithRetry($"[dbo].[UpdFrameworkTaskRunner] {TaskRunnerId.ToString()}"); throw e; } } } } } }
public static JObject GetSQLMergeStatementCore(HttpRequest req, Logging logging) { string requestBody = new StreamReader(req.Body).ReadToEndAsync().Result; dynamic data = JsonConvert.DeserializeObject(requestBody); JObject Root = new JObject(); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); using (SqlConnection _con = TMD.GetSqlConnection()) { string _token = Shared._AzureAuthenticationCredentialProvider.GetAzureRestApiToken("https://database.windows.net/"); String g = Guid.NewGuid().ToString().Replace("-", ""); _con.AccessToken = _token; JArray arrStage = (JArray)data["Stage"]; string _StagingTableSchema = data["StagingTableSchema"].ToString(); string _StagingTableName = "#Temp_" + data["StagingTableName"].ToString() + g.ToString(); string _CreateStatementStage = GenerateSQLStatementTemplates.GetCreateTable(arrStage, _StagingTableSchema, _StagingTableName, false); TMD.ExecuteSql(_CreateStatementStage, _con); JArray arrTarget = (JArray)data["Target"]; string _TargetTableSchema = data["TargetTableSchema"].ToString(); string _TargetTableName = "#Temp_" + data["TargetTableName"].ToString() + g.ToString(); string _CreateStatementTarget = GenerateSQLStatementTemplates.GetCreateTable(arrTarget, _TargetTableSchema, _TargetTableName, false); TMD.ExecuteSql(_CreateStatementTarget, _con); string _MergeStatement = TMD.GenerateMergeSQL(_StagingTableSchema, _StagingTableName, _TargetTableSchema, _TargetTableName, _con, true, logging); string fullStagingTableName = string.Format("[{0}].[{1}]", _StagingTableSchema, _StagingTableName.Replace("#Temp_", "").Replace(g.ToString(), "")); string fullTargetTableName = string.Format("[{0}].[{1}]", _TargetTableSchema, _TargetTableName.Replace("#Temp_", "").Replace(g.ToString(), "")); _MergeStatement = _MergeStatement.Replace(_TargetTableName, fullTargetTableName); _MergeStatement = _MergeStatement.Replace(_StagingTableName, fullStagingTableName); //Add Select for ADF Lookup Activity _MergeStatement += Environment.NewLine + "Select 1 "; Root["MergeStatement"] = _MergeStatement; logging.LogInformation("GetSQLMergeStatement Function complete."); } return(Root); }
public static DataTable GetTaskTypeMapping(Logging logging) { logging.LogDebug("Load TaskTypeMapping called."); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); dynamic resTaskTypeMapping = TMD.GetSqlConnection().QueryWithRetry(@"select * from [dbo].[TaskTypeMapping] Where ActiveYN = 1"); DataTable dtTaskTypeMapping = new DataTable(); dtTaskTypeMapping.Columns.Add(new DataColumn("TaskTypeId", typeof(int))); dtTaskTypeMapping.Columns.Add(new DataColumn("MappingType", typeof(string))); dtTaskTypeMapping.Columns.Add(new DataColumn("MappingName", typeof(string))); dtTaskTypeMapping.Columns.Add(new DataColumn("SourceSystemType", typeof(string))); dtTaskTypeMapping.Columns.Add(new DataColumn("SourceType", typeof(string))); dtTaskTypeMapping.Columns.Add(new DataColumn("TargetSystemType", typeof(string))); dtTaskTypeMapping.Columns.Add(new DataColumn("TargetType", typeof(string))); dtTaskTypeMapping.Columns.Add(new DataColumn("TaskDatafactoryIR", typeof(string))); foreach (dynamic _row in resTaskTypeMapping) { DataRow dr = dtTaskTypeMapping.NewRow(); dr["TaskTypeId"] = _row.TaskTypeId; dr["MappingType"] = _row.MappingType; dr["MappingName"] = _row.MappingName; dr["SourceSystemType"] = _row.SourceSystemType; dr["SourceType"] = _row.SourceType; dr["TargetSystemType"] = _row.TargetSystemType; dr["TargetType"] = _row.TargetType; dr["TaskDatafactoryIR"] = _row.TaskDatafactoryIR; dtTaskTypeMapping.Rows.Add(dr); } logging.LogDebug("Load TaskTypeMapping complete."); return(dtTaskTypeMapping); }
public dynamic GetActivityLevelLogsCore(Logging logging) { string AppInsightsWorkspaceId = _appOptions.Value.ServiceConnections.AppInsightsWorkspaceId; using var client = _appInsightsContext.httpClient.CreateClient(_appInsightsContext.httpClientName); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); using SqlConnection _conRead = TMD.GetSqlConnection(); //Get Last Request Date var MaxTimesGenQuery = _conRead.QueryWithRetry(@" select max([timestamp]) maxtimestamp from ActivityLevelLogs"); foreach (var datafactory in MaxTimesGenQuery) { DateTimeOffset MaxAllowedLogTimeGenerated = DateTimeOffset.UtcNow.AddDays(-1 * _appOptions.Value.ServiceConnections.AppInsightsMaxNumberOfDaysToRequest); DateTimeOffset MaxObservedLogTimeGenerated = DateTimeOffset.UtcNow.AddDays(-1 * _appOptions.Value.ServiceConnections.AppInsightsMaxNumberOfDaysToRequest); if (datafactory.maxtimestamp != null) { MaxObservedLogTimeGenerated = ((DateTimeOffset)datafactory.maxtimestamp).AddMinutes(-1 * _appOptions.Value.ServiceConnections.AppInsightsMinutesOverlap); //Make sure that we don't get more than max to ensure we dont get timeouts etc. if ((MaxObservedLogTimeGenerated) <= MaxAllowedLogTimeGenerated) { MaxObservedLogTimeGenerated = MaxAllowedLogTimeGenerated; } } //string workspaceId = datafactory.LogAnalyticsWorkspaceId.ToString(); Dictionary <string, object> KqlParams = new Dictionary <string, object> { { "MaxLogTimeGenerated", MaxObservedLogTimeGenerated.ToString("yyyy-MM-dd HH:mm:ss.ff K") } //{"SubscriptionId", ((string)datafactory.SubscriptionUid.ToString()).ToUpper()}, //{"ResourceGroupName", ((string)datafactory.ResourceGroup.ToString()).ToUpper() }, //{"DataFactoryName", ((string)datafactory.Name.ToString()).ToUpper() }, //{"DatafactoryId", datafactory.Id.ToString() } }; string KQL = System.IO.File.ReadAllText(System.IO.Path.Combine(Shared._ApplicationBasePath, Shared._ApplicationOptions.LocalPaths.KQLTemplateLocation, "GetActivityLevelLogs.kql")); KQL = KQL.FormatWith(KqlParams, FormatWith.MissingKeyBehaviour.ThrowException, null, '{', '}'); JObject JsonContent = new JObject(); JsonContent["query"] = KQL; var postContent = new StringContent(JsonContent.ToString(), System.Text.Encoding.UTF8, "application/json"); var response = client.PostAsync($"https://api.applicationinsights.io/v1/apps/{AppInsightsWorkspaceId}/query", postContent).Result; if (response.StatusCode == System.Net.HttpStatusCode.OK) { //Start to parse the response content HttpContent responseContent = response.Content; var content = response.Content.ReadAsStringAsync().Result; var tables = ((JArray)(JObject.Parse(content)["tables"])); if (tables.Count > 0) { DataTable dt = new DataTable(); var rows = (JArray)(tables[0]["rows"]); var columns = (JArray)(tables[0]["columns"]); foreach (JObject c in columns) { DataColumn dc = new DataColumn(); dc.ColumnName = c["name"].ToString(); dc.DataType = KustoDataTypeMapper[c["type"].ToString()]; dt.Columns.Add(dc); } foreach (JArray r in rows) { DataRow dr = dt.NewRow(); for (int i = 0; i < columns.Count; i++) { if (((Newtonsoft.Json.Linq.JValue)r[i]).Value != null) { dr[i] = ((Newtonsoft.Json.Linq.JValue)r[i]).Value; } else { dr[i] = DBNull.Value; } } dt.Rows.Add(dr); } Table t = new Table(); t.Schema = "dbo"; string TableGuid = Guid.NewGuid().ToString(); t.Name = "#ActivityLevelLogs{TableGuid}"; using (SqlConnection _conWrite = TMD.GetSqlConnection()) { TMD.BulkInsert(dt, t, true, _conWrite); Dictionary <string, string> SqlParams = new Dictionary <string, string> { { "TempTable", t.QuotedSchemaAndName() }, { "DatafactoryId", "1" } }; string MergeSQL = GenerateSQLStatementTemplates.GetSQL(System.IO.Path.Combine(Shared._ApplicationBasePath, Shared._ApplicationOptions.LocalPaths.SQLTemplateLocation), "MergeIntoActivityLevelLogs", SqlParams); logging.LogInformation(MergeSQL.ToString()); _conWrite.ExecuteWithRetry(MergeSQL); _conWrite.Close(); _conWrite.Dispose(); } } else { logging.LogErrors(new Exception("Kusto query failed getting ADFPipeline Stats.")); } } } return(new { }); }
public static JObject LogCore(HttpRequest req, Logging LogHelper) { short _FrameworkNumberOfRetries = Shared.GlobalConfigs.GetInt16Config("FrameworkNumberOfRetries"); string requestBody = new StreamReader(req.Body).ReadToEndAsync().Result; dynamic data = JsonConvert.DeserializeObject(requestBody); dynamic _TaskInstanceId = JObject.Parse(data.ToString())["TaskInstanceId"]; dynamic _NumberOfRetries = JObject.Parse(data.ToString())["NumberOfRetries"]; dynamic _PostObjectExecutionUid = JObject.Parse(data.ToString())["ExecutionUid"]; dynamic _AdfRunUid = JObject.Parse(data.ToString())["RunId"]; dynamic _LogTypeId = JObject.Parse(data.ToString())["LogTypeId"]; //1 Error, 2 Warning, 3 Info, 4 Performance, 5 Debug dynamic _LogSource = JObject.Parse(data.ToString())["LogSource"]; //ADF, AF dynamic _ActivityType = JObject.Parse(data.ToString())["ActivityType"]; dynamic _StartDateTimeOffSet = JObject.Parse(data.ToString())["StartDateTimeOffSet"]; dynamic _Status = JObject.Parse(data.ToString())["Status"]; //Started Failed Completed dynamic _Comment = JObject.Parse(data.ToString())["Comment"]; _Comment = _Comment == null ? null : _Comment.ToString().Replace("'", ""); dynamic _EndDateTimeOffSet = JObject.Parse(data.ToString())["EndDateTimeOffSet"]; dynamic _RowsInserted = JObject.Parse(data.ToString())["RowsInserted"]; if (_TaskInstanceId != null) { LogHelper.DefaultActivityLogItem.TaskInstanceId = (long?)_TaskInstanceId; } if (_LogSource != null) { LogHelper.DefaultActivityLogItem.LogSource = (string)_LogSource; } if (_LogTypeId != null) { LogHelper.DefaultActivityLogItem.LogTypeId = (short?)_LogTypeId; } if (_StartDateTimeOffSet != null) { LogHelper.DefaultActivityLogItem.StartDateTimeOffset = (DateTimeOffset)_StartDateTimeOffSet; } if (_Status != null) { LogHelper.DefaultActivityLogItem.Status = (string)_Status; } if (_EndDateTimeOffSet != null) { LogHelper.DefaultActivityLogItem.EndDateTimeOffset = (DateTimeOffset)_EndDateTimeOffSet; } if (_PostObjectExecutionUid != null) { LogHelper.DefaultActivityLogItem.ExecutionUid = (Guid?)_PostObjectExecutionUid; } LogHelper.LogInformation(_Comment); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); BaseTasks.TaskStatus TaskStatus = new BaseTasks.TaskStatus(); if (_ActivityType == "Data-Movement-Master") { if (_Status == "Failed") { //Todo Put Max Number of retries in DB at TaskMasterLevel -- This has now been done. Have left logic in function as stored procedure handles with all failed statuses. _NumberOfRetries = (_NumberOfRetries == null) ? 0 : (int)_NumberOfRetries + 1; TaskStatus = ((_NumberOfRetries < _FrameworkNumberOfRetries) ? BaseTasks.TaskStatus.FailedRetry : BaseTasks.TaskStatus.FailedNoRetry); } else { if (Enum.TryParse <BaseTasks.TaskStatus>(_Status.ToString(), out TaskStatus) == false) { string InvalidStatus = "TaskStatus Enum does not exist for: " + _Status.ToString(); LogHelper.LogErrors(new Exception("TaskStatus Enum does not exist for: " + _Status.ToString())); _Comment = _Comment.ToString() + "." + InvalidStatus; TaskStatus = BaseTasks.TaskStatus.FailedNoRetry; } } TMD.LogTaskInstanceCompletion((Int64)_TaskInstanceId, (System.Guid)_PostObjectExecutionUid, TaskStatus, (System.Guid)_AdfRunUid, (String)_Comment); } JObject Root = new JObject { ["Result"] = "Complete" }; return(Root); }
public static void QueryPipelineRuns(string subscriptionId, string resourceGroup, string factoryName, string _rungroupid, DateTime startDT, DateTime endDT, Logging logging) { #region QueryPipelineRuns logging.LogInformation("Query ADF Pipeline Runs."); string outputString = string.Empty; DataTable dt = new DataTable(); dt.Columns.Add(new DataColumn("ExecutionUid", typeof(Guid))); dt.Columns.Add(new DataColumn("TaskInstanceId", typeof(Int64))); dt.Columns.Add(new DataColumn("TaskMasterId", typeof(Int64))); //dt.Columns.Add(new DataColumn("AdditionalProperties", typeof(String))); dt.Columns.Add(new DataColumn("DurationInMs", typeof(Int64))); //dt.Columns.Add(new DataColumn("InvokedBy", typeof(String))); dt.Columns.Add(new DataColumn("IsLastest", typeof(Boolean))); dt.Columns.Add(new DataColumn("LastUpdated", typeof(DateTime))); //dt.Columns.Add(new DataColumn("Message", typeof(String))); //dt.Columns.Add(new DataColumn("Parameters", typeof(String))); dt.Columns.Add(new DataColumn("RunId", typeof(Guid))); //dt.Columns.Add(new DataColumn("RunGroupId", typeof(Guid))); dt.Columns.Add(new DataColumn("PipelineName", typeof(String))); dt.Columns.Add(new DataColumn("RunStart", typeof(DateTime))); dt.Columns.Add(new DataColumn("RunEnd", typeof(DateTime))); dt.Columns.Add(new DataColumn("RunDimensions", typeof(String))); dt.Columns.Add(new DataColumn("Status", typeof(String))); DataTable ActivityDt = new DataTable(); ActivityDt.Columns.Add(new DataColumn("ActivityName", typeof(String))); ActivityDt.Columns.Add(new DataColumn("RunId", typeof(Guid))); ActivityDt.Columns.Add(new DataColumn("ActivityRunStart", typeof(DateTime))); ActivityDt.Columns.Add(new DataColumn("ActivityRunEnd", typeof(DateTime))); ActivityDt.Columns.Add(new DataColumn("ActivityRunId", typeof(Guid))); ActivityDt.Columns.Add(new DataColumn("ActivityType", typeof(String))); //dt.Columns.Add(new DataColumn("AdditionalProperties", typeof(String))); ActivityDt.Columns.Add(new DataColumn("DurationInMs", typeof(Int64))); //dt.Columns.Add(new DataColumn("Error", typeof(String))); //dt.Columns.Add(new DataColumn("Input", typeof(String))); //dt.Columns.Add(new DataColumn("LinkedServiceName", typeof(String))); ActivityDt.Columns.Add(new DataColumn("OutPut", typeof(String))); ActivityDt.Columns.Add(new DataColumn("PipelineName", typeof(String))); ActivityDt.Columns.Add(new DataColumn("PipelineRunId", typeof(String))); ActivityDt.Columns.Add(new DataColumn("Status", typeof(String))); using (var client = DataFactoryClient.CreateDataFactoryClient(subscriptionId)) { //Get pipeline status with provided run id PipelineRunsQueryResponse pipelineRunsQueryResponse; RunFilterParameters filterParameterActivityRuns = new RunFilterParameters(); filterParameterActivityRuns.LastUpdatedAfter = startDT; filterParameterActivityRuns.LastUpdatedBefore = endDT.AddHours(+2); RunFilterParameters filterParameter = new RunFilterParameters(); filterParameter.LastUpdatedAfter = startDT; filterParameter.LastUpdatedBefore = endDT; IList <string> rungroupid = new List <string> { _rungroupid }; IList <RunQueryFilter> filter = new List <RunQueryFilter>(); filter.Add(new RunQueryFilter { Operand = RunQueryFilterOperand.RunGroupId, OperatorProperty = RunQueryFilterOperator.Equals, Values = rungroupid }); filterParameter.Filters = filter; logging.LogInformation(String.Format("API PipelineRuns.QueryByFactory Start")); pipelineRunsQueryResponse = client.PipelineRuns.QueryByFactory(resourceGroup, factoryName, filterParameter); logging.LogInformation(String.Format("API PipelineRuns.QueryByFactory End")); var enumerator = pipelineRunsQueryResponse.Value.GetEnumerator(); PipelineRun pipelineRuns; string runId = String.Empty; int item = 0; while (true) { for (bool hasMoreRuns = enumerator.MoveNext(); hasMoreRuns;) { pipelineRuns = enumerator.Current; hasMoreRuns = enumerator.MoveNext(); runId = pipelineRuns.RunId; item += 1; logging.LogInformation(String.Format("PipelineRuns.QueryByFactory RunId {0} Current Item {1} of {2}", runId, item, pipelineRunsQueryResponse.Value.Count)); DataRow dr = dt.NewRow(); string _param = string.Empty; foreach (var element in pipelineRuns.Parameters) { _param = element.Value; break; } dr["ExecutionUid"] = Shared.JsonHelpers.GetStringValueFromJSON(logging, "ExecutionUid", JObject.Parse(_param), null, true); dr["TaskInstanceId"] = Shared.JsonHelpers.GetStringValueFromJSON(logging, "TaskInstanceId", JObject.Parse(_param), null, true); dr["TaskMasterId"] = Shared.JsonHelpers.GetStringValueFromJSON(logging, "TaskMasterId", JObject.Parse(_param), null, true); //dr["AdditionalProperties"] = pipelineRuns.AdditionalProperties ?? (object)DBNull.Value; dr["DurationInMs"] = pipelineRuns.DurationInMs ?? (object)DBNull.Value; //dr["InvokedBy"] = pipelineRuns.InvokedBy ?? (object)DBNull.Value; dr["IsLastest"] = pipelineRuns.IsLatest ?? (object)DBNull.Value; dr["LastUpdated"] = pipelineRuns.LastUpdated ?? (object)DBNull.Value; //dr["Message"] = pipelineRuns.Message ?? (object)DBNull.Value; //dr["Parameters"] = _param; dr["RunId"] = pipelineRuns.RunId; //dr["RunGroupId"] = pipelineRuns.RunGroupId ?? (object)DBNull.Value; dr["PipelineName"] = pipelineRuns.PipelineName ?? (object)DBNull.Value; dr["RunStart"] = pipelineRuns.RunStart ?? (object)DBNull.Value; dr["RunEnd"] = pipelineRuns.RunEnd ?? (object)DBNull.Value; dr["RunDimensions"] = pipelineRuns.PipelineName ?? (object)DBNull.Value; dr["Status"] = pipelineRuns.Status ?? (object)DBNull.Value; dt.Rows.Add(dr); QueryActivityRuns(subscriptionId, resourceGroup, factoryName, runId, runId, filterParameterActivityRuns, logging, ref ActivityDt); } if (pipelineRunsQueryResponse.ContinuationToken == null) { break; } filterParameter.ContinuationToken = pipelineRunsQueryResponse.ContinuationToken; pipelineRunsQueryResponse = client.PipelineRuns.QueryByFactory(resourceGroup, factoryName, filterParameter); enumerator = pipelineRunsQueryResponse.Value.GetEnumerator(); item = 0; } } if (ActivityDt.Rows.Count > 0) { string TempTableName = "#Temp_ADFActivities_" + Guid.NewGuid().ToString(); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); TMD.AutoBulkInsertAndMerge(ActivityDt, TempTableName, "ADFActivity"); } if (dt.Rows.Count > 0) { string TempTableName = "#Temp_ADFPipelineRun_" + Guid.NewGuid().ToString(); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); TMD.AutoBulkInsertAndMerge(dt, TempTableName, "ADFPipelineRun"); } #endregion }
public static dynamic GetADFActivityErrors(Logging logging) { using var client = new HttpClient(); string token = Shared.Azure.AzureSDK.GetAzureRestApiToken("https://api.loganalytics.io"); client.DefaultRequestHeaders.Accept.Clear(); client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", token); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); using SqlConnection _conRead = TMD.GetSqlConnection(); //Get Last Request Date //ToDo Add DataFactoryId field to ADFActivityErrors var MaxTimesGen = _conRead.QueryWithRetry(@" Select a.*, MaxTimeGenerated MaxTimeGenerated from Datafactory a left join ( Select DataFactoryId, MaxTimeGenerated = Max(TimeGenerated) from ADFActivityErrors b group by DataFactoryId ) b on a.Id = b.DatafactoryId "); DateTimeOffset MaxTimeGenerated = DateTimeOffset.UtcNow.AddDays(-30); foreach (var datafactory in MaxTimesGen) { if (datafactory.MaxTimeGenerated != null) { MaxTimeGenerated = ((DateTimeOffset)datafactory.MaxTimeGenerated).AddMinutes(-5); } string workspaceId = datafactory.LogAnalyticsWorkspaceId.ToString(); Dictionary <string, object> KqlParams = new Dictionary <string, object> { { "MaxActivityTimeGenerated", MaxTimeGenerated.ToString("yyyy-MM-dd HH:mm:ss.ff K") }, { "SubscriptionId", ((string)datafactory.SubscriptionUid.ToString()).ToUpper() }, { "ResourceGroupName", ((string)datafactory.ResourceGroup.ToString()).ToUpper() }, { "DataFactoryName", ((string)datafactory.Name.ToString()).ToUpper() }, { "DatafactoryId", datafactory.Id.ToString() } }; string KQL = System.IO.File.ReadAllText(Shared.GlobalConfigs.GetStringConfig("KQLTemplateLocation") + "GetADFActivityErrors.kql"); KQL = KQL.FormatWith(KqlParams, FormatWith.MissingKeyBehaviour.ThrowException, null, '{', '}'); JObject JsonContent = new JObject(); JsonContent["query"] = KQL; var postContent = new StringContent(JsonContent.ToString(), System.Text.Encoding.UTF8, "application/json"); var response = client.PostAsync($"https://api.loganalytics.io/v1/workspaces/{workspaceId}/query", postContent).Result; if (response.StatusCode == System.Net.HttpStatusCode.OK) { //Start to parse the response content HttpContent responseContent = response.Content; var content = response.Content.ReadAsStringAsync().Result; var tables = ((JArray)(JObject.Parse(content)["tables"])); if (tables.Count > 0) { DataTable dt = new DataTable(); var rows = (JArray)(tables[0]["rows"]); var columns = (JArray)(tables[0]["columns"]); foreach (JObject c in columns) { DataColumn dc = new DataColumn(); dc.ColumnName = c["name"].ToString(); dc.DataType = KustoDataTypeMapper[c["type"].ToString()]; dt.Columns.Add(dc); } foreach (JArray r in rows) { DataRow dr = dt.NewRow(); for (int i = 0; i < columns.Count; i++) { if (((Newtonsoft.Json.Linq.JValue)r[i]).Value != null) { dr[i] = ((Newtonsoft.Json.Linq.JValue)r[i]).Value; } else { dr[i] = DBNull.Value; } } dt.Rows.Add(dr); } Table t = new Table(); t.Schema = "dbo"; string TableGuid = Guid.NewGuid().ToString(); t.Name = $"#ADFActivityErrors{TableGuid}"; using (SqlConnection _conWrite = TMD.GetSqlConnection()) { TMD.BulkInsert(dt, t, true, _conWrite); Dictionary <string, string> SqlParams = new Dictionary <string, string> { { "TempTable", t.QuotedSchemaAndName() }, { "DatafactoryId", datafactory.Id.ToString() } }; string MergeSQL = GenerateSQLStatementTemplates.GetSQL(Shared.GlobalConfigs.GetStringConfig("SQLTemplateLocation"), "MergeIntoADFActivityErrors", SqlParams); _conWrite.ExecuteWithRetry(MergeSQL); _conWrite.Close(); _conWrite.Dispose(); } } else { logging.LogErrors(new Exception("Kusto query failed getting ADFPipeline Stats.")); } } } return(new { }); }
public static dynamic RunFrameworkTasksCore(HttpRequest req, Logging logging) { TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); short TaskRunnerId = System.Convert.ToInt16(req.Query["TaskRunnerId"]); try { TMD.ExecuteSql(string.Format("Insert into Execution values ('{0}', '{1}', '{2}')", logging.DefaultActivityLogItem.ExecutionUid, DateTimeOffset.Now.ToString("u"), DateTimeOffset.Now.AddYears(999).ToString("u"))); //Fetch Top # tasks JArray _Tasks = AdsGoFast.TaskMetaData.TaskInstancesStatic.GetActive_ADFJSON((Guid)logging.DefaultActivityLogItem.ExecutionUid, TaskRunnerId, logging); var UtcCurDay = DateTime.UtcNow.ToString("yyyyMMdd"); foreach (JObject _Task in _Tasks) { long _TaskInstanceId = System.Convert.ToInt64(Shared.JsonHelpers.GetDynamicValueFromJSON(logging, "TaskInstanceId", _Task, null, true)); logging.DefaultActivityLogItem.TaskInstanceId = _TaskInstanceId; //TO DO: Update TaskInstance yto UnTried if failed string _pipelinename = _Task["DataFactory"]["ADFPipeline"].ToString(); System.Collections.Generic.Dictionary <string, object> _pipelineparams = new System.Collections.Generic.Dictionary <string, object>(); logging.LogInformation(string.Format("Executing ADF Pipeline for TaskInstanceId {0} ", _TaskInstanceId.ToString())); //Check Task Type and execute appropriate ADF Pipeline //Todo: Potentially extract switch into metadata if (Shared._ApplicationOptions.TestingOptions.GenerateTaskObjectTestFiles) { string FileFullPath = Shared._ApplicationOptions.TestingOptions.TaskObjectTestFileLocation + /*UtcCurDay +*/ "/"; // Determine whether the directory exists. if (!System.IO.Directory.Exists(FileFullPath)) { // Try to create the directory. System.IO.DirectoryInfo di = System.IO.Directory.CreateDirectory(FileFullPath); } FileFullPath = FileFullPath + _Task["TaskType"].ToString() + "_" + _pipelinename.ToString() + "_" + _Task["TaskMasterId"].ToString() + ".json"; System.IO.File.WriteAllText(FileFullPath, _Task.ToString()); TMD.LogTaskInstanceCompletion(_TaskInstanceId, (Guid)logging.DefaultActivityLogItem.ExecutionUid, TaskMetaData.BaseTasks.TaskStatus.Complete, System.Guid.Empty, "Complete"); } else { try { if (_Task["TaskExecutionType"].ToString() == "ADF") { _pipelinename = "Master"; _pipelineparams.Add("TaskObject", _Task); if (_pipelinename != "") { JObject _pipelineresult = ExecutePipeline.ExecutePipelineMethod(_Task["DataFactory"]["SubscriptionId"].ToString(), _Task["DataFactory"]["ResourceGroup"].ToString(), _Task["DataFactory"]["Name"].ToString(), _pipelinename, _pipelineparams, logging); logging.DefaultActivityLogItem.AdfRunUid = Guid.Parse(_pipelineresult["RunId"].ToString()); TMD.GetSqlConnection().Execute(string.Format(@" INSERT INTO TaskInstanceExecution ( [ExecutionUid] ,[TaskInstanceId] ,[DatafactorySubscriptionUid] ,[DatafactoryResourceGroup] ,[DatafactoryName] ,[PipelineName] ,[AdfRunUid] ,[StartDateTime] ,[Status] ,[Comment] ) VALUES ( @ExecutionUid ,@TaskInstanceId ,@DatafactorySubscriptionUid ,@DatafactoryResourceGroup ,@DatafactoryName ,@PipelineName ,@AdfRunUid ,@StartDateTime ,@Status ,@Comment )" ), new { ExecutionUid = logging.DefaultActivityLogItem.ExecutionUid.ToString(), TaskInstanceId = System.Convert.ToInt64(_Task["TaskInstanceId"]), DatafactorySubscriptionUid = _Task["DataFactory"]["SubscriptionId"].ToString(), DatafactoryResourceGroup = _Task["DataFactory"]["ResourceGroup"].ToString(), DatafactoryName = _Task["DataFactory"]["Name"].ToString(), PipelineName = _pipelineresult["PipelineName"].ToString(), AdfRunUid = Guid.Parse(_pipelineresult["RunId"].ToString()), StartDateTime = DateTimeOffset.UtcNow, Status = _pipelineresult["Status"].ToString(), Comment = "" }); } //To Do // Batch to make less "chatty" //To Do // Upgrade to stored procedure call } else if (_Task["TaskExecutionType"].ToString() == "AF") { //The "AF" branch is for calling Azure Function Based Tasks that do not require ADF. Calls are made async (just like the ADF calls) and calls are made using "AsyncHttp" requests even though at present the "AF" based Tasks reside in the same function app. This is to "future proof" as it is expected that these AF based tasks will be moved out to a separate function app in the future. switch (_pipelinename) { case "AZ-Storage-SAS-Uri-SMTP-Email": using (var client = new System.Net.Http.HttpClient()) { //Lets get an access token based on MSI or Service Principal var secureFunctionAPIURL = string.Format("{0}/api/GetSASUriSendEmailHttpTrigger", Shared._ApplicationOptions.ServiceConnections.CoreFunctionsURL); var accessToken = Shared._AzureAuthenticationCredentialProvider.GetAzureRestApiToken(secureFunctionAPIURL); using HttpRequestMessage httpRequestMessage = new HttpRequestMessage { Method = HttpMethod.Post, RequestUri = new Uri(secureFunctionAPIURL), Content = new StringContent(_Task.ToString(), System.Text.Encoding.UTF8, "application/json"), Headers = { { System.Net.HttpRequestHeader.Authorization.ToString(), "Bearer " + accessToken } } }; //Todo Add some error handling in case function cannot be reached. Note Wait time is there to provide sufficient time to complete post before the HttpClient is disposed. var HttpTask = client.SendAsync(httpRequestMessage).Wait(3000); } break; case "AZ-Storage-Cache-File-List": using (var client = new System.Net.Http.HttpClient()) { //Lets get an access token based on MSI or Service Principal var secureFunctionAPIURL = string.Format("{0}/api/AZStorageCacheFileListHttpTrigger", Shared._ApplicationOptions.ServiceConnections.CoreFunctionsURL); var accessToken = Shared._AzureAuthenticationCredentialProvider.GetAzureRestApiToken(secureFunctionAPIURL); using HttpRequestMessage httpRequestMessage = new HttpRequestMessage { Method = HttpMethod.Post, RequestUri = new Uri(secureFunctionAPIURL), Content = new StringContent(_Task.ToString(), System.Text.Encoding.UTF8, "application/json"), Headers = { { System.Net.HttpRequestHeader.Authorization.ToString(), "Bearer " + accessToken } } }; //Todo Add some error handling in case function cannot be reached. Note Wait time is there to provide sufficient time to complete post before the HttpClient is disposed. var HttpTask = client.SendAsync(httpRequestMessage).Wait(3000); } break; case "StartAndStopVMs": using (var client = new System.Net.Http.HttpClient()) { //Lets get an access token based on MSI or Service Principal var accessToken = GetSecureFunctionToken(_pipelinename); using HttpRequestMessage httpRequestMessage = new HttpRequestMessage { Method = HttpMethod.Post, RequestUri = new Uri(GetSecureFunctionURI(_pipelinename)), Content = new StringContent(_Task.ToString(), System.Text.Encoding.UTF8, "application/json"), Headers = { { System.Net.HttpRequestHeader.Authorization.ToString(), "Bearer " + accessToken } } }; //Todo Add some error handling in case function cannot be reached. Note Wait time is there to provide sufficient time to complete post before the HttpClient is disposed. var HttpTask = client.SendAsync(httpRequestMessage).Wait(3000); } break; case "Cache-File-List-To-Email-Alert": using (var client = new System.Net.Http.HttpClient()) { SendAlert(_Task, logging); } break; default: var msg = $"Could not find execution path for Task Type of {_pipelinename} and Execution Type of {_Task["TaskExecutionType"].ToString()}"; logging.LogErrors(new Exception(msg)); TMD.LogTaskInstanceCompletion((Int64)_TaskInstanceId, (System.Guid)logging.DefaultActivityLogItem.ExecutionUid, BaseTasks.TaskStatus.FailedNoRetry, Guid.Empty, (String)msg); break; } //To Do // Batch to make less "chatty" //To Do // Upgrade to stored procedure call } } catch (Exception TaskException) { logging.LogErrors(TaskException); TMD.LogTaskInstanceCompletion((Int64)_TaskInstanceId, (System.Guid)logging.DefaultActivityLogItem.ExecutionUid, BaseTasks.TaskStatus.FailedNoRetry, Guid.Empty, (String)"Runner failed to execute task."); } } } } catch (Exception RunnerException) { //Set Runner back to Idle TMD.ExecuteSql(string.Format("exec [dbo].[UpdFrameworkTaskRunner] {0}", TaskRunnerId)); logging.LogErrors(RunnerException); //log and re-throw the error throw RunnerException; } //Set Runner back to Idle TMD.ExecuteSql(string.Format("exec [dbo].[UpdFrameworkTaskRunner] {0}", TaskRunnerId)); //Return success JObject Root = new JObject { ["Succeeded"] = true }; return(Root); }
public static void CreateTaskInstance(Logging logging) { logging.LogInformation("Create ScheduleInstance called."); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); DateTimeOffset _date = DateTimeOffset.Now; DataTable dtScheduleInstance = new DataTable(); dtScheduleInstance.Columns.Add(new DataColumn("ScheduleMasterId", typeof(long))); dtScheduleInstance.Columns.Add(new DataColumn("ScheduledDateUtc", typeof(DateTime))); dtScheduleInstance.Columns.Add(new DataColumn("ScheduledDateTimeOffset", typeof(DateTimeOffset))); dtScheduleInstance.Columns.Add(new DataColumn("ActiveYN", typeof(bool))); dynamic resScheduleInstance = TMD.GetSqlConnection().QueryWithRetry(@" Select SM.ScheduleMasterId, SM.ScheduleCronExpression, Coalesce(SI.MaxScheduledDateTimeOffset,cast('1900-01-01' as datetimeoffset)) as MaxScheduledDateTimeOffset from ScheduleMaster SM join ( Select distinct ScheduleMasterId from TaskMaster TM where TM.ActiveYN = 1) TM on TM.ScheduleMasterId = SM.ScheduleMasterId left outer join ( Select ScheduleMasterId, Max(ScheduledDateTimeOffset) MaxScheduledDateTimeOffset From ScheduleInstance Where ActiveYN = 1 Group By ScheduleMasterId ) SI on SM.ScheduleMasterId = SI.ScheduleMasterId Where SM.ActiveYN = 1"); foreach (dynamic _row in resScheduleInstance) { DateTimeOffset?nextUtc; if (_row.ScheduleCronExpression.ToString() == "N/A") { nextUtc = DateTime.UtcNow.AddMinutes(-1); } else { CronExpression _cronExpression = CronExpression.Parse(_row.ScheduleCronExpression.ToString(), CronFormat.IncludeSeconds); nextUtc = _cronExpression.GetNextOccurrence(_row.MaxScheduledDateTimeOffset, TimeZoneInfo.Utc); } if (nextUtc?.DateTime <= DateTime.UtcNow) { DataRow dr = dtScheduleInstance.NewRow(); dr["ScheduleMasterId"] = _row.ScheduleMasterId; dr["ScheduledDateUtc"] = _date.Date; dr["ScheduledDateTimeOffset"] = _date; dr["ActiveYN"] = true; dtScheduleInstance.Rows.Add(dr); } } //Persist TEMP ScheduleInstance SqlConnection _con = TMD.GetSqlConnection(); Table tmpScheduleInstanceTargetTable = new Table { Name = "#Temp" + Guid.NewGuid().ToString() }; TMD.BulkInsert(dtScheduleInstance, tmpScheduleInstanceTargetTable, true, _con); //Create TaskInstance logging.LogInformation("Create TaskInstance called."); DataTable dtTaskInstance = new DataTable(); dtTaskInstance.Columns.Add(new DataColumn("ExecutionUid", typeof(Guid))); dtTaskInstance.Columns.Add(new DataColumn("TaskMasterId", typeof(long))); dtTaskInstance.Columns.Add(new DataColumn("ScheduleInstanceId", typeof(long))); dtTaskInstance.Columns.Add(new DataColumn("ADFPipeline", typeof(string))); dtTaskInstance.Columns.Add(new DataColumn("TaskInstanceJson", typeof(string))); dtTaskInstance.Columns.Add(new DataColumn("LastExecutionStatus", typeof(string))); dtTaskInstance.Columns.Add(new DataColumn("ActiveYN", typeof(bool))); dynamic resTaskInstance = TMD.GetSqlConnection().QueryWithRetry(@"Exec dbo.GetTaskMaster"); DataTable dtTaskTypeMapping = GetTaskTypeMapping(logging); foreach (dynamic _row in resTaskInstance) { DataRow drTaskInstance = dtTaskInstance.NewRow(); logging.DefaultActivityLogItem.TaskInstanceId = _row.TaskInstanceId; logging.DefaultActivityLogItem.TaskMasterId = _row.TaskMasterId; try { dynamic sourceSystemJson = JsonConvert.DeserializeObject(_row.SourceSystemJSON); dynamic taskMasterJson = JsonConvert.DeserializeObject(_row.TaskMasterJSON); dynamic targetSystemJson = JsonConvert.DeserializeObject(_row.TargetSystemJSON); string _ADFPipeline = GetTaskTypeMappingName(logging, _row.TaskExecutionType.ToString(), dtTaskTypeMapping, _row.TaskTypeId, _row.SourceSystemType.ToString(), taskMasterJson?.Source.Type.ToString(), _row.TargetSystemType.ToString(), taskMasterJson?.Target.Type.ToString(), _row.TaskDatafactoryIR); drTaskInstance["TaskMasterId"] = _row.TaskMasterId ?? DBNull.Value; drTaskInstance["ScheduleInstanceId"] = 0;//_row.ScheduleInstanceId == null ? DBNull.Value : _row.ScheduleInstanceId; drTaskInstance["ExecutionUid"] = logging.DefaultActivityLogItem.ExecutionUid; drTaskInstance["ADFPipeline"] = _ADFPipeline; drTaskInstance["LastExecutionStatus"] = "Untried"; drTaskInstance["ActiveYN"] = true; JObject Root = new JObject(); if (_row.SourceSystemType == "ADLS" || _row.SourceSystemType == "Azure Blob") { if (taskMasterJson?.Source.Type.ToString() != "Filelist") { Root["SourceRelativePath"] = TaskInstancesStatic.TransformRelativePath(JObject.Parse(_row.TaskMasterJSON)["Source"]["RelativePath"].ToString(), _date.DateTime); } } if (_row.TargetSystemType == "ADLS" || _row.TargetSystemType == "Azure Blob") { if (JObject.Parse(_row.TaskMasterJSON)["Target"]["RelativePath"] != null) { Root["TargetRelativePath"] = TaskInstancesStatic.TransformRelativePath(JObject.Parse(_row.TaskMasterJSON)["Target"]["RelativePath"].ToString(), _date.DateTime); } } if (JObject.Parse(_row.TaskMasterJSON)["Source"]["IncrementalType"] == "Watermark") { Root["IncrementalField"] = _row.TaskMasterWaterMarkColumn; Root["IncrementalColumnType"] = _row.TaskMasterWaterMarkColumnType; if (_row.TaskMasterWaterMarkColumnType == "DateTime") { Root["IncrementalValue"] = _row.TaskMasterWaterMark_DateTime ?? "1900-01-01"; } else if (_row.TaskMasterWaterMarkColumnType == "BigInt") { Root["IncrementalValue"] = _row.TaskMasterWaterMark_BigInt ?? -1; } } if (Root == null) { drTaskInstance["TaskInstanceJson"] = DBNull.Value; } else { drTaskInstance["TaskInstanceJson"] = Root; } dtTaskInstance.Rows.Add(drTaskInstance); } catch (Exception e) { logging.LogErrors(new Exception(string.Format("Failed to create new task instances for TaskMasterId '{0}'.", logging.DefaultActivityLogItem.TaskInstanceId))); logging.LogErrors(e); } } //Persist TMP TaskInstance Table tmpTaskInstanceTargetTable = new Table { Name = "#Temp" + Guid.NewGuid().ToString() }; TMD.BulkInsert(dtTaskInstance, tmpTaskInstanceTargetTable, true, _con); Dictionary <string, string> SqlParams = new Dictionary <string, string> { { "tmpScheduleInstance", tmpScheduleInstanceTargetTable.QuotedSchemaAndName() }, { "tmpTaskInstance", tmpTaskInstanceTargetTable.QuotedSchemaAndName() } }; string InsertSQL = GenerateSQLStatementTemplates.GetSQL(Shared.GlobalConfigs.GetStringConfig("SQLTemplateLocation"), "InsertScheduleInstance_TaskInstance", SqlParams); _con.ExecuteWithRetry(InsertSQL); _con.Close(); }
public static dynamic GetAzureStorageListingsCore(HttpRequest req, Logging logging) { string requestBody = new System.IO.StreamReader(req.Body).ReadToEndAsync().Result; dynamic taskInformation = JsonConvert.DeserializeObject(requestBody); string _TaskInstanceId = taskInformation["TaskInstanceId"].ToString(); string _ExecutionUid = taskInformation["ExecutionUid"].ToString(); try { string _storageAccountName = taskInformation["Source"]["StorageAccountName"]; //The name is actually the base url so we need to parse it to get the name only _storageAccountName = _storageAccountName.Split('.')[0].Replace("https://", ""); string _storageAccountToken = taskInformation["Source"]["StorageAccountToken"]; Int64 _SourceSystemId = taskInformation["Source"]["SystemId"]; TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); using SqlConnection _con = TMD.GetSqlConnection(); var res = _con.QueryWithRetry(string.Format("Select Max(PartitionKey) MaxPartitionKey from AzureStorageListing where SystemId = {0}", _SourceSystemId.ToString())); string MaxPartitionKey = DateTime.UtcNow.AddDays(-1).ToString("yyyy-MM-dd hh:mm"); foreach (var r in res) { if (r.MaxPartitionKey != null) { MaxPartitionKey = DateTime.Parse(r.MaxPartitionKey).AddMinutes(-1).ToString("yyyy-MM-dd hh:mm"); } } using (HttpClient SourceClient = new HttpClient()) { //Now use the SAS URI to connect rather than the MSI / Service Principal as AD Based Auth not yet avail for tables var _storageCredentials = new StorageCredentials(_storageAccountToken); var SourceStorageAccount = new CloudStorageAccount(storageCredentials: _storageCredentials, accountName: _storageAccountName, endpointSuffix: "core.windows.net", useHttps: true); var client = SourceStorageAccount.CreateCloudTableClient(); CloudTable table = client.GetTableReference("Filelist"); TableQuery <DynamicTableEntity> query = new TableQuery <DynamicTableEntity>().Where(TableQuery.GenerateFilterCondition("PartitionKey", QueryComparisons.GreaterThan, MaxPartitionKey.ToString())); DataTable dt = new DataTable(); DataColumn dc = new DataColumn(); dc.ColumnName = "PartitionKey"; dc.DataType = typeof(string); dt.Columns.Add(dc); DataColumn dc1 = new DataColumn(); dc1.ColumnName = "RowKey"; dc1.DataType = typeof(string); dt.Columns.Add(dc1); DataColumn dc2 = new DataColumn(); dc2.ColumnName = "SystemId"; dc2.DataType = typeof(Int64); dt.Columns.Add(dc2); DataColumn dc3 = new DataColumn(); dc3.ColumnName = "FilePath"; dc3.DataType = typeof(string); dt.Columns.Add(dc3); string Filelist = ""; TableContinuationToken token = null; do { TableQuerySegment <DynamicTableEntity> resultSegment = table.ExecuteQuerySegmentedAsync(query, token).Result; token = resultSegment.ContinuationToken; //load into data table foreach (var entity in resultSegment.Results) { DataRow dr = dt.NewRow(); dr["PartitionKey"] = entity.PartitionKey; dr["RowKey"] = entity.RowKey; dr["SystemId"] = _SourceSystemId; dr["FilePath"] = entity.Properties["FilePath"].StringValue; dt.Rows.Add(dr); Filelist += entity.Properties["FilePath"].StringValue + System.Environment.NewLine; } } while (token != null); if (dt.Rows.Count > 0) { Table t = new Table(); t.Schema = "dbo"; string TableGuid = Guid.NewGuid().ToString(); t.Name = $"#AzureStorageListing{TableGuid}"; TMD.BulkInsert(dt, t, true, _con); Dictionary <string, string> SqlParams = new Dictionary <string, string> { { "TempTable", t.QuotedSchemaAndName() }, { "SourceSystemId", _SourceSystemId.ToString() } }; string MergeSQL = GenerateSQLStatementTemplates.GetSQL(Shared.GlobalConfigs.GetStringConfig("SQLTemplateLocation"), "MergeIntoAzureStorageListing", SqlParams); _con.ExecuteWithRetry(MergeSQL, 120); if ((JArray)taskInformation["Alerts"] != null) { foreach (JObject Alert in (JArray)taskInformation["Alerts"]) { //Only Send out for Operator Level Alerts if (Alert["AlertCategory"].ToString() == "Task Specific Operator Alert") { AlertOperator(_SourceSystemId, Alert["AlertEmail"].ToString(), "", Filelist); } } } } _con.Close(); _con.Dispose(); TMD.LogTaskInstanceCompletion(System.Convert.ToInt64(_TaskInstanceId), System.Guid.Parse(_ExecutionUid), TaskMetaData.BaseTasks.TaskStatus.Complete, Guid.Empty, ""); } } catch (Exception e) { logging.LogErrors(e); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); TMD.LogTaskInstanceCompletion(System.Convert.ToInt64(_TaskInstanceId), System.Guid.Parse(_ExecutionUid), TaskMetaData.BaseTasks.TaskStatus.FailedRetry, Guid.Empty, "Failed when trying to Generate Sas URI and Send Email"); JObject Root = new JObject { ["Result"] = "Failed" }; return(Root); } return(new { }); }
public static async Task <JObject> SendEmailSASUri(HttpRequest req, Logging logging) { string requestBody = new StreamReader(req.Body).ReadToEndAsync().Result; dynamic taskInformation = JsonConvert.DeserializeObject(requestBody); string _TaskInstanceId = taskInformation["TaskInstanceId"].ToString(); string _ExecutionUid = taskInformation["ExecutionUid"].ToString(); try { //Get SAS URI string _blobStorageAccountName = taskInformation["Source"]["StorageAccountName"].ToString(); string _blobStorageContainerName = taskInformation["Source"]["StorageAccountContainer"].ToString(); string _blobStorageFolderPath = taskInformation["Source"]["RelativePath"].ToString(); string _dataFileName = taskInformation["Source"]["DataFileName"].ToString(); int _accessDuration = (int)taskInformation["Source"]["SasURIDaysValid"]; string _targetSystemUidInPHI = taskInformation["Source"]["TargetSystemUidInPHI"]; string _FileUploaderWebAppURL = taskInformation["Source"]["FileUploaderWebAppURL"]; string SASUri = Storage.CreateSASToken(_blobStorageAccountName, _blobStorageContainerName, _blobStorageFolderPath, _dataFileName, _accessDuration); //Send Email string _emailRecipient = taskInformation["Target"]["EmailRecipient"].ToString(); string _emailRecipientName = taskInformation["Target"]["EmailRecipientName"].ToString(); string _emailTemplateFileName = taskInformation["Target"]["EmailTemplateFileName"].ToString(); string _senderEmail = taskInformation["Target"]["SenderEmail"].ToString(); string _senderDescription = taskInformation["Target"]["SenderDescription"].ToString(); string _subject = taskInformation["Target"]["EmailSubject"].ToString(); //Get Plain Text and Email Subject from Template Files Dictionary <string, string> Params = new Dictionary <string, string> { { "NAME", _emailRecipientName }, { "SASTOKEN", SASUri }, { "FileUploaderUrl", _FileUploaderWebAppURL }, { "TargetSystemUidInPHI", _targetSystemUidInPHI }, }; string _plainTextContent = System.IO.File.ReadAllText(Shared.GlobalConfigs.GetStringConfig("HTMLTemplateLocation") + _emailTemplateFileName + ".txt"); _plainTextContent = _plainTextContent.FormatWith(Params, MissingKeyBehaviour.ThrowException, null, '{', '}'); string _htmlContent = System.IO.File.ReadAllText(Shared.GlobalConfigs.GetStringConfig("HTMLTemplateLocation") + _emailTemplateFileName + ".html"); _htmlContent = _htmlContent.FormatWith(Params, MissingKeyBehaviour.ThrowException, null, '{', '}'); var apiKey = System.Environment.GetEnvironmentVariable("SENDGRID_APIKEY"); var client = new SendGridClient(new SendGridClientOptions { ApiKey = apiKey, HttpErrorAsException = true }); var msg = new SendGridMessage() { From = new EmailAddress(_senderEmail, _senderDescription), Subject = _subject, PlainTextContent = _plainTextContent, HtmlContent = _htmlContent }; msg.AddTo(new EmailAddress(_emailRecipient, _emailRecipientName)); try { var response = await client.SendEmailAsync(msg).ConfigureAwait(false); logging.LogInformation($"SendGrid Response StatusCode - {response.StatusCode}"); } catch (Exception ex) { SendGridErrorResponse errorResponse = JsonConvert.DeserializeObject <SendGridErrorResponse>(ex.Message); logging.LogInformation($"Error Message - {ex.Message}"); throw new Exception("Could not send email"); } //Update Task Instace TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); TMD.LogTaskInstanceCompletion(System.Convert.ToInt64(_TaskInstanceId), System.Guid.Parse(_ExecutionUid), TaskMetaData.BaseTasks.TaskStatus.Complete, Guid.Empty, ""); JObject Root = new JObject { ["Result"] = "Complete" }; return(Root); } catch (Exception TaskException) { logging.LogErrors(TaskException); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); TMD.LogTaskInstanceCompletion(System.Convert.ToInt64(_TaskInstanceId), System.Guid.Parse(_ExecutionUid), TaskMetaData.BaseTasks.TaskStatus.FailedRetry, Guid.Empty, "Failed when trying to Generate Sas URI and Send Email"); JObject Root = new JObject { ["Result"] = "Failed" }; return(Root); } }
public dynamic GetADFActivityRuns(Logging logging) { using var client = _logAnalyticsContext.httpClient.CreateClient(_logAnalyticsContext.httpClientName); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); using SqlConnection _conRead = TMD.GetSqlConnection(); //Get Last Request Date var MaxTimesGen = _conRead.QueryWithRetry(@" Select a.*, MaxActivityTimeGenerated from DataFactory a left join ( Select b.DataFactoryId, MaxActivityTimeGenerated = Max(MaxActivityTimeGenerated) from ADFActivityRun b group by b.DatafactoryId) b on a.Id = b.DatafactoryId "); DateTimeOffset MaxActivityTimeGenerated = DateTimeOffset.UtcNow.AddDays(-30); foreach (var datafactory in MaxTimesGen) { if (datafactory.MaxActivityTimeGenerated != null) { MaxActivityTimeGenerated = ((DateTimeOffset)datafactory.MaxActivityTimeGenerated).AddMinutes(-5); } string workspaceId = datafactory.LogAnalyticsWorkspaceId.ToString(); Dictionary <string, object> KqlParams = new Dictionary <string, object> { { "MaxActivityTimeGenerated", MaxActivityTimeGenerated.ToString("yyyy-MM-dd HH:mm:ss.ff K") }, { "SubscriptionId", ((string)datafactory.SubscriptionUid.ToString()).ToUpper() }, { "ResourceGroupName", ((string)datafactory.ResourceGroup.ToString()).ToUpper() }, { "DataFactoryName", ((string)datafactory.Name.ToString()).ToUpper() }, { "DatafactoryId", datafactory.Id.ToString() } }; //Add in the rates from ADFServiceRates.json string ADFRatesStr = System.IO.File.ReadAllText(Path.Combine(Path.Combine(Shared._ApplicationBasePath, Shared._ApplicationOptions.LocalPaths.KQLTemplateLocation), "ADFServiceRates.json")); JObject ADFRates = JObject.Parse(ADFRatesStr); foreach (JProperty p in ADFRates.Properties()) { KqlParams.Add(p.Name, p.Value.ToString()); } string KQL = System.IO.File.ReadAllText(Path.Combine(Path.Combine(Shared._ApplicationBasePath, Shared._ApplicationOptions.LocalPaths.KQLTemplateLocation), "GetADFActivityRuns.kql")); KQL = KQL.FormatWith(KqlParams, FormatWith.MissingKeyBehaviour.ThrowException, null, '{', '}'); JObject JsonContent = new JObject(); JsonContent["query"] = KQL; var postContent = new StringContent(JsonContent.ToString(), System.Text.Encoding.UTF8, "application/json"); var response = client.PostAsync($"https://api.loganalytics.io/v1/workspaces/{workspaceId}/query", postContent).Result; if (response.StatusCode == System.Net.HttpStatusCode.OK) { //Start to parse the response content HttpContent responseContent = response.Content; var content = response.Content.ReadAsStringAsync().Result; var tables = ((JArray)(JObject.Parse(content)["tables"])); if (tables.Count > 0) { DataTable dt = new DataTable(); var rows = (JArray)(tables[0]["rows"]); var columns = (JArray)(tables[0]["columns"]); foreach (JObject c in columns) { DataColumn dc = new DataColumn(); dc.ColumnName = c["name"].ToString(); dc.DataType = GetADFStats.KustoDataTypeMapper[c["type"].ToString()]; dt.Columns.Add(dc); } foreach (JArray r in rows) { DataRow dr = dt.NewRow(); for (int i = 0; i < columns.Count; i++) { dr[i] = ((Newtonsoft.Json.Linq.JValue)r[i]).Value; } dt.Rows.Add(dr); } Table t = new Table(); t.Schema = "dbo"; string TableGuid = Guid.NewGuid().ToString(); t.Name = $"#ADFActivityRun{TableGuid}"; using (SqlConnection _conWrite = TMD.GetSqlConnection()) { TMD.BulkInsert(dt, t, true, _conWrite); Dictionary <string, string> SqlParams = new Dictionary <string, string> { { "TempTable", t.QuotedSchemaAndName() }, { "DatafactoryId", datafactory.Id.ToString() } }; string MergeSQL = GenerateSQLStatementTemplates.GetSQL(System.IO.Path.Combine(Shared._ApplicationBasePath, Shared._ApplicationOptions.LocalPaths.SQLTemplateLocation), "MergeIntoADFActivityRun", SqlParams); _conWrite.ExecuteWithRetry(MergeSQL, 120); _conWrite.Close(); _conWrite.Dispose(); } } else { logging.LogErrors(new Exception("Kusto query failed getting ADFPipeline Stats.")); } } } return(new { }); }
public static async Task <JObject> StartAndStopVMsCore(HttpRequest req, Logging logging) { string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); JObject data = JsonConvert.DeserializeObject <JObject>(requestBody); string _TaskInstanceId = data["TaskInstanceId"].ToString(); string _ExecutionUid = data["ExecutionUid"].ToString(); try { logging.LogInformation("StartAndStopVMs function processed a request."); string Subscription = data["Target"]["SubscriptionUid"].ToString(); string VmName = data["Target"]["VMname"].ToString(); string VmResourceGroup = data["Target"]["ResourceGroup"].ToString(); string VmAction = data["Target"]["Action"].ToString(); Microsoft.Azure.Management.Fluent.Azure.IAuthenticated azureAuth = Microsoft.Azure.Management.Fluent.Azure.Configure().WithLogLevel(HttpLoggingDelegatingHandler.Level.BodyAndHeaders).Authenticate(Shared.Azure.AzureSDK.GetAzureCreds(Shared._ApplicationOptions.UseMSI)); IAzure azure = azureAuth.WithSubscription(Subscription); logging.LogInformation("Selected subscription: " + azure.SubscriptionId); IVirtualMachine vm = azure.VirtualMachines.GetByResourceGroup(VmResourceGroup, VmName); if (vm.PowerState == Microsoft.Azure.Management.Compute.Fluent.PowerState.Deallocated && VmAction.ToLower() == "start") { logging.LogInformation("VM State is: " + vm.PowerState.Value.ToString()); vm.StartAsync().Wait(5000); logging.LogInformation("VM Start Initiated: " + vm.Name); } if (vm.PowerState != Microsoft.Azure.Management.Compute.Fluent.PowerState.Deallocated && VmAction.ToLower() == "stop") { logging.LogInformation("VM State is: " + vm.PowerState.Value.ToString()); vm.DeallocateAsync().Wait(5000); logging.LogInformation("VM Stop Initiated: " + vm.Name); } JObject Root = new JObject { ["Result"] = "Complete" }; TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); if (VmName != null) { Root["Result"] = "Complete"; } else { Root["Result"] = "Please pass a name, resourcegroup and action to request body"; TMD.LogTaskInstanceCompletion(System.Convert.ToInt64(_TaskInstanceId), System.Guid.Parse(_ExecutionUid), TaskMetaData.BaseTasks.TaskStatus.FailedRetry, System.Guid.Empty, "Task missing VMname, ResourceGroup or SubscriptionUid in Target element."); return(Root); } //Update Task Instance TMD.LogTaskInstanceCompletion(System.Convert.ToInt64(_TaskInstanceId), System.Guid.Parse(_ExecutionUid), TaskMetaData.BaseTasks.TaskStatus.Complete, System.Guid.Empty, ""); return(Root); } catch (System.Exception TaskException) { logging.LogErrors(TaskException); TaskMetaDataDatabase TMD = new TaskMetaDataDatabase(); TMD.LogTaskInstanceCompletion(System.Convert.ToInt64(_TaskInstanceId), System.Guid.Parse(_ExecutionUid), TaskMetaData.BaseTasks.TaskStatus.FailedRetry, System.Guid.Empty, "Failed when trying to start or stop VM"); JObject Root = new JObject { ["Result"] = "Failed" }; return(Root); } }