static void Main(string[] args) { AzureDataFactoryFoundry adfFoundry = new AzureDataFactoryFoundry(); DualLoadUtil util = new DualLoadUtil(); //main.tearDown(util); //adfFoundry.test(); //adfFoundry.initialize(util); }
public void tearDown(DualLoadUtil util) { Console.WriteLine("Tearing down " + DualLoadConfig.DATAFACTORY_Name); DataFactoryManagementClient client = ADFLoginController.createDataFactoryManagementClient(); client.DataFactories.Delete(DualLoadConfig.RESOURCEGROUP_Name, DualLoadConfig.DATAFACTORY_Name); //util.teardown(client, dualloadconfig.pipeline_init); //util.teardown(client, dualloadconfig.pipeline_loadprocess); //util.teardown(client, dualloadconfig.pipeline_archive); }
private void createPipelines(DualLoadUtil util, String basePipelineName) { DualLoadActivities dLActivities = new DualLoadActivities(); int i = 0; //i represents # of Init pipelines that will get created foreach (string file in SOURCE_FOLDER_FILELIST) { DateTime PipelineActivePeriodStartTime = new DateTime(2014, 8, 9, 1, 0, 0, 0, DateTimeKind.Local).AddMinutes(60); DateTime PipelineActivePeriodEndTime = PipelineActivePeriodStartTime.AddMinutes(60); Console.WriteLine("file being processed: " + file); String pipelineName = basePipelineName + "_" + i; util.getDatasets().createDataSet_SourceFolder(CONTROL_PROCESS, SOURCE_FOLDER_PATH, file, i); util.getDatasets().createDataSet_ToBeProcessedPath(CONTROL_PROCESS, TOBEPROCESSED_FOLDER_PATH, file, pipelineName, i); util.getDatasets().createDataSet_Init_SqlDummy(i); Console.WriteLine("Creating Pipeline: " + pipelineName); util.getClient().Pipelines.CreateOrUpdate(DualLoadConfig.RESOURCEGROUP_Name, DualLoadConfig.DATAFACTORY_Name, new PipelineCreateOrUpdateParameters() { Pipeline = new Pipeline() { Name = pipelineName, Properties = new PipelineProperties() { Description = "DualLoadInit Pipeline will pull all files to be processed in central location", // Initial value for pipeline's active period. With this, you won't need to set slice status Start = PipelineActivePeriodStartTime, End = PipelineActivePeriodEndTime, Activities = new List <Activity>() { dLActivities.create_Activity_Init_3(CONTROL_ID, file, i), dLActivities.create_Activity_Init_4(DualLoadConfig.DATASET_SOURCEFOLDER + "_" + i, DualLoadConfig.DATASET_ToBeProcessedFolder + "_" + pipelineName, i) } } } } ); util.getADFMonitor().monitorPipelineOutput(PipelineActivePeriodStartTime, PipelineActivePeriodEndTime, DualLoadConfig.DATASET_ToBeProcessedFolder + "_" + pipelineName); i++; storageController.deleteBlob(CONTROL_PROCESS, SOURCE_FOLDER_PATH, file); } }
static void Main(string[] args) { LoadProcessWorkflow loadProcessWorkflow = new LoadProcessWorkflow(); DualLoadUtil util = new DualLoadUtil(); DataFactoryManagementClient client = ADFLoginController.createDataFactoryManagementClient(); util.setADFMonitor(new ADFOutputMonitor(client)); //util.tearDown(client, DualLoadConfig.PIPELINE_LOADPROCESS); DualLoadDatasets datasets = loadProcessWorkflow.createDatasets(client); util.setDatasets(datasets); loadProcessWorkflow.createPipelines(util, DualLoadConfig.PIPELINE_LOADPROCESS); Console.WriteLine("\nPress any key to exit."); Console.ReadKey(); }
static void Main(string[] args) { ArchiveWorkflow archiveWorkflow = new ArchiveWorkflow(); DualLoadUtil util = new DualLoadUtil(); DataFactoryManagementClient client = ADFLoginController.createDataFactoryManagementClient(); //util.tearDown(client, DualLoadConfig.PIPELINE_ARCHIVE); util.setADFMonitor(new ADFOutputMonitor(client)); DualLoadDatasets datasets = archiveWorkflow.createDatasets(client); util.setDatasets(datasets); archiveWorkflow.initStorageController(); archiveWorkflow.createPipelines(util, DualLoadConfig.PIPELINE_ARCHIVE); Console.WriteLine("\nPress any key to exit."); Console.ReadKey(); }
static void Main(string[] args) { InitWorkflow initWorkflow = new InitWorkflow(); DualLoadUtil util = new DualLoadUtil(); DataFactoryManagementClient client = ADFLoginController.createDataFactoryManagementClient(); //util.tearDown(client, DualLoadConfig.PIPELINE_INIT); util.setADFMonitor(new ADFOutputMonitor(client)); initWorkflow.executeDBQuery_Step1(); initWorkflow.executeStorageQuery_Step2(); DualLoadDatasets datasets = initWorkflow.createDatasets(client); util.setDatasets(datasets); initWorkflow.createPipelines(util, DualLoadConfig.PIPELINE_INIT); Console.WriteLine("\nPress any key to exit."); Console.ReadKey(); }
public void initialize(DualLoadUtil util) { util.createDataFactory(); util.createLinkedService_BlobStorage(); util.createLinkedService_ControlDB(); }
private void createPipelines(DualLoadUtil util, String basePipelineName) { DualLoadActivities dLActivities = new DualLoadActivities(); AzureSQLController sqlController = new AzureSQLController(); List <Dictionary <string, object> > resultList = sqlController.executeSQLQuery(DualLoadConfig.CONNECTION_STRING_ControlDB, DualLoadConfig.QUERY_LOADPROCESS_2.Replace("$PdwId", "1").Replace("$ControlProcess", "'DimEmployee'")); List <Object> controlIdList = new List <Object>(); foreach (Dictionary <string, object> result in resultList) { foreach (KeyValuePair <string, object> kvp in result) { string key = kvp.Key; object value = kvp.Value; Console.WriteLine("Key: " + key + ", value: " + value); if ("ETLControlDetailID".Equals(key)) { controlIdList.Add(value); } } } for (int i = 0; i < controlIdList.Count; i++) //i represents # of Load Process pipelines that will get created { DateTime PipelineActivePeriodStartTime = new DateTime(2014, 8, 9, 1, 0, 0, 0, DateTimeKind.Local).AddMinutes(60); DateTime PipelineActivePeriodEndTime = PipelineActivePeriodStartTime.AddMinutes(60); string controlId = controlIdList.ElementAt(i).ToString(); Console.WriteLine("controlId " + controlId); Console.WriteLine("Creating Pipeline: " + basePipelineName + "_" + i); util.getDatasets().createDataSet_Load_1_SqlDummy(i); util.getDatasets().createDataSet_Load_2_SqlDummy(i); util.getClient().Pipelines.CreateOrUpdate(DualLoadConfig.RESOURCEGROUP_Name, DualLoadConfig.DATAFACTORY_Name, new PipelineCreateOrUpdateParameters() { Pipeline = new Pipeline() { Name = basePipelineName + "_" + i, Properties = new PipelineProperties() { Description = "DualLoadInit Pipeline will pull all files to be processed in central location", // Initial value for pipeline's active period. With this, you won't need to set slice status Start = PipelineActivePeriodStartTime, End = PipelineActivePeriodEndTime, Activities = new List <Activity>() { dLActivities.create_Activity_LoadProcess_3(controlId, i), dLActivities.create_Activity_LoadProcess_5(controlId, i) } } } } ); util.getADFMonitor().monitorPipelineOutput(PipelineActivePeriodStartTime, PipelineActivePeriodEndTime, DualLoadConfig.DATASET_LOAD_2_SQLDUMMY + "_" + i); } }
private void createPipelines(DualLoadUtil util, String basePipelineName) { DualLoadActivities dLActivities = new DualLoadActivities(); int i = 0; AzureSQLController sqlController = new AzureSQLController(); List <Dictionary <string, object> > resultList = sqlController.executeSQLQuery(DualLoadConfig.CONNECTION_STRING_ControlDB, DualLoadConfig.QUERY_ARCHIVE_1.Replace("$ControlProcess", "'dimEmployee'")); foreach (Dictionary <string, object> result in resultList) { foreach (KeyValuePair <string, object> kvp in result) { string key = kvp.Key; object value = kvp.Value; //Console.WriteLine("Key: " + key + ", value: " + value); CONTROLDETAIL_ID = ("ETLControlDetailID".Equals(key)) ? (int)value : CONTROLDETAIL_ID; FILENAME = ("FileName".Equals(key)) ? value.ToString() : FILENAME; ARCHIVED_FOLDER_PATH = ("ArchivePath".Equals(key)) ? value.ToString() : ARCHIVED_FOLDER_PATH; Console.WriteLine("CONTROLDETAIL_ID = " + CONTROLDETAIL_ID); Console.WriteLine("FILENAME = " + FILENAME); Console.WriteLine("ARCHIVED_FOLDER_PATH = " + ARCHIVED_FOLDER_PATH); } DateTime PipelineActivePeriodStartTime = new DateTime(2014, 8, 9, 1, 0, 0, 0, DateTimeKind.Local).AddMinutes(60); DateTime PipelineActivePeriodEndTime = PipelineActivePeriodStartTime.AddMinutes(60); Console.WriteLine("file being processed: " + FILENAME); String pipelineName = basePipelineName + "_" + i; util.getDatasets().createDataSet_ToBeProcessedPath(CONTROL_PROCESS, TOBEPROCESSED_FOLDER_PATH, FILENAME, pipelineName, i); util.getDatasets().createDataSet_ArchivedFolder(CONTROL_PROCESS, ARCHIVED_FOLDER_PATH, FILENAME, i); util.getDatasets().createDataSet_Archive_1_SqlDummy(i); Console.WriteLine("Creating Pipeline: " + pipelineName); util.getClient().Pipelines.CreateOrUpdate(DualLoadConfig.RESOURCEGROUP_Name, DualLoadConfig.DATAFACTORY_Name, new PipelineCreateOrUpdateParameters() { Pipeline = new Pipeline() { Name = pipelineName, Properties = new PipelineProperties() { Description = "Archive Pipeline will pull all files to be processed in archived location", // Initial value for pipeline's active period. With this, you won't need to set slice status Start = PipelineActivePeriodStartTime, End = PipelineActivePeriodEndTime, Activities = new List <Activity>() { dLActivities.create_Activity_Archive_2(DualLoadConfig.DATASET_ToBeProcessedFolder + "_" + pipelineName, DualLoadConfig.DATASET_ArchivedFolder, i), dLActivities.create_Activity_Archive_3(CONTROLDETAIL_ID, FILENAME, i) } } } } ); util.getADFMonitor().monitorPipelineOutput(PipelineActivePeriodStartTime, PipelineActivePeriodEndTime, DualLoadConfig.DATASET_ArchivedFolder + "_" + i); i++; storageController.deleteBlob(CONTROL_PROCESS, TOBEPROCESSED_FOLDER_PATH, FILENAME); } }