Esempio n. 1
0
        private void createPipelines(DualLoadUtil util, String basePipelineName)
        {
            DualLoadActivities dLActivities = new DualLoadActivities();
            int i = 0; //i represents # of Init pipelines that will get created

            foreach (string file in SOURCE_FOLDER_FILELIST)
            {
                DateTime PipelineActivePeriodStartTime = new DateTime(2014, 8, 9, 1, 0, 0, 0, DateTimeKind.Local).AddMinutes(60);
                DateTime PipelineActivePeriodEndTime   = PipelineActivePeriodStartTime.AddMinutes(60);
                Console.WriteLine("file being processed: " + file);
                String pipelineName = basePipelineName + "_" + i;

                util.getDatasets().createDataSet_SourceFolder(CONTROL_PROCESS, SOURCE_FOLDER_PATH, file, i);
                util.getDatasets().createDataSet_ToBeProcessedPath(CONTROL_PROCESS, TOBEPROCESSED_FOLDER_PATH, file, pipelineName, i);
                util.getDatasets().createDataSet_Init_SqlDummy(i);
                Console.WriteLine("Creating Pipeline: " + pipelineName);



                util.getClient().Pipelines.CreateOrUpdate(DualLoadConfig.RESOURCEGROUP_Name, DualLoadConfig.DATAFACTORY_Name,
                                                          new PipelineCreateOrUpdateParameters()
                {
                    Pipeline = new Pipeline()
                    {
                        Name       = pipelineName,
                        Properties = new PipelineProperties()
                        {
                            Description = "DualLoadInit Pipeline will pull all files to be processed in central location",

                            // Initial value for pipeline's active period. With this, you won't need to set slice status

                            Start = PipelineActivePeriodStartTime,
                            End   = PipelineActivePeriodEndTime,


                            Activities = new List <Activity>()
                            {
                                dLActivities.create_Activity_Init_3(CONTROL_ID, file, i),
                                dLActivities.create_Activity_Init_4(DualLoadConfig.DATASET_SOURCEFOLDER + "_" + i, DualLoadConfig.DATASET_ToBeProcessedFolder + "_" + pipelineName, i)
                            }
                        }
                    }
                }
                                                          );
                util.getADFMonitor().monitorPipelineOutput(PipelineActivePeriodStartTime, PipelineActivePeriodEndTime, DualLoadConfig.DATASET_ToBeProcessedFolder + "_" + pipelineName);
                i++;
                storageController.deleteBlob(CONTROL_PROCESS, SOURCE_FOLDER_PATH, file);
            }
        }
        private void createPipelines(DualLoadUtil util, String basePipelineName)
        {
            DualLoadActivities dLActivities = new DualLoadActivities();


            AzureSQLController sqlController = new AzureSQLController();

            List <Dictionary <string, object> > resultList = sqlController.executeSQLQuery(DualLoadConfig.CONNECTION_STRING_ControlDB, DualLoadConfig.QUERY_LOADPROCESS_2.Replace("$PdwId", "1").Replace("$ControlProcess", "'DimEmployee'"));

            List <Object> controlIdList = new List <Object>();

            foreach (Dictionary <string, object> result in resultList)
            {
                foreach (KeyValuePair <string, object> kvp in result)
                {
                    string key   = kvp.Key;
                    object value = kvp.Value;
                    Console.WriteLine("Key: " + key + ", value: " + value);
                    if ("ETLControlDetailID".Equals(key))
                    {
                        controlIdList.Add(value);
                    }
                }
            }

            for (int i = 0; i < controlIdList.Count; i++) //i represents # of Load Process pipelines that will get created
            {
                DateTime PipelineActivePeriodStartTime = new DateTime(2014, 8, 9, 1, 0, 0, 0, DateTimeKind.Local).AddMinutes(60);
                DateTime PipelineActivePeriodEndTime   = PipelineActivePeriodStartTime.AddMinutes(60);
                string   controlId = controlIdList.ElementAt(i).ToString();
                Console.WriteLine("controlId " + controlId);
                Console.WriteLine("Creating Pipeline: " + basePipelineName + "_" + i);
                util.getDatasets().createDataSet_Load_1_SqlDummy(i);
                util.getDatasets().createDataSet_Load_2_SqlDummy(i);

                util.getClient().Pipelines.CreateOrUpdate(DualLoadConfig.RESOURCEGROUP_Name, DualLoadConfig.DATAFACTORY_Name,
                                                          new PipelineCreateOrUpdateParameters()
                {
                    Pipeline = new Pipeline()
                    {
                        Name       = basePipelineName + "_" + i,
                        Properties = new PipelineProperties()
                        {
                            Description = "DualLoadInit Pipeline will pull all files to be processed in central location",

                            // Initial value for pipeline's active period. With this, you won't need to set slice status
                            Start = PipelineActivePeriodStartTime,
                            End   = PipelineActivePeriodEndTime,

                            Activities = new List <Activity>()
                            {
                                dLActivities.create_Activity_LoadProcess_3(controlId, i),
                                dLActivities.create_Activity_LoadProcess_5(controlId, i)
                            }
                        }
                    }
                }
                                                          );
                util.getADFMonitor().monitorPipelineOutput(PipelineActivePeriodStartTime, PipelineActivePeriodEndTime, DualLoadConfig.DATASET_LOAD_2_SQLDUMMY + "_" + i);
            }
        }
        private void createPipelines(DualLoadUtil util, String basePipelineName)
        {
            DualLoadActivities dLActivities = new DualLoadActivities();
            int i = 0;
            AzureSQLController sqlController = new AzureSQLController();
            List <Dictionary <string, object> > resultList = sqlController.executeSQLQuery(DualLoadConfig.CONNECTION_STRING_ControlDB, DualLoadConfig.QUERY_ARCHIVE_1.Replace("$ControlProcess", "'dimEmployee'"));


            foreach (Dictionary <string, object> result in resultList)
            {
                foreach (KeyValuePair <string, object> kvp in result)
                {
                    string key   = kvp.Key;
                    object value = kvp.Value;
                    //Console.WriteLine("Key: " + key + ", value: " + value);
                    CONTROLDETAIL_ID     = ("ETLControlDetailID".Equals(key)) ? (int)value : CONTROLDETAIL_ID;
                    FILENAME             = ("FileName".Equals(key)) ? value.ToString() : FILENAME;
                    ARCHIVED_FOLDER_PATH = ("ArchivePath".Equals(key)) ? value.ToString() : ARCHIVED_FOLDER_PATH;
                    Console.WriteLine("CONTROLDETAIL_ID = " + CONTROLDETAIL_ID);
                    Console.WriteLine("FILENAME = " + FILENAME);
                    Console.WriteLine("ARCHIVED_FOLDER_PATH = " + ARCHIVED_FOLDER_PATH);
                }

                DateTime PipelineActivePeriodStartTime = new DateTime(2014, 8, 9, 1, 0, 0, 0, DateTimeKind.Local).AddMinutes(60);
                DateTime PipelineActivePeriodEndTime   = PipelineActivePeriodStartTime.AddMinutes(60);

                Console.WriteLine("file being processed: " + FILENAME);
                String pipelineName = basePipelineName + "_" + i;

                util.getDatasets().createDataSet_ToBeProcessedPath(CONTROL_PROCESS, TOBEPROCESSED_FOLDER_PATH, FILENAME, pipelineName, i);
                util.getDatasets().createDataSet_ArchivedFolder(CONTROL_PROCESS, ARCHIVED_FOLDER_PATH, FILENAME, i);
                util.getDatasets().createDataSet_Archive_1_SqlDummy(i);

                Console.WriteLine("Creating Pipeline: " + pipelineName);


                util.getClient().Pipelines.CreateOrUpdate(DualLoadConfig.RESOURCEGROUP_Name, DualLoadConfig.DATAFACTORY_Name,
                                                          new PipelineCreateOrUpdateParameters()
                {
                    Pipeline = new Pipeline()
                    {
                        Name       = pipelineName,
                        Properties = new PipelineProperties()
                        {
                            Description = "Archive Pipeline will pull all files to be processed in archived location",

                            // Initial value for pipeline's active period. With this, you won't need to set slice status
                            Start = PipelineActivePeriodStartTime,
                            End   = PipelineActivePeriodEndTime,

                            Activities = new List <Activity>()
                            {
                                dLActivities.create_Activity_Archive_2(DualLoadConfig.DATASET_ToBeProcessedFolder + "_" + pipelineName, DualLoadConfig.DATASET_ArchivedFolder, i),
                                dLActivities.create_Activity_Archive_3(CONTROLDETAIL_ID, FILENAME, i)
                            }
                        }
                    }
                }
                                                          );
                util.getADFMonitor().monitorPipelineOutput(PipelineActivePeriodStartTime, PipelineActivePeriodEndTime, DualLoadConfig.DATASET_ArchivedFolder + "_" + i);
                i++;

                storageController.deleteBlob(CONTROL_PROCESS, TOBEPROCESSED_FOLDER_PATH, FILENAME);
            }
        }