示例#1
0
        private void executeDBQuery_Step1()
        {
            AzureSQLController sqlController = new AzureSQLController();
            List <Dictionary <string, object> > resultList = sqlController.executeSQLQuery(DualLoadConfig.CONNECTION_STRING_ControlDB, DualLoadConfig.QUERY_INIT_1);

            List <Object> controlIdList = new List <Object>();

            foreach (Dictionary <string, object> result in resultList)
            {
                foreach (KeyValuePair <string, object> kvp in result)
                {
                    string key   = kvp.Key;
                    object value = kvp.Value;
                    //Console.WriteLine("Key: " + key + ", value: " + value);
                    CONTROL_ID                = ("id".Equals(key)) ? (int)value : CONTROL_ID;
                    LASTRUN_DATE              = ("LastRunDate".Equals(key)) ? (DateTime)value : LASTRUN_DATE;
                    FILENAME_LIKE             = ("FileNameLike".Equals(key)) ? value.ToString() : FILENAME_LIKE;
                    CONTROL_PROCESS           = ("ControlProcess".Equals(key)) ? value.ToString() : CONTROL_PROCESS;
                    SOURCE_FOLDER_PATH        = ("FilePath".Equals(key)) ? value.ToString() : SOURCE_FOLDER_PATH;
                    TOBEPROCESSED_FOLDER_PATH = ("ToBeProcessedPath".Equals(key)) ? value.ToString() : TOBEPROCESSED_FOLDER_PATH;
                    ARCHIVE_FOLDER_PATH       = ("ArchivePath".Equals(key)) ? value.ToString() : ARCHIVE_FOLDER_PATH;
                }
            }

            Console.WriteLine("CONTROL_ID = " + CONTROL_ID);
            Console.WriteLine("CONTROL_PROCESS = " + CONTROL_PROCESS);
            Console.WriteLine("LASTRUN_DATE = " + LASTRUN_DATE);
            Console.WriteLine("FILENAME_LIKE = " + FILENAME_LIKE);
            Console.WriteLine("SOURCE_FOLDER_PATH = " + SOURCE_FOLDER_PATH);
            Console.WriteLine("TOBEPROCESSED_FOLDER_PATH = " + TOBEPROCESSED_FOLDER_PATH);
            Console.WriteLine("ARCHIVE_FOLDER_PATH = " + ARCHIVE_FOLDER_PATH);
        }
示例#2
0
        ////main method for testing sql queries
        static void Main(string[] args)
        {
            AzureSQLController sqlController = new AzureSQLController();

            sqlController.executeDBQuery_Step1();
            Console.ReadKey();
        }
        private void createPipelines(DualLoadUtil util, String basePipelineName)
        {
            DualLoadActivities dLActivities = new DualLoadActivities();


            AzureSQLController sqlController = new AzureSQLController();

            List <Dictionary <string, object> > resultList = sqlController.executeSQLQuery(DualLoadConfig.CONNECTION_STRING_ControlDB, DualLoadConfig.QUERY_LOADPROCESS_2.Replace("$PdwId", "1").Replace("$ControlProcess", "'DimEmployee'"));

            List <Object> controlIdList = new List <Object>();

            foreach (Dictionary <string, object> result in resultList)
            {
                foreach (KeyValuePair <string, object> kvp in result)
                {
                    string key   = kvp.Key;
                    object value = kvp.Value;
                    Console.WriteLine("Key: " + key + ", value: " + value);
                    if ("ETLControlDetailID".Equals(key))
                    {
                        controlIdList.Add(value);
                    }
                }
            }

            for (int i = 0; i < controlIdList.Count; i++) //i represents # of Load Process pipelines that will get created
            {
                DateTime PipelineActivePeriodStartTime = new DateTime(2014, 8, 9, 1, 0, 0, 0, DateTimeKind.Local).AddMinutes(60);
                DateTime PipelineActivePeriodEndTime   = PipelineActivePeriodStartTime.AddMinutes(60);
                string   controlId = controlIdList.ElementAt(i).ToString();
                Console.WriteLine("controlId " + controlId);
                Console.WriteLine("Creating Pipeline: " + basePipelineName + "_" + i);
                util.getDatasets().createDataSet_Load_1_SqlDummy(i);
                util.getDatasets().createDataSet_Load_2_SqlDummy(i);

                util.getClient().Pipelines.CreateOrUpdate(DualLoadConfig.RESOURCEGROUP_Name, DualLoadConfig.DATAFACTORY_Name,
                                                          new PipelineCreateOrUpdateParameters()
                {
                    Pipeline = new Pipeline()
                    {
                        Name       = basePipelineName + "_" + i,
                        Properties = new PipelineProperties()
                        {
                            Description = "DualLoadInit Pipeline will pull all files to be processed in central location",

                            // Initial value for pipeline's active period. With this, you won't need to set slice status
                            Start = PipelineActivePeriodStartTime,
                            End   = PipelineActivePeriodEndTime,

                            Activities = new List <Activity>()
                            {
                                dLActivities.create_Activity_LoadProcess_3(controlId, i),
                                dLActivities.create_Activity_LoadProcess_5(controlId, i)
                            }
                        }
                    }
                }
                                                          );
                util.getADFMonitor().monitorPipelineOutput(PipelineActivePeriodStartTime, PipelineActivePeriodEndTime, DualLoadConfig.DATASET_LOAD_2_SQLDUMMY + "_" + i);
            }
        }
        private void createPipelines(DualLoadUtil util, String basePipelineName)
        {
            DualLoadActivities dLActivities = new DualLoadActivities();
            int i = 0;
            AzureSQLController sqlController = new AzureSQLController();
            List <Dictionary <string, object> > resultList = sqlController.executeSQLQuery(DualLoadConfig.CONNECTION_STRING_ControlDB, DualLoadConfig.QUERY_ARCHIVE_1.Replace("$ControlProcess", "'dimEmployee'"));


            foreach (Dictionary <string, object> result in resultList)
            {
                foreach (KeyValuePair <string, object> kvp in result)
                {
                    string key   = kvp.Key;
                    object value = kvp.Value;
                    //Console.WriteLine("Key: " + key + ", value: " + value);
                    CONTROLDETAIL_ID     = ("ETLControlDetailID".Equals(key)) ? (int)value : CONTROLDETAIL_ID;
                    FILENAME             = ("FileName".Equals(key)) ? value.ToString() : FILENAME;
                    ARCHIVED_FOLDER_PATH = ("ArchivePath".Equals(key)) ? value.ToString() : ARCHIVED_FOLDER_PATH;
                    Console.WriteLine("CONTROLDETAIL_ID = " + CONTROLDETAIL_ID);
                    Console.WriteLine("FILENAME = " + FILENAME);
                    Console.WriteLine("ARCHIVED_FOLDER_PATH = " + ARCHIVED_FOLDER_PATH);
                }

                DateTime PipelineActivePeriodStartTime = new DateTime(2014, 8, 9, 1, 0, 0, 0, DateTimeKind.Local).AddMinutes(60);
                DateTime PipelineActivePeriodEndTime   = PipelineActivePeriodStartTime.AddMinutes(60);

                Console.WriteLine("file being processed: " + FILENAME);
                String pipelineName = basePipelineName + "_" + i;

                util.getDatasets().createDataSet_ToBeProcessedPath(CONTROL_PROCESS, TOBEPROCESSED_FOLDER_PATH, FILENAME, pipelineName, i);
                util.getDatasets().createDataSet_ArchivedFolder(CONTROL_PROCESS, ARCHIVED_FOLDER_PATH, FILENAME, i);
                util.getDatasets().createDataSet_Archive_1_SqlDummy(i);

                Console.WriteLine("Creating Pipeline: " + pipelineName);


                util.getClient().Pipelines.CreateOrUpdate(DualLoadConfig.RESOURCEGROUP_Name, DualLoadConfig.DATAFACTORY_Name,
                                                          new PipelineCreateOrUpdateParameters()
                {
                    Pipeline = new Pipeline()
                    {
                        Name       = pipelineName,
                        Properties = new PipelineProperties()
                        {
                            Description = "Archive Pipeline will pull all files to be processed in archived location",

                            // Initial value for pipeline's active period. With this, you won't need to set slice status
                            Start = PipelineActivePeriodStartTime,
                            End   = PipelineActivePeriodEndTime,

                            Activities = new List <Activity>()
                            {
                                dLActivities.create_Activity_Archive_2(DualLoadConfig.DATASET_ToBeProcessedFolder + "_" + pipelineName, DualLoadConfig.DATASET_ArchivedFolder, i),
                                dLActivities.create_Activity_Archive_3(CONTROLDETAIL_ID, FILENAME, i)
                            }
                        }
                    }
                }
                                                          );
                util.getADFMonitor().monitorPipelineOutput(PipelineActivePeriodStartTime, PipelineActivePeriodEndTime, DualLoadConfig.DATASET_ArchivedFolder + "_" + i);
                i++;

                storageController.deleteBlob(CONTROL_PROCESS, TOBEPROCESSED_FOLDER_PATH, FILENAME);
            }
        }