public static void StartPipeline( string inputFileSystem, string inputFileDirectory, string outputFileSystem, string outputFileDirectory, string pipelineName, string resourceGroup, string dataFactoryName, string subscriptionId, Microsoft.Extensions.Logging.ILogger log) { // Uses MSI to get an Azure AD token: You can run locally if you have a domain joined computer and your domain is synced with Azure AD // The Function App must be in the Contributor Role of RBAC for the CosmosDB account var tokenProvider = new AzureServiceTokenProvider(); string accessToken = tokenProvider.GetAccessTokenAsync("https://management.azure.com/").Result; ServiceClientCredentials cred = new TokenCredentials(accessToken); var client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; // Create a pipeline run Console.WriteLine("Creating pipeline run..."); Dictionary <string, object> parameters = new Dictionary <string, object> { { "inputFileSystem", inputFileSystem }, { "inputFileDirectory", inputFileDirectory }, { "outputFileSystem", outputFileSystem }, { "outputFileDirectory", outputFileDirectory } }; CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync(resourceGroup, dataFactoryName, pipelineName, parameters: parameters).Result.Body; log.LogInformation("Pipeline run ID: " + runResponse.RunId); } // StartPipeline
public static void correrPipe(DataFactoryManagementClient client) { Console.Write("\nNombre del pipe a correr:"); string nombrePipe = Console.ReadLine(); // Create a pipeline run Console.WriteLine("\nCreating pipeline run..."); CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync(DatosGrales.resourceGroup, DatosGrales.dataFactoryName, nombrePipe).Result.Body; Console.WriteLine("Pipeline run ID: " + runResponse.RunId);//Pipeline-LlamarSSIS /* * Lista de pipelines: Pipeline-LlamarSSIS * Pipeline-Sql-DataLake-Tarea */ // Monitor the pipeline run Console.WriteLine("Checking pipeline run status..."); PipelineRun pipelineRun; while (true) { pipelineRun = client.PipelineRuns.Get(DatosGrales.resourceGroup, DatosGrales.dataFactoryName, runResponse.RunId); Console.WriteLine("Status: " + pipelineRun.Status); if (pipelineRun.Status == "InProgress") { System.Threading.Thread.Sleep(10000); } else { break; } } Console.WriteLine("Pipeline run ID: " + runResponse.RunId + "\n"); }
public virtual string CreatePipelineRun(string resourceGroupName, string dataFactoryName, string pipelineName, Dictionary <string, object> paramDictionary, string referencePipelineRunId, bool?isRecovery, string startActivityName, bool?startFromFailure) { CreateRunResponse response = this.DataFactoryManagementClient.Pipelines.CreateRun(resourceGroupName, dataFactoryName, pipelineName, parameters: paramDictionary, referencePipelineRunId: referencePipelineRunId, isRecovery: isRecovery, startActivityName: startActivityName, startFromFailure: startFromFailure); return(response.RunId); }
public async Task CreateAndRunPipeline() { const string PipelineName = "Test-Pipeline"; const string JobName = "SparkJobName"; const string ActivityName = "ActivityName"; string workspaceUrl = TestEnvironment.WorkspaceUrl; var pipelineClient = new PipelineClient(endpoint: new Uri(workspaceUrl), credential: new DefaultAzureCredential()); var sparkJob = new SynapseSparkJobReference(SparkJobReferenceType.SparkJobDefinitionReference, JobName); var activity = new SynapseSparkJobDefinitionActivity(ActivityName, sparkJob); var pipelineResource = new PipelineResource(); pipelineResource.Activities.Add(activity); Console.WriteLine("Create pipeline if not already exists."); await pipelineClient.StartCreateOrUpdatePipelineAsync(PipelineName, pipelineResource); Console.WriteLine("Pipeline created"); Console.WriteLine("Running pipeline."); CreateRunResponse runOperation = await pipelineClient.CreatePipelineRunAsync(PipelineName); Console.WriteLine("Run started. ID: {0}", runOperation.RunId); }
public static async Task <HttpResponseMessage> Run([HttpTrigger(AuthorizationLevel.Function, "get", Route = null)] HttpRequestMessage req, TraceWriter log) { // Set variables string tenantID = ""; // your tenant ID string applicationId = ""; // your application ID string authenticationKey = ""; // your authentication key for the application string subscriptionId = ""; // your subscription ID where the data factory resides string resourceGroup = ""; // your resource group where the data factory resides string dataFactoryName = ""; // specify the name of data factory to create. It must be globally unique string pipelineName = ""; // name of the pipeline // Authenticate and create a data factory management client var context = new AuthenticationContext("https://login.windows.net/" + tenantID); ClientCredential cc = new ClientCredential(applicationId, authenticationKey); AuthenticationResult result = context.AcquireTokenAsync("https://management.azure.com/", cc).Result; ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); var client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync(resourceGroup, dataFactoryName, pipelineName).Result.Body; //log.Info("Pipline run ID: " + runResponse.RunId); return(req.CreateResponse(HttpStatusCode.OK, "Pipeline run ID: " + runResponse.RunId)); }
static void Main(string[] args) { // Set variables string tenantID = "72f988bf-86f1-41af-91ab-2d7cd011db47"; //"<your tenant ID>"; string applicationId = "79135904-7846-4725-9509-f33bd515f04d"; //"<your application ID>"; string authenticationKey = "vQlAD/XjEqQfULp8IbqqjPB453aefGXj0vs2yMuZaTY="; //"<your authentication key for the application>"; string subscriptionId = "7b68d2b5-dfbe-46e1-938f-98ed143b7953"; //"<your subscription ID where the data factory resides>"; string resourceGroup = "demo"; //"<your resource group where the data factory resides>"; string dataFactoryName = "adflab1"; //" < specify the name of data factory to create. It must be globally unique.>"; string pipelineName = "pipeline2"; // name of the pipeline // Authenticate and create a data factory management client var context = new AuthenticationContext("https://login.windows.net/" + tenantID); ClientCredential cc = new ClientCredential(applicationId, authenticationKey); AuthenticationResult result = context.AcquireTokenAsync("https://management.azure.com/", cc).Result; ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); var client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync(resourceGroup, dataFactoryName, pipelineName).Result.Body; Console.WriteLine("Pipeline run ID: " + runResponse.RunId); //wait in console window on completion Console.ReadKey(); }
public void Pipelines_CreateRun() { RunTest("Pipelines_CreateRun", (example, client, responseCode) => { CreateRunResponse response = client.Pipelines.CreateRun(RGN(example), FN(example), PN(example), parameters: GetTypedParameter <Dictionary <string, object> >(example, client, "parameters")); CheckResponseBody(example, client, responseCode, response); }); }
public async Task TestPipelineRun() { PipelineClient client = CreateClient(); await using DisposablePipeline pipeline = await DisposablePipeline.Create(client, this.Recording); CreateRunResponse runResponse = await client.CreatePipelineRunAsync(pipeline.Name); Assert.NotNull(runResponse.RunId); }
static void Main(string[] args) { // String values ---- // Set variables string tenantID = "111e6fe8367-24af-48c2-a0ee-b5f386408512"; string subscriptionId = "11144357e6b-77a0-4b60-a817-27e62ffb6fdd"; string applicationId = "1110da14751-0058-4a91-ae16-b8722b2b74d1"; string authenticationKey = "111CbA5ryJyhbFi4_g/bmM]VM4-QynEOU1p"; string resourceGroup = ""; string region = ""; string dataFactoryName = ""; string pipelineName = ""; // Authenticate and create a data factory management client var context = new AuthenticationContext("https://login.windows.net/" + tenantID); ClientCredential cc = new ClientCredential(applicationId, authenticationKey); AuthenticationResult result = context.AcquireTokenAsync("https://management.azure.com/", cc).Result; ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); var client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; client.uri = new uri Console.WriteLine("Creating Pipeline run..."); CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync(resourceGroup, dataFactoryName, pipelineName).Result.Body; client.Pipelines.CreateRun(resourceGroup, dataFactoryName, pipelineName); Console.WriteLine("Pipeline run ID: " + runResponse.RunId); Check Status of Azure Pipeline Runs // Monitor the Pipeline Run Console.WriteLine("Checking Pipeline Run Status..."); PipelineRun pipelineRun; while (true) { pipelineRun = client.PipelineRuns.Get(resourceGroup, dataFactoryName, runResponse.RunId); Console.WriteLine("Status: " + pipelineRun.Status); if (pipelineRun.Status == "InProgress") { System.Threading.Thread.Sleep(15000); } else { break; } } }
private async Task RunPipeline(string pipelineName) { string endpoint = TestEnvironment.EndpointUrl; var pipelineClient = new PipelineClient(endpoint: new Uri(endpoint), credential: new DefaultAzureCredential()); Console.WriteLine("Running pipeline."); CreateRunResponse runOperation = await pipelineClient.CreatePipelineRunAsync(pipelineName); Console.WriteLine("Run started. ID: {0}", runOperation.RunId); }
public async Task RunPipeline() { const string PipelineName = "Test-Pipeline"; string workspaceUrl = TestEnvironment.WorkspaceUrl; var pipelineClient = new PipelineClient(endpoint: new Uri(workspaceUrl), credential: new DefaultAzureCredential()); Console.WriteLine("Running pipeline."); CreateRunResponse runOperation = await pipelineClient.CreatePipelineRunAsync(PipelineName); Console.WriteLine("Run started. ID: {0}", runOperation.RunId); }
public static void initialise() { DataFactoryManagementClient client = ADF.GetAuthUser(); ADF.CreateADF(client); ADF.CreateBlobLS(client); ADF.CreateDS(client); ADF.CreatePipeline(client); CreateRunResponse runResponse = ADF.CreatePipelineRun(client); PipelineRun pipelineRun = ADF.MonitorPipelineStatus(client, runResponse); ADF.MonitorActivityStatus(client, runResponse, pipelineRun); }
public async Task RunPipeline() { #region Snippet:CreatePipelineClientPrep #if SNIPPET // Replace the string below with your actual endpoint url. string endpoint = "<my-endpoint-url>"; #else string endpoint = TestEnvironment.EndpointUrl; #endif string pipelineName = "Test-Pipeline"; #endregion #region Snippet:CreatePipelineClient var client = new PipelineClient(endpoint: new Uri(endpoint), credential: new DefaultAzureCredential()); #endregion #region Snippet:CreatePipeline PipelineCreateOrUpdatePipelineOperation operation = client.StartCreateOrUpdatePipeline(pipelineName, new PipelineResource()); Response <PipelineResource> createdPipeline = await operation.WaitForCompletionAsync(); #endregion #region Snippet:RetrievePipeline PipelineResource retrievedPipeline = client.GetPipeline(pipelineName); Console.WriteLine("Pipeline ID: {0}", retrievedPipeline.Id); #endregion #region Snippet:RunPipeline Console.WriteLine("Running pipeline."); CreateRunResponse runOperation = await client.CreatePipelineRunAsync(pipelineName); Console.WriteLine("Run started. ID: {0}", runOperation.RunId); #endregion #region Snippet:ListPipelines Pageable <PipelineResource> pipelines = client.GetPipelinesByWorkspace(); foreach (PipelineResource pipeline in pipelines) { Console.WriteLine(pipeline.Name); } #endregion #region Snippet:DeletePipeline PipelineDeletePipelineOperation deleteOperation = client.StartDeletePipeline(pipelineName); await deleteOperation.WaitForCompletionResponseAsync(); #endregion }
public async Task TestQueryRuns() { PipelineClient pipelineClient = CreatePipelineClient(); PipelineRunClient runClient = CreatePipelineRunClient(); await using DisposablePipeline pipeline = await DisposablePipeline.Create(pipelineClient, this.Recording); CreateRunResponse runResponse = await pipelineClient.CreatePipelineRunAsync(pipeline.Name); Assert.NotNull(runResponse.RunId); PipelineRunsQueryResponse queryResponse = await runClient.QueryPipelineRunsByWorkspaceAsync(new RunFilterParameters (DateTimeOffset.MinValue, DateTimeOffset.MaxValue)); Assert.GreaterOrEqual(queryResponse.Value.Count, 1); }
public async Task TestCancelRun() { PipelineClient pipelineClient = CreatePipelineClient(); PipelineRunClient runClient = CreatePipelineRunClient(); await using DisposablePipeline pipeline = await DisposablePipeline.Create(pipelineClient, this.Recording); CreateRunResponse runResponse = await pipelineClient.CreatePipelineRunAsync(pipeline.Name); Assert.NotNull(runResponse.RunId); Response response = await runClient.CancelPipelineRunAsync(runResponse.RunId); response.AssertSuccess(); }
public static CreateRunResponse CreatePipelineRun(DataFactoryManagementClient client) { Console.WriteLine("Creating pipeline run..."); Dictionary <string, object> parameters = new Dictionary <string, object> { { "inputPath", Constants.inputBlobPath }, { "outputPath", Constants.outputBlobPath } }; CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync( Constants.resourceGroup, Constants.dataFactoryName, Constants.pipelineName, parameters: parameters ).Result.Body; Console.WriteLine("Pipeline run ID: " + runResponse.RunId); return(runResponse); }
public async Task TestGet() { PipelineClient pipelineClient = CreatePipelineClient(); PipelineRunClient runClient = CreatePipelineRunClient(); await using DisposablePipeline pipeline = await DisposablePipeline.Create(pipelineClient, this.Recording); CreateRunResponse runResponse = await pipelineClient.CreatePipelineRunAsync(pipeline.Name); Assert.NotNull(runResponse.RunId); PipelineRun run = await runClient.GetPipelineRunAsync(runResponse.RunId); Assert.AreEqual(run.RunId, runResponse.RunId); Assert.NotNull(run.Status); }
private string CapturePipelines_CreateRun() { interceptor.CurrentExampleName = "Pipelines_CreateRun"; string[] outputBlobNameList = new string[1]; outputBlobNameList[0] = outputBlobName; JArray outputBlobNameArray = JArray.FromObject(outputBlobNameList); Dictionary <string, object> arguments = new Dictionary <string, object> { { "OutputBlobNameList", outputBlobNameArray } }; CreateRunResponse rtr = client.Pipelines.CreateRun(secrets.ResourceGroupName, secrets.FactoryName, pipelineName, parameters: arguments); return(rtr.RunId); }
public static void MonitorActivityStatus(DataFactoryManagementClient client, CreateRunResponse runResponse, PipelineRun pipelineRun) { Console.WriteLine("Checking copy activity run details..."); RunFilterParameters filterParams = new RunFilterParameters( DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow.AddMinutes(10)); ActivityRunsQueryResponse queryResponse = client.ActivityRuns.QueryByPipelineRun( Constants.resourceGroup, Constants.dataFactoryName, runResponse.RunId, filterParams); if (pipelineRun.Status == "Succeeded") { Console.WriteLine(queryResponse.Value.First().Output); } else { Console.WriteLine(queryResponse.Value.First().Error); } Console.WriteLine("\nPress any key to exit..."); Console.ReadKey(); }
private string CapturePipelines_CreateRun() { interceptor.CurrentExampleName = "Pipelines_CreateRun"; string[] outputBlobNameList = new string[1]; outputBlobNameList[0] = outputBlobName; JArray outputBlobNameArray = JArray.FromObject(outputBlobNameList); Dictionary <string, object> arguments = new Dictionary <string, object> { { "OutputBlobNameList", outputBlobNameArray } }; CreateRunResponse rtr1 = client.Pipelines.CreateRun(secrets.ResourceGroupName, secrets.FactoryName, pipelineName, parameters: arguments); System.Threading.Thread.Sleep(TimeSpan.FromSeconds(120)); CreateRunResponse rtr2 = client.Pipelines.CreateRun(secrets.ResourceGroupName, secrets.FactoryName, pipelineName, isRecovery: true, referencePipelineRunId: rtr1.RunId); return(rtr2.RunId); }
public static JObject ExecutePipelineMethod(string subscriptionId, string resourceGroup, string factoryName, string pipelineName, Dictionary <string, object> parameters, Logging logging) { #region CreatePipelineRun //Create a data factory management client logging.LogInformation("Creating ADF connectivity client."); string outputString = string.Empty; using (var client = DataFactoryClient.CreateDataFactoryClient(subscriptionId)) { //Run pipeline CreateRunResponse runResponse; PipelineRun pipelineRun; if (parameters.Count == 0) { logging.LogInformation("Called pipeline without parameters."); runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync( resourceGroup, factoryName, pipelineName).Result.Body; } else { logging.LogInformation("Called pipeline with parameters."); logging.LogInformation("Number of parameters provided: " + parameters.Count); System.Threading.Thread.Sleep(1000); runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync( resourceGroup, factoryName, pipelineName, parameters: parameters).Result.Body; runResponse = new CreateRunResponse(); runResponse.RunId = Guid.NewGuid().ToString(); } logging.LogInformation("Pipeline run ID: " + runResponse.RunId); //Wait and check for pipeline to start... logging.LogInformation("Checking ADF pipeline status."); //while (true) //{ // pipelineRun = client.PipelineRuns.Get( // resourceGroup, factoryName, runResponse.RunId); // logging.LogInformation("ADF pipeline status: " + pipelineRun.Status); //if (pipelineRun.Status == "Queued") // System.Threading.Thread.Sleep(15000); //else // break; //} //Final return detail outputString = "{ \"PipelineName\": \"" + pipelineName + "\", \"RunId\": \"" + runResponse.RunId + "\", \"Status\": \"" + "InProgress" + //pipelineRun.Status + "\" }"; } #endregion JObject outputJson = JObject.Parse(outputString); return(outputJson); }
static void Main(string[] args) { //Authentification auprès d'Azure avec l'application svc_adf AuthenticationContext context = new AuthenticationContext("https://login.windows.net/" + _repertoireId); ClientCredential cc = new ClientCredential(_applicationId, _applicationKey); AuthenticationResult result = context.AcquireTokenAsync("https://management.azure.com/", cc).Result; ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); DataFactoryManagementClient ADFclient = new DataFactoryManagementClient(cred) { SubscriptionId = _abonnementId }; //Création d'une Azure Data Factory Factory dataFactory = new Factory { Location = _region, Identity = new FactoryIdentity() }; ADFclient.Factories.CreateOrUpdate(_ressourceGroup, _dataFactoryName, dataFactory); Console.WriteLine(SafeJsonConvert.SerializeObject(dataFactory, ADFclient.SerializationSettings)); var toto = ADFclient.Factories.Get(_ressourceGroup, _dataFactoryName).ProvisioningState; while (ADFclient.Factories.Get(_ressourceGroup, _dataFactoryName).ProvisioningState == "PendingCreation") { System.Threading.Thread.Sleep(1000); Console.WriteLine("*"); } ////Création d'un Integration Runtime Auto-Hébergé //IntegrationRuntimeResource integrationRuntimeResource = new IntegrationRuntimeResource( //new SelfHostedIntegrationRuntime //{ // Description = "L'Integration Runtime du projet ..." //} //); //ADFclient.IntegrationRuntimes.CreateOrUpdate(_ressourceGroup, _dataFactoryName, _IntegrationRuntimeName, integrationRuntimeResource); //Console.WriteLine(SafeJsonConvert.SerializeObject(integrationRuntimeResource, ADFclient.SerializationSettings)); //Console.WriteLine("Authkey : " + ADFclient.IntegrationRuntimes.ListAuthKeys(_ressourceGroup, _dataFactoryName, _IntegrationRuntimeName).AuthKey1); //Création service lié File System on premise IntegrationRuntimeReference integrationRuntimeReference = new IntegrationRuntimeReference(_IntegrationRuntimeName); SecureString secureString = new SecureString("MonPassword"); LinkedServiceResource FS_PartageOnPremise = new LinkedServiceResource( new FileServerLinkedService { Description = "Service lié référençant un espace partagé dans le réseau privé de l'entreprise", ConnectVia = integrationRuntimeReference, Host = @"\\IRAutoHeberge\Dépôt", UserId = "chsauget", Password = secureString } ); ADFclient.LinkedServices.CreateOrUpdate(_ressourceGroup, _dataFactoryName, _FS_PartageOnPremiseName, FS_PartageOnPremise); Console.WriteLine(SafeJsonConvert.SerializeObject(FS_PartageOnPremise, ADFclient.SerializationSettings)); //Création service lié Azure SQLDB SecureString SQLsecureString = new SecureString("integrated security=False;encrypt=True;connection timeout=30;data source=adflivre.database.windows.net;initial catalog=advwrks;user id=chsauget;Password=toto"); LinkedServiceResource SQDB_AdventureWorks = new LinkedServiceResource( new AzureSqlDatabaseLinkedService { Description = "Service lié référençant un espace partagé dans le réseau privé de l'entreprise", ConnectionString = SQLsecureString, } ); ADFclient.LinkedServices.CreateOrUpdate(_ressourceGroup, _dataFactoryName, _SQDB_AdventureWorksName, SQDB_AdventureWorks); Console.WriteLine(SafeJsonConvert.SerializeObject(SQDB_AdventureWorks, ADFclient.SerializationSettings)); //Création jeu de données FS_Customer DatasetResource FS_Customer = new DatasetResource( new FileShareDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = _FS_PartageOnPremiseName } , FolderPath = "AdventureWorks CSV" , FileName = "Customer.csv" , Format = new TextFormat { ColumnDelimiter = "\t", RowDelimiter = "\n", FirstRowAsHeader = false } , Structure = new List <DatasetDataElement> { new DatasetDataElement { Name = "CustomerID", Type = "Int32" }, new DatasetDataElement { Name = "PersonID", Type = "Int32" }, new DatasetDataElement { Name = "StoreID", Type = "Int32" }, new DatasetDataElement { Name = "TerritoryID", Type = "Int32" }, new DatasetDataElement { Name = "AccountNumber", Type = "String" }, new DatasetDataElement { Name = "rowguid", Type = "String" }, new DatasetDataElement { Name = "ModifiedDate", Type = "DateTime" } } } , name: _FS_CustomerName ); ADFclient.Datasets.CreateOrUpdate(_ressourceGroup, _dataFactoryName, _FS_CustomerName, FS_Customer); Console.WriteLine(SafeJsonConvert.SerializeObject(FS_Customer, ADFclient.SerializationSettings)); //Création jeu de données SQDB_Col_Customer DatasetResource SQDB_Col_Customer = new DatasetResource( new AzureSqlTableDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = _SQDB_AdventureWorksName }, TableName = "col.Customer" } , name: _SQDB_Col_CustomerName ); ADFclient.Datasets.CreateOrUpdate(_ressourceGroup, _dataFactoryName, _SQDB_AdventureWorksName, SQDB_Col_Customer); Console.WriteLine(SafeJsonConvert.SerializeObject(SQDB_Col_Customer, ADFclient.SerializationSettings)); //Création de l'activité de copie du fichier Customer CopyActivity CustomerCopy = new CopyActivity { Name = "Copy - Customer" , Inputs = new List <DatasetReference> { new DatasetReference() { ReferenceName = _FS_CustomerName } } , Outputs = new List <DatasetReference> { new DatasetReference() { ReferenceName = _SQDB_Col_CustomerName } } , Source = new FileSystemSource { } , Sink = new AzureTableSink { } }; //Création de l'activité de copie du fichier Customer PipelineResource PipelineCustomer = new PipelineResource { Activities = new List <Activity> { CustomerCopy } , Folder = new PipelineFolder { Name = "AdventureWorks" } }; ADFclient.Pipelines.CreateOrUpdate(_ressourceGroup, _dataFactoryName, "Col_Customer", PipelineCustomer); Console.WriteLine(SafeJsonConvert.SerializeObject(PipelineCustomer, ADFclient.SerializationSettings)); //Demander une execution du pipeline CreateRunResponse runResponse = ADFclient.Pipelines.CreateRunWithHttpMessagesAsync(_ressourceGroup, _dataFactoryName, "Col_Customer").Result.Body; //Contrôler l'execution du pipeline PipelineRun run = ADFclient.PipelineRuns.Get(_ressourceGroup, _dataFactoryName, runResponse.RunId); while (run.Status == "InProgress") { run = ADFclient.PipelineRuns.Get(_ressourceGroup, _dataFactoryName, runResponse.RunId); Console.WriteLine("Status: " + run.Status); } //Déclencheur Quotidien TriggerResource scheduleTrigger = new TriggerResource( new ScheduleTrigger { Pipelines = new List <TriggerPipelineReference> { new TriggerPipelineReference { PipelineReference = new PipelineReference("Col_Customer") } } , Recurrence = new ScheduleTriggerRecurrence { StartTime = DateTime.Parse("2019-03-30T01:00:00Z") , Frequency = "Day" , Interval = 1 } } , name: "Daily_01h_Schedule" ); ADFclient.Triggers.CreateOrUpdate(_ressourceGroup, _dataFactoryName, "Daily_01h_Schedule", scheduleTrigger); Console.WriteLine(SafeJsonConvert.SerializeObject(scheduleTrigger, ADFclient.SerializationSettings)); ADFclient.Triggers.BeginStart(_ressourceGroup, _dataFactoryName, "Daily_01h_Schedule"); }
//static void Main(string[] args) static async System.Threading.Tasks.Task Main(string[] args) { // Set variables string tenantID = "microsoft.onmicrosoft.com"; string User = "******"; string applicationId = "<your application ID>"; string authenticationKey = "<your authentication key for the application>"; string subscriptionId = "b67993e8-0937-4812-af89-520a495da302"; string resourceGroup = "vchmeha"; string region = "West Central US"; string dataFactoryName = "ADFv2HaHu"; string storageAccount = "fromblob"; string storageKey = "gREZ3TbADUcfWJ+ryfxOH7FKrlCKobMHrpS7Z2PPS/gQIsH02LHAGedlqYVpxm/PeWHDJsU/wkBmNz1Q20Rwyg=="; // specify the container and input folder from which all files // need to be copied to the output folder. string inputBlobPath = "adftutorial/input"; //specify the contains and output folder where the files are copied string outputBlobPath = "adftutorial/output"; // name of the Azure Storage linked service, blob dataset, and the pipeline string storageLinkedServiceName = "AzureStorageLinkedService"; string blobDatasetName = "BlobDataset"; string pipelineName = "Adfv2QuickStartPipeline"; // Authenticate and create a data factory management client var azureServiceTokenProvider = new AzureServiceTokenProvider(); var token = await azureServiceTokenProvider.GetAccessTokenAsync("https://management.azure.com/"); var cred = new TokenCredentials(token); var client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; // Create a data factory Console.WriteLine("Creating data factory " + dataFactoryName + "..."); Factory dataFactory = new Factory { Location = region, Identity = new FactoryIdentity() }; client.Factories.CreateOrUpdate(resourceGroup, dataFactoryName, dataFactory); Console.WriteLine( SafeJsonConvert.SerializeObject(dataFactory, client.SerializationSettings)); while (client.Factories.Get(resourceGroup, dataFactoryName).ProvisioningState == "PendingCreation") { System.Threading.Thread.Sleep(1000); } // Create an Azure Storage linked service Console.WriteLine("Creating linked service " + storageLinkedServiceName + "..."); LinkedServiceResource storageLinkedService = new LinkedServiceResource( new AzureStorageLinkedService { ConnectionString = new SecureString( "DefaultEndpointsProtocol=https;AccountName=" + storageAccount + ";AccountKey=" + storageKey) } ); client.LinkedServices.CreateOrUpdate( resourceGroup, dataFactoryName, storageLinkedServiceName, storageLinkedService); Console.WriteLine(SafeJsonConvert.SerializeObject( storageLinkedService, client.SerializationSettings)); // Create an Azure Blob dataset Console.WriteLine("Creating dataset " + blobDatasetName + "..."); DatasetResource blobDataset = new DatasetResource( new AzureBlobDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = storageLinkedServiceName }, FolderPath = new Expression { Value = "@{dataset().path}" }, Parameters = new Dictionary <string, ParameterSpecification> { { "path", new ParameterSpecification { Type = ParameterType.String } } } } ); client.Datasets.CreateOrUpdate( resourceGroup, dataFactoryName, blobDatasetName, blobDataset); Console.WriteLine( SafeJsonConvert.SerializeObject(blobDataset, client.SerializationSettings)); // Create a pipeline with a copy activity Console.WriteLine("Creating pipeline " + pipelineName + "..."); PipelineResource pipeline = new PipelineResource { Parameters = new Dictionary <string, ParameterSpecification> { { "inputPath", new ParameterSpecification { Type = ParameterType.String } }, { "outputPath", new ParameterSpecification { Type = ParameterType.String } } }, Activities = new List <Activity> { new CopyActivity { Name = "CopyFromBlobToBlob", Inputs = new List <DatasetReference> { new DatasetReference() { ReferenceName = blobDatasetName, Parameters = new Dictionary <string, object> { { "path", "@pipeline().parameters.inputPath" } } } }, Outputs = new List <DatasetReference> { new DatasetReference { ReferenceName = blobDatasetName, Parameters = new Dictionary <string, object> { { "path", "@pipeline().parameters.outputPath" } } } }, Source = new BlobSource { }, Sink = new BlobSink { } } } }; client.Pipelines.CreateOrUpdate(resourceGroup, dataFactoryName, pipelineName, pipeline); Console.WriteLine(SafeJsonConvert.SerializeObject(pipeline, client.SerializationSettings)); // Create a pipeline run Console.WriteLine("Creating pipeline run..."); Dictionary <string, object> parameters = new Dictionary <string, object> { { "inputPath", inputBlobPath }, { "outputPath", outputBlobPath } }; CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync( resourceGroup, dataFactoryName, pipelineName, parameters: parameters ).Result.Body; Console.WriteLine("Pipeline run ID: " + runResponse.RunId); // Monitor the pipeline run Console.WriteLine("Checking pipeline run status..."); PipelineRun pipelineRun; while (true) { pipelineRun = client.PipelineRuns.Get( resourceGroup, dataFactoryName, runResponse.RunId); Console.WriteLine("Status: " + pipelineRun.Status); if (pipelineRun.Status == "InProgress" || pipelineRun.Status == "Queued") { System.Threading.Thread.Sleep(15000); } else { break; } } // Check the copy activity run details Console.WriteLine("Checking copy activity run details..."); RunFilterParameters filterParams = new RunFilterParameters( DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow.AddMinutes(10)); ActivityRunsQueryResponse queryResponse = client.ActivityRuns.QueryByPipelineRun( resourceGroup, dataFactoryName, runResponse.RunId, filterParams); if (pipelineRun.Status == "Succeeded") { Console.WriteLine(queryResponse.Value.First().Output); } else { Console.WriteLine(queryResponse.Value.First().Error); } Console.WriteLine("\nPress any key to exit..."); Console.ReadKey(); }
static void Main(string[] args) { // Set variables string tenantID = "<your tenant ID>"; string applicationId = "<your application ID>"; string authenticationKey = "<your authentication key for the application>"; string subscriptionId = "<your subscription ID where the data factory resides>"; string resourceGroup = "<your resource group where the data factory resides>"; string region = "<the location of your resource group>"; string dataFactoryName = "<specify the name of data factory to create. It must be globally unique.>"; string storageAccount = "<your storage account name to copy data>"; string storageKey = "<your storage account key>"; // specify the container and input folder from which all files // need to be copied to the output folder. string inputBlobPath = "<path to existing blob(s) to copy data from, e.g. containername/inputdir>"; //specify the contains and output folder where the files are copied string outputBlobPath = "<the blob path to copy data to, e.g. containername/outputdir>"; // name of the Azure Storage linked service, blob dataset, and the pipeline string storageLinkedServiceName = "AzureStorageLinkedService"; string blobDatasetName = "BlobDataset"; string pipelineName = "Adfv2QuickStartPipeline"; // Authenticate and create a data factory management client var context = new AuthenticationContext("https://login.microsoftonline.com/" + tenantID); ClientCredential cc = new ClientCredential(applicationId, authenticationKey); AuthenticationResult result = context.AcquireTokenAsync( "https://management.azure.com/", cc).Result; ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); var client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; // Create a data factory Console.WriteLine("Creating data factory " + dataFactoryName + "..."); Factory dataFactory = new Factory { Location = region, Identity = new FactoryIdentity() }; client.Factories.CreateOrUpdate(resourceGroup, dataFactoryName, dataFactory); Console.WriteLine( SafeJsonConvert.SerializeObject(dataFactory, client.SerializationSettings)); while (client.Factories.Get(resourceGroup, dataFactoryName).ProvisioningState == "PendingCreation") { System.Threading.Thread.Sleep(1000); } // Create an Azure Storage linked service Console.WriteLine("Creating linked service " + storageLinkedServiceName + "..."); LinkedServiceResource storageLinkedService = new LinkedServiceResource( new AzureStorageLinkedService { ConnectionString = new SecureString( "DefaultEndpointsProtocol=https;AccountName=" + storageAccount + ";AccountKey=" + storageKey) } ); client.LinkedServices.CreateOrUpdate( resourceGroup, dataFactoryName, storageLinkedServiceName, storageLinkedService); Console.WriteLine(SafeJsonConvert.SerializeObject( storageLinkedService, client.SerializationSettings)); // Create an Azure Blob dataset Console.WriteLine("Creating dataset " + blobDatasetName + "..."); DatasetResource blobDataset = new DatasetResource( new AzureBlobDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = storageLinkedServiceName }, FolderPath = new Expression { Value = "@{dataset().path}" }, Parameters = new Dictionary <string, ParameterSpecification> { { "path", new ParameterSpecification { Type = ParameterType.String } } } } ); client.Datasets.CreateOrUpdate( resourceGroup, dataFactoryName, blobDatasetName, blobDataset); Console.WriteLine( SafeJsonConvert.SerializeObject(blobDataset, client.SerializationSettings)); // Create a pipeline with a copy activity Console.WriteLine("Creating pipeline " + pipelineName + "..."); PipelineResource pipeline = new PipelineResource { Parameters = new Dictionary <string, ParameterSpecification> { { "inputPath", new ParameterSpecification { Type = ParameterType.String } }, { "outputPath", new ParameterSpecification { Type = ParameterType.String } } }, Activities = new List <Activity> { new CopyActivity { Name = "CopyFromBlobToBlob", Inputs = new List <DatasetReference> { new DatasetReference() { ReferenceName = blobDatasetName, Parameters = new Dictionary <string, object> { { "path", "@pipeline().parameters.inputPath" } } } }, Outputs = new List <DatasetReference> { new DatasetReference { ReferenceName = blobDatasetName, Parameters = new Dictionary <string, object> { { "path", "@pipeline().parameters.outputPath" } } } }, Source = new BlobSource { }, Sink = new BlobSink { } } } }; client.Pipelines.CreateOrUpdate(resourceGroup, dataFactoryName, pipelineName, pipeline); Console.WriteLine(SafeJsonConvert.SerializeObject(pipeline, client.SerializationSettings)); // Create a pipeline run Console.WriteLine("Creating pipeline run..."); Dictionary <string, object> parameters = new Dictionary <string, object> { { "inputPath", inputBlobPath }, { "outputPath", outputBlobPath } }; CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync( resourceGroup, dataFactoryName, pipelineName, parameters: parameters ).Result.Body; Console.WriteLine("Pipeline run ID: " + runResponse.RunId); // Monitor the pipeline run Console.WriteLine("Checking pipeline run status..."); PipelineRun pipelineRun; while (true) { pipelineRun = client.PipelineRuns.Get( resourceGroup, dataFactoryName, runResponse.RunId); Console.WriteLine("Status: " + pipelineRun.Status); if (pipelineRun.Status == "InProgress" || pipelineRun.Status == "Queued") { System.Threading.Thread.Sleep(15000); } else { break; } } // Check the copy activity run details Console.WriteLine("Checking copy activity run details..."); RunFilterParameters filterParams = new RunFilterParameters( DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow.AddMinutes(10)); ActivityRunsQueryResponse queryResponse = client.ActivityRuns.QueryByPipelineRun( resourceGroup, dataFactoryName, runResponse.RunId, filterParams); if (pipelineRun.Status == "Succeeded") { Console.WriteLine(queryResponse.Value.First().Output); } else { Console.WriteLine(queryResponse.Value.First().Error); } Console.WriteLine("\nPress any key to exit..."); Console.ReadKey(); }
static void Main(string[] args) { // Set variables string tenantID = "<tenant ID>"; string applicationId = "<Activity directory application ID>"; string authenticationKey = "<Activity directory application authentication key>"; string subscriptionId = "<subscription ID>"; string resourceGroup = "<resource group name>"; // Note that the data stores (Azure Storage, Azure SQL Database, etc.) and computes (HDInsight, etc.) used by data factory can be in other regions. string region = "East US"; string dataFactoryName = "<name of the data factory>"; //must be globally unique // Specify the source Azure Blob information string storageAccount = "<name of Azure Storage account>"; string storageKey = "<key for your Azure Storage account>"; string inputBlobPath = "adfv2tutorial/"; string inputBlobName = "inputEmp.txt"; // Specify the sink Azure SQL Database information string azureSqlConnString = "Server=tcp:<name of Azure SQL Server>.database.windows.net,1433;Database=spsqldb;User ID=spelluru;Password=Sowmya123;Trusted_Connection=False;Encrypt=True;Connection Timeout=30"; string azureSqlTableName = "dbo.emp"; string storageLinkedServiceName = "AzureStorageLinkedService"; string sqlDbLinkedServiceName = "AzureSqlDbLinkedService"; string blobDatasetName = "BlobDataset"; string sqlDatasetName = "SqlDataset"; string pipelineName = "Adfv2TutorialBlobToSqlCopy"; // Authenticate and create a data factory management client var context = new AuthenticationContext("https://login.windows.net/" + tenantID); ClientCredential cc = new ClientCredential(applicationId, authenticationKey); AuthenticationResult result = context.AcquireTokenAsync("https://management.azure.com/", cc).Result; ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); var client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; // Create data factory Console.WriteLine("Creating data factory " + dataFactoryName + "..."); Factory dataFactory = new Factory { Location = region, Identity = new FactoryIdentity() }; client.Factories.CreateOrUpdate(resourceGroup, dataFactoryName, dataFactory); Console.WriteLine(SafeJsonConvert.SerializeObject(dataFactory, client.SerializationSettings)); while (client.Factories.Get(resourceGroup, dataFactoryName).ProvisioningState == "PendingCreation") { System.Threading.Thread.Sleep(1000); } // Create an Azure Storage linked service Console.WriteLine("Creating linked service " + storageLinkedServiceName + "..."); LinkedServiceResource storageLinkedService = new LinkedServiceResource( new AzureStorageLinkedService { ConnectionString = new SecureString("DefaultEndpointsProtocol=https;AccountName=" + storageAccount + ";AccountKey=" + storageKey) } ); client.LinkedServices.CreateOrUpdate(resourceGroup, dataFactoryName, storageLinkedServiceName, storageLinkedService); Console.WriteLine(SafeJsonConvert.SerializeObject(storageLinkedService, client.SerializationSettings)); // Create an Azure SQL Database linked service Console.WriteLine("Creating linked service " + sqlDbLinkedServiceName + "..."); LinkedServiceResource sqlDbLinkedService = new LinkedServiceResource( new AzureSqlDatabaseLinkedService { ConnectionString = new SecureString(azureSqlConnString) } ); client.LinkedServices.CreateOrUpdate(resourceGroup, dataFactoryName, sqlDbLinkedServiceName, sqlDbLinkedService); Console.WriteLine(SafeJsonConvert.SerializeObject(sqlDbLinkedService, client.SerializationSettings)); // Create a Azure Blob dataset Console.WriteLine("Creating dataset " + blobDatasetName + "..."); DatasetResource blobDataset = new DatasetResource( new AzureBlobDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = storageLinkedServiceName }, FolderPath = inputBlobPath, FileName = inputBlobName, Format = new TextFormat { ColumnDelimiter = "|" }, Structure = new List <DatasetDataElement> { new DatasetDataElement { Name = "FirstName", Type = "String" }, new DatasetDataElement { Name = "LastName", Type = "String" } } } ); client.Datasets.CreateOrUpdate(resourceGroup, dataFactoryName, blobDatasetName, blobDataset); Console.WriteLine(SafeJsonConvert.SerializeObject(blobDataset, client.SerializationSettings)); // Create a Azure SQL Database dataset Console.WriteLine("Creating dataset " + sqlDatasetName + "..."); DatasetResource sqlDataset = new DatasetResource( new AzureSqlTableDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = sqlDbLinkedServiceName }, TableName = azureSqlTableName } ); client.Datasets.CreateOrUpdate(resourceGroup, dataFactoryName, sqlDatasetName, sqlDataset); Console.WriteLine(SafeJsonConvert.SerializeObject(sqlDataset, client.SerializationSettings)); // Create a pipeline with copy activity Console.WriteLine("Creating pipeline " + pipelineName + "..."); PipelineResource pipeline = new PipelineResource { Activities = new List <Activity> { new CopyActivity { Name = "CopyFromBlobToSQL", Inputs = new List <DatasetReference> { new DatasetReference() { ReferenceName = blobDatasetName } }, Outputs = new List <DatasetReference> { new DatasetReference { ReferenceName = sqlDatasetName } }, Source = new BlobSource { }, Sink = new SqlSink { } } } }; client.Pipelines.CreateOrUpdate(resourceGroup, dataFactoryName, pipelineName, pipeline); Console.WriteLine(SafeJsonConvert.SerializeObject(pipeline, client.SerializationSettings)); // Create a pipeline run Console.WriteLine("Creating pipeline run..."); CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync(resourceGroup, dataFactoryName, pipelineName).Result.Body; Console.WriteLine("Pipeline run ID: " + runResponse.RunId); // Monitor the pipeline run Console.WriteLine("Checking pipeline run status..."); PipelineRun pipelineRun; while (true) { pipelineRun = client.PipelineRuns.Get(resourceGroup, dataFactoryName, runResponse.RunId); Console.WriteLine("Status: " + pipelineRun.Status); if (pipelineRun.Status == "InProgress") { System.Threading.Thread.Sleep(15000); } else { break; } } // Check the copy activity run details Console.WriteLine("Checking copy activity run details..."); List <ActivityRun> activityRuns = client.ActivityRuns.ListByPipelineRun( resourceGroup, dataFactoryName, runResponse.RunId, DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow.AddMinutes(10), pipelineName).ToList(); if (pipelineRun.Status == "Succeeded") { Console.WriteLine(activityRuns.First().Output); //SaveToJson(SafeJsonConvert.SerializeObject(activityRuns.First().Output, client.SerializationSettings), "ActivityRunResult.json", folderForJsons); } else { Console.WriteLine(activityRuns.First().Error); } Console.WriteLine("\nPress any key to exit..."); Console.ReadKey(); }
private void TriggerPipeline(object sender, EventArgs e) { string tenantID = tenant.Text; string applicationId = appID.Text; string authenticationKey = authKey.Text; string subscriptionId = subId.Text; string resourceGroup = resourceGrp.Text; string dataFactoryName = dsName.Text; string pipelineName = plName.Text; string pipelineParameters = Regex.Replace(plParam.Text, @"\s+", string.Empty); // Authorize Azure try { outputDetails.Text = string.Empty; outputDetails.Text = "Initiating Azure Authorization..."; AuthenticationContext context = new AuthenticationContext("https://login.windows.net/" + tenantID); ClientCredential cc = new ClientCredential(applicationId, authenticationKey); AuthenticationResult result = context.AcquireTokenAsync("https://management.azure.com/", cc).Result; ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; outputDetails.Text = outputDetails.Text + "\r\nAzure Authorization Complete..."; } catch (Exception ex) { outputDetails.Text = outputDetails.Text + "\r\n Azure Authorization failed..." + "\r\n" + ex.InnerException; } // Set parameters {parameter_name, value} Dictionary <string, object> parameters = new Dictionary <string, object>(); string[] items = pipelineParameters.TrimEnd(';').Split(';'); foreach (string item in items) { string[] keyValue = item.Split('='); parameters.Add(keyValue[0], keyValue[1]); } // Trigger Azure Datafactory Pipeline try { outputDetails.Text = outputDetails.Text + "\r\nInitiating pipeline run..."; CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync( resourceGroup, dataFactoryName, pipelineName, parameters: parameters ).Result.Body; outputDetails.Text = outputDetails.Text + "\r\nPipeline run ID: " + runResponse.RunId; outputDetails.Text = outputDetails.Text + "\r\nChecking pipeline run status..."; PipelineRun pipelineRun; while (true) { pipelineRun = client.PipelineRuns.Get( resourceGroup, dataFactoryName, runResponse.RunId); outputDetails.Text = outputDetails.Text + "\r\nStatus: " + pipelineRun.Status; if (pipelineRun.Status == "InProgress" || pipelineRun.Status == "Queued") { System.Threading.Thread.Sleep(15000); } else { break; } } } catch (Exception ex) { outputDetails.Text = "Pipeline Trigger failed..." + "\r\n" + ex.InnerException;; } }
static void Main(string[] args) { // Set variables string tenantID = "da67ef1b-ca59-4db2-9a8c-aa8d94617a16"; string applicationId = "b7fd00c9-74b7-40db-9505-f8a899663e59"; string authenticationKey = "phBWjqjTfa9qHv0WbozCfMwHlD5SFbD7ugL2R/JbqZY="; string subscriptionId = "61727f05-e5e1-4271-b29c-66b7d81a7729"; string resourceGroup = "dilipazureprojectresourcegroup"; string region = "EAST US"; string dataFactoryName = "dilipdatafactoryy"; // Specify the source Azure Blob information string storageAccount = "dilipazurestorageaccount"; string storageKey = "usk5E1K7Sj2HfX2V/eZRq/hU5k/oZs0n6WgcixaYK+Im6IKGk0er7vFLTbv6yBe5rNdcEqqpHh1YvNUY3vugvw=="; string inputBlobPath = "dilipazurecontainer/"; string inputBlobName = "PATIENT.txt"; // Specify the sink Azure SQL Database information string azureSqlConnString = "Server=tcp:dilipazureprojectdbserver.database.windows.net,1433;Initial Catalog=dilipazureprojectdb;Persist Security Info=False;User [email protected];Password=password143#;MultipleActiveResultSets=False;Encrypt=True;TrustServerCertificate=False;Connection Timeout=30"; string azureSqlTableName = "dbo.Patient"; string storageLinkedServiceName = "AzureStorageLinkedService"; string sqlDbLinkedServiceName = "AzureSqlDbLinkedService"; string blobDatasetName = "BlobDataset"; string sqlDatasetName = "SqlDataset"; string pipelineName = "Adfv2TutorialBlobToSqlCopyy"; // Authenticate and create a data factory management client var context = new AuthenticationContext("https://login.windows.net/" + tenantID); ClientCredential cc = new ClientCredential(applicationId, authenticationKey); AuthenticationResult result = context.AcquireTokenAsync("https://management.azure.com/", cc).Result; ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); var client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; // Create a data factory Console.WriteLine("Creating a data factory " + dataFactoryName + "..."); Factory dataFactory = new Factory { Location = region, Identity = new FactoryIdentity() }; client.Factories.CreateOrUpdate(resourceGroup, dataFactoryName, dataFactory); Console.WriteLine(SafeJsonConvert.SerializeObject(dataFactory, client.SerializationSettings)); while (client.Factories.Get(resourceGroup, dataFactoryName).ProvisioningState == "PendingCreation") { System.Threading.Thread.Sleep(1000); } // Create an Azure Storage linked service Console.WriteLine("Creating linked service " + storageLinkedServiceName + "..."); LinkedServiceResource storageLinkedService = new LinkedServiceResource( new AzureStorageLinkedService { ConnectionString = new SecureString("DefaultEndpointsProtocol=https;AccountName=" + storageAccount + ";AccountKey=" + storageKey) } ); client.LinkedServices.CreateOrUpdate(resourceGroup, dataFactoryName, storageLinkedServiceName, storageLinkedService); Console.WriteLine(SafeJsonConvert.SerializeObject(storageLinkedService, client.SerializationSettings)); // Create an Azure SQL Database linked service Console.WriteLine("Creating linked service " + sqlDbLinkedServiceName + "..."); LinkedServiceResource sqlDbLinkedService = new LinkedServiceResource( new AzureSqlDatabaseLinkedService { ConnectionString = new SecureString(azureSqlConnString) } ); client.LinkedServices.CreateOrUpdate(resourceGroup, dataFactoryName, sqlDbLinkedServiceName, sqlDbLinkedService); Console.WriteLine(SafeJsonConvert.SerializeObject(sqlDbLinkedService, client.SerializationSettings)); // Create a Azure Blob dataset Console.WriteLine("Creating dataset " + blobDatasetName + "..."); DatasetResource blobDataset = new DatasetResource( new AzureBlobDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = storageLinkedServiceName }, FolderPath = inputBlobPath, FileName = inputBlobName, Format = new TextFormat { ColumnDelimiter = "|" }, Structure = new List <DatasetDataElement> { new DatasetDataElement { Name = "ENTERPRISEID", Type = "String" }, new DatasetDataElement { Name = "LAST_NAME", Type = "String" }, new DatasetDataElement { Name = "FIRST_NAME", Type = "String" }, new DatasetDataElement { Name = "DOB", Type = "DATE" }, new DatasetDataElement { Name = "GENDER", Type = "String" }, new DatasetDataElement { Name = "SSN", Type = "String" }, new DatasetDataElement { Name = "ADDRESS1", Type = "String" }, new DatasetDataElement { Name = "ZIP", Type = "String" }, new DatasetDataElement { Name = "MRN", Type = "String" }, new DatasetDataElement { Name = "CITY", Type = "String" }, new DatasetDataElement { Name = "STATE", Type = "String" }, new DatasetDataElement { Name = "PHONE", Type = "String" }, new DatasetDataElement { Name = "EMAIL", Type = "String" }, new DatasetDataElement { Name = "PRVDR_ID", Type = "String" } } } ); client.Datasets.CreateOrUpdate(resourceGroup, dataFactoryName, blobDatasetName, blobDataset); Console.WriteLine(SafeJsonConvert.SerializeObject(blobDataset, client.SerializationSettings)); // Create a Azure SQL Database dataset Console.WriteLine("Creating dataset " + sqlDatasetName + "..."); DatasetResource sqlDataset = new DatasetResource( new AzureSqlTableDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = sqlDbLinkedServiceName }, TableName = azureSqlTableName } ); client.Datasets.CreateOrUpdate(resourceGroup, dataFactoryName, sqlDatasetName, sqlDataset); Console.WriteLine(SafeJsonConvert.SerializeObject(sqlDataset, client.SerializationSettings)); // Create a pipeline with copy activity Console.WriteLine("Creating pipeline " + pipelineName + "..."); PipelineResource pipeline = new PipelineResource { Activities = new List <Activity> { new CopyActivity { Name = "CopyFromBlobToSQL", Inputs = new List <DatasetReference> { new DatasetReference() { ReferenceName = blobDatasetName } }, Outputs = new List <DatasetReference> { new DatasetReference { ReferenceName = sqlDatasetName } }, Source = new BlobSource { }, Sink = new SqlSink { } } } }; client.Pipelines.CreateOrUpdate(resourceGroup, dataFactoryName, pipelineName, pipeline); Console.WriteLine(SafeJsonConvert.SerializeObject(pipeline, client.SerializationSettings)); // Create a pipeline run Console.WriteLine("Creating pipeline run..."); CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync(resourceGroup, dataFactoryName, pipelineName).Result.Body; Console.WriteLine("Pipeline run ID: " + runResponse.RunId); // Monitor the pipeline run Console.WriteLine("Checking pipeline run status..."); PipelineRun pipelineRun; while (true) { pipelineRun = client.PipelineRuns.Get(resourceGroup, dataFactoryName, runResponse.RunId); Console.WriteLine("Status: " + pipelineRun.Status); if (pipelineRun.Status == "InProgress") { System.Threading.Thread.Sleep(15000); } else { break; } } // Check the copy activity run details Console.WriteLine("Checking copy activity run details..."); List <ActivityRun> activityRuns = client.ActivityRuns.ListByPipelineRun( resourceGroup, dataFactoryName, runResponse.RunId, DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow.AddMinutes(10)).ToList(); if (pipelineRun.Status == "Succeeded") { Console.WriteLine(activityRuns.First().Output); } else { Console.WriteLine(activityRuns.First().Error); } Console.WriteLine("\nPress any key to exit..."); Console.ReadKey(); }
public virtual string CreatePipelineRun(string resourceGroupName, string dataFactoryName, string pipelineName, Dictionary <string, object> paramDictionary) { CreateRunResponse response = this.DataFactoryManagementClient.Pipelines.CreateRun(resourceGroupName, dataFactoryName, pipelineName, paramDictionary); return(response.RunId); }
public static PipelineRun MonitorPipelineStatus(DataFactoryManagementClient client, CreateRunResponse runResponse) { Console.WriteLine("Checking pipeline run status..."); PipelineRun pipelineRun; while (true) { pipelineRun = client.PipelineRuns.Get( Constants.resourceGroup, Constants.dataFactoryName, runResponse.RunId); Console.WriteLine("Status: " + pipelineRun.Status); if (pipelineRun.Status == "InProgress") { System.Threading.Thread.Sleep(5000); } else { break; } } return(pipelineRun); }
public PSCreateRunResponse(CreateRunResponse response) { this.RunId = response?.RunId; }