public PipelineResource ToSdkObject() { PipelineResource pipeline = new PipelineResource { Description = this.Description, Concurrency = this.Concurrency, Folder = this.Folder?.ToSdkObject() }; this.ActivitiesForCreate?.ForEach(item => pipeline.Activities.Add(item?.ToSdkObject())); this.Variables?.ForEach(item => pipeline.Variables.Add(item.Key, item.Value?.ToSdkObject())); this.Annotations?.ForEach(item => pipeline.Annotations.Add(item)); this.RunDimensions?.ForEach(item => pipeline.RunDimensions.Add(item)); this.Parameters?.ForEach(item => pipeline.Parameters.Add(item.Key, item.Value?.ToSdkObject())); List <string> properties = new List <string> { "description", "activities", "variables", "concurrency", "annotations", "runDimensions", "folder", "parameters" }; if (this.AdditionalProperties != null) { foreach (var item in this.AdditionalProperties) { if (!properties.Contains(item.Key)) { pipeline.Add(item.Key, item.Value); } } } return(pipeline); }
public async Task PipelineSample() { #region Snippet:CreatePipelineClient // Replace the string below with your actual endpoint url. string endpoint = "<my-endpoint-url>"; /*@@*/ endpoint = TestEnvironment.EndpointUrl; PipelineClient client = new PipelineClient(endpoint: new Uri(endpoint), credential: new DefaultAzureCredential()); #endregion #region Snippet:CreatePipeline PipelineCreateOrUpdatePipelineOperation operation = client.StartCreateOrUpdatePipeline("MyPipeline", new PipelineResource()); Response <PipelineResource> createdPipeline = await operation.WaitForCompletionAsync(); #endregion #region Snippet:RetrievePipeline PipelineResource retrievedPipeline = client.GetPipeline("MyPipeline"); #endregion #region Snippet:ListPipelines Pageable <PipelineResource> pipelines = client.GetPipelinesByWorkspace(); foreach (PipelineResource pipeline in pipelines) { System.Console.WriteLine(pipeline.Name); } #endregion #region Snippet:DeletePipeline client.StartDeletePipeline("MyPipeline"); #endregion }
public virtual PipelineResource CreateOrUpdatePipeline(string resourceGroupName, string dataFactoryName, string pipelineName, string rawJsonContent) { if (string.IsNullOrWhiteSpace(rawJsonContent)) { throw new ArgumentNullException("rawJsonContent"); } PipelineResource pipeline; try { pipeline = SafeJsonConvert.DeserializeObject <PipelineResource>(rawJsonContent, this.DataFactoryManagementClient.DeserializationSettings); } catch (Exception ex) { throw new ArgumentException(string.Format("Json is not valid. Details: '{0}'", ex)); } PipelineResource response = this.DataFactoryManagementClient.Pipelines.CreateOrUpdate( resourceGroupName, dataFactoryName, pipelineName, pipeline); return(response); }
public void CreatePipeline() { #region Snippet:CreatePipeline PipelineCreateOrUpdatePipelineOperation operation = PipelineClient.StartCreateOrUpdatePipeline("MyPipeline", new PipelineResource()); PipelineResource pipeline = operation.WaitForCompletionAsync().ConfigureAwait(true).GetAwaiter().GetResult(); #endregion }
public PipelineResource CreateOrUpdatePipeline(string pipelineName, string rawJsonContent) { PipelineResource pipeline = JsonConvert.DeserializeObject <PipelineResource>(rawJsonContent); var operation = _pipelineClient.StartCreateOrUpdatePipeline(pipelineName, pipeline); return(operation.Poll().Value); }
public async Task CreateAndRunPipeline() { const string PipelineName = "Test-Pipeline"; const string JobName = "SparkJobName"; const string ActivityName = "ActivityName"; string workspaceUrl = TestEnvironment.WorkspaceUrl; var pipelineClient = new PipelineClient(endpoint: new Uri(workspaceUrl), credential: new DefaultAzureCredential()); var sparkJob = new SynapseSparkJobReference(SparkJobReferenceType.SparkJobDefinitionReference, JobName); var activity = new SynapseSparkJobDefinitionActivity(ActivityName, sparkJob); var pipelineResource = new PipelineResource(); pipelineResource.Activities.Add(activity); Console.WriteLine("Create pipeline if not already exists."); await pipelineClient.StartCreateOrUpdatePipelineAsync(PipelineName, pipelineResource); Console.WriteLine("Pipeline created"); Console.WriteLine("Running pipeline."); CreateRunResponse runOperation = await pipelineClient.CreatePipelineRunAsync(PipelineName); Console.WriteLine("Run started. ID: {0}", runOperation.RunId); }
private static void corregirPrueba(DataFactoryManagementClient client) { //Creo pipelines que suben mas de una tabla en cada uno. int cantidadTablasPorPipe = 1; int ayudaRecorrido = 0; string[] nombreTablas = DatosGrales.traerTablas(true); string[] nombreTablasParaCompresion = DatosGrales.traerTablas(false); List <Activity> la = new List <Activity>(); PipelineReference pipeRef; for (int i = 0; i < 1; i++) { pipeRef = new PipelineReference("Pipeline-Sql-DataLake-ConCompresion-Claim-" + nombreTablas[i], "Pipeline-Sql-DataLake-ConCompresion-Claim-" + nombreTablas[i]); Dictionary <String, object> diccionarioParams = new Dictionary <String, object>(); diccionarioParams.Add("Param1", 1); ExecutePipelineActivity epa = new ExecutePipelineActivity("ExecPipe-" + nombreTablas[i], pipeRef, diccionarioParams, "Llama al pipe para " + nombreTablas[i], null, diccionarioParams, false); la.Add(epa); } PipelineResource pipe1 = new PipelineResource(); pipe1.Activities = la; client.Pipelines.CreateOrUpdate(DatosGrales.resourceGroup, DatosGrales.dataFactoryName, "Pipeline-ManyActivs-Claim-1", pipe1); Console.Write("Mensaje"); }
public static void CreateOrUpdate(string resourceName, PipelineResource pipelineResource) { if (Interface != null) { Interface.Pipelines.CreateOrUpdate(resourceGroup, dataFactoryName, resourceName, pipelineResource); } }
public PSPipelineResource(PipelineResource pipelineResource, string workspaceName) : base(pipelineResource?.Id, pipelineResource?.Name, pipelineResource?.Type, pipelineResource?.Etag) { this.WorkspaceName = workspaceName; this.Description = pipelineResource?.Description; this.Activities = pipelineResource?.Activities; this.Variables = pipelineResource?.Variables? .Select(element => new KeyValuePair <string, PSVariableSpecification>(element.Key, new PSVariableSpecification(element.Value))) .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); this.Concurrency = pipelineResource?.Concurrency; this.Annotations = pipelineResource?.Annotations; this.RunDimensions = pipelineResource?.RunDimensions; this.Folder = new PSPipelineFolder(pipelineResource?.Folder); this.Parameters = pipelineResource?.Parameters? .Select(element => new KeyValuePair <string, PSParameterSpecification>(element.Key, new PSParameterSpecification(element.Value))) .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); var propertiesEnum = pipelineResource?.GetEnumerator(); if (propertiesEnum != null) { this.AdditionalProperties = new Dictionary <string, object>(); while (propertiesEnum.MoveNext()) { this.AdditionalProperties.Add(propertiesEnum.Current); } } }
public void Pipelines_Update() { RunTest("Pipelines_Update", (example, client, responseCode) => { PipelineResource resource = client.Pipelines.CreateOrUpdate(RGN(example), FN(example), PN(example), PR(example, client)); CheckResponseBody(example, client, responseCode, resource); }); }
public void Pipelines_Get() { RunTest("Pipelines_Get", (example, client, responseCode) => { PipelineResource resource = client.Pipelines.Get(RGN(example), FN(example), PN(example)); CheckResponseBody(example, client, responseCode, resource); }); }
public async Task TestCreatePipeline() { string pipelineName = Recording.GenerateName("Pipeline"); PipelineCreateOrUpdatePipelineOperation operation = await PipelineClient.StartCreateOrUpdatePipelineAsync(pipelineName, new PipelineResource()); PipelineResource pipeline = await operation.WaitForCompletionAsync(); Assert.AreEqual(pipelineName, pipeline.Name); }
private static void crearPipesSubidaNormal(DataFactoryManagementClient client) { var nombreTablas = DatosGrales.traerTablas(true); var nombreSinSchema = DatosGrales.traerTablas(false); List <Activity> la; CopyActivity ca; List <DatasetReference> inp; DatasetReference dr; List <DatasetReference> outp; DatasetReference drO; PipelineResource pipe; string nombreTablaParaConsulta; string consulta; string nombreBD = DatosGrales.nombreBD; for (int i = 0; i < nombreTablas.Length; i++) { if (esTablaEspecial(nombreTablas[i])) { //no creo nada porque es especial } else { nombreTablaParaConsulta = nombreTablas[i].Replace('-', '.'); consulta = DatosGrales.queryMagica(nombreTablaParaConsulta, 10000); la = new List <Activity>(); ca = new CopyActivity(); ca.Name = "CopyPipeline-Sql-Lake-" + nombreTablas[i]; ca.Source = new SqlSource(null, 3, null, consulta); ca.Sink = new SqlSink(); inp = new List <DatasetReference>(); dr = new DatasetReference("Dataset_" + nombreBD + "_" + nombreTablas[i]); inp.Add(dr); ca.Inputs = inp; outp = new List <DatasetReference>(); drO = new DatasetReference("Dataset_" + nombreBD + "_DataLakeStore_" + nombreSinSchema[i]); outp.Add(drO); ca.Outputs = outp; la.Add(ca); pipe = new PipelineResource(); pipe.Activities = la; client.Pipelines.CreateOrUpdate(DatosGrales.resourceGroup, DatosGrales.dataFactoryName, "Pipeline-Copy-" + nombreBD + "-" + nombreTablas[i], pipe); Console.Write((i + 1) + ". Pipeline-Copy-" + nombreBD + "-" + nombreTablas[i] + " creado.\n"); } } }
public async Task TestDeleteNotebook() { PipelineClient client = CreateClient(); PipelineResource resource = await DisposablePipeline.CreateResource(client, this.Recording); PipelineDeletePipelineOperation operation = await client.StartDeletePipelineAsync(resource.Name); await operation.WaitAndAssertSuccessfulCompletion(); }
public async Task TestGetPipeline() { await foreach (var expectedPipeline in PipelineClient.GetPipelinesByWorkspaceAsync()) { PipelineResource actualPipeline = await PipelineClient.GetPipelineAsync(expectedPipeline.Name); Assert.AreEqual(expectedPipeline.Name, actualPipeline.Name); Assert.AreEqual(expectedPipeline.Id, actualPipeline.Id); } }
public PSPipeline(PipelineResource pipeline, string resourceGroupName, string factoryName) { if (pipeline == null) { throw new ArgumentNullException("pipeline"); } this._pipeline = pipeline; this.ResourceGroupName = resourceGroupName; this.DataFactoryName = factoryName; }
public PipelineResource CreateOrUpdatePipeline(string pipelineName, string rawJsonContent) { PSPipelineResource psPipeline = JsonConvert.DeserializeObject <PSPipelineResource>(rawJsonContent, Settings); PipelineResource pipeline = psPipeline.ToSdkObject(); var operation = _pipelineClient.StartCreateOrUpdatePipeline(pipelineName, pipeline); while (!operation.HasValue) { operation.UpdateStatus(); } return(operation.Value); }
public virtual PSPipeline GetPipeline(string resourceGroupName, string dataFactoryName, string pipelineName) { PipelineResource response = this.DataFactoryManagementClient.Pipelines.Get( resourceGroupName, dataFactoryName, pipelineName); if (response == null) { return(null); } return(new PSPipeline(response, resourceGroupName, dataFactoryName)); }
public static void CreatePipeline(DataFactoryManagementClient client) { Console.WriteLine("Creating pipeline " + Constants.pipelineName + "..."); PipelineResource pipeline = new PipelineResource { Parameters = new Dictionary <string, ParameterSpecification> { { "inputPath", new ParameterSpecification { Type = ParameterType.String } }, { "outputPath", new ParameterSpecification { Type = ParameterType.String } } }, Activities = new List <Activity> { new CopyActivity { Name = "CopyFromBlobToBlob", Inputs = new List <DatasetReference> { new DatasetReference() { ReferenceName = Constants.blobDatasetName, Parameters = new Dictionary <string, object> { { "path", "@pipeline().parameters.inputPath" } } } }, Outputs = new List <DatasetReference> { new DatasetReference { ReferenceName = Constants.blobDatasetName, Parameters = new Dictionary <string, object> { { "path", "@pipeline().parameters.outputPath" } } } }, Source = new BlobSource { }, Sink = new BlobSink { } } } }; client.Pipelines.CreateOrUpdate(Constants.resourceGroup, Constants.dataFactoryName, Constants.pipelineName, pipeline); //Console.WriteLine(SafeJsonConvert.SerializeObject(pipeline, client.SerializationSettings)); }
public async Task RunPipeline() { #region Snippet:CreatePipelineClientPrep #if SNIPPET // Replace the string below with your actual endpoint url. string endpoint = "<my-endpoint-url>"; #else string endpoint = TestEnvironment.EndpointUrl; #endif string pipelineName = "Test-Pipeline"; #endregion #region Snippet:CreatePipelineClient var client = new PipelineClient(endpoint: new Uri(endpoint), credential: new DefaultAzureCredential()); #endregion #region Snippet:CreatePipeline PipelineCreateOrUpdatePipelineOperation operation = client.StartCreateOrUpdatePipeline(pipelineName, new PipelineResource()); Response <PipelineResource> createdPipeline = await operation.WaitForCompletionAsync(); #endregion #region Snippet:RetrievePipeline PipelineResource retrievedPipeline = client.GetPipeline(pipelineName); Console.WriteLine("Pipeline ID: {0}", retrievedPipeline.Id); #endregion #region Snippet:RunPipeline Console.WriteLine("Running pipeline."); CreateRunResponse runOperation = await client.CreatePipelineRunAsync(pipelineName); Console.WriteLine("Run started. ID: {0}", runOperation.RunId); #endregion #region Snippet:ListPipelines Pageable <PipelineResource> pipelines = client.GetPipelinesByWorkspace(); foreach (PipelineResource pipeline in pipelines) { Console.WriteLine(pipeline.Name); } #endregion #region Snippet:DeletePipeline PipelineDeletePipelineOperation deleteOperation = client.StartDeletePipeline(pipelineName); await deleteOperation.WaitForCompletionResponseAsync(); #endregion }
public static void CreatePipeline( this DataFactoryManagementClient client, AzureConfig config, string pipelineName, Activity[] activities) { Console.WriteLine($"Creating pipeline {pipelineName}..."); var pipeline = new PipelineResource { Activities = activities }; client.Pipelines.CreateOrUpdate(config.ResourceGroup, config.DataFactoryName, pipelineName, pipeline); Console.WriteLine(SafeJsonConvert.SerializeObject(pipeline, client.SerializationSettings)); }
public async Task TestGetPipeline() { PipelineClient client = CreateClient(); await using DisposablePipeline pipeline = await DisposablePipeline.Create(client, this.Recording); IList <PipelineResource> pipelines = await client.GetPipelinesByWorkspaceAsync().ToListAsync(); Assert.GreaterOrEqual(pipelines.Count, 1); foreach (var expectedPipeline in pipelines) { PipelineResource actualPipeline = await client.GetPipelineAsync(expectedPipeline.Name); Assert.AreEqual(expectedPipeline.Name, actualPipeline.Name); Assert.AreEqual(expectedPipeline.Id, actualPipeline.Id); } }
private static void corregirUserSinCompresion(DataFactoryManagementClient client) { string nombreTabla = "dbo-cc_user"; string nombreTablaSinEsquema = "cc_user"; List <Activity> la; CopyActivity ca; List <DatasetReference> inp; DatasetReference dr; List <DatasetReference> outp; DatasetReference drO; PipelineResource pipe; string nuevaConsulta = "select top 1000 [LoadCommandID], [OffsetStatsUpdateTime], [PublicID], [CreateTime], [UserSettingsID], cast([SpatialPointDenorm] as nvarchar(MAX)), [SessionTimeoutSecs], [OrganizationID], [VacationStatus], [Department], [UpdateTime], [ExternalUser], [Language], [ExperienceLevel], [Locale], [ID], [LossType], [AuthorityProfileID], [CreateUserID], [BeanVersion], [NewlyAssignedActivities], [Retired], [DefaultPhoneCountry], [ValidationLevel], [PolicyType], [UpdateUserID], [QuickClaim], [CredentialID], [SystemUserType], [DefaultCountry], [TimeZone], [ContactID], [JobTitle] from cc_user"; la = new List <Activity>(); ca = new CopyActivity(); ca.Name = "CopyPipeline-Sql-Lake-" + nombreTabla; ca.Source = new SqlSource(null, 3, null, nuevaConsulta); ca.Sink = new SqlSink(); inp = new List <DatasetReference>(); dr = new DatasetReference("Dataset_Claim_" + nombreTabla); inp.Add(dr); ca.Inputs = inp; outp = new List <DatasetReference>(); drO = new DatasetReference("Dataset_Claim_DataLakeStore_" + nombreTablaSinEsquema); outp.Add(drO); ca.Outputs = outp; la.Add(ca); pipe = new PipelineResource(); pipe.Activities = la; client.Pipelines.CreateOrUpdate(DatosGrales.resourceGroup, DatosGrales.dataFactoryName, "Pipeline-Copy-Claim-" + nombreTabla, pipe); Console.Write("Pipeline-Copy-Claim-" + nombreTabla + " modificado.\n"); }
private static void corregirAddressSinCompresion(DataFactoryManagementClient client) { string nombreTabla = "dbo-cc_address"; string nombreTablaSinEsquema = "cc_address"; List <Activity> la; CopyActivity ca; List <DatasetReference> inp; DatasetReference dr; List <DatasetReference> outp; DatasetReference drO; PipelineResource pipe; string nuevaConsulta = "select top 1000 [LoadCommandID], [PublicID], [BatchGeocode], [CreateTime], [AddressLine1], [AddressLine2], [County], [AddressLine3], cast([SpatialPoint] as nvarchar(MAX)), [CityKanji], [AddressLine2Kanji], [Admin], [State], [AddressBookUID], [UpdateTime], [Country], [ID], [Ext_StreetType], [ExternalLinkID], [CreateUserID], [ValidUntil], [ArchivePartition], [BeanVersion], [CityDenorm], [Retired], [Ext_StreetNumber], [City], [AddressType], [AddressLine1Kanji], [UpdateUserID], [CEDEXBureau], [GeocodeStatus], [CEDEX], [PostalCodeDenorm], [PostalCode], [Subtype], [Description] from cc_address"; la = new List <Activity>(); ca = new CopyActivity(); ca.Name = "CopyPipeline-Sql-Lake-" + nombreTabla; ca.Source = new SqlSource(null, 3, null, nuevaConsulta); ca.Sink = new SqlSink(); inp = new List <DatasetReference>(); dr = new DatasetReference("Dataset_Claim_" + nombreTabla); inp.Add(dr); ca.Inputs = inp; outp = new List <DatasetReference>(); drO = new DatasetReference("Dataset_Claim_DataLakeStore_" + nombreTablaSinEsquema); outp.Add(drO); ca.Outputs = outp; la.Add(ca); pipe = new PipelineResource(); pipe.Activities = la; client.Pipelines.CreateOrUpdate(DatosGrales.resourceGroup, DatosGrales.dataFactoryName, "Pipeline-Copy-Claim-" + nombreTabla, pipe); Console.Write("Pipeline-Copy-Claim-" + nombreTabla + " modificado.\n"); }
public PSPipelineResource(PipelineResource pipelineResource, string workspaceName) : base(pipelineResource?.Id, pipelineResource?.Name, pipelineResource?.Type, pipelineResource?.Etag) { this.WorkspaceName = workspaceName; this.Description = pipelineResource?.Description; this.Activities = pipelineResource?.Activities; this.Variables = pipelineResource?.Variables? .Select(element => new KeyValuePair <string, PSVariableSpecification>(element.Key, new PSVariableSpecification(element.Value))) .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); this.Concurrency = pipelineResource?.Concurrency; this.Annotations = pipelineResource?.Annotations; this.RunDimensions = pipelineResource?.RunDimensions; this.Folder = new PSPipelineFolder(pipelineResource?.Folder); this.Parameters = pipelineResource?.Parameters? .Select(element => new KeyValuePair <string, PSParameterSpecification>(element.Key, new PSParameterSpecification(element.Value))) .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); this.AdditionalProperties = pipelineResource?.AdditionalProperties; }
private async Task CreatePipeline(string pipelineName) { const string JobName = "SparkJobName"; const string ActivityName = "ActivityName"; string endpoint = TestEnvironment.EndpointUrl; var pipelineClient = new PipelineClient(endpoint: new Uri(endpoint), credential: new DefaultAzureCredential()); var sparkJob = new SynapseSparkJobReference(SparkJobReferenceType.SparkJobDefinitionReference, JobName); var activity = new SynapseSparkJobDefinitionActivity(ActivityName, sparkJob); var pipelineResource = new PipelineResource(); pipelineResource.Activities.Add(activity); Console.WriteLine("Create pipeline if not already exists."); PipelineCreateOrUpdatePipelineOperation operation = await pipelineClient.StartCreateOrUpdatePipelineAsync(pipelineName, pipelineResource); await operation.WaitForCompletionAsync(); Console.WriteLine("Pipeline created"); }
public async Task TestRenameLinkedService() { PipelineClient client = CreateClient(); PipelineResource resource = await DisposablePipeline.CreateResource(client, Recording); string newPipelineName = Recording.GenerateId("Pipeline2", 16); PipelineRenamePipelineOperation renameOperation = await client.StartRenamePipelineAsync(resource.Name, new ArtifactRenameRequest() { NewName = newPipelineName }); await renameOperation.WaitForCompletionResponseAsync(); PipelineResource pipeline = await client.GetPipelineAsync(newPipelineName); Assert.AreEqual(newPipelineName, pipeline.Name); PipelineDeletePipelineOperation operation = await client.StartDeletePipelineAsync(newPipelineName); await operation.WaitForCompletionResponseAsync(); }
static void Main(string[] args) { // Set variables string tenantID = "<your tenant ID>"; string applicationId = "<your application ID>"; string authenticationKey = "<your authentication key for the application>"; string subscriptionId = "<your subscription ID where the data factory resides>"; string resourceGroup = "<your resource group where the data factory resides>"; string region = "<the location of your resource group>"; string dataFactoryName = "<specify the name of data factory to create. It must be globally unique.>"; string storageAccount = "<your storage account name to copy data>"; string storageKey = "<your storage account key>"; // specify the container and input folder from which all files // need to be copied to the output folder. string inputBlobPath = "<path to existing blob(s) to copy data from, e.g. containername/inputdir>"; //specify the contains and output folder where the files are copied string outputBlobPath = "<the blob path to copy data to, e.g. containername/outputdir>"; // name of the Azure Storage linked service, blob dataset, and the pipeline string storageLinkedServiceName = "AzureStorageLinkedService"; string blobDatasetName = "BlobDataset"; string pipelineName = "Adfv2QuickStartPipeline"; // Authenticate and create a data factory management client var context = new AuthenticationContext("https://login.microsoftonline.com/" + tenantID); ClientCredential cc = new ClientCredential(applicationId, authenticationKey); AuthenticationResult result = context.AcquireTokenAsync( "https://management.azure.com/", cc).Result; ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); var client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; // Create a data factory Console.WriteLine("Creating data factory " + dataFactoryName + "..."); Factory dataFactory = new Factory { Location = region, Identity = new FactoryIdentity() }; client.Factories.CreateOrUpdate(resourceGroup, dataFactoryName, dataFactory); Console.WriteLine( SafeJsonConvert.SerializeObject(dataFactory, client.SerializationSettings)); while (client.Factories.Get(resourceGroup, dataFactoryName).ProvisioningState == "PendingCreation") { System.Threading.Thread.Sleep(1000); } // Create an Azure Storage linked service Console.WriteLine("Creating linked service " + storageLinkedServiceName + "..."); LinkedServiceResource storageLinkedService = new LinkedServiceResource( new AzureStorageLinkedService { ConnectionString = new SecureString( "DefaultEndpointsProtocol=https;AccountName=" + storageAccount + ";AccountKey=" + storageKey) } ); client.LinkedServices.CreateOrUpdate( resourceGroup, dataFactoryName, storageLinkedServiceName, storageLinkedService); Console.WriteLine(SafeJsonConvert.SerializeObject( storageLinkedService, client.SerializationSettings)); // Create an Azure Blob dataset Console.WriteLine("Creating dataset " + blobDatasetName + "..."); DatasetResource blobDataset = new DatasetResource( new AzureBlobDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = storageLinkedServiceName }, FolderPath = new Expression { Value = "@{dataset().path}" }, Parameters = new Dictionary <string, ParameterSpecification> { { "path", new ParameterSpecification { Type = ParameterType.String } } } } ); client.Datasets.CreateOrUpdate( resourceGroup, dataFactoryName, blobDatasetName, blobDataset); Console.WriteLine( SafeJsonConvert.SerializeObject(blobDataset, client.SerializationSettings)); // Create a pipeline with a copy activity Console.WriteLine("Creating pipeline " + pipelineName + "..."); PipelineResource pipeline = new PipelineResource { Parameters = new Dictionary <string, ParameterSpecification> { { "inputPath", new ParameterSpecification { Type = ParameterType.String } }, { "outputPath", new ParameterSpecification { Type = ParameterType.String } } }, Activities = new List <Activity> { new CopyActivity { Name = "CopyFromBlobToBlob", Inputs = new List <DatasetReference> { new DatasetReference() { ReferenceName = blobDatasetName, Parameters = new Dictionary <string, object> { { "path", "@pipeline().parameters.inputPath" } } } }, Outputs = new List <DatasetReference> { new DatasetReference { ReferenceName = blobDatasetName, Parameters = new Dictionary <string, object> { { "path", "@pipeline().parameters.outputPath" } } } }, Source = new BlobSource { }, Sink = new BlobSink { } } } }; client.Pipelines.CreateOrUpdate(resourceGroup, dataFactoryName, pipelineName, pipeline); Console.WriteLine(SafeJsonConvert.SerializeObject(pipeline, client.SerializationSettings)); // Create a pipeline run Console.WriteLine("Creating pipeline run..."); Dictionary <string, object> parameters = new Dictionary <string, object> { { "inputPath", inputBlobPath }, { "outputPath", outputBlobPath } }; CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync( resourceGroup, dataFactoryName, pipelineName, parameters: parameters ).Result.Body; Console.WriteLine("Pipeline run ID: " + runResponse.RunId); // Monitor the pipeline run Console.WriteLine("Checking pipeline run status..."); PipelineRun pipelineRun; while (true) { pipelineRun = client.PipelineRuns.Get( resourceGroup, dataFactoryName, runResponse.RunId); Console.WriteLine("Status: " + pipelineRun.Status); if (pipelineRun.Status == "InProgress" || pipelineRun.Status == "Queued") { System.Threading.Thread.Sleep(15000); } else { break; } } // Check the copy activity run details Console.WriteLine("Checking copy activity run details..."); RunFilterParameters filterParams = new RunFilterParameters( DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow.AddMinutes(10)); ActivityRunsQueryResponse queryResponse = client.ActivityRuns.QueryByPipelineRun( resourceGroup, dataFactoryName, runResponse.RunId, filterParams); if (pipelineRun.Status == "Succeeded") { Console.WriteLine(queryResponse.Value.First().Output); } else { Console.WriteLine(queryResponse.Value.First().Error); } Console.WriteLine("\nPress any key to exit..."); Console.ReadKey(); }
private void CapturePipelines_Get() { interceptor.CurrentExampleName = "Pipelines_Get"; PipelineResource resource = client.Pipelines.Get(secrets.ResourceGroupName, secrets.FactoryName, pipelineName); }
private void CapturePipelines_Update() { interceptor.CurrentExampleName = "Pipelines_Update"; PipelineResource resourceIn = GetPipelineResource("Example description"); PipelineResource resource = client.Pipelines.CreateOrUpdate(secrets.ResourceGroupName, secrets.FactoryName, pipelineName, resourceIn); }