public void Datasets_Get() { RunTest("Datasets_Get", (example, client, responseCode) => { DatasetResource resource = client.Datasets.Get(RGN(example), FN(example), DSN(example)); CheckResponseBody(example, client, responseCode, resource); }); }
public void Datasets_Create() { RunTest("Datasets_Create", (example, client, responseCode) => { DatasetResource resource = client.Datasets.CreateOrUpdate(RGN(example), FN(example), DSN(example), DSR(example, client)); CheckResponseBody(example, client, responseCode, resource); }); }
public PSDatasetResource(DatasetResource dataset, string workspaceName) : base(dataset?.Id, dataset?.Name, dataset?.Type, dataset?.Etag) { this.WorkspaceName = workspaceName; this.Properties = dataset?.Properties; }
public async Task TestCreateDataset() { string datasetName = Recording.GenerateName("Dataset"); DatasetCreateOrUpdateDatasetOperation operation = await DatasetClient.StartCreateOrUpdateDatasetAsync(datasetName, new DatasetResource(new Dataset(new LinkedServiceReference(LinkedServiceReferenceType.LinkedServiceReference, TestEnvironment.WorkspaceName + "-WorkspaceDefaultStorage")))); DatasetResource dataset = await operation.WaitForCompletionAsync(); Assert.AreEqual(datasetName, dataset.Name); }
private static void createDummyDatasetForSSIS(DataFactoryManagementClient client) { DatasetResource dummy; LinkedServiceReference lsr = new LinkedServiceReference(DatosGrales.linkedServiceSSIS); SqlServerTableDataset sqltd = new SqlServerTableDataset(lsr, "dbo.Ayuda_SSIS"); dummy = new DatasetResource(sqltd); var dsResult = client.Datasets.CreateOrUpdate(DatosGrales.resourceGroup, DatosGrales.dataFactoryName, "DummyDatasetForSSIS", dummy); }
public async Task TestGetDataset() { await foreach (var expectedDataset in DatasetClient.GetDatasetsByWorkspaceAsync()) { DatasetResource actualDataset = await DatasetClient.GetDatasetAsync(expectedDataset.Name); Assert.AreEqual(expectedDataset.Name, actualDataset.Name); Assert.AreEqual(expectedDataset.Id, actualDataset.Id); } }
public DatasetResource CreateOrUpdateDataset(string datasetName, string rawJsonContent) { PSDatasetResource pSDatasetResource = JsonConvert.DeserializeObject <PSDatasetResource>(rawJsonContent, Settings); DatasetResource dataset = pSDatasetResource.ToSdkObject(); var operation = _datasetClient.StartCreateOrUpdateDataset(datasetName, dataset); while (!operation.HasValue) { operation.UpdateStatus(); } return(operation.Value); }
public PSDataset(DatasetResource dataset, string resourceGroupName, string factoryName) { if (dataset == null) { throw new ArgumentNullException("dataset"); } if (dataset.Properties == null) { dataset.Properties = new Dataset(); } this.dataset = dataset; this.ResourceGroupName = resourceGroupName; this.DataFactoryName = factoryName; }
private static void createDatasetsSQLServer(DataFactoryManagementClient client) { var nombreTablas = DatosGrales.traerTablas(true); string nombreBD = DatosGrales.nombreBD; DatasetResource dsResult; DatasetResource sqlDataset;// for (int i = 0; i < nombreTablas.Length; i++) { sqlDataset = new DatasetResource( new SqlServerTableDataset( new LinkedServiceReference(DatosGrales.linkedServiceSQLServer), nombreTablas[i])); dsResult = client.Datasets.CreateOrUpdate(DatosGrales.resourceGroup, DatosGrales.dataFactoryName, "Dataset_" + nombreBD + "_" + nombreTablas[i], sqlDataset); Console.Write((i + 1) + ". Dataset_" + nombreBD + "_" + nombreTablas[i] + " creado.\n"); } }
public async Task DatasetSample() { #region Snippet:CreateDatasetClientPrep #if SNIPPET // Replace the string below with your actual endpoint url. string endpoint = "<my-endpoint-url>"; string storageName = "<my-storage-name>"; #else string endpoint = TestEnvironment.EndpointUrl; string storageName = TestEnvironment.WorkspaceName + "-WorkspaceDefaultStorage"; #endif string dataSetName = "Test-Dataset"; #endregion #region Snippet:CreateDatasetClient DatasetClient client = new DatasetClient(endpoint: new Uri(endpoint), credential: new DefaultAzureCredential()); #endregion #region Snippet:CreateDataset Dataset data = new Dataset(new LinkedServiceReference(LinkedServiceReferenceType.LinkedServiceReference, storageName)); DatasetCreateOrUpdateDatasetOperation operation = client.StartCreateOrUpdateDataset(dataSetName, new DatasetResource(data)); Response <DatasetResource> createdDataset = await operation.WaitForCompletionAsync(); #endregion #region Snippet:RetrieveDataset DatasetResource retrievedDataset = client.GetDataset(dataSetName); #endregion #region Snippet:ListDatasets Pageable <DatasetResource> datasets = client.GetDatasetsByWorkspace(); foreach (DatasetResource dataset in datasets) { System.Console.WriteLine(dataset.Name); } #endregion #region Snippet:DeleteDataset DatasetDeleteDatasetOperation deleteDatasetOperation = client.StartDeleteDataset(dataSetName); await deleteDatasetOperation.WaitForCompletionResponseAsync(); #endregion }
public static void CreateAzureTableDataset( this DataFactoryManagementClient client, AzureConfig config, string storageLinkedServiceName, string storageDatasetName, string tableName) { Console.WriteLine($"Creating dataset {storageDatasetName}..."); var blobDataset = new DatasetResource( new AzureTableDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = storageLinkedServiceName }, TableName = tableName } ); client.Datasets.CreateOrUpdate(config.ResourceGroup, config.DataFactoryName, storageDatasetName, blobDataset); Console.WriteLine(SafeJsonConvert.SerializeObject(blobDataset, client.SerializationSettings)); }
private static void createLakeDatasetsCompresion(DataFactoryManagementClient client) { var nombreTablas = DatosGrales.traerTablas(false); AzureDataLakeStoreDataset dlsd = new AzureDataLakeStoreDataset(); TextFormat txtfrm; DatasetResource DataLakeDataset; string nombreBD = DatosGrales.nombreBD; for (int i = 1190; i < nombreTablas.Length; i++) { dlsd.LinkedServiceName = new LinkedServiceReference(DatosGrales.linkedServiceLake); dlsd.FolderPath = "Transient Data/" + nombreBD + "/"; dlsd.FileName = nombreTablas[i] + ".csv.gz"; dlsd.Compression = new DatasetGZipCompression(null, "Optimal"); txtfrm = new TextFormat(); txtfrm.ColumnDelimiter = "|"; txtfrm.EncodingName = "Windows-1252"; txtfrm.FirstRowAsHeader = true; //txtfrm.NullValue = ""; txtfrm.TreatEmptyAsNull = true; //txtfrm.QuoteChar = "{"; dlsd.Format = txtfrm; DataLakeDataset = new DatasetResource(dlsd); if (nombreBD == "ClaimCenter") { nombreBD = "Claim"; } client.Datasets.CreateOrUpdate(DatosGrales.resourceGroup, DatosGrales.dataFactoryName, "Dataset_Descompresion_" + nombreBD + "_DataLakeStore_" + nombreTablas[i], DataLakeDataset); Console.Write((i + 1) + ". Dataset_Descompresion_" + nombreBD + "_DataLakeStore_" + nombreTablas[i] + " creado.\n"); } }
private static void createLakeDatasetsDescomp(DataFactoryManagementClient client) { var nombreTablas = DatosGrales.traerTablas(false); AzureDataLakeStoreDataset dlsd1; TextFormat txtfrm1; DatasetResource DataLakeDataset1; for (int i = 1050; i < nombreTablas.Length; i++) { string nombreBD = DatosGrales.nombreBD; dlsd1 = new AzureDataLakeStoreDataset(); dlsd1.LinkedServiceName = new LinkedServiceReference(DatosGrales.linkedServiceLake); dlsd1.FolderPath = "Raw Data/" + nombreBD + "/"; dlsd1.FileName = nombreTablas[i] + ".csv"; txtfrm1 = new TextFormat(); txtfrm1.ColumnDelimiter = "|"; txtfrm1.EncodingName = "Windows-1252"; //default es utf-8, pero no acepta acentos. txtfrm1.FirstRowAsHeader = true; txtfrm1.TreatEmptyAsNull = true; //txtfrm1.NullValue = ""; dlsd1.Format = txtfrm1; DataLakeDataset1 = new DatasetResource(dlsd1); if (nombreBD == "ClaimCenter") { nombreBD = "Claim"; } client.Datasets.CreateOrUpdate(DatosGrales.resourceGroup, DatosGrales.dataFactoryName, "Dataset_" + nombreBD + "_DataLakeStore_" + nombreTablas[i], DataLakeDataset1); Console.Write((i + 1) + ". Dataset_" + nombreBD + "_DataLakeStore_" + nombreTablas[i] + " creado.\n"); } }
private void CaptureDatasets_Get() { interceptor.CurrentExampleName = "Datasets_Get"; DatasetResource resource = client.Datasets.Get(secrets.ResourceGroupName, secrets.FactoryName, datasetName); }
private void CaptureDatasets_Update() { interceptor.CurrentExampleName = "Datasets_Update"; DatasetResource resourceIn = GetDatasetResource("Example description"); DatasetResource resource = client.Datasets.CreateOrUpdate(secrets.ResourceGroupName, secrets.FactoryName, datasetName, resourceIn); }
static void Main(string[] args) { // Set variables string tenantID = "<tenant ID>"; string applicationId = "<Activity directory application ID>"; string authenticationKey = "<Activity directory application authentication key>"; string subscriptionId = "<subscription ID>"; string resourceGroup = "<resource group name>"; // Note that the data stores (Azure Storage, Azure SQL Database, etc.) and computes (HDInsight, etc.) used by data factory can be in other regions. string region = "East US"; string dataFactoryName = "<name of the data factory>"; //must be globally unique // Specify the source Azure Blob information string storageAccount = "<name of Azure Storage account>"; string storageKey = "<key for your Azure Storage account>"; string inputBlobPath = "adfv2tutorial/"; string inputBlobName = "inputEmp.txt"; // Specify the sink Azure SQL Database information string azureSqlConnString = "Server=tcp:<name of Azure SQL Server>.database.windows.net,1433;Database=spsqldb;User ID=spelluru;Password=Sowmya123;Trusted_Connection=False;Encrypt=True;Connection Timeout=30"; string azureSqlTableName = "dbo.emp"; string storageLinkedServiceName = "AzureStorageLinkedService"; string sqlDbLinkedServiceName = "AzureSqlDbLinkedService"; string blobDatasetName = "BlobDataset"; string sqlDatasetName = "SqlDataset"; string pipelineName = "Adfv2TutorialBlobToSqlCopy"; // Authenticate and create a data factory management client var context = new AuthenticationContext("https://login.windows.net/" + tenantID); ClientCredential cc = new ClientCredential(applicationId, authenticationKey); AuthenticationResult result = context.AcquireTokenAsync("https://management.azure.com/", cc).Result; ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); var client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; // Create data factory Console.WriteLine("Creating data factory " + dataFactoryName + "..."); Factory dataFactory = new Factory { Location = region, Identity = new FactoryIdentity() }; client.Factories.CreateOrUpdate(resourceGroup, dataFactoryName, dataFactory); Console.WriteLine(SafeJsonConvert.SerializeObject(dataFactory, client.SerializationSettings)); while (client.Factories.Get(resourceGroup, dataFactoryName).ProvisioningState == "PendingCreation") { System.Threading.Thread.Sleep(1000); } // Create an Azure Storage linked service Console.WriteLine("Creating linked service " + storageLinkedServiceName + "..."); LinkedServiceResource storageLinkedService = new LinkedServiceResource( new AzureStorageLinkedService { ConnectionString = new SecureString("DefaultEndpointsProtocol=https;AccountName=" + storageAccount + ";AccountKey=" + storageKey) } ); client.LinkedServices.CreateOrUpdate(resourceGroup, dataFactoryName, storageLinkedServiceName, storageLinkedService); Console.WriteLine(SafeJsonConvert.SerializeObject(storageLinkedService, client.SerializationSettings)); // Create an Azure SQL Database linked service Console.WriteLine("Creating linked service " + sqlDbLinkedServiceName + "..."); LinkedServiceResource sqlDbLinkedService = new LinkedServiceResource( new AzureSqlDatabaseLinkedService { ConnectionString = new SecureString(azureSqlConnString) } ); client.LinkedServices.CreateOrUpdate(resourceGroup, dataFactoryName, sqlDbLinkedServiceName, sqlDbLinkedService); Console.WriteLine(SafeJsonConvert.SerializeObject(sqlDbLinkedService, client.SerializationSettings)); // Create a Azure Blob dataset Console.WriteLine("Creating dataset " + blobDatasetName + "..."); DatasetResource blobDataset = new DatasetResource( new AzureBlobDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = storageLinkedServiceName }, FolderPath = inputBlobPath, FileName = inputBlobName, Format = new TextFormat { ColumnDelimiter = "|" }, Structure = new List <DatasetDataElement> { new DatasetDataElement { Name = "FirstName", Type = "String" }, new DatasetDataElement { Name = "LastName", Type = "String" } } } ); client.Datasets.CreateOrUpdate(resourceGroup, dataFactoryName, blobDatasetName, blobDataset); Console.WriteLine(SafeJsonConvert.SerializeObject(blobDataset, client.SerializationSettings)); // Create a Azure SQL Database dataset Console.WriteLine("Creating dataset " + sqlDatasetName + "..."); DatasetResource sqlDataset = new DatasetResource( new AzureSqlTableDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = sqlDbLinkedServiceName }, TableName = azureSqlTableName } ); client.Datasets.CreateOrUpdate(resourceGroup, dataFactoryName, sqlDatasetName, sqlDataset); Console.WriteLine(SafeJsonConvert.SerializeObject(sqlDataset, client.SerializationSettings)); // Create a pipeline with copy activity Console.WriteLine("Creating pipeline " + pipelineName + "..."); PipelineResource pipeline = new PipelineResource { Activities = new List <Activity> { new CopyActivity { Name = "CopyFromBlobToSQL", Inputs = new List <DatasetReference> { new DatasetReference() { ReferenceName = blobDatasetName } }, Outputs = new List <DatasetReference> { new DatasetReference { ReferenceName = sqlDatasetName } }, Source = new BlobSource { }, Sink = new SqlSink { } } } }; client.Pipelines.CreateOrUpdate(resourceGroup, dataFactoryName, pipelineName, pipeline); Console.WriteLine(SafeJsonConvert.SerializeObject(pipeline, client.SerializationSettings)); // Create a pipeline run Console.WriteLine("Creating pipeline run..."); CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync(resourceGroup, dataFactoryName, pipelineName).Result.Body; Console.WriteLine("Pipeline run ID: " + runResponse.RunId); // Monitor the pipeline run Console.WriteLine("Checking pipeline run status..."); PipelineRun pipelineRun; while (true) { pipelineRun = client.PipelineRuns.Get(resourceGroup, dataFactoryName, runResponse.RunId); Console.WriteLine("Status: " + pipelineRun.Status); if (pipelineRun.Status == "InProgress") { System.Threading.Thread.Sleep(15000); } else { break; } } // Check the copy activity run details Console.WriteLine("Checking copy activity run details..."); List <ActivityRun> activityRuns = client.ActivityRuns.ListByPipelineRun( resourceGroup, dataFactoryName, runResponse.RunId, DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow.AddMinutes(10), pipelineName).ToList(); if (pipelineRun.Status == "Succeeded") { Console.WriteLine(activityRuns.First().Output); //SaveToJson(SafeJsonConvert.SerializeObject(activityRuns.First().Output, client.SerializationSettings), "ActivityRunResult.json", folderForJsons); } else { Console.WriteLine(activityRuns.First().Error); } Console.WriteLine("\nPress any key to exit..."); Console.ReadKey(); }
public async Task DataFactoryE2E() { Factory expectedFactory = new Factory(location: FactoryLocation); Func <DataFactoryManagementClient, Task> action = async(client) => { #region DataFactoryTests await DataFactoryScenarioTests.Create(client, this.ResourceGroupName, this.DataFactoryName, expectedFactory); var tags = new Dictionary <string, string> { { "exampleTag", "exampleValue" } }; await DataFactoryScenarioTests.Update(client, this.ResourceGroupName, this.DataFactoryName, expectedFactory, new FactoryUpdateParameters { Tags = tags }); #endregion #region LinkedServiceTests var expectedLinkedService = LinkedServiceScenarioTests.GetLinkedServiceResource(null); await LinkedServiceScenarioTests.Create(client, this.ResourceGroupName, this.DataFactoryName, linkedServiceName, expectedLinkedService); var updatedLinkedService = LinkedServiceScenarioTests.GetLinkedServiceResource("linkedService description"); await LinkedServiceScenarioTests.Update(client, this.ResourceGroupName, this.DataFactoryName, linkedServiceName, updatedLinkedService); await LinkedServiceScenarioTests.GetList(client, this.ResourceGroupName, this.DataFactoryName, linkedServiceName, updatedLinkedService); #endregion #region DatasetTests DatasetResource expectedDataset = DatasetScenarioTests.GetDatasetResource(null, linkedServiceName); await DatasetScenarioTests.Create(client, this.ResourceGroupName, this.DataFactoryName, datasetName, expectedDataset); await DatasetScenarioTests.GetList(client, this.ResourceGroupName, this.DataFactoryName, datasetName, expectedDataset); DatasetResource updatedDataset = DatasetScenarioTests.GetDatasetResource("dataset description", linkedServiceName); await DatasetScenarioTests.Update(client, this.ResourceGroupName, this.DataFactoryName, datasetName, updatedDataset); await DatasetScenarioTests.GetList(client, this.ResourceGroupName, this.DataFactoryName, datasetName, updatedDataset); #endregion #region PipelineTests PipelineResource expectedPipeline = PipelineScenarioTests.GetPipelineResource(null, datasetName); await PipelineScenarioTests.Create(client, this.ResourceGroupName, this.DataFactoryName, pipelineName, expectedPipeline); await PipelineScenarioTests.GetList(client, this.ResourceGroupName, this.DataFactoryName, pipelineName, expectedPipeline); PipelineResource updatedPipeline = PipelineScenarioTests.GetPipelineResource("pipeline description", datasetName); await PipelineScenarioTests.Update(client, this.ResourceGroupName, this.DataFactoryName, pipelineName, updatedPipeline); await PipelineScenarioTests.GetList(client, this.ResourceGroupName, this.DataFactoryName, pipelineName, updatedPipeline); #endregion #region TriggerTests TriggerResource expectedTrigger = TriggerScenarioTests.GetTriggerResource(null, pipelineName, outputBlobName); await TriggerScenarioTests.Create(client, this.ResourceGroupName, this.DataFactoryName, triggerName, expectedTrigger); await TriggerScenarioTests.GetList(client, this.ResourceGroupName, this.DataFactoryName, triggerName, expectedTrigger); TriggerResource updatedTrigger = TriggerScenarioTests.GetTriggerResource("trigger description", pipelineName, outputBlobName); await TriggerScenarioTests.Update(client, this.ResourceGroupName, this.DataFactoryName, triggerName, updatedTrigger); await TriggerScenarioTests.GetList(client, this.ResourceGroupName, this.DataFactoryName, triggerName, updatedTrigger); #endregion #region TestCleanup await TriggerScenarioTests.Delete(client, this.ResourceGroupName, this.DataFactoryName, triggerName); await PipelineScenarioTests.Delete(client, this.ResourceGroupName, this.DataFactoryName, pipelineName); await DatasetScenarioTests.Delete(client, this.ResourceGroupName, this.DataFactoryName, datasetName); await LinkedServiceScenarioTests.Delete(client, this.ResourceGroupName, this.DataFactoryName, linkedServiceName); await DataFactoryScenarioTests.Delete(client, this.ResourceGroupName, this.DataFactoryName); #endregion }; Func <DataFactoryManagementClient, Task> finallyAction = async(client) => { }; await this.RunTest(action, finallyAction); }
static void Main(string[] args) { //Authentification auprès d'Azure avec l'application svc_adf AuthenticationContext context = new AuthenticationContext("https://login.windows.net/" + _repertoireId); ClientCredential cc = new ClientCredential(_applicationId, _applicationKey); AuthenticationResult result = context.AcquireTokenAsync("https://management.azure.com/", cc).Result; ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); DataFactoryManagementClient ADFclient = new DataFactoryManagementClient(cred) { SubscriptionId = _abonnementId }; //Création d'une Azure Data Factory Factory dataFactory = new Factory { Location = _region, Identity = new FactoryIdentity() }; ADFclient.Factories.CreateOrUpdate(_ressourceGroup, _dataFactoryName, dataFactory); Console.WriteLine(SafeJsonConvert.SerializeObject(dataFactory, ADFclient.SerializationSettings)); var toto = ADFclient.Factories.Get(_ressourceGroup, _dataFactoryName).ProvisioningState; while (ADFclient.Factories.Get(_ressourceGroup, _dataFactoryName).ProvisioningState == "PendingCreation") { System.Threading.Thread.Sleep(1000); Console.WriteLine("*"); } ////Création d'un Integration Runtime Auto-Hébergé //IntegrationRuntimeResource integrationRuntimeResource = new IntegrationRuntimeResource( //new SelfHostedIntegrationRuntime //{ // Description = "L'Integration Runtime du projet ..." //} //); //ADFclient.IntegrationRuntimes.CreateOrUpdate(_ressourceGroup, _dataFactoryName, _IntegrationRuntimeName, integrationRuntimeResource); //Console.WriteLine(SafeJsonConvert.SerializeObject(integrationRuntimeResource, ADFclient.SerializationSettings)); //Console.WriteLine("Authkey : " + ADFclient.IntegrationRuntimes.ListAuthKeys(_ressourceGroup, _dataFactoryName, _IntegrationRuntimeName).AuthKey1); //Création service lié File System on premise IntegrationRuntimeReference integrationRuntimeReference = new IntegrationRuntimeReference(_IntegrationRuntimeName); SecureString secureString = new SecureString("MonPassword"); LinkedServiceResource FS_PartageOnPremise = new LinkedServiceResource( new FileServerLinkedService { Description = "Service lié référençant un espace partagé dans le réseau privé de l'entreprise", ConnectVia = integrationRuntimeReference, Host = @"\\IRAutoHeberge\Dépôt", UserId = "chsauget", Password = secureString } ); ADFclient.LinkedServices.CreateOrUpdate(_ressourceGroup, _dataFactoryName, _FS_PartageOnPremiseName, FS_PartageOnPremise); Console.WriteLine(SafeJsonConvert.SerializeObject(FS_PartageOnPremise, ADFclient.SerializationSettings)); //Création service lié Azure SQLDB SecureString SQLsecureString = new SecureString("integrated security=False;encrypt=True;connection timeout=30;data source=adflivre.database.windows.net;initial catalog=advwrks;user id=chsauget;Password=toto"); LinkedServiceResource SQDB_AdventureWorks = new LinkedServiceResource( new AzureSqlDatabaseLinkedService { Description = "Service lié référençant un espace partagé dans le réseau privé de l'entreprise", ConnectionString = SQLsecureString, } ); ADFclient.LinkedServices.CreateOrUpdate(_ressourceGroup, _dataFactoryName, _SQDB_AdventureWorksName, SQDB_AdventureWorks); Console.WriteLine(SafeJsonConvert.SerializeObject(SQDB_AdventureWorks, ADFclient.SerializationSettings)); //Création jeu de données FS_Customer DatasetResource FS_Customer = new DatasetResource( new FileShareDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = _FS_PartageOnPremiseName } , FolderPath = "AdventureWorks CSV" , FileName = "Customer.csv" , Format = new TextFormat { ColumnDelimiter = "\t", RowDelimiter = "\n", FirstRowAsHeader = false } , Structure = new List <DatasetDataElement> { new DatasetDataElement { Name = "CustomerID", Type = "Int32" }, new DatasetDataElement { Name = "PersonID", Type = "Int32" }, new DatasetDataElement { Name = "StoreID", Type = "Int32" }, new DatasetDataElement { Name = "TerritoryID", Type = "Int32" }, new DatasetDataElement { Name = "AccountNumber", Type = "String" }, new DatasetDataElement { Name = "rowguid", Type = "String" }, new DatasetDataElement { Name = "ModifiedDate", Type = "DateTime" } } } , name: _FS_CustomerName ); ADFclient.Datasets.CreateOrUpdate(_ressourceGroup, _dataFactoryName, _FS_CustomerName, FS_Customer); Console.WriteLine(SafeJsonConvert.SerializeObject(FS_Customer, ADFclient.SerializationSettings)); //Création jeu de données SQDB_Col_Customer DatasetResource SQDB_Col_Customer = new DatasetResource( new AzureSqlTableDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = _SQDB_AdventureWorksName }, TableName = "col.Customer" } , name: _SQDB_Col_CustomerName ); ADFclient.Datasets.CreateOrUpdate(_ressourceGroup, _dataFactoryName, _SQDB_AdventureWorksName, SQDB_Col_Customer); Console.WriteLine(SafeJsonConvert.SerializeObject(SQDB_Col_Customer, ADFclient.SerializationSettings)); //Création de l'activité de copie du fichier Customer CopyActivity CustomerCopy = new CopyActivity { Name = "Copy - Customer" , Inputs = new List <DatasetReference> { new DatasetReference() { ReferenceName = _FS_CustomerName } } , Outputs = new List <DatasetReference> { new DatasetReference() { ReferenceName = _SQDB_Col_CustomerName } } , Source = new FileSystemSource { } , Sink = new AzureTableSink { } }; //Création de l'activité de copie du fichier Customer PipelineResource PipelineCustomer = new PipelineResource { Activities = new List <Activity> { CustomerCopy } , Folder = new PipelineFolder { Name = "AdventureWorks" } }; ADFclient.Pipelines.CreateOrUpdate(_ressourceGroup, _dataFactoryName, "Col_Customer", PipelineCustomer); Console.WriteLine(SafeJsonConvert.SerializeObject(PipelineCustomer, ADFclient.SerializationSettings)); //Demander une execution du pipeline CreateRunResponse runResponse = ADFclient.Pipelines.CreateRunWithHttpMessagesAsync(_ressourceGroup, _dataFactoryName, "Col_Customer").Result.Body; //Contrôler l'execution du pipeline PipelineRun run = ADFclient.PipelineRuns.Get(_ressourceGroup, _dataFactoryName, runResponse.RunId); while (run.Status == "InProgress") { run = ADFclient.PipelineRuns.Get(_ressourceGroup, _dataFactoryName, runResponse.RunId); Console.WriteLine("Status: " + run.Status); } //Déclencheur Quotidien TriggerResource scheduleTrigger = new TriggerResource( new ScheduleTrigger { Pipelines = new List <TriggerPipelineReference> { new TriggerPipelineReference { PipelineReference = new PipelineReference("Col_Customer") } } , Recurrence = new ScheduleTriggerRecurrence { StartTime = DateTime.Parse("2019-03-30T01:00:00Z") , Frequency = "Day" , Interval = 1 } } , name: "Daily_01h_Schedule" ); ADFclient.Triggers.CreateOrUpdate(_ressourceGroup, _dataFactoryName, "Daily_01h_Schedule", scheduleTrigger); Console.WriteLine(SafeJsonConvert.SerializeObject(scheduleTrigger, ADFclient.SerializationSettings)); ADFclient.Triggers.BeginStart(_ressourceGroup, _dataFactoryName, "Daily_01h_Schedule"); }
public virtual Response <DatasetResource> CreateOrUpdateDataset(string datasetName, DatasetResource dataset, string ifMatch = null, CancellationToken cancellationToken = default) { using var scope = _clientDiagnostics.CreateScope("DatasetClient.CreateOrUpdateDataset"); scope.Start(); try { return(RestClient.CreateOrUpdateDataset(datasetName, dataset, ifMatch, cancellationToken)); } catch (Exception e) { scope.Failed(e); throw; } }
/// <summary> /// Creates or updates a dataset. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The resource group name. /// </param> /// <param name='factoryName'> /// The factory name. /// </param> /// <param name='datasetName'> /// The dataset name. /// </param> /// <param name='dataset'> /// Dataset resource definition. /// </param> /// <param name='ifMatch'> /// ETag of the dataset entity. Should only be specified for update, for which /// it should match existing entity or can be * for unconditional update. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task <DatasetResource> CreateOrUpdateAsync(this IDatasetsOperations operations, string resourceGroupName, string factoryName, string datasetName, DatasetResource dataset, string ifMatch = default(string), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.CreateOrUpdateWithHttpMessagesAsync(resourceGroupName, factoryName, datasetName, dataset, ifMatch, null, cancellationToken).ConfigureAwait(false)) { return(_result.Body); } }
private static void ValidateDataset(DataFactoryManagementClient client, string resourceGroupName, string dataFactoryName, DatasetResource expected, DatasetResource actual, string expectedName) { ValidateSubResource(client, resourceGroupName, actual, dataFactoryName, expectedName, "datasets"); Assert.IsType <AzureBlobDataset>(actual.Properties); }
internal static async Task GetList(DataFactoryManagementClient client, string resourceGroupName, string dataFactoryName, string datasetName, DatasetResource expectedDataset) { AzureOperationResponse <DatasetResource> getDatasetResponse = await client.Datasets.GetWithHttpMessagesAsync(resourceGroupName, dataFactoryName, datasetName); ValidateDataset(client, resourceGroupName, dataFactoryName, expectedDataset, getDatasetResponse.Body, datasetName); Assert.Equal(HttpStatusCode.OK, getDatasetResponse.Response.StatusCode); AzureOperationResponse <IPage <DatasetResource> > listDatasetResponse = await client.Datasets.ListByFactoryWithHttpMessagesAsync(resourceGroupName, dataFactoryName); ValidateDataset(client, resourceGroupName, dataFactoryName, expectedDataset, listDatasetResponse.Body.First(), datasetName); Assert.Equal(HttpStatusCode.OK, listDatasetResponse.Response.StatusCode); }
static void Main(string[] args) { // Set variables string tenantID = "da67ef1b-ca59-4db2-9a8c-aa8d94617a16"; string applicationId = "b7fd00c9-74b7-40db-9505-f8a899663e59"; string authenticationKey = "phBWjqjTfa9qHv0WbozCfMwHlD5SFbD7ugL2R/JbqZY="; string subscriptionId = "61727f05-e5e1-4271-b29c-66b7d81a7729"; string resourceGroup = "dilipazureprojectresourcegroup"; string region = "EAST US"; string dataFactoryName = "dilipdatafactoryy"; // Specify the source Azure Blob information string storageAccount = "dilipazurestorageaccount"; string storageKey = "usk5E1K7Sj2HfX2V/eZRq/hU5k/oZs0n6WgcixaYK+Im6IKGk0er7vFLTbv6yBe5rNdcEqqpHh1YvNUY3vugvw=="; string inputBlobPath = "dilipazurecontainer/"; string inputBlobName = "PATIENT.txt"; // Specify the sink Azure SQL Database information string azureSqlConnString = "Server=tcp:dilipazureprojectdbserver.database.windows.net,1433;Initial Catalog=dilipazureprojectdb;Persist Security Info=False;User [email protected];Password=password143#;MultipleActiveResultSets=False;Encrypt=True;TrustServerCertificate=False;Connection Timeout=30"; string azureSqlTableName = "dbo.Patient"; string storageLinkedServiceName = "AzureStorageLinkedService"; string sqlDbLinkedServiceName = "AzureSqlDbLinkedService"; string blobDatasetName = "BlobDataset"; string sqlDatasetName = "SqlDataset"; string pipelineName = "Adfv2TutorialBlobToSqlCopyy"; // Authenticate and create a data factory management client var context = new AuthenticationContext("https://login.windows.net/" + tenantID); ClientCredential cc = new ClientCredential(applicationId, authenticationKey); AuthenticationResult result = context.AcquireTokenAsync("https://management.azure.com/", cc).Result; ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); var client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; // Create a data factory Console.WriteLine("Creating a data factory " + dataFactoryName + "..."); Factory dataFactory = new Factory { Location = region, Identity = new FactoryIdentity() }; client.Factories.CreateOrUpdate(resourceGroup, dataFactoryName, dataFactory); Console.WriteLine(SafeJsonConvert.SerializeObject(dataFactory, client.SerializationSettings)); while (client.Factories.Get(resourceGroup, dataFactoryName).ProvisioningState == "PendingCreation") { System.Threading.Thread.Sleep(1000); } // Create an Azure Storage linked service Console.WriteLine("Creating linked service " + storageLinkedServiceName + "..."); LinkedServiceResource storageLinkedService = new LinkedServiceResource( new AzureStorageLinkedService { ConnectionString = new SecureString("DefaultEndpointsProtocol=https;AccountName=" + storageAccount + ";AccountKey=" + storageKey) } ); client.LinkedServices.CreateOrUpdate(resourceGroup, dataFactoryName, storageLinkedServiceName, storageLinkedService); Console.WriteLine(SafeJsonConvert.SerializeObject(storageLinkedService, client.SerializationSettings)); // Create an Azure SQL Database linked service Console.WriteLine("Creating linked service " + sqlDbLinkedServiceName + "..."); LinkedServiceResource sqlDbLinkedService = new LinkedServiceResource( new AzureSqlDatabaseLinkedService { ConnectionString = new SecureString(azureSqlConnString) } ); client.LinkedServices.CreateOrUpdate(resourceGroup, dataFactoryName, sqlDbLinkedServiceName, sqlDbLinkedService); Console.WriteLine(SafeJsonConvert.SerializeObject(sqlDbLinkedService, client.SerializationSettings)); // Create a Azure Blob dataset Console.WriteLine("Creating dataset " + blobDatasetName + "..."); DatasetResource blobDataset = new DatasetResource( new AzureBlobDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = storageLinkedServiceName }, FolderPath = inputBlobPath, FileName = inputBlobName, Format = new TextFormat { ColumnDelimiter = "|" }, Structure = new List <DatasetDataElement> { new DatasetDataElement { Name = "ENTERPRISEID", Type = "String" }, new DatasetDataElement { Name = "LAST_NAME", Type = "String" }, new DatasetDataElement { Name = "FIRST_NAME", Type = "String" }, new DatasetDataElement { Name = "DOB", Type = "DATE" }, new DatasetDataElement { Name = "GENDER", Type = "String" }, new DatasetDataElement { Name = "SSN", Type = "String" }, new DatasetDataElement { Name = "ADDRESS1", Type = "String" }, new DatasetDataElement { Name = "ZIP", Type = "String" }, new DatasetDataElement { Name = "MRN", Type = "String" }, new DatasetDataElement { Name = "CITY", Type = "String" }, new DatasetDataElement { Name = "STATE", Type = "String" }, new DatasetDataElement { Name = "PHONE", Type = "String" }, new DatasetDataElement { Name = "EMAIL", Type = "String" }, new DatasetDataElement { Name = "PRVDR_ID", Type = "String" } } } ); client.Datasets.CreateOrUpdate(resourceGroup, dataFactoryName, blobDatasetName, blobDataset); Console.WriteLine(SafeJsonConvert.SerializeObject(blobDataset, client.SerializationSettings)); // Create a Azure SQL Database dataset Console.WriteLine("Creating dataset " + sqlDatasetName + "..."); DatasetResource sqlDataset = new DatasetResource( new AzureSqlTableDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = sqlDbLinkedServiceName }, TableName = azureSqlTableName } ); client.Datasets.CreateOrUpdate(resourceGroup, dataFactoryName, sqlDatasetName, sqlDataset); Console.WriteLine(SafeJsonConvert.SerializeObject(sqlDataset, client.SerializationSettings)); // Create a pipeline with copy activity Console.WriteLine("Creating pipeline " + pipelineName + "..."); PipelineResource pipeline = new PipelineResource { Activities = new List <Activity> { new CopyActivity { Name = "CopyFromBlobToSQL", Inputs = new List <DatasetReference> { new DatasetReference() { ReferenceName = blobDatasetName } }, Outputs = new List <DatasetReference> { new DatasetReference { ReferenceName = sqlDatasetName } }, Source = new BlobSource { }, Sink = new SqlSink { } } } }; client.Pipelines.CreateOrUpdate(resourceGroup, dataFactoryName, pipelineName, pipeline); Console.WriteLine(SafeJsonConvert.SerializeObject(pipeline, client.SerializationSettings)); // Create a pipeline run Console.WriteLine("Creating pipeline run..."); CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync(resourceGroup, dataFactoryName, pipelineName).Result.Body; Console.WriteLine("Pipeline run ID: " + runResponse.RunId); // Monitor the pipeline run Console.WriteLine("Checking pipeline run status..."); PipelineRun pipelineRun; while (true) { pipelineRun = client.PipelineRuns.Get(resourceGroup, dataFactoryName, runResponse.RunId); Console.WriteLine("Status: " + pipelineRun.Status); if (pipelineRun.Status == "InProgress") { System.Threading.Thread.Sleep(15000); } else { break; } } // Check the copy activity run details Console.WriteLine("Checking copy activity run details..."); List <ActivityRun> activityRuns = client.ActivityRuns.ListByPipelineRun( resourceGroup, dataFactoryName, runResponse.RunId, DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow.AddMinutes(10)).ToList(); if (pipelineRun.Status == "Succeeded") { Console.WriteLine(activityRuns.First().Output); } else { Console.WriteLine(activityRuns.First().Error); } Console.WriteLine("\nPress any key to exit..."); Console.ReadKey(); }
//static void Main(string[] args) static async System.Threading.Tasks.Task Main(string[] args) { // Set variables string tenantID = "microsoft.onmicrosoft.com"; string User = "******"; string applicationId = "<your application ID>"; string authenticationKey = "<your authentication key for the application>"; string subscriptionId = "b67993e8-0937-4812-af89-520a495da302"; string resourceGroup = "vchmeha"; string region = "West Central US"; string dataFactoryName = "ADFv2HaHu"; string storageAccount = "fromblob"; string storageKey = "gREZ3TbADUcfWJ+ryfxOH7FKrlCKobMHrpS7Z2PPS/gQIsH02LHAGedlqYVpxm/PeWHDJsU/wkBmNz1Q20Rwyg=="; // specify the container and input folder from which all files // need to be copied to the output folder. string inputBlobPath = "adftutorial/input"; //specify the contains and output folder where the files are copied string outputBlobPath = "adftutorial/output"; // name of the Azure Storage linked service, blob dataset, and the pipeline string storageLinkedServiceName = "AzureStorageLinkedService"; string blobDatasetName = "BlobDataset"; string pipelineName = "Adfv2QuickStartPipeline"; // Authenticate and create a data factory management client var azureServiceTokenProvider = new AzureServiceTokenProvider(); var token = await azureServiceTokenProvider.GetAccessTokenAsync("https://management.azure.com/"); var cred = new TokenCredentials(token); var client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; // Create a data factory Console.WriteLine("Creating data factory " + dataFactoryName + "..."); Factory dataFactory = new Factory { Location = region, Identity = new FactoryIdentity() }; client.Factories.CreateOrUpdate(resourceGroup, dataFactoryName, dataFactory); Console.WriteLine( SafeJsonConvert.SerializeObject(dataFactory, client.SerializationSettings)); while (client.Factories.Get(resourceGroup, dataFactoryName).ProvisioningState == "PendingCreation") { System.Threading.Thread.Sleep(1000); } // Create an Azure Storage linked service Console.WriteLine("Creating linked service " + storageLinkedServiceName + "..."); LinkedServiceResource storageLinkedService = new LinkedServiceResource( new AzureStorageLinkedService { ConnectionString = new SecureString( "DefaultEndpointsProtocol=https;AccountName=" + storageAccount + ";AccountKey=" + storageKey) } ); client.LinkedServices.CreateOrUpdate( resourceGroup, dataFactoryName, storageLinkedServiceName, storageLinkedService); Console.WriteLine(SafeJsonConvert.SerializeObject( storageLinkedService, client.SerializationSettings)); // Create an Azure Blob dataset Console.WriteLine("Creating dataset " + blobDatasetName + "..."); DatasetResource blobDataset = new DatasetResource( new AzureBlobDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = storageLinkedServiceName }, FolderPath = new Expression { Value = "@{dataset().path}" }, Parameters = new Dictionary <string, ParameterSpecification> { { "path", new ParameterSpecification { Type = ParameterType.String } } } } ); client.Datasets.CreateOrUpdate( resourceGroup, dataFactoryName, blobDatasetName, blobDataset); Console.WriteLine( SafeJsonConvert.SerializeObject(blobDataset, client.SerializationSettings)); // Create a pipeline with a copy activity Console.WriteLine("Creating pipeline " + pipelineName + "..."); PipelineResource pipeline = new PipelineResource { Parameters = new Dictionary <string, ParameterSpecification> { { "inputPath", new ParameterSpecification { Type = ParameterType.String } }, { "outputPath", new ParameterSpecification { Type = ParameterType.String } } }, Activities = new List <Activity> { new CopyActivity { Name = "CopyFromBlobToBlob", Inputs = new List <DatasetReference> { new DatasetReference() { ReferenceName = blobDatasetName, Parameters = new Dictionary <string, object> { { "path", "@pipeline().parameters.inputPath" } } } }, Outputs = new List <DatasetReference> { new DatasetReference { ReferenceName = blobDatasetName, Parameters = new Dictionary <string, object> { { "path", "@pipeline().parameters.outputPath" } } } }, Source = new BlobSource { }, Sink = new BlobSink { } } } }; client.Pipelines.CreateOrUpdate(resourceGroup, dataFactoryName, pipelineName, pipeline); Console.WriteLine(SafeJsonConvert.SerializeObject(pipeline, client.SerializationSettings)); // Create a pipeline run Console.WriteLine("Creating pipeline run..."); Dictionary <string, object> parameters = new Dictionary <string, object> { { "inputPath", inputBlobPath }, { "outputPath", outputBlobPath } }; CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync( resourceGroup, dataFactoryName, pipelineName, parameters: parameters ).Result.Body; Console.WriteLine("Pipeline run ID: " + runResponse.RunId); // Monitor the pipeline run Console.WriteLine("Checking pipeline run status..."); PipelineRun pipelineRun; while (true) { pipelineRun = client.PipelineRuns.Get( resourceGroup, dataFactoryName, runResponse.RunId); Console.WriteLine("Status: " + pipelineRun.Status); if (pipelineRun.Status == "InProgress" || pipelineRun.Status == "Queued") { System.Threading.Thread.Sleep(15000); } else { break; } } // Check the copy activity run details Console.WriteLine("Checking copy activity run details..."); RunFilterParameters filterParams = new RunFilterParameters( DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow.AddMinutes(10)); ActivityRunsQueryResponse queryResponse = client.ActivityRuns.QueryByPipelineRun( resourceGroup, dataFactoryName, runResponse.RunId, filterParams); if (pipelineRun.Status == "Succeeded") { Console.WriteLine(queryResponse.Value.First().Output); } else { Console.WriteLine(queryResponse.Value.First().Error); } Console.WriteLine("\nPress any key to exit..."); Console.ReadKey(); }
public void RetrieveDataset() { #region Snippet:RetrieveDataset DatasetResource dataset = DatasetClient.GetDataset("MyDataset"); #endregion }
public virtual DatasetCreateOrUpdateDatasetOperation StartCreateOrUpdateDataset(string datasetName, DatasetResource dataset, string ifMatch = null, CancellationToken cancellationToken = default) { if (datasetName == null) { throw new ArgumentNullException(nameof(datasetName)); } if (dataset == null) { throw new ArgumentNullException(nameof(dataset)); } using var scope = _clientDiagnostics.CreateScope("DatasetClient.StartCreateOrUpdateDataset"); scope.Start(); try { var originalResponse = RestClient.CreateOrUpdateDataset(datasetName, dataset, ifMatch, cancellationToken); return(new DatasetCreateOrUpdateDatasetOperation(_clientDiagnostics, _pipeline, RestClient.CreateCreateOrUpdateDatasetRequest(datasetName, dataset, ifMatch).Request, originalResponse)); } catch (Exception e) { scope.Failed(e); throw; } }
internal static async Task Update(DataFactoryManagementClient client, string resourceGroupName, string dataFactoryName, string datasetName, DatasetResource expectedDataset) { AzureOperationResponse <DatasetResource> updateDatasetResponse = await client.Datasets.CreateOrUpdateWithHttpMessagesAsync(resourceGroupName, dataFactoryName, datasetName, expectedDataset); ValidateDataset(client, resourceGroupName, dataFactoryName, expectedDataset, updateDatasetResponse.Body, datasetName); Assert.Equal(HttpStatusCode.OK, updateDatasetResponse.Response.StatusCode); }
static void Main(string[] args) { // Set variables string tenantID = "<your tenant ID>"; string applicationId = "<your application ID>"; string authenticationKey = "<your authentication key for the application>"; string subscriptionId = "<your subscription ID where the data factory resides>"; string resourceGroup = "<your resource group where the data factory resides>"; string region = "<the location of your resource group>"; string dataFactoryName = "<specify the name of data factory to create. It must be globally unique.>"; string storageAccount = "<your storage account name to copy data>"; string storageKey = "<your storage account key>"; // specify the container and input folder from which all files // need to be copied to the output folder. string inputBlobPath = "<path to existing blob(s) to copy data from, e.g. containername/inputdir>"; //specify the contains and output folder where the files are copied string outputBlobPath = "<the blob path to copy data to, e.g. containername/outputdir>"; // name of the Azure Storage linked service, blob dataset, and the pipeline string storageLinkedServiceName = "AzureStorageLinkedService"; string blobDatasetName = "BlobDataset"; string pipelineName = "Adfv2QuickStartPipeline"; // Authenticate and create a data factory management client var context = new AuthenticationContext("https://login.microsoftonline.com/" + tenantID); ClientCredential cc = new ClientCredential(applicationId, authenticationKey); AuthenticationResult result = context.AcquireTokenAsync( "https://management.azure.com/", cc).Result; ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); var client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; // Create a data factory Console.WriteLine("Creating data factory " + dataFactoryName + "..."); Factory dataFactory = new Factory { Location = region, Identity = new FactoryIdentity() }; client.Factories.CreateOrUpdate(resourceGroup, dataFactoryName, dataFactory); Console.WriteLine( SafeJsonConvert.SerializeObject(dataFactory, client.SerializationSettings)); while (client.Factories.Get(resourceGroup, dataFactoryName).ProvisioningState == "PendingCreation") { System.Threading.Thread.Sleep(1000); } // Create an Azure Storage linked service Console.WriteLine("Creating linked service " + storageLinkedServiceName + "..."); LinkedServiceResource storageLinkedService = new LinkedServiceResource( new AzureStorageLinkedService { ConnectionString = new SecureString( "DefaultEndpointsProtocol=https;AccountName=" + storageAccount + ";AccountKey=" + storageKey) } ); client.LinkedServices.CreateOrUpdate( resourceGroup, dataFactoryName, storageLinkedServiceName, storageLinkedService); Console.WriteLine(SafeJsonConvert.SerializeObject( storageLinkedService, client.SerializationSettings)); // Create an Azure Blob dataset Console.WriteLine("Creating dataset " + blobDatasetName + "..."); DatasetResource blobDataset = new DatasetResource( new AzureBlobDataset { LinkedServiceName = new LinkedServiceReference { ReferenceName = storageLinkedServiceName }, FolderPath = new Expression { Value = "@{dataset().path}" }, Parameters = new Dictionary <string, ParameterSpecification> { { "path", new ParameterSpecification { Type = ParameterType.String } } } } ); client.Datasets.CreateOrUpdate( resourceGroup, dataFactoryName, blobDatasetName, blobDataset); Console.WriteLine( SafeJsonConvert.SerializeObject(blobDataset, client.SerializationSettings)); // Create a pipeline with a copy activity Console.WriteLine("Creating pipeline " + pipelineName + "..."); PipelineResource pipeline = new PipelineResource { Parameters = new Dictionary <string, ParameterSpecification> { { "inputPath", new ParameterSpecification { Type = ParameterType.String } }, { "outputPath", new ParameterSpecification { Type = ParameterType.String } } }, Activities = new List <Activity> { new CopyActivity { Name = "CopyFromBlobToBlob", Inputs = new List <DatasetReference> { new DatasetReference() { ReferenceName = blobDatasetName, Parameters = new Dictionary <string, object> { { "path", "@pipeline().parameters.inputPath" } } } }, Outputs = new List <DatasetReference> { new DatasetReference { ReferenceName = blobDatasetName, Parameters = new Dictionary <string, object> { { "path", "@pipeline().parameters.outputPath" } } } }, Source = new BlobSource { }, Sink = new BlobSink { } } } }; client.Pipelines.CreateOrUpdate(resourceGroup, dataFactoryName, pipelineName, pipeline); Console.WriteLine(SafeJsonConvert.SerializeObject(pipeline, client.SerializationSettings)); // Create a pipeline run Console.WriteLine("Creating pipeline run..."); Dictionary <string, object> parameters = new Dictionary <string, object> { { "inputPath", inputBlobPath }, { "outputPath", outputBlobPath } }; CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync( resourceGroup, dataFactoryName, pipelineName, parameters: parameters ).Result.Body; Console.WriteLine("Pipeline run ID: " + runResponse.RunId); // Monitor the pipeline run Console.WriteLine("Checking pipeline run status..."); PipelineRun pipelineRun; while (true) { pipelineRun = client.PipelineRuns.Get( resourceGroup, dataFactoryName, runResponse.RunId); Console.WriteLine("Status: " + pipelineRun.Status); if (pipelineRun.Status == "InProgress" || pipelineRun.Status == "Queued") { System.Threading.Thread.Sleep(15000); } else { break; } } // Check the copy activity run details Console.WriteLine("Checking copy activity run details..."); RunFilterParameters filterParams = new RunFilterParameters( DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow.AddMinutes(10)); ActivityRunsQueryResponse queryResponse = client.ActivityRuns.QueryByPipelineRun( resourceGroup, dataFactoryName, runResponse.RunId, filterParams); if (pipelineRun.Status == "Succeeded") { Console.WriteLine(queryResponse.Value.First().Output); } else { Console.WriteLine(queryResponse.Value.First().Error); } Console.WriteLine("\nPress any key to exit..."); Console.ReadKey(); }
/// <summary> /// Creates or updates a dataset. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The resource group name. /// </param> /// <param name='factoryName'> /// The factory name. /// </param> /// <param name='datasetName'> /// The dataset name. /// </param> /// <param name='dataset'> /// Dataset resource definition. /// </param> /// <param name='ifMatch'> /// ETag of the dataset entity. Should only be specified for update, for which /// it should match existing entity or can be * for unconditional update. /// </param> public static DatasetResource CreateOrUpdate(this IDatasetsOperations operations, string resourceGroupName, string factoryName, string datasetName, DatasetResource dataset, string ifMatch = default(string)) { return(operations.CreateOrUpdateAsync(resourceGroupName, factoryName, datasetName, dataset, ifMatch).GetAwaiter().GetResult()); }