public static void CreateObjects( string[] parameters, DataFactoryManagementClient client, IList<UpdateResourceEndpoint> endpoints) { CreateLinkedService( DataFactoryConfig.ResourceGroupName, DataFactoryConfig.DataFactoryName, client, endpoints, parameters); string inputDataset; IList<string> outputModelDatasets; IList<string> outputPlaceholderDatasets; CreateInputOutputDatasets( DataFactoryConfig.ResourceGroupName, DataFactoryConfig.DataFactoryName, client, out inputDataset, out outputModelDatasets, out outputPlaceholderDatasets, parameters, endpoints); CreatePipelines( DataFactoryConfig.ResourceGroupName, DataFactoryConfig.DataFactoryName, client, inputDataset, outputModelDatasets, outputPlaceholderDatasets, parameters, endpoints); }
private static void CreateInputOutputDatasets( string resourceGroupName, string dataFactoryName, DataFactoryManagementClient client, out string inputDataset, out IList<string> outputDatasets, string[] parameters) { inputDataset = "InputDatasetScoring"; outputDatasets = new List<string>(); foreach (string parameter in parameters) { string[] parameterList = parameter.Split(','); string region = parameterList[0]; string tableName = String.Format("outputScoring_{0}", region); client.Datasets.CreateOrUpdate(resourceGroupName, dataFactoryName, new DatasetCreateOrUpdateParameters() { Dataset = new Dataset() { Name = tableName, Properties = new DatasetProperties() { LinkedServiceName = "LinkedService-AzureStorage", TypeProperties = new AzureBlobDataset() { FolderPath = String.Format("outputscoring/{0}/", region) + "{Slice}", FileName = "output.csv", Format = new TextFormat() { ColumnDelimiter = "," }, PartitionedBy = new Collection<Partition>() { new Partition() { Name = "Slice", Value = new DateTimePartitionValue() { Date = "SliceStart", Format = "yyyyMMdd-HHmmss" } } } }, Availability = new Availability() { Frequency = SchedulePeriod.Minute, Interval = DataFactoryConfig.PipelineFrequencyInMinutes, }, } } }); outputDatasets.Add(tableName); } }
internal ComputeTypeGetResponse( CoreRegistrationModel.ComputeTypeGetResponse internalResponse, DataFactoryManagementClient client) : this() { DataFactoryOperationUtilities.CopyRuntimeProperties(internalResponse, this); this.ComputeType = ((ComputeTypeOperations)client.ComputeTypes).Converter.ToWrapperType(internalResponse.ComputeType); }
internal ActivityTypeCreateOrUpdateResponse( CoreRegistrationModel.ActivityTypeCreateOrUpdateResponse internalResponse, DataFactoryManagementClient client) : this() { DataFactoryOperationUtilities.CopyRuntimeProperties(internalResponse, this); this.ActivityType = ((ActivityTypeOperations)client.ActivityTypes).Converter.ToWrapperType(internalResponse.ActivityType); this.Status = internalResponse.Status; }
internal TableGetResponse(Core.Models.TableGetResponse internalResponse, DataFactoryManagementClient client) : this() { Ensure.IsNotNull(internalResponse, "internalResponse"); Ensure.IsNotNull(internalResponse.Table, "internalResponse.Table"); DataFactoryUtilities.CopyRuntimeProperties(internalResponse, this); this.Table = ((TableOperations)client.Tables).Converter.ToWrapperType(internalResponse.Table); }
internal DatasetGetResponse(Core.Models.DatasetGetResponse internalResponse, DataFactoryManagementClient client) : this() { Ensure.IsNotNull(internalResponse, "internalResponse"); Ensure.IsNotNull(internalResponse.Dataset, "internalResponse.Dataset"); DataFactoryOperationUtilities.CopyRuntimeProperties(internalResponse, this); this.Dataset = ((DatasetOperations)client.Datasets).Converter.ToWrapperType(internalResponse.Dataset); }
public static void GetDetails(this DataFactoryManagementClient client, AzureConfig config, string pipelineRunId, string status) { Console.WriteLine("Checking copy activity run details..."); var filterParams = new RunFilterParameters(DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow.AddMinutes(10)); var queryResponse = client.ActivityRuns.QueryByPipelineRun(config.ResourceGroup, config.DataFactoryName, pipelineRunId, filterParams); Console.WriteLine(status == "Succeeded" ? queryResponse.Value.First().Output : queryResponse.Value.First().Error); }
internal ActivityTypeListResponse( CoreRegistrationModel.ActivityTypeListResponse internalResponse, DataFactoryManagementClient client) { DataFactoryOperationUtilities.CopyRuntimeProperties(internalResponse, this); this.NextLink = internalResponse.NextLink; this.ActivityTypes = internalResponse.ActivityTypes.Select( internalActivityType => ((ActivityTypeOperations)client.ActivityTypes).Converter.ToWrapperType(internalActivityType)) .ToList(); }
internal static async Task Delete(DataFactoryManagementClient client, string resourceGroupName, string dataFactoryName) { AzureOperationResponse deleteResponse = await client.Factories.DeleteWithHttpMessagesAsync(resourceGroupName, dataFactoryName); Assert.Equal(HttpStatusCode.OK, deleteResponse.Response.StatusCode); deleteResponse = await client.Factories.DeleteWithHttpMessagesAsync(resourceGroupName, dataFactoryName); Assert.Equal(HttpStatusCode.NoContent, deleteResponse.Response.StatusCode); }
public DataFactoryClient(IAzureContext context) { if (context == null) { throw new ApplicationException(Resources.InvalidDefaultSubscription); } _subscriptionId = Guid.Parse(context.Subscription.Id); this.DataFactoryManagementClient = DataFactoryClient.CreateAdfClient(context); }
internal TableListResponse(Core.Models.TableListResponse internalResponse, DataFactoryManagementClient client) { Ensure.IsNotNull(internalResponse, "internalResponse"); Ensure.IsNotNull(internalResponse.Tables, "internalResponse.Tables"); DataFactoryUtilities.CopyRuntimeProperties(internalResponse, this); this.NextLink = internalResponse.NextLink; this.Tables = internalResponse.Tables.Select( internalTable => ((TableOperations)client.Tables).Converter.ToWrapperType(internalTable)).ToList(); }
internal DatasetListResponse(Core.Models.DatasetListResponse internalResponse, DataFactoryManagementClient client) { Ensure.IsNotNull(internalResponse, "internalResponse"); Ensure.IsNotNull(internalResponse.Datasets, "internalResponse.Datasets"); DataFactoryOperationUtilities.CopyRuntimeProperties(internalResponse, this); this.NextLink = internalResponse.NextLink; this.Datasets = internalResponse.Datasets.Select( internalDataset => ((DatasetOperations)client.Datasets).Converter.ToWrapperType(internalDataset)).ToList(); }
static void Main(string[] args) { // String values ---- // Set variables string tenantID = "111e6fe8367-24af-48c2-a0ee-b5f386408512"; string subscriptionId = "11144357e6b-77a0-4b60-a817-27e62ffb6fdd"; string applicationId = "1110da14751-0058-4a91-ae16-b8722b2b74d1"; string authenticationKey = "111CbA5ryJyhbFi4_g/bmM]VM4-QynEOU1p"; string resourceGroup = ""; string region = ""; string dataFactoryName = ""; string pipelineName = ""; // Authenticate and create a data factory management client var context = new AuthenticationContext("https://login.windows.net/" + tenantID); ClientCredential cc = new ClientCredential(applicationId, authenticationKey); AuthenticationResult result = context.AcquireTokenAsync("https://management.azure.com/", cc).Result; ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); var client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; client.uri = new uri Console.WriteLine("Creating Pipeline run..."); CreateRunResponse runResponse = client.Pipelines.CreateRunWithHttpMessagesAsync(resourceGroup, dataFactoryName, pipelineName).Result.Body; client.Pipelines.CreateRun(resourceGroup, dataFactoryName, pipelineName); Console.WriteLine("Pipeline run ID: " + runResponse.RunId); Check Status of Azure Pipeline Runs // Monitor the Pipeline Run Console.WriteLine("Checking Pipeline Run Status..."); PipelineRun pipelineRun; while (true) { pipelineRun = client.PipelineRuns.Get(resourceGroup, dataFactoryName, runResponse.RunId); Console.WriteLine("Status: " + pipelineRun.Status); if (pipelineRun.Status == "InProgress") { System.Threading.Thread.Sleep(15000); } else { break; } } }
internal PipelineGetResponse( Core.Models.PipelineGetResponse internalResponse, DataFactoryManagementClient client) : this() { Ensure.IsNotNull(internalResponse, "internalResponse"); Ensure.IsNotNull(internalResponse.Pipeline, "internalResponse.Pipeline"); DataFactoryOperationUtilities.CopyRuntimeProperties(internalResponse, this); this.Pipeline = ((PipelineOperations)client.Pipelines).Converter.ToWrapperType(internalResponse.Pipeline); }
private static void CreateLinkedService( string resourceGroupName, string dataFactoryName, DataFactoryManagementClient client, IList <UpdateResourceEndpoint> endpoints, string[] parameters) { // create Azure ML training linked services Console.WriteLine("Creating Azure ML training linked service"); client.LinkedServices.CreateOrUpdate(resourceGroupName, dataFactoryName, new LinkedServiceCreateOrUpdateParameters() { LinkedService = new LinkedService() { Name = "LinkedServiceRetraining-AzureML", Properties = new LinkedServiceProperties ( new AzureMLLinkedService(DataFactoryConfig.RetrainingEndPoint, DataFactoryConfig.RetrainingApiKey) { } ) } } ); int i = 0; foreach (UpdateResourceEndpoint endpoint in endpoints) { string[] parameterList = parameters[i].Split(','); string region = parameterList[0]; // create Azure ML scoring linked services Console.WriteLine("Creating Azure ML scoring linked service for {0}", endpoint); client.LinkedServices.CreateOrUpdate(resourceGroupName, dataFactoryName, new LinkedServiceCreateOrUpdateParameters() { LinkedService = new LinkedService() { // Note: The linked service names generated here are also used by the scoring pipeline. Name = Utilities.GetScoringLinkedServiceName(DataFactoryConfig.ScoringLinkedServiceNamePrefix, region), Properties = new LinkedServiceProperties ( new AzureMLLinkedService(endpoint.mlEndpoint, endpoint.apiKey) { UpdateResourceEndpoint = endpoint.updateResourceEndpointUrl } ) } } ); i++; } }
public void ConstructorWithCredentialsAndBaseUriTest() { Mock <TokenCloudCredentials> mockCredentials = CreateMockCredentials(); SubscriptionCloudCredentials creds = mockCredentials.Object; var client = new DataFactoryManagementClient(creds, TestBaseUri); ValidateClientWithCredentials(client, creds, mockCredentials); Assert.Equal(TestBaseUri, client.BaseUri); Assert.Equal(TestBaseUri, client.InternalClient.BaseUri); }
private static void createAzureSSIS(DataFactoryManagementClient client) { var IR = new IntegrationRuntimeReference(DatosGrales.onPremiseIntegrationRuntime); var pass = new SecureString(DatosGrales.passwordWarehouse); LinkedServiceResource SqlServerLinkedServiceSSIS = new LinkedServiceResource( new SqlServerLinkedService( new SecureString(@"Data Source=sqlsrvbi00.database.windows.net;Initial Catalog=SSISDB;Integrated Security=False"), null, IR, "Sql que hostea el SSIS", DatosGrales.usuarioWarehouse, pass)); client.LinkedServices.CreateOrUpdate(DatosGrales.resourceGroup, DatosGrales.dataFactoryName, DatosGrales.linkedServiceSSIS, SqlServerLinkedServiceSSIS); }
private static void createSQLServers(DataFactoryManagementClient client) { var IR = new IntegrationRuntimeReference(DatosGrales.onPremiseIntegrationRuntime); string nombreBD = DatosGrales.nombreBD; var pass = new SecureString(DatosGrales.passwordOnPremise); LinkedServiceResource SqlServerLinkedServiceClaim = new LinkedServiceResource( new SqlServerLinkedService( new SecureString(@"Data Source=ROW2K12SQL11;Initial Catalog=" + nombreBD + ";Integrated Security=True"), null, IR, "Sql Local - " + nombreBD, DatosGrales.usuarioOnPremise, pass)); client.LinkedServices.CreateOrUpdate(DatosGrales.resourceGroup, DatosGrales.dataFactoryName, "SqlServerLinkedService-" + nombreBD, SqlServerLinkedServiceClaim); }
private static void ValidateDataFlow(DataFactoryManagementClient client, string resourceGroupName, string dataFactoryName, DataFlowResource expected, DataFlowResource actual, string expectedName) { ValidateSubResource(client, resourceGroupName, actual, dataFactoryName, expectedName, "dataflows"); if (string.Equals(expectedName, "TestPowerQuery", StringComparison.InvariantCultureIgnoreCase)) { Assert.IsType <WranglingDataFlow>(actual.Properties); } else { Assert.IsType <MappingDataFlow>(actual.Properties); } }
public static DataFactoryManagementClient CreateDataFactoryClient(string SubscriptionId) { string token = Shared.Azure.AzureSDK.GetAzureRestApiToken("https://management.azure.com/"); ServiceClientCredentials cred = new TokenCredentials(token); DataFactoryManagementClient adfClient = new DataFactoryManagementClient(cred) { SubscriptionId = SubscriptionId }; return(adfClient); }
internal static async Task GetList(DataFactoryManagementClient client, string resourceGroupName, string dataFactoryName, string datasetName, TriggerResource expectedTrigger) { AzureOperationResponse <TriggerResource> getTriggerResponse = await client.Triggers.GetWithHttpMessagesAsync(resourceGroupName, dataFactoryName, datasetName); ValidateTrigger(client, resourceGroupName, dataFactoryName, expectedTrigger, getTriggerResponse.Body, datasetName); Assert.Equal(HttpStatusCode.OK, getTriggerResponse.Response.StatusCode); AzureOperationResponse <IPage <TriggerResource> > listTriggerResponse = await client.Triggers.ListByFactoryWithHttpMessagesAsync(resourceGroupName, dataFactoryName); ValidateTrigger(client, resourceGroupName, dataFactoryName, expectedTrigger, listTriggerResponse.Body.First(), datasetName); Assert.Equal(HttpStatusCode.OK, listTriggerResponse.Response.StatusCode); }
public void ConstructorWithCredentialsAndHttpClientTest() { var httpClient = new HttpClient(); Mock <TokenCloudCredentials> mockCredentials = CreateMockCredentials(); SubscriptionCloudCredentials creds = mockCredentials.Object; var client = new DataFactoryManagementClient(creds, httpClient); ValidateClientWithCredentials(client, creds, mockCredentials); Assert.Equal(httpClient, client.HttpClient); Assert.Equal(httpClient, client.InternalClient.HttpClient); }
internal static async Task GetList(DataFactoryManagementClient client, string resourceGroupName, string dataFactoryName, string dataFlowName, DataFlowResource expectedDataFlow) { AzureOperationResponse <DataFlowResource> getDataFlowResponse = await client.DataFlows.GetWithHttpMessagesAsync(resourceGroupName, dataFactoryName, dataFlowName); ValidateDataFlow(client, resourceGroupName, dataFactoryName, expectedDataFlow, getDataFlowResponse.Body, dataFlowName); Assert.Equal(HttpStatusCode.OK, getDataFlowResponse.Response.StatusCode); AzureOperationResponse <IPage <DataFlowResource> > listDataFlowResponse = await client.DataFlows.ListByFactoryWithHttpMessagesAsync(resourceGroupName, dataFactoryName); ValidateDataFlow(client, resourceGroupName, dataFactoryName, expectedDataFlow, listDataFlowResponse.Body.First(), dataFlowName); Assert.Equal(HttpStatusCode.OK, listDataFlowResponse.Response.StatusCode); }
internal static async Task GetList(DataFactoryManagementClient client, string resourceGroupName, string dataFactoryName, string pipelineName, PipelineResource expectedPipeline) { AzureOperationResponse <PipelineResource> getPipelineResponse = await client.Pipelines.GetWithHttpMessagesAsync(resourceGroupName, dataFactoryName, pipelineName); ValidatePipeline(client, resourceGroupName, dataFactoryName, expectedPipeline, getPipelineResponse.Body, pipelineName); Assert.Equal(HttpStatusCode.OK, getPipelineResponse.Response.StatusCode); AzureOperationResponse <IPage <PipelineResource> > listPipelineResponse = await client.Pipelines.ListByFactoryWithHttpMessagesAsync(resourceGroupName, dataFactoryName); ValidatePipeline(client, resourceGroupName, dataFactoryName, expectedPipeline, listPipelineResponse.Body.First(), pipelineName); Assert.Equal(HttpStatusCode.OK, listPipelineResponse.Response.StatusCode); }
/// <summary> /// Gets the data factory management client. /// </summary> private DataFactoryManagementClient GetDataFactoryManagementClient() { TokenCloudCredentials aadTokenCredentials = new TokenCloudCredentials(settingsContext.SubscriptionId, AzureAccessUtilities.GetAuthorizationHeaderNoPopup(settingsContext)); Uri resourceManagerUri = new Uri(resourceManagerEndpoint); DataFactoryManagementClient client = new DataFactoryManagementClient(aadTokenCredentials, resourceManagerUri); return(client); }
internal LinkedServiceGetResponse( Core.Models.LinkedServiceGetResponse internalResponse, DataFactoryManagementClient client) : this() { Ensure.IsNotNull(internalResponse, "internalResponse"); Ensure.IsNotNull(internalResponse.LinkedService, "internalResponse.LinkedService"); DataFactoryOperationUtilities.CopyRuntimeProperties(internalResponse, this); this.LinkedService = ((LinkedServiceOperations)client.LinkedServices).Converter.ToWrapperType(internalResponse.LinkedService); }
private async Task <string> RunAdHocPipeline(DataFactoryManagementClient client, ManualTriggerRequest inputRequest) { AzureOperationResponse <CreateRunResponse> runResponse = await client.Pipelines. CreateRunWithHttpMessagesAsync( inputRequest.ResourceGroup, inputRequest.DataFactoryName, inputRequest.PipelineName, parameters : inputRequest.PipelineParams ); return(runResponse.Body.RunId); }
private static void listarPipelines(DataFactoryManagementClient client) { var pl = client.Pipelines.ListByFactory(DatosGrales.resourceGroup, DatosGrales.dataFactoryName); PipelineResource[] pipes = pl.ToArray <PipelineResource>(); Console.Write("\nLista de pipelines: \n"); for (int i = 0; i < pipes.Length; i++) { Console.Write("" + (i + 1) + ": " + pipes[i].Name + "\n"); } Console.Write("\n"); }
internal static async Task GetList(DataFactoryManagementClient client, string resourceGroupName, string dataFactoryName, string linkedServiceName, LinkedServiceResource expectedLinkedService) { AzureOperationResponse <LinkedServiceResource> getResponse = await client.LinkedServices.GetWithHttpMessagesAsync(resourceGroupName, dataFactoryName, linkedServiceName); ValidateLinkedService(client, resourceGroupName, dataFactoryName, expectedLinkedService, getResponse.Body, linkedServiceName); Assert.Equal(HttpStatusCode.OK, getResponse.Response.StatusCode); AzureOperationResponse <IPage <LinkedServiceResource> > listResponse = await client.LinkedServices.ListByFactoryWithHttpMessagesAsync(resourceGroupName, dataFactoryName); ValidateLinkedService(client, resourceGroupName, dataFactoryName, expectedLinkedService, listResponse.Body.First(), linkedServiceName); Assert.Equal(HttpStatusCode.OK, listResponse.Response.StatusCode); }
public static void CreatePipeline(DataFactoryManagementClient client) { Console.WriteLine("Creating pipeline " + Constants.pipelineName + "..."); PipelineResource pipeline = new PipelineResource { Parameters = new Dictionary <string, ParameterSpecification> { { "inputPath", new ParameterSpecification { Type = ParameterType.String } }, { "outputPath", new ParameterSpecification { Type = ParameterType.String } } }, Activities = new List <Activity> { new CopyActivity { Name = "CopyFromBlobToBlob", Inputs = new List <DatasetReference> { new DatasetReference() { ReferenceName = Constants.blobDatasetName, Parameters = new Dictionary <string, object> { { "path", "@pipeline().parameters.inputPath" } } } }, Outputs = new List <DatasetReference> { new DatasetReference { ReferenceName = Constants.blobDatasetName, Parameters = new Dictionary <string, object> { { "path", "@pipeline().parameters.outputPath" } } } }, Source = new BlobSource { }, Sink = new BlobSink { } } } }; client.Pipelines.CreateOrUpdate(Constants.resourceGroup, Constants.dataFactoryName, Constants.pipelineName, pipeline); //Console.WriteLine(SafeJsonConvert.SerializeObject(pipeline, client.SerializationSettings)); }
private async Task InitializeClient() { var token = await _azureAppAuthenticationProvider.LoginAsync(); var tokenCredentials = new TokenCredentials(token); _dataFactoryClient = new DataFactoryManagementClient(tokenCredentials) { SubscriptionId = _subscriptionId }; await Task.CompletedTask; }
private static DataFactoryManagementClient CreateADFClient(string applicationId, string authenticationKey, string tenantId, string subscriptionId) { var context = new Microsoft.IdentityModel.Clients.ActiveDirectory.AuthenticationContext("https://login.windows.net/" + tenantId); ClientCredential cc = new ClientCredential(applicationId, authenticationKey); AuthenticationResult result = context.AcquireTokenAsync("https://management.azure.com/", cc).Result; ServiceClientCredentials cred = new TokenCredentials(result.AccessToken); var client = new DataFactoryManagementClient(cred) { SubscriptionId = subscriptionId }; return(client); }
public static void initialise() { DataFactoryManagementClient client = ADF.GetAuthUser(); ADF.CreateADF(client); ADF.CreateBlobLS(client); ADF.CreateDS(client); ADF.CreatePipeline(client); CreateRunResponse runResponse = ADF.CreatePipelineRun(client); PipelineRun pipelineRun = ADF.MonitorPipelineStatus(client, runResponse); ADF.MonitorActivityStatus(client, runResponse, pipelineRun); }
internal DatasetCreateOrUpdateResponse( Core.Models.DatasetCreateOrUpdateResponse internalResponse, DataFactoryManagementClient client) : this() { Ensure.IsNotNull(internalResponse, "internalResponse"); Ensure.IsNotNull(internalResponse.Dataset, "internalResponse.Dataset"); DataFactoryOperationUtilities.CopyRuntimeProperties(internalResponse, this); this.Dataset = ((DatasetOperations)client.Datasets).Converter.ToWrapperType(internalResponse.Dataset); this.Location = internalResponse.Location; this.Status = internalResponse.Status; }
public static DataFactoryManagementClient CreateManagementClientInstance() { //IMPORTANT: generate security token for the subsciption and AAD App TokenCloudCredentials aadTokenCredentials = new TokenCloudCredentials(ConfigurationSettings.AppSettings["SubscriptionId"], GetAuthorizationHeader().Result); Uri resourceManagerUri = new Uri(ConfigurationSettings.AppSettings["ResourceManagerEndpoint"]); // create data factory management client client = new DataFactoryManagementClient(aadTokenCredentials, resourceManagerUri); return(client); }
private static void listarLinkedServices(DataFactoryManagementClient client) { var listaLink = client.LinkedServices.ListByFactory(DatosGrales.resourceGroup, DatosGrales.dataFactoryName); LinkedServiceResource[] lsr = listaLink.ToArray <LinkedServiceResource>(); Console.Write("\nLista de linked services:\n"); for (int i = 0; i < lsr.Length; i++) { Console.Write("" + (i + 1) + ": " + lsr[i].Name + "\n"); } Console.Write("\n"); }
internal PipelineCreateOrUpdateResponse( Core.Models.PipelineCreateOrUpdateResponse internalResponse, DataFactoryManagementClient client) : this() { Ensure.IsNotNull(internalResponse, "internalResponse"); Ensure.IsNotNull(internalResponse.Pipeline, "internalResponse.Pipeline"); DataFactoryUtilities.CopyRuntimeProperties(internalResponse, this); this.Pipeline = ((PipelineOperations)client.Pipelines).Converter.ToWrapperType(internalResponse.Pipeline); this.Location = internalResponse.Location; this.Status = internalResponse.Status; }
private static void ValidateDefaultClientProperties(DataFactoryManagementClient client) { Assert.NotNull(client); Core.DataFactoryManagementClient internalClient = client.InternalClient; Assert.NotNull(internalClient); Assert.Equal(internalClient.LongRunningOperationInitialTimeout, client.LongRunningOperationInitialTimeout); Assert.Equal(internalClient.LongRunningOperationRetryTimeout, client.LongRunningOperationRetryTimeout); Assert.NotNull(client.HttpClient); Assert.NotNull(client.InternalClient.HttpClient); Assert.Equal(internalClient.HttpClient.Timeout, client.HttpClient.Timeout); }
public async Task <DataFactoryManagementClient> GetClient() { var context = new AuthenticationContext($"https://login.windows.net/{_azureConfig.TenantId}"); var cc = new ClientCredential(_azureConfig.ApplicationId, _azureConfig.AuthenticationKey); var result = await context.AcquireTokenAsync("https://management.azure.com/", cc); var client = new DataFactoryManagementClient(new TokenCredentials(result.AccessToken)) { SubscriptionId = _azureConfig.SubscriptionId }; return(client); }
protected override bool CheckExistence() { var found = false; if (Parameters.Properties.ResourceGroupExists) { using (var client = new DataFactoryManagementClient(GetCredentials())) { var result = client.DataFactories.ListAsync(Parameters.Tenant.SiteName).Result; found = result.DataFactories.Any(d => d.Name.Equals(Parameters.Tenant.SiteName)); } } return found; }
/// <summary> /// Creates a data factory with a given name, resource group in the specified data factory region /// </summary> public static void CreateDataFactory(string resourceGroupName, string dataFactoryName, DataFactoryManagementClient client) { // create a data factory Console.WriteLine("Creating a data factory"); client.DataFactories.CreateOrUpdate(resourceGroupName, new DataFactoryCreateOrUpdateParameters() { DataFactory = new DataFactory() { Name = dataFactoryName, Location = DataFactoryConfig.DeploymentRegion, Properties = new DataFactoryProperties() { } } } ); }
/// <summary> /// Create the storage linked service. The same storage account is used for both retraining and scoring outputs. /// </summary> /// <param name="resourceGroupName"></param> /// <param name="dataFactoryName"></param> /// <param name="client"></param> public static void CreateStorageLinkedService(string resourceGroupName, string dataFactoryName, DataFactoryManagementClient client) { // create a linked service Console.WriteLine("Creating a linked service"); client.LinkedServices.CreateOrUpdate(resourceGroupName, dataFactoryName, new LinkedServiceCreateOrUpdateParameters() { LinkedService = new LinkedService() { Name = "LinkedService-AzureStorage", Properties = new LinkedServiceProperties ( new AzureStorageLinkedService(String.Format("DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1}", DataFactoryConfig.StorageAccountName, DataFactoryConfig.StorageAccountKey)) ) } } ); }
public static void CreateObjects( string[] parameters, DataFactoryManagementClient client) { string Dataset_Source; string Dataset_Destination; CreateInputOutputDatasets( DataFactoryConfig.ResourceGroupName, DataFactoryConfig.DataFactoryName, client, out Dataset_Source, out Dataset_Destination); CreatePipelines( DataFactoryConfig.ResourceGroupName, DataFactoryConfig.DataFactoryName, client, Dataset_Source, Dataset_Destination, parameters); }
public static void CreateObjects( string[] parameters, DataFactoryManagementClient client) { string inputDataset; IList<string> outputDatasets; CreateInputOutputDatasets( DataFactoryConfig.ResourceGroupName, DataFactoryConfig.DataFactoryName, client, out inputDataset, out outputDatasets, parameters); CreatePipelines( DataFactoryConfig.ResourceGroupName, DataFactoryConfig.DataFactoryName, client, inputDataset, outputDatasets, parameters); }
static void Main(string[] args) { // Read region values from parameters.txt file. // We will create on retraining pipeline and one scoring pipeline for each region values. // Note: You will typically replace this with code to read this from your data store. string[] parameters = System.IO.File.ReadAllLines(@"parameters.txt"); // create data factory management client. This will pop up a UI for // Azure login for your subscription. TokenCloudCredentials aadTokenCredentials = new TokenCloudCredentials( ConfigurationManager.AppSettings["SubscriptionId"], Utilities.GetAuthorizationHeader()); Uri resourceManagerUri = new Uri(ConfigurationManager.AppSettings["ResourceManagerEndpoint"]); DataFactoryManagementClient client = new DataFactoryManagementClient(aadTokenCredentials, resourceManagerUri); Utilities.CreateDataFactory(DataFactoryConfig.ResourceGroupName, DataFactoryConfig.DataFactoryName, client); // We use the same storage account to put results of both retraining and scoring. Hence create the shared // linked servive for the storage account before creating the 2 pipelines. Utilities.CreateStorageLinkedService(DataFactoryConfig.ResourceGroupName, DataFactoryConfig.DataFactoryName, client); // Note: The CreateMLEndpoints creates as many endpoint entries in the endpoints collection as there are regions. // In this sample we are using the same hardwired value of the endpoint for each entry in the collection. You will need // to replace this code with code that programatically creates multiple endpoints. IList<UpdateResourceEndpoint> mlEndpoints; Utilities.CreateMLEndpoints(out mlEndpoints, parameters.Length); // Note: retraining pipeline generation code creates the scoring linked services that are referenced by // scoring pipeline. Hence retraining pipeline code MUST run before the scoring pipeline code. RetrainingPipeline.CreateObjects(parameters, client, mlEndpoints); ScoringPipeline.CreateObjects(parameters, client); // We don't have a copy step. This is commented out bonus code for copying data if you needed to have data movement as well. //CopyPipeline.CreateObjects(parameters, client); Console.WriteLine("Press Enter to exit"); Console.ReadLine(); }
private void CreatePipeline_PrepareSampleData(DataFactoryManagementClient client) { // Setup Pipeline Parameters var parameters = new PipelineCreateOrUpdateParameters() { Pipeline = new Pipeline() { Name = "PrepareSampleDataPipeline", Properties = new PipelineProperties() { Description = "Prepare Sample Data for Personalized Product Recommendation Use Case", Start = _pipelineStartDate, End = _pipelineEndDate, Activities = new List<Activity>() { new Activity() { Name = "PrepareSampleDataActivity", Description = "Prepare Sample Data for Personalized Product Recommendation Use Case", Outputs = new List<ActivityOutput>() { new ActivityOutput("RawProductsUsageTable") }, LinkedServiceName = "HDInsightLinkedService", TypeProperties = new DotNetActivity() { AssemblyName = "ProductRecDataGenerator.dll", EntryPoint = "ProductRecDataGenerator.DataGenerator", PackageLinkedService = "StorageLinkedService", PackageFile = "packages/ProductRecDataGenerator.zip", ExtendedProperties = new Dictionary<string, string>() { { "sliceStart", "$$Text.Format('{0:yyyyMMddHHmm}', Time.AddMinutes(SliceStart, 0))" } } }, Policy = new ActivityPolicy() { Concurrency = 1, ExecutionPriorityOrder = "NewestFirst", Retry = 1, Timeout = new TimeSpan(0, 2, 0, 0) } } }, } } }; // Create PipeLine client.Pipelines.CreateOrUpdateAsync(Parameters.Tenant.SiteName, Parameters.Tenant.SiteName, parameters).Wait(); }
private void CreatePipeline_PrepareMahoutUsage(DataFactoryManagementClient client) { // Setup Pipeline Parameters var parameters = new PipelineCreateOrUpdateParameters() { Pipeline = new Pipeline() { Name = "PrepareMahoutUsagePipeline", Properties = new PipelineProperties() { Description = "This is a pipeline to prepare the data for feeding into the Mahout engine", Start = _pipelineStartDate, End = _pipelineEndDate, Activities = new List<Activity>() { new Activity() { Name = "PrepareMahoutInputHiveActivity", Description = "Prepare Mahout Input Hive Activity", Inputs = new List<ActivityInput>() { new ActivityInput("PartitionedProductsUsageTable") }, Outputs = new List<ActivityOutput>() { new ActivityOutput("MahoutInputProductsUsageTable") }, LinkedServiceName = "HDInsightLinkedService", TypeProperties = new HDInsightHiveActivity() { ScriptPath = "scripts\\preparemahoutinput.hql", ScriptLinkedService = "StorageLinkedService", Defines = new Dictionary<string, string>() { { "MAHOUTINPUT", UpdateParameters("$$Text.Format('wasb://productrec@<account name>.blob.core.windows.net/mahoutinput/yearno={0:yyyy}/monthno={0:%M}/', SliceStart)") }, { "PARTITIONEDOUTPUT", UpdateParameters("$$Text.Format('wasb://productrec@<account name>.blob.core.windows.net/partitionedusageevents/yearno={0:yyyy}/monthno={0:%M}/', SliceStart)") } }, }, Policy = new ActivityPolicy() { Concurrency = 1, ExecutionPriorityOrder = "NewestFirst", Retry = 1, Timeout = new TimeSpan(0, 1, 0, 0) } } }, } } }; // Create PipeLine client.Pipelines.CreateOrUpdateAsync(Parameters.Tenant.SiteName, Parameters.Tenant.SiteName, parameters).Wait(); }
private void CreatePipeline_PartitionProductUsage(DataFactoryManagementClient client) { // Setup Pipeline Parameters var parameters = new PipelineCreateOrUpdateParameters() { Pipeline = new Pipeline() { Name = "PartitionProductsUsagePipeline", Properties = new PipelineProperties() { Description = "This is a pipeline to prepare the raw Products Usage data for further processing (v1.0)", Start = _pipelineStartDate, End = _pipelineEndDate, Activities = new List<Activity>() { new Activity() { Name = "BlobPartitionHiveActivity", Description = "Blob Partition Hive Activity", Inputs = new List<ActivityInput>() { new ActivityInput("RawProductsUsageTable") }, Outputs = new List<ActivityOutput>() { new ActivityOutput("PartitionedProductsUsageTable") }, LinkedServiceName = "HDInsightLinkedService", TypeProperties = new HDInsightHiveActivity() { ScriptPath = "scripts\\partitionproductusage.hql", ScriptLinkedService = "StorageLinkedService", Defines = new Dictionary<string, string>() { { "RAWINPUT", UpdateParameters("wasb://productrec@<account name>.blob.core.windows.net/rawusageevents/") }, { "PARTITIONEDOUTPUT", UpdateParameters("wasb://productrec@<account name>.blob.core.windows.net/partitionedusageevents/") }, { "Year", "$$Text.Format('{0:yyyy}',SliceStart)" }, { "Month", "$$Text.Format('{0:%M}',SliceStart)" }, { "Day", "$$Text.Format('{0:%d}',SliceStart)" } }, }, Policy = new ActivityPolicy() { Concurrency = 1, ExecutionPriorityOrder = "NewestFirst", Retry = 2, Timeout = new TimeSpan(0, 1, 0, 0) } } }, } } }; // Create PipeLine client.Pipelines.CreateOrUpdateAsync(Parameters.Tenant.SiteName, Parameters.Tenant.SiteName, parameters).Wait(); }
private void CreatePipeline_MapSimilarProducts(DataFactoryManagementClient client) { // Setup Pipeline Parameters var parameters = new PipelineCreateOrUpdateParameters() { Pipeline = new Pipeline() { Name = "MapSimilarProductsPipeline", Properties = new PipelineProperties() { Description = "This is a pipeline to map the similar productids generated as part of Mahout recommendations to the product name", Start = _pipelineStartDate, End = _pipelineEndDate, Activities = new List<Activity>() { new Activity() { Name = "MapSimilarProductsHiveActivity", Description = "Map Similar Hive Activity", Inputs = new List<ActivityInput>() { new ActivityInput("ProductsSimilarityTable") }, Outputs = new List<ActivityOutput>() { new ActivityOutput("ProductsSimilarityOutputTable") }, LinkedServiceName = "HDInsightLinkedService", TypeProperties = new HDInsightHiveActivity() { ScriptPath = "scripts\\selectsimilarproducts.hql", ScriptLinkedService = "StorageLinkedService", Defines = new Dictionary<string, string>() { { "MAHOUTOUTPUT", UpdateParameters("$$Text.Format('wasb://productrec@<account name>.blob.core.windows.net/itemsimilarity/yearno={0:yyyy}/monthno={0:%M}/', SliceStart)") }, { "SIMILARPRODUCTSOUTPUT", UpdateParameters("$$Text.Format('wasb://productrec@<account name>.blob.core.windows.net/itemsimilarityoutput/yearno={0:yyyy}/monthno={0:%M}/', SliceStart)") } }, }, Policy = new ActivityPolicy() { Concurrency = 1, ExecutionPriorityOrder = "NewestFirst", Retry = 0, Timeout = new TimeSpan(0, 1, 0, 0) } } }, } } }; // Create PipeLine client.Pipelines.CreateOrUpdateAsync(Parameters.Tenant.SiteName, Parameters.Tenant.SiteName, parameters).Wait(); }
private static void CreatePipelines( string resourceGroupName, string dataFactoryName, DataFactoryManagementClient client, string inputDataset, IList<string> outputModelDatasets, IList<string> outputPlaceholderDatasets, string[] parameters, IList<UpdateResourceEndpoint> endpoints) { int i = 0; foreach (string parameter in parameters) { string[] parameterList = parameter.Split(','); string region = parameterList[0]; // create a pipeline DateTime PipelineActivePeriodStartTime = DateTime.Parse(DataFactoryConfig.PipelineStartTime); DateTime PipelineActivePeriodEndTime = PipelineActivePeriodStartTime.AddMinutes(DataFactoryConfig.MinutesToAddToStartTimeForEndTime); string PipelineName = String.Format("RetrainingPipeline_{0}", region); Console.WriteLine("Creating a pipeline {0}", PipelineName); client.Pipelines.CreateOrUpdate(resourceGroupName, dataFactoryName, new PipelineCreateOrUpdateParameters() { Pipeline = new Pipeline() { Name = PipelineName, Properties = new PipelineProperties() { Description = "Pipeline for retraining", // Initial value for pipeline's active period. Start = PipelineActivePeriodStartTime, End = PipelineActivePeriodEndTime, Activities = new List<Activity>() { new Activity() { Name = "AzureMLBatchExecution", Inputs = new List<ActivityInput>() { new ActivityInput() { Name = inputDataset } }, Outputs = new List<ActivityOutput>() { new ActivityOutput() { Name = outputModelDatasets[i] } }, LinkedServiceName = "LinkedServiceRetraining-AzureML", TypeProperties = new AzureMLBatchExecutionActivity() { WebServiceInput = inputDataset, WebServiceOutputs = new Dictionary<string, string> { {"output1", outputModelDatasets[i]} }, GlobalParameters = new Dictionary<string, string> { {"starttime", "$$Text.Format('\\'{0:yyyy-MM-dd HH:mm:ss}\\'', WindowStart)"}, {"endtime", "$$Text.Format('\\'{0:yyyy-MM-dd HH:mm:ss}\\'', WindowEnd)"}, {"cpf", region} } } }, new Activity() { Name = "AzureMLUpdateResource", Inputs = new List<ActivityInput>() { new ActivityInput() { Name = outputModelDatasets[i] } }, Outputs = new List<ActivityOutput>() { new ActivityOutput() { Name = outputPlaceholderDatasets[i] } }, LinkedServiceName = "LinkedServiceScoring-AzureML-" + region, TypeProperties = new AzureMLUpdateResourceActivity() { TrainedModelName = "Trained model for facility " + region, TrainedModelDatasetName = outputModelDatasets[i] } } }, } } }); i++; } }
private static void CreatePipelines(string resourceGroupName, string dataFactoryName, DataFactoryManagementClient client, string Dataset_Source, string Dataset_Destination, string[] parameters) { foreach (string parameter in parameters) { string[] parameterList = parameter.Split(','); string region = parameterList[0]; // create a pipeline DateTime PipelineActivePeriodStartTime = DateTime.Parse(DataFactoryConfig.PipelineStartTime); DateTime PipelineActivePeriodEndTime = PipelineActivePeriodStartTime.AddMinutes(DataFactoryConfig.MinutesToAddToStartTimeForEndTime); string PipelineName = String.Format("Sample_{0}", region); Console.WriteLine("Creating a pipeline {0}", PipelineName); client.Pipelines.CreateOrUpdate(resourceGroupName, dataFactoryName, new PipelineCreateOrUpdateParameters() { Pipeline = new Pipeline() { Name = PipelineName, Properties = new PipelineProperties() { Description = "Demo Pipeline for data transfer between blobs", // Initial value for pipeline's active period. Start = PipelineActivePeriodStartTime, End = PipelineActivePeriodEndTime, Activities = new List<Activity>() { new Activity() { Name = "BlobToBlob", Inputs = new List<ActivityInput>() { new ActivityInput() { Name = Dataset_Source } }, Outputs = new List<ActivityOutput>() { new ActivityOutput() { Name = Dataset_Destination } }, TypeProperties = new CopyActivity() { Source = new BlobSource(), Sink = new BlobSink() { WriteBatchSize = 10000, WriteBatchTimeout = TimeSpan.FromMinutes(10) } } } }, } } }); } }
private static void CreateInputOutputDatasets( string resourceGroupName, string dataFactoryName, DataFactoryManagementClient client, out string inputDataset, out IList<string> outputModelDatasets, out IList<string> outputPlaceholderDatasets, string[] parameters, IList<UpdateResourceEndpoint> endpoints) { // create input and output tables Console.WriteLine("Creating input and output tables"); inputDataset = "InputDatasetScoring"; outputModelDatasets = new List<string>(); outputPlaceholderDatasets = new List<string>(); client.Datasets.CreateOrUpdate(resourceGroupName, dataFactoryName, new DatasetCreateOrUpdateParameters() { Dataset = new Dataset() { Name = inputDataset, Properties = new DatasetProperties() { LinkedServiceName = "LinkedService-AzureStorage", TypeProperties = new AzureBlobDataset() { FolderPath = "inputdatascoring/", FileName = "input.csv", Format = new TextFormat() { ColumnDelimiter = "," } }, External = true, Availability = new Availability() { Frequency = SchedulePeriod.Minute, Interval = 15, }, Policy = new Policy() { Validation = new ValidationPolicy() { MinimumRows = 1 } } } } }); foreach (string parameter in parameters) { string[] parameterList = parameter.Split(','); string region = parameterList[0]; string outputModelDataset = String.Format("outputModel_{0}", region); client.Datasets.CreateOrUpdate(resourceGroupName, dataFactoryName, new DatasetCreateOrUpdateParameters() { Dataset = new Dataset() { Name = outputModelDataset, Properties = new DatasetProperties() { LinkedServiceName = "LinkedService-AzureStorage", TypeProperties = new AzureBlobDataset() { FolderPath = String.Format("outputmodel/{0}/", region) + "{Slice}", FileName = "model.ilearner", Format = new TextFormat() { }, PartitionedBy = new Collection<Partition>() { new Partition() { Name = "Slice", Value = new DateTimePartitionValue() { Date = "SliceStart", Format = "yyyyMMdd-HHmmss" } } } }, Availability = new Availability() { Frequency = SchedulePeriod.Minute, Interval = DataFactoryConfig.PipelineFrequencyInMinutes, }, } } }); outputModelDatasets.Add(outputModelDataset); string outputPlaceholderDataset = String.Format("outputplaceholder_{0}", region); client.Datasets.CreateOrUpdate(resourceGroupName, dataFactoryName, new DatasetCreateOrUpdateParameters() { Dataset = new Dataset() { Name = outputPlaceholderDataset, Properties = new DatasetProperties() { LinkedServiceName = "LinkedService-AzureStorage", TypeProperties = new AzureBlobDataset() { FolderPath = "any", Format = new TextFormat() { }, }, Availability = new Availability() { Frequency = SchedulePeriod.Minute, Interval = 15, }, } } }); outputPlaceholderDatasets.Add(outputPlaceholderDataset); } }
private static void CreateInputOutputDatasets( string resourceGroupName, string dataFactoryName, DataFactoryManagementClient client, out string Dataset_Source, out string Dataset_Destination) { // create input and output tables Console.WriteLine("Creating input and output tables"); Dataset_Source = "DatasetBlobSource"; Dataset_Destination = "DatasetBlobDestination"; client.Datasets.CreateOrUpdate(resourceGroupName, dataFactoryName, new DatasetCreateOrUpdateParameters() { Dataset = new Dataset() { Name = Dataset_Source, Properties = new DatasetProperties() { LinkedServiceName = "LinkedService-AzureStorage", TypeProperties = new AzureBlobDataset() { FolderPath = "sample/", FileName = "input.txt" }, External = true, Availability = new Availability() { Frequency = SchedulePeriod.Minute, Interval = 15, }, Policy = new Policy() { Validation = new ValidationPolicy() { MinimumRows = 1 } } } } }); client.Datasets.CreateOrUpdate(resourceGroupName, dataFactoryName, new DatasetCreateOrUpdateParameters() { Dataset = new Dataset() { Name = Dataset_Destination, Properties = new DatasetProperties() { LinkedServiceName = "LinkedService-AzureStorage", TypeProperties = new AzureBlobDataset() { FolderPath = "sample/output/{Slice}", PartitionedBy = new Collection<Partition>() { new Partition() { Name = "Slice", Value = new DateTimePartitionValue() { Date = "SliceStart", Format = "yyyyMMdd-HHmm" } } } }, Availability = new Availability() { Frequency = SchedulePeriod.Minute, Interval = 15, }, } } }); }
private void CreatePipeline_ProductsRecommenderMahout(DataFactoryManagementClient client) { // Setup Pipeline Parameters var parameters = new PipelineCreateOrUpdateParameters() { Pipeline = new Pipeline() { Name = "ProductsRecommenderMahoutPipeline", Properties = new PipelineProperties() { Description = "Pipeline to Run a Mahout Custom Map Reduce Jar to generate Recommendations.", Start = _pipelineStartDate, End = _pipelineEndDate, Activities = new List<Activity>() { new Activity() { Name = "MahoutActivity", Description = "Custom Map Reduce to generate Mahout result", Inputs = new List<ActivityInput>() { new ActivityInput("MahoutInputProductsUsageTable") }, Outputs = new List<ActivityOutput>() { new ActivityOutput("ProductsRecommendationTable") }, LinkedServiceName = "HDInsightLinkedService", TypeProperties = new HDInsightMapReduceActivity() { ClassName = "org.apache.mahout.cf.taste.hadoop.similarity.item.ItemSimilarityJob", JarFilePath = "jars/mahout/mahout-core-0.9.0.2.1.12.0-2329-job.jar", JarLinkedService = "StorageLinkedService", Arguments = new List<string>() { UpdateParameters("-s"), UpdateParameters("SIMILARITY_COOCCURRENCE"), UpdateParameters("--input"), UpdateParameters("$$Text.Format('wasb://productrec@<account name>.blob.core.windows.net/mahoutinput/yearno={0:yyyy}/monthno={0:%M}/', SliceStart)"), UpdateParameters("--output"), UpdateParameters("$$Text.Format('wasb://productrec@<account name>.blob.core.windows.net/recommendations/yearno={0:yyyy}/monthno={0:%M}/', SliceStart)"), UpdateParameters("--tempDir"), UpdateParameters("$$Text.Format('wasb://productrec@<account name>.blob.core.windows.net/temprecommendationsdir/yearno={0:yyyy}/monthno={0:%M}/', SliceStart)") }, }, Policy = new ActivityPolicy() { Concurrency = 1, ExecutionPriorityOrder = "NewestFirst", Retry = 1, Timeout = new TimeSpan(0, 1, 0, 0) } } }, } } }; // Create PipeLine client.Pipelines.CreateOrUpdateAsync(Parameters.Tenant.SiteName, Parameters.Tenant.SiteName, parameters).Wait(); }
private static void CreatePipelines( string resourceGroupName, string dataFactoryName, DataFactoryManagementClient client, string inputDataset, IList<string> outputDatasets, string[] parameters) { int i = 0; foreach (string parameter in parameters) { string[] parameterList = parameter.Split(','); string region = parameterList[0]; // create a pipeline DateTime PipelineActivePeriodStartTime = DateTime.Parse(DataFactoryConfig.PipelineStartTime); DateTime PipelineActivePeriodEndTime = PipelineActivePeriodStartTime.AddMinutes(DataFactoryConfig.MinutesToAddToStartTimeForEndTime); string PipelineName = String.Format("ScoringPipeline_{0}", region); Console.WriteLine("Creating a pipeline {0}", PipelineName); client.Pipelines.CreateOrUpdate(resourceGroupName, dataFactoryName, new PipelineCreateOrUpdateParameters() { Pipeline = new Pipeline() { Name = PipelineName, Properties = new PipelineProperties() { Description = "Pipeline for scoring", // Initial value for pipeline's active period. Start = PipelineActivePeriodStartTime, End = PipelineActivePeriodEndTime, Activities = new List<Activity>() { new Activity() { Name = "AzureMLBatchScoring", Inputs = new List<ActivityInput>() { new ActivityInput() { Name = inputDataset } }, Outputs = new List<ActivityOutput>() { new ActivityOutput() { Name = outputDatasets[i] } }, // Note: The linked service names referenced here are generated previously by the retraining pipeline code. LinkedServiceName = Utilities.GetScoringLinkedServiceName(DataFactoryConfig.ScoringLinkedServiceNamePrefix, region), TypeProperties = new AzureMLBatchExecutionActivity() { WebServiceInput = inputDataset, WebServiceOutputs = new Dictionary<string, string> { {"output1", outputDatasets[i]} }, GlobalParameters = new Dictionary<string, string> { {"starttime", "$$Text.Format('\\'{0:yyyy-MM-dd HH:mm:ss}\\'', WindowStart)"}, {"endtime", "$$Text.Format('\\'{0:yyyy-MM-dd HH:mm:ss}\\'', WindowEnd)"}, {"cpf", region} } } } }, } } }); i++; } }
protected UnitTestBase() { this.Client = new DataFactoryManagementClient(); }
private static void CreateLinkedService( string resourceGroupName, string dataFactoryName, DataFactoryManagementClient client, IList<UpdateResourceEndpoint> endpoints, string[] parameters) { // create Azure ML training linked services Console.WriteLine("Creating Azure ML training linked service"); client.LinkedServices.CreateOrUpdate(resourceGroupName, dataFactoryName, new LinkedServiceCreateOrUpdateParameters() { LinkedService = new LinkedService() { Name = "LinkedServiceRetraining-AzureML", Properties = new LinkedServiceProperties ( new AzureMLLinkedService(DataFactoryConfig.RetrainingEndPoint, DataFactoryConfig.RetrainingApiKey ) { } ) } } ); int i = 0; foreach (UpdateResourceEndpoint endpoint in endpoints) { string[] parameterList = parameters[i].Split(','); string region = parameterList[0]; // create Azure ML scoring linked services Console.WriteLine("Creating Azure ML scoring linked service for {0}", endpoint); client.LinkedServices.CreateOrUpdate(resourceGroupName, dataFactoryName, new LinkedServiceCreateOrUpdateParameters() { LinkedService = new LinkedService() { // Note: The linked service names generated here are also used by the scoring pipeline. Name = Utilities.GetScoringLinkedServiceName(DataFactoryConfig.ScoringLinkedServiceNamePrefix, region), Properties = new LinkedServiceProperties ( new AzureMLLinkedService(endpoint.mlEndpoint, endpoint.apiKey) { UpdateResourceEndpoint = endpoint.updateResourceEndpointUrl } ) } } ); i++; } }