public PSLinkedService(LinkedService linkedService) { if (linkedService == null) { throw new ArgumentNullException("linkedService"); } this.linkedService = linkedService; }
public static CloudBlockBlob GetBlob(LinkedService linkedService, string filepath) { var azStorage = linkedService.Properties.TypeProperties as AzureStorageLinkedService; if (azStorage == null) throw new ArgumentException("LinkedService is not AzureStorageLinkedService"); var account = CloudStorageAccount.Parse(azStorage.ConnectionString); var client = account.CreateCloudBlobClient(); var index = filepath.IndexOf("/"); var container = client.GetContainerReference(filepath.Substring(0, index)); container.CreateIfNotExists(); return container.GetBlockBlobReference(filepath.Substring(index + 1)); }
public BlobLocation(LinkedService linkedService, Dataset dataset, string sliceYear, string sliceMonth, string sliceDay) { _linkedService = linkedService; _dataset = dataset; _sliceYear = sliceYear; _sliceMonth = sliceMonth; _sliceDay = sliceDay; }
public void CanCreateLinkedService() { // Arrange LinkedService expected = new LinkedService() { Name = linkedServiceName, Properties = new HDInsightBYOCLinkedService() { ProvisioningState = "Succeeded" } }; dataFactoriesClientMock.Setup(c => c.ReadJsonFileContent(It.IsAny<string>())) .Returns(rawJsonContent) .Verifiable(); dataFactoriesClientMock.Setup( c => c.CreatePSLinkedService( It.Is<CreatePSLinkedServiceParameters>( parameters => parameters.Name == linkedServiceName && parameters.ResourceGroupName == ResourceGroupName && parameters.DataFactoryName == DataFactoryName))) .CallBase() .Verifiable(); dataFactoriesClientMock.Setup( c => c.CreateOrUpdateLinkedService(ResourceGroupName, DataFactoryName, linkedServiceName, rawJsonContent)) .Returns(expected) .Verifiable(); // Action cmdlet.File = filePath; cmdlet.Force = true; cmdlet.ExecuteCmdlet(); // Assert dataFactoriesClientMock.VerifyAll(); commandRuntimeMock.Verify( f => f.WriteObject( It.Is<PSLinkedService>( ls => ResourceGroupName == ls.ResourceGroupName && DataFactoryName == ls.DataFactoryName && expected.Name == ls.LinkedServiceName && expected.Properties == ls.Properties)), Times.Once()); }
private static string GetConnectionString(LinkedService asset) { AzureStorageLinkedService storageAsset; if (asset == null) { return null; } storageAsset = asset.Properties as AzureStorageLinkedService; if (storageAsset == null) { return null; } return storageAsset.ConnectionString; }
public CustomDbDatasetProvider(Dataset dataset, LinkedService linkedService, Func<string, LinkedService> linkedServiceResolver) { var props = dataset.Properties.TypeProperties as CustomDataset; var packageLnkedServiceName = props.ServiceExtraProperties["packageLinkedService"].ToString(); Dataset = new CustomDbDataset { InstanceName = props.ServiceExtraProperties["instanceName"].ToString(), DbContextName = props.ServiceExtraProperties["dbContextName"].ToString(), AssemblyFile = props.ServiceExtraProperties["assemblyFile"].ToString() }; InstanceName = Dataset.InstanceName; var blob = Helpers.GetBlob(linkedServiceResolver(packageLnkedServiceName), Dataset.AssemblyFile); var path = Path.Combine(Environment.CurrentDirectory, Path.GetFileName(Dataset.AssemblyFile)); blob.DownloadToFile(path, FileMode.Create); var m = Assembly.LoadFrom(path); Context = (DbContext)Activator.CreateInstance(m.GetType(Dataset.DbContextName), (linkedService.Properties.TypeProperties as AzureSqlDatabaseLinkedService).ConnectionString); }
public AzureBlobProvider(Dataset dataset, LinkedService linkedService, Slice slice) { InstanceName = dataset.Name; Structure = (dataset.Properties as DatasetProperties).Structure; var azblobDataset = dataset.Properties.TypeProperties as AzureBlobDataset; var filepath = Path.Combine(azblobDataset.FolderPath, azblobDataset.FileName); filepath = Helpers.ReplaceByPatition(filepath, azblobDataset.PartitionedBy, slice); Blob = Helpers.GetBlob(linkedService, filepath); var format = azblobDataset.Format as TextFormat; if (format != null) Configuration = new CsvConfiguration { Delimiter = format.ColumnDelimiter, Encoding = Encoding.GetEncoding(format.EncodingName) }; else Configuration = new CsvConfiguration { }; }
public CustomAzureBlobProvider(Dataset dataset, LinkedService linkedService, Slice slice) { var props = dataset.Properties.TypeProperties as CustomDataset; if (props.ServiceExtraProperties.ContainsKey("instanceName")) InstanceName = props.ServiceExtraProperties["instanceName"].ToString(); var enc = props.ServiceExtraProperties.ContainsKey("encoding") ? props.ServiceExtraProperties["encoding"].ToString() : null; if (!string.IsNullOrEmpty(enc)) { try { Encoding = Encoding.GetEncoding(enc); } catch (ArgumentException){} } var filepath = props.ServiceExtraProperties["filePath"].ToString(); var regex = new Regex(@"\{(.+?)\}"); var matches = regex.Matches(filepath); foreach(Match m in matches) { var key = m.Groups[1].Value; if (props.ServiceExtraProperties.ContainsKey(key)) { var formatAndName = props.ServiceExtraProperties[key].ToString().Split(',').Select(_ => _.Trim()).ToArray(); var value = formatAndName[1].ToLower() == "slicestart" ? slice.Start.ToString(formatAndName[0]) : formatAndName[1].ToLower() == "sliceend" ? slice.End.ToString(formatAndName[0]) : ""; filepath = filepath.Replace($"{{{formatAndName[1]}}}", value); } } Blob = Helpers.GetBlob(linkedService, filepath); var format = props.ServiceExtraProperties["format"]; Format = format == null ? CustomAzureBlobFormat.Unknown : format.ToString().ToLower() == "json" ? CustomAzureBlobFormat.Json : CustomAzureBlobFormat.Unknown; if (props.ServiceExtraProperties.ContainsKey("test")) Console.Write(props.ServiceExtraProperties["test"].ToString()); }
public PSLinkedService() { linkedService = new LinkedService(); }
public void CanThrowIfLinkedServiceProvisioningFailed() { // Arrange LinkedService expected = new LinkedService() { Name = linkedServiceName, Properties = new HDInsightBYOCLinkedService() { ProvisioningState = "Failed" } }; dataFactoriesClientMock.Setup(c => c.ReadJsonFileContent(It.IsAny<string>())) .Returns(rawJsonContent) .Verifiable(); dataFactoriesClientMock.Setup( c => c.CreatePSLinkedService( It.Is<CreatePSLinkedServiceParameters>( parameters => parameters.Name == linkedServiceName && parameters.ResourceGroupName == ResourceGroupName && parameters.DataFactoryName == DataFactoryName))) .CallBase() .Verifiable(); dataFactoriesClientMock.Setup( c => c.CreateOrUpdateLinkedService(ResourceGroupName, DataFactoryName, linkedServiceName, rawJsonContent)) .Returns(expected) .Verifiable(); // Action cmdlet.File = filePath; cmdlet.Force = true; // Assert Assert.Throws<ProvisioningFailedException>(() => cmdlet.ExecuteCmdlet()); }
public static bool IsMatch(Dataset dataset, LinkedService linkedService) { return dataset.Properties.Type == "CustomDataset" && linkedService.Properties.Type == "AzureSqlDatabase"; }
public static bool IsMatch(Dataset dataset, LinkedService linkedService) { return dataset.Properties.Type == "CustomDataset" && linkedService.Properties.Type == "AzureStorage" && ((CustomDataset)dataset.Properties.TypeProperties).ServiceExtraProperties["type"].ToString() == "AzureBlob"; }
public static void InitParameters( string pipelinePath, string activityName, out List <Models.LinkedService> linkedServices, out List <Models.Dataset> datasets, out Models.Activity activity) { // init the parameters linkedServices = new List <Models.LinkedService>(); datasets = new List <Models.Dataset>(); activity = new Models.Activity(); // parse the pipeline json source var pipelineJson = File.ReadAllText(pipelinePath); var dummyPipeline = JsonConvert.DeserializeObject <Dummy.Pipeline>(pipelineJson); foreach (var dummyActivity in dummyPipeline.Properties.Activities) { // find the relevant activity in the pipeline if (dummyActivity.Name != activityName) { continue; } activity.Name = dummyActivity.Name; // get the input and output tables var dummyDatasets = new HashSet <Dummy.ActivityData>(); dummyDatasets.UnionWith(dummyActivity.Inputs); dummyDatasets.UnionWith(dummyActivity.Outputs); var dummyServices = new HashSet <Dummy.LinkedService>(); // init the data tables foreach (var dummyDataset in dummyDatasets) { // parse the table json source var dataPath = Path.Combine(Path.GetDirectoryName(pipelinePath), dummyDataset.Name + ".json"); var dataJson = File.ReadAllText(dataPath); var dummyTable = JsonConvert.DeserializeObject <Dummy.Table>(dataJson); { // initialize dataset properties Models.DatasetTypeProperties datasetProperties; switch (dummyTable.Properties.Type) { case "AzureBlob": // init the azure model var blobDataset = new Models.AzureBlobDataset(); blobDataset.FolderPath = dummyTable.Properties.TypeProperties.FolderPath; blobDataset.FileName = dummyTable.Properties.TypeProperties.FileName; datasetProperties = blobDataset; break; case "AzureTable": var tableDataset = new Models.AzureTableDataset(); tableDataset.TableName = dummyTable.Properties.TypeProperties.TableName; datasetProperties = tableDataset; break; default: throw new Exception(string.Format("Unexpected Dataset.Type {0}", dummyTable.Properties.Type)); } // initialize dataset { var dataDataset = new Models.Dataset( dummyDataset.Name, new Models.DatasetProperties( datasetProperties, new CommonModels.Availability(), "" ) ); dataDataset.Properties.LinkedServiceName = dummyTable.Properties.LinkedServiceName; datasets.Add(dataDataset); } } // register the input or output in the activity if (dummyDataset is Dummy.ActivityInput) { activity.Inputs.Add(new CommonModels.ActivityInput(dummyDataset.Name)); } if (dummyDataset is Dummy.ActivityOutput) { activity.Outputs.Add(new CommonModels.ActivityOutput(dummyDataset.Name)); } // parse the linked service json source for later use var servicePath = Path.Combine(Path.GetDirectoryName(pipelinePath), dummyTable.Properties.LinkedServiceName + ".json"); var serviceJson = File.ReadAllText(servicePath); var storageService = JsonConvert.DeserializeObject <Dummy.StorageService>(serviceJson); dummyServices.Add(storageService); } // parse the hd insight service json source { var servicePath = Path.Combine(Path.GetDirectoryName(pipelinePath), dummyActivity.LinkedServiceName + ".json"); var serviceJson = File.ReadAllText(servicePath); var computeService = JsonConvert.DeserializeObject <Dummy.ComputeService>(serviceJson); dummyServices.Add(computeService); } // init the services foreach (var dummyService in dummyServices) { Models.LinkedService linkedService = null; // init if it is a storage service if (dummyService is Dummy.StorageService) { var dummyStorageService = dummyService as Dummy.StorageService; var service = new Models.AzureStorageLinkedService(); service.ConnectionString = dummyStorageService.Properties.TypeProperties.ConnectionString; linkedService = new Models.LinkedService( dummyService.Name, new Models.LinkedServiceProperties(service) ); } // init if it is a hd insight service if (dummyService is Dummy.ComputeService) { var service = new Models.HDInsightLinkedService(); linkedService = new Models.LinkedService( dummyService.Name, new Models.LinkedServiceProperties(service) ); } linkedServices.Add(linkedService); } } if (activity.Name == null) { throw new Exception(string.Format("Activity {0} not found.", activityName)); } }