private static void corregirClaimSinCompresion(DataFactoryManagementClient client) { List <Activity> la; CopyActivity ca; List <DatasetReference> inp; DatasetReference dr; List <DatasetReference> outp; DatasetReference drO; PipelineResource pipe; la = new List <Activity>(); ca = new CopyActivity(); ca.Name = "CopyPipeline-Lake-DW-" + "cc_history"; ca.Source = new AzureDataLakeStoreSource(recursive: false); var ware = new SqlDWSink(); ware.AllowPolyBase = true; ware.WriteBatchSize = 1000; var poly = new PolybaseSettings(); poly.RejectValue = 0; poly.RejectType = "percentage"; poly.RejectSampleValue = 0; poly.UseTypeDefault = true; ware.PolyBaseSettings = poly; ca.Sink = ware; ca.EnableStaging = true; ca.CloudDataMovementUnits = 0; ca.EnableSkipIncompatibleRow = true; var stg = new StagingSettings(); stg.Path = "adfstagingcopydata"; LinkedServiceReference lsIntermedio = new LinkedServiceReference("temp_StagingStorage-c0p"); stg.LinkedServiceName = lsIntermedio; ca.StagingSettings = stg; var trans = new TabularTranslator(); //trans.ColumnMappings = DatosGrales.traerCamposPolybase("cc_history"); ca.Translator = trans; inp = new List <DatasetReference>(); dr = new DatasetReference("Dataset_Descompresion_Claim_DataLakeStore_cc_history"); inp.Add(dr); ca.Inputs = inp; outp = new List <DatasetReference>(); drO = new DatasetReference("Dataset_Warehouse_landing-pruebaDFv2_cc_history"); outp.Add(drO); ca.Outputs = outp; la.Add(ca); pipe = new PipelineResource(); pipe.Activities = la; client.Pipelines.CreateOrUpdate(DatosGrales.resourceGroup, DatosGrales.dataFactoryName, "Pipeline-Copy-Lake-ADW-cc_history", pipe); Console.Write("Pipeline-Copy-Lake-ADW-cc_history creado.\n"); }
private static void crearPipesLakeaWarehouse(DataFactoryManagementClient client) { List <Activity> la; CopyActivity ca; List <DatasetReference> inp; DatasetReference dr; List <DatasetReference> outp; DatasetReference drO; PipelineResource pipe; string[] tablasWarehouse = DatosGrales.tablasLakeaWarehouse; string nombreTablaSinSchema; string nombreSinPunto; for (int i = 0; i < tablasWarehouse.Length; i++) { nombreTablaSinSchema = tablasWarehouse[i].Split('.')[1]; nombreSinPunto = tablasWarehouse[i].Replace('.', '-'); la = new List <Activity>(); ca = new CopyActivity(); ca.Name = "CopyPipeline-Lake-DW-" + nombreTablaSinSchema; var sor = new AzureDataLakeStoreSource(recursive: false); ca.Source = sor; var ware = new SqlDWSink(); ware.AllowPolyBase = true; ware.WriteBatchSize = 1000; var poly = new PolybaseSettings(); poly.RejectValue = 1000; poly.RejectType = "value"; //poly.RejectSampleValue = 0; poly.UseTypeDefault = false; ware.PolyBaseSettings = poly; ware.PreCopyScript = "truncate table landing." + nombreTablaSinSchema; ca.Sink = ware; ca.EnableStaging = true; ca.CloudDataMovementUnits = 0; ca.EnableSkipIncompatibleRow = true; var stg = new StagingSettings(); stg.Path = "adfstagingcopydata"; LinkedServiceReference lsIntermedio = new LinkedServiceReference("StagingStorageLakeToWarehouse"); stg.LinkedServiceName = lsIntermedio; ca.StagingSettings = stg; var trans = new TabularTranslator(); //trans.ColumnMappings = DatosGrales.traerCamposPolybase("cc_history"); ca.Translator = trans; inp = new List <DatasetReference>(); dr = new DatasetReference("Dataset_Datastaging_DataLakeStore_" + nombreTablaSinSchema); inp.Add(dr); ca.Inputs = inp; outp = new List <DatasetReference>(); drO = new DatasetReference("Dataset_Warehouse_" + nombreSinPunto); outp.Add(drO); ca.Outputs = outp; la.Add(ca); pipe = new PipelineResource(); pipe.Activities = la; if (tablasWarehouse[i] == "landing.ccst_RAJ") { client.Pipelines.CreateOrUpdate(DatosGrales.resourceGroup, DatosGrales.dataFactoryName, "Pipeline-Copy-Lake-ADW-" + nombreTablaSinSchema, pipe); Console.Write((i + 1) + ". Pipeline-Copy-Lake-ADW-" + nombreTablaSinSchema + " creado.\n"); } } }