public dynamic Transform(string template, dynamic model, Slice slice) { var viewBag = new DynamicViewBag(); model.Slice = slice; Razor.RunWithTemplate(template ?? "", model, viewBag); return ((dynamic)viewBag).Exports; }
public override void Save(Slice slice, dynamic data) { var dict = ToDictionary(data); var properties = data.GetType().GetProperties() as PropertyInfo[]; foreach (var property in properties) { var guid = Guid.NewGuid().ToString(); var type = GetTypeFromPropertyName(property.Name); var keyProps = GetKeys(Context, type); foreach (var item in dict[property.Name] as IEnumerable<dynamic>) { var props = item is ExpandoObject ? item : ToDictionary(item, guid); var obj = CreateAndCopy(type, props); AddOrModify(type, obj, keyProps, props); } } using (var tran = Context.Database.BeginTransaction()) { try { Context.SaveChanges(); tran.Commit(); } catch (DbUpdateException) { tran.Rollback(); } } }
public override dynamic Load(Slice slice) { var text = Blob.DownloadText(Encoding); switch (Format) { case CustomAzureBlobFormat.Json: return (IEnumerable<dynamic>)Jil.JSON.DeserializeDynamic(text); default: return new List<dynamic>(); } }
public override void Save(Slice slice, dynamic data) { string text = ""; switch (Format) { case CustomAzureBlobFormat.Json: text = JsonConvert.SerializeObject(data); break; default: break; } Blob.UploadText(text ?? "", Encoding); }
public override dynamic Load(Slice slice) { var ret = new List<dynamic>(); if (!Blob.Exists()) return ret; using (var reader = new CsvHelper.CsvReader(new StreamReader(Blob.OpenRead(), Configuration.Encoding))) { var records = reader.GetRecords<dynamic>(); foreach (IDictionary<string, object> record in records) ret.Add(Helpers.DictionaryToObject(record, Structure)); } return ret; }
public static string ReplaceByPatition(string str, IEnumerable<Partition> partitionedBy, Slice slice) { var partitions = partitionedBy.Select(_ => { var dateParition = _.Value as DateTimePartitionValue; var name = dateParition.Date.ToLower(); var value = name == "slicestart" ? slice.Start.ToString(dateParition.Format) : name == "sliceend" ? slice.End.ToString(dateParition.Format) : ""; return new KeyValuePair<string, string>(_.Name, value); }); foreach (var partition in partitions) { str = str.Replace($"{{{partition.Key}}}", partition.Value); } return str; }
public AzureBlobProvider(Dataset dataset, LinkedService linkedService, Slice slice) { InstanceName = dataset.Name; Structure = (dataset.Properties as DatasetProperties).Structure; var azblobDataset = dataset.Properties.TypeProperties as AzureBlobDataset; var filepath = Path.Combine(azblobDataset.FolderPath, azblobDataset.FileName); filepath = Helpers.ReplaceByPatition(filepath, azblobDataset.PartitionedBy, slice); Blob = Helpers.GetBlob(linkedService, filepath); var format = azblobDataset.Format as TextFormat; if (format != null) Configuration = new CsvConfiguration { Delimiter = format.ColumnDelimiter, Encoding = Encoding.GetEncoding(format.EncodingName) }; else Configuration = new CsvConfiguration { }; }
public CustomAzureBlobProvider(Dataset dataset, LinkedService linkedService, Slice slice) { var props = dataset.Properties.TypeProperties as CustomDataset; if (props.ServiceExtraProperties.ContainsKey("instanceName")) InstanceName = props.ServiceExtraProperties["instanceName"].ToString(); var enc = props.ServiceExtraProperties.ContainsKey("encoding") ? props.ServiceExtraProperties["encoding"].ToString() : null; if (!string.IsNullOrEmpty(enc)) { try { Encoding = Encoding.GetEncoding(enc); } catch (ArgumentException){} } var filepath = props.ServiceExtraProperties["filePath"].ToString(); var regex = new Regex(@"\{(.+?)\}"); var matches = regex.Matches(filepath); foreach(Match m in matches) { var key = m.Groups[1].Value; if (props.ServiceExtraProperties.ContainsKey(key)) { var formatAndName = props.ServiceExtraProperties[key].ToString().Split(',').Select(_ => _.Trim()).ToArray(); var value = formatAndName[1].ToLower() == "slicestart" ? slice.Start.ToString(formatAndName[0]) : formatAndName[1].ToLower() == "sliceend" ? slice.End.ToString(formatAndName[0]) : ""; filepath = filepath.Replace($"{{{formatAndName[1]}}}", value); } } Blob = Helpers.GetBlob(linkedService, filepath); var format = props.ServiceExtraProperties["format"]; Format = format == null ? CustomAzureBlobFormat.Unknown : format.ToString().ToLower() == "json" ? CustomAzureBlobFormat.Json : CustomAzureBlobFormat.Unknown; if (props.ServiceExtraProperties.ContainsKey("test")) Console.Write(props.ServiceExtraProperties["test"].ToString()); }
public override dynamic Load(Slice slice) { return Context; }
public abstract dynamic Load(Slice slice);
public abstract void Save(Slice slice, dynamic data);
public IDictionary<string, string> Execute( IEnumerable<LinkedService> linkedServices, IEnumerable<Dataset> datasets, ADFActivity activity, IActivityLogger logger) { logger.Write("Actiity start.\n"); Func<string, LinkedService> linkedServiceResolver = name => linkedServices.Single(_ => _.Name == name); IEnumerable<string> inputNames = activity.Inputs.Select(_ => _.Name); IEnumerable<string> outputNames = activity.Outputs.Select(_ => _.Name); IList<CustomDbDataset> dbDatasets = new List<CustomDbDataset>(); // convert transform properties from activity's one. var dotNetActivity = (DotNetActivity)activity.TypeProperties; Slice slice = new Slice { Start = Convert.ToDateTime(dotNetActivity.ExtendedProperties["SliceStart"].ToString()), End = Convert.ToDateTime(dotNetActivity.ExtendedProperties["SliceEnd"].ToString()) }; string transform = dotNetActivity.ExtendedProperties["transform"].ToString(); logger.Write("Slice from {0} to {1}\n", slice.Start, slice.End); // create providers logger.Write("create providers\n"); IDictionary<string, IDatasetProvider> providers = inputNames.Concat(outputNames) .Distinct() .Select(datasetName => { IDatasetProvider provider = null; var dataset = datasets.Single(_ => _.Name == datasetName); var linkedService = linkedServiceResolver(dataset.Properties.LinkedServiceName); if (CustomDbDatasetProvider.IsMatch(dataset, linkedService)) { provider = new CustomDbDatasetProvider(dataset, linkedService, linkedServiceResolver); dbDatasets.Add((provider as CustomDbDatasetProvider).Dataset); logger.Write("{0} is CustomDbDataset\n", datasetName); } else if (CustomAzureBlobProvider.IsMatch(dataset, linkedService)) { provider = new CustomAzureBlobProvider(dataset, linkedService, slice); logger.Write("{0} is CustomAzureBlobDataset\n", datasetName); } else if (AzureBlobProvider.IsMatch(dataset, linkedService)) { provider = new AzureBlobProvider(dataset, linkedService, slice); logger.Write("{0} is AzureBlobDataset\n", datasetName); } else { logger.Write("{0} is UnknownDataset\n", datasetName); } return new { dataset = datasetName, provider = provider }; }).Where(_ => _.provider != null).ToDictionary(_ => _.dataset, _ => _.provider); inputNames = inputNames.Where(name => providers.ContainsKey(name)).ToList(); // create model for transform razor. dynamic model = new ExpandoObject(); var dict = (IDictionary<string, dynamic>)model; foreach (var name in inputNames) { logger.Write("Try load {0}\n", name); dict[providers[name].InstanceName] = providers[name].Load(slice); logger.Write("Success\n"); } // transform logger.Write("Start transform\n"); var transformer = new FactoryWorkerTransformer(dbDatasets, logger); dynamic transformed = transformer.Transform(transform, model, slice); logger.Write("End\n"); // save all foreach (var name in outputNames) { logger.Write("Try save {0}\n", name); providers[name].Save(slice, transformed); logger.Write("Success\n"); } return new Dictionary<string, string>(); }
public override void Save(Slice slice, dynamic data) { var csv = CreateCsv((IEnumerable<dynamic>)data, Configuration); Blob.UploadText(csv ?? ""); }