public static async Task <string> GetShortUrl(string longUrl) { // Get new ID var id = await _bucket.IncrementAsync(Config.PrimaryKey); var res = await _bucket.InsertAsync(id.Value.ToString(), new { url = longUrl }); if (res.Success) { return((new ShortUrl((long)id.Value)).ToString()); } if (res.Status == ResponseStatus.KeyExists) { throw new CouchbaseDuplicateKeyException(res.Message); } throw new Exception(res.Message); }
private async Task MaterializeSingleEntity(string id, string name, string type, EntityModel model, IBucket _bucket, IBucket materializedBucket) { var docT = _bucket.GetAsync <string>(id); var counterT = materializedBucket.IncrementAsync(type + "::counter"); var counter = await counterT; var doc = await docT; var raw = JsonConvert.DeserializeObject <Dictionary <string, object> >(doc.Value); var entity = new Dictionary <string, object>(); entity.Add("Name", name); entity.Add("Type", type); foreach (var attribute in model.Attributes) { if (raw.ContainsKey(attribute)) { entity.Add(attribute, raw[attribute]); } } await materializedBucket.UpsertAsync(type + "::" + name + "::" + counter.Value.ToString(), entity); }
private static Task <Dictionary <string, string> > ParseLogAsync(string path) { return(Task.Run(async() => { Dictionary <string, string> json = new Dictionary <string, string>(); if (!File.Exists(path)) { return json; } using (StreamReader sr = new StreamReader(path)) { var config = new CsvConfiguration(); config.HasHeaderRecord = true; config.TrimFields = true; config.TrimHeaders = true; var parser = new CsvParser(sr, config); string[] row = parser.Read(); var fields = UniqueHeaders(row); do { var counterT = _bucket.IncrementAsync("counter"); row = parser.Read(); if (row == null) { break; } if (fields.Length != row.Length) { Console.WriteLine("Warning, header count does not match line count. Headers: {0}, Lines: {1}", fields.Length, row.Length); } Dictionary <string, object> dict = new Dictionary <string, object>(); for (int i = 0; i < fields.Length && i < row.Length; i++) { dict.Add(fields[i], ParseValue(row[i])); } #region Debug if (_debug) { if (dict.ContainsKey("Log_Time")) { var date = DateTime.Parse(dict["Log_Time"].ToString()); var newDate = date.AddMinutes(new Random().Next(43200) - 43200 / 2); dict["Log_Time"] = newDate.ToString("yyyy-MM-dd HH:mm:ss"); } } #endregion var counter = await counterT; json.Add(counter.Value.ToString(), JsonConvert.SerializeObject(dict)); }while (row != null); fields = MergeSchema(fields); json.Add("schema", JsonConvert.SerializeObject(fields)); } return json; })); }