private void button_actions_gmpublish_update_Click(object sender, EventArgs e) { SaveSettings(); // > Get paths string gmpublish_path = textbox_settings_paths_gmpublish.Text; string addon_path = textbox_settings_paths_addon.Text; string icon_path = StringPath.RemoveExtension(addon_path) + ".jpg"; string id_path = StringPath.RemoveExtension(addon_path) + ".id"; string id = File.ReadAllText(id_path); // > Check .gma if (!CheckAddonPath(true)) { return; } if (!(CheckGMA(addon_path))) { return; } // > Check icon //if( !File.Exists( icon_path ) ) //{ // Notification.NoFileError( icon_path ); //} // > Publish string stdout = Shell.Execute(gmpublish_path, $"update -icon {icon_path} -addon {addon_path} -id {id}"); Notification.Information("gmpublish.exe:\n" + stdout); }
public async Task <ICollection <StringPath> > List(StringPath path) { var basePath = BasePathSansContainer; var req = new UriBuilder(Storage.BlobEndpoint) .WithPathSegment(ContainerName) .WithParameter("restype", "container") .WithParameter("comp", "list") .WithParameter("prefix", basePath.Add(path) + "/") .WithParameter("delimiter", "/") .Uri.Get().WithBlobHeaders(Storage); var res = await H.SendAsync(req); res.EnsureSuccessStatusCode(); var sr = await res.ContentAsStream(); var debugText = sr.ReadToEnd(); var response = (ListBlobsResponse) new XmlSerializer(typeof(ListBlobsResponse)).Deserialize(new StringReader(debugText)); if (response.NextMarker.HasValue()) { throw new NotImplementedException("paging for listing blobs not implemented"); } var blobs = response.Blobs.Select(b => new StringPath(b.Name).RelativePath(basePath)).ToList(); return(blobs); }
// > gmpublish functions private void button_actions_gmpublish_publish_Click(object sender, EventArgs e) { SaveSettings(); // > Get paths string gmpublish_path = textbox_settings_paths_gmpublish.Text; string addon_path = textbox_settings_paths_addon.Text; string icon_path = StringPath.RemoveExtension(addon_path) + ".jpg"; // > Check .gma if (!CheckAddonPath(true)) { return; } if (!(CheckGMA(addon_path))) { return; } // > Check icon if (!File.Exists(icon_path)) { Notification.NoFileError(icon_path); } // > Publish string stdout = Shell.Execute(gmpublish_path, $"create -icon {icon_path} -addon {addon_path}"); Notification.Information("gmpublish.exe:\n" + stdout); // > Write ID Match match = Regex.Match(stdout, @"[id=](\d+)"); string id = (match.Success ? match.Value : "null").Replace("=", ""); File.WriteAllText(StringPath.RemoveExtension(addon_path) + ".id", id); }
public async Task Save(StringPath path, FPath file) { var blob = BlobRef(path); AutoPopulateProps(path, blob); await blob.UploadFromFileAsync(file.FullPath); }
public async Task Set <T>(StringPath path, T item) { using (var memStream = new MemoryStream()) { using (var zipWriter = new GZipStream(memStream, CompressionLevel.Optimal, true)) using (var tw = new StreamWriter(zipWriter, Encoding.UTF8)) JsonExtensions.DefaultSerializer.Serialize(new JsonTextWriter(tw), item); HttpRequestMessage Request() { memStream.Seek(0, SeekOrigin.Begin); var fullPath = path.WithExtension(".json.gz"); var req = BlobUri(fullPath).Put().WithStreamContent(memStream).WithBlobHeaders(Storage); return(req); } try { await Policy .Handle <HttpRequestException>() .OrResult <HttpResponseMessage>(r => r.StatusCode.IsTransient()) .RetryAsync(3, (r, i) => i.ExponentialBackoff()) .ExecuteAsync(() => H.SendAsync(Request())); } catch (Exception ex) { throw new InvalidOperationException($"Unable to write to blob storage '{Request().RequestUri}'", ex); } } }
async Task <ByteSize> LoadBLobData(SyncTableCfg tableCfg, ILogger log, string loadId, string sourceSql, object maxTs, TableId loadTable) { var path = StringPath.Relative("sync", tableCfg.Name, loadId); var copyTask = Source.CopyTo(path, sourceSql, tableCfg, maxTs); var loadedFiles = new KeyedCollection <StringPath, FileListItem>(f => f.Path); while (true) // load as the files are created { if (copyTask.IsFaulted) { break; } var toLoad = (await Store.List(path).SelectManyList()) .Where(f => !loadedFiles.ContainsKey(f.Path)).ToArray(); if (toLoad.None()) { if (copyTask.IsCompleted) { break; } await 5.Seconds().Delay(); continue; } log.Debug("Sync {Table} - loading: {Files}", tableCfg.Name, toLoad.Join("|", l => l.Path.ToString())); await Dest.LoadFrom(toLoad.Select(f => f.Path), loadTable); loadedFiles.AddRange(toLoad); await toLoad.BlockAction(f => Store.Delete(f.Path, log), parallel : 8); } log.Information("Sync {Table} - copied {Files} files ({Size})", tableCfg.Name, loadedFiles.Count, loadedFiles.Sum(f => f.Bytes).Bytes().Humanize("#,#")); return(loadedFiles.Sum(f => f.Bytes).Bytes()); }
async Task <List <StoreFileMd> > FilesToUpgrade(StringPath path, int fromVersion) { var files = (await Store.List(path, true).SelectManyList()).Select(StoreFileMd.FromFileItem).ToList(); var toUpgrade = files.Where(f => (f.Version ?? "0").ParseInt() == fromVersion).ToList(); return(toUpgrade); }
public async IAsyncEnumerable <ICollection <StringPath> > ListKeys(StringPath path) { var prefix = BasePath.Add(path); var request = new ListObjectsV2Request { BucketName = Cfg.Bucket, Prefix = prefix }; while (true) { var response = await S3.ListObjectsV2Async(request); var keys = response.S3Objects.Select(f => f.Key); yield return(keys.Select(k => new StringPath(k).RelativePath(prefix).WithoutExtension()).ToList()); if (response.IsTruncated) { request.ContinuationToken = response.NextContinuationToken; } else { break; } } }
public async Task Save(StringPath path, Stream contents) { var req = BlobUri(path).Put().WithStreamContent(contents).WithBlobHeaders(Storage); var res = await H.SendAsync(req); res.EnsureSuccessStatusCode(); }
public async Task <T> Get <T>(StringPath path) where T : class { var blob = BlobRef(path); await using var mem = new MemoryStream(); try { await blob.DownloadToStreamAsync(mem); } catch (Exception) { var exists = await blob.ExistsAsync(); if (!exists) { return(null); } throw; } mem.Position = 0; await using var zr = new GZipStream(mem, CompressionMode.Decompress); using var tr = new StreamReader(zr, Encoding.UTF8); var jObject = await JObject.LoadAsync(new JsonTextReader(tr)); var r = jObject.ToObject <T>(JsonExtensions.DefaultSerializer); return(r); }
public async Task <T> Get <T>(StringPath path) where T : class { GetObjectResponse response = null; try { response = await S3Policy.ExecuteAsync(() => S3.GetObjectAsync(new GetObjectRequest { BucketName = Cfg.Bucket, Key = FilePath(path) })); } catch (AmazonS3Exception e) { if (e.ErrorCode == "NoSuchBucket" || e.ErrorCode == "NotFound" || e.ErrorCode == "NoSuchKey") { return(null); } throw; } using (var zr = new GZipStream(response.ResponseStream, CompressionMode.Decompress)) using (var tr = new StreamReader(zr, Encoding.UTF8)) { var jObject = await JObject.LoadAsync(new JsonTextReader(tr)); var r = jObject.ToObject <T>(JsonExtensions.DefaultSerializer); return(r); } }
public async Task Save(StringPath path, Stream contents) { var blob = BlobRef(path); AutoPopulateProps(path, blob); await blob.UploadFromStreamAsync(contents); }
/// <summary>Serializes item into the object store</summary> /// <param name="path">The path to the object (no extensions)</param> public static async Task Set <T>(this ISimpleFileStore store, StringPath path, T item, bool zip = true, ILogger log = default, JsonSerializerSettings jCfg = default) { await using var memStream = new MemoryStream(); var serializer = jCfg != null?JsonSerializer.Create(jCfg) : JsonExtensions.DefaultSerializer; if (zip) { await using (var zipWriter = new GZipStream(memStream, CompressionLevel.Optimal, true)) { await using var tw = new StreamWriter(zipWriter, Encoding.UTF8); serializer.Serialize(new JsonTextWriter(tw), item); } } else { await using (var tw = new StreamWriter(memStream, Encoding.UTF8, leaveOpen: true)) serializer.Serialize(new JsonTextWriter(tw), item); } var fullPath = path.AddJsonExtention(zip); memStream.Seek(0, SeekOrigin.Begin); await store.Save(fullPath, memStream, log); }
public StoreFileMd(StringPath path, string ts, DateTime modified, string version) { Path = path; Ts = ts; Modified = modified; Version = version; }
public BlobIndexResult(BlobIndexMeta index, StringPath indexPath, StringPath indexFilesPath, StringPath[] toDelete) { Index = index; IndexPath = indexPath; ToDelete = toDelete; IndexFilesPath = indexFilesPath; }
/// <summary>Returns the most recent file within this path (any child directories)</summary> public async Task <StoreFileMd> LatestFile(StringPath path = null) { var files = await Files(path, allDirectories : true).SelectManyList(); var latest = files.OrderByDescending(f => StoreFileMd.GetTs(f.Path)).FirstOrDefault(); return(latest); }
public StoreFileMd(StringPath path, string ts, DateTime modified, long bytes, string version = null) { Path = path; Ts = ts; Modified = modified; Bytes = bytes; Version = version; }
public async Task <Stream> OpenForWrite(StringPath path, ILogger log = null) { var blob = BlobRef(path); await blob.DeleteIfExistsAsync(); AutoPopulateProps(path, blob); return(await blob.OpenWriteAsync()); }
public AzureBlobFileStore(string cs, StringPath path, ILogger log) : this(path, log) { var containerName = path?.Tokens.FirstOrDefault() ?? throw new InvalidOperationException("path needs to be provided and start with a container name"); var storage = CloudStorageAccount.Parse(cs); var client = new CloudBlobClient(storage.BlobEndpoint, storage.Credentials); Container = client.GetContainerReference(containerName); }
public AppendCollectionStore(ISimpleFileStore store, StringPath path, Func <T, string> getTs, string version, ILogger log) { Store = store; Path = path; GetTs = getTs; Version = version; Log = log; }
/// <summary>Returns the most recent file within this path (any child directories)</summary> async Task <StoreFileMd> LatestFile(StringPath path) { var files = await Files(path, true); var latest = files.OrderByDescending(f => StoreFileMd.GetTs(f.Path)).FirstOrDefault(); return(latest); }
AzureBlobFileStore(StringPath path, ILogger log) { Log = log; H = new HttpClient { Timeout = 10.Minutes() }; BasePath = path ?? StringPath.Emtpy; }
async Task LogParseError(string msg, Exception ex, string videoId, string rawHtml, ILogger log) { var path = StringPath.Relative(DateTime.UtcNow.ToString("yyyy-MM-dd"), $"{videoId}.html"); var logUrl = LogStore.Url(path); await LogStore.Save(path, rawHtml.AsStream(), log); log.Warning(ex, "WebScraper - {VideoId} - saved html that we could not parse '{msg}' ({Url}). error: {Error}", videoId, msg, logUrl, ex?.ToString()); }
public FileCollection(ISimpleFileStore s3, Expression <Func <T, string> > getId, StringPath path, CollectionCacheType cacheType = CollectionCacheType.Memory, FPath localCacheDir = null) { Store = s3; GetId = getId.Compile(); Path = path; CacheType = cacheType; LocalCacheDir = localCacheDir; Cache = new KeyedCollection <string, T>(getId, theadSafe: true); }
public BlobIndexWork(StringPath path, IndexCol[] cols, IAsyncEnumerable <JObject> rows, ByteSize size, Action <JObject> onProcessed = null) { Path = path; Rows = rows; Cols = cols; Size = size; OnProcessed = onProcessed; }
public async Task <Stream> Load(StringPath path) { var blob = BlobRef(path); var mem = new MemoryStream(); await blob.DownloadToStreamAsync(mem); mem.Seek(0, SeekOrigin.Begin); return(mem); }
/// <summary> /// Check if the given file path is a .gma file. /// </summary> /// <param name="path">File path</param> /// <returns>Whenever the file is a .gma file.</returns> private bool CheckGMA(string path) { if (!(StringPath.GetExtension(path) == ".gma")) { Notification.Error($"File '{path}' isn't a .gma file! Please specify a correct file path!"); return(false); } return(true); }
public async Task Save(StringPath path, FPath file) { using (var stream = File.OpenRead(file.FullPath)) { var req = BlobUri(path).Put().WithStreamContent(stream).WithBlobHeaders(Storage); var res = await H.SendAsync(req); res.EnsureSuccessStatusCode(); } }
public AzureBlobFileStore(string cs, StringPath path) { ContainerName = path.Tokens.FirstOrDefault() ?? throw new InvalidOperationException("path needs to at least have a container"); BasePath = path; H = new HttpClient { Timeout = 10.Minutes() }; Storage = CloudStorageAccount.Parse(cs); }
public async Task Save(StringPath path, Stream contents, ILogger log = null) { log ??= Log; var blob = BlobRef(path); AutoPopulateProps(path, blob); await blob.UploadFromStreamAsync(contents); log.Debug("Saved {Path}", blob.Uri); }