public async Task <bool> Cleanup(StatusFile sf) { if (sf.obsoletes.IsNullOrEmpty()) { return(true); } var bkp = st.status.ToBucketKeyPair(); var prefix = $"{bkp.key}/{S3HashStoreStatus.UploadStatusFilePrefix}"; var success = true; await ParallelEx.ForEachAsync(sf.obsoletes, async file => { var cts = new CancellationTokenSource(); var id = file.TrimStart(prefix).TrimEnd(".json").ToLongOrDefault(0); var folderBKP = st.destination.ToBucketKeyPair(); var result = await s3h.DeleteObjectAsync( bucketName: folderBKP.bucket, key: file, throwOnFailure: false, cancellationToken: cts.Token).TryCancelAfter(cts.Token, msTimeout: st.timeout); if (success) { Console.WriteLine($"Status file: '{folderBKP.bucket}/{file}' was removed."); } else { Console.WriteLine($"Failed to remove status file: '{folderBKP.bucket}/{file}'."); } }, maxDegreeOfParallelism : parallelism); return(success); }
public async Task Processing() { Log($"Loading github configuration files..."); var files = await _GIT.GetGitHubTrees(); var instancesPath = Environment.GetEnvironmentVariable("instances_path"); var instanceObjects = files.GetObjectsByPath(path: instancesPath)?.Where(x => x.IsTree()); var configs = new List <EC2InstanceConfig>(); foreach (var instanceObject in instanceObjects) { var configObject = instanceObject?.objects.FirstOrDefault(x => x.path.ToLower() == "config.json"); if (configObject?.IsBlob() != true) { Log($"Failed to load config file from {instancesPath}/{instanceObject?.path ?? "undefined"}"); continue; } var blob = await _GIT.GetGitHubBlob(configObject); var cfg = blob.JsonDeserialize <EC2InstanceConfig>(); configs.Add(cfg); } var parallelism = Environment.GetEnvironmentVariable("parallelism").ToIntOrDefault(1); Log($"Loading instances..."); var instances = await _EC2.ListInstances(); instances = await TagsClenup(instances); Log($"Found {configs.Count} configuration files and {instances?.Count() ?? 0} active instances."); if (!configs.IsNullOrEmpty()) { await ParallelEx.ForEachAsync(configs, async cfg => { /*/// TESTNET * await Launcher(instances, cfg); * /*/// PRODUCTION try { Log($"Processing '{cfg?.name ?? "undefined"} config'..."); await Launcher(instances, cfg); } catch (Exception ex) { _logger.Log($"Failed to process {cfg?.name ?? "undefined"}, error: {ex.JsonSerializeAsPrettyException()}"); } //*/ }, maxDegreeOfParallelism : parallelism); } Log($"Done, all configuration files were processed."); }
public async Task FunctionHandler(ILambdaContext context) { var sw = Stopwatch.StartNew(); context.Logger.Log($"{context?.FunctionName} => {nameof(FunctionHandler)} => Started"); try { var instances = await _EC2.ListInstances(); if (instances.Length <= 0) { context.Logger.Log($"AWSTerminator can't process tags, not a single EC2 Instance was found."); } await ParallelEx.ForEachAsync(instances, async instance => await Process(instance, context.Logger)); } finally { context.Logger.Log($"{context?.FunctionName} => {nameof(FunctionHandler)} => Stopped, Eveluated within: {sw.ElapsedMilliseconds} [ms]"); } }
public async Task <bool> DeleteDirectoryAsync( string bucketName, string prefix = null, string versionId = null, bool throwOnFailure = true, CancellationToken cancellationToken = default(CancellationToken)) { var list = await this.ListObjectsAsync(bucketName, prefix, cancellationToken : cancellationToken); bool success = true; await ParallelEx.ForEachAsync(list, async obj => { var result = await this.DeleteObjectAsync( bucketName: bucketName, key: obj.Key, throwOnFailure: throwOnFailure, cancellationToken: cancellationToken); lock (_locker) success = success && result; }, maxDegreeOfParallelism : _maxDegreeOfParalelism); return(success); }
public async Task <long> Execute() { var sw = Stopwatch.StartNew(); Log($"{_context?.FunctionName} => {nameof(FunctionHandler)} => Execute"); try { //Select Instances with Route Tag Key Only var instances = (await _EC2.ListInstances()).Where(instance => instance.Tags.Any(x => x.Key.Contains("Route53 Name"))).ToArray(); var running = instances.Where(instance => instance.State.Code == 16); //running var not_running = instances.Where(instance => instance.State.Code != 16); //not running var blacklist = new List <string>(); //only process running instances if there are stopped ones with the same 'Route53 Name' if (!running.IsNullOrEmpty() && !not_running.IsNullOrEmpty()) { foreach (var live in running) { var names_live = live.Tags.Where(x => x.Key.Contains("Route53 Name") && !x.Value.IsNullOrEmpty()).Select(x => x.Value); var zones_live = live.Tags.Where(x => x.Key.Contains("Route53 Zone") && !x.Value.IsNullOrEmpty()).Select(x => x.Value); foreach (var stopped in not_running) { if (blacklist.Contains(stopped.InstanceId) || live.InstanceId == stopped.InstanceId) { continue; //dont process already blacklisted instances or the same instances } var names_stopped = stopped.Tags.Where(x => x.Key.Contains("Route53 Name") && !x.Value.IsNullOrEmpty()).Select(x => x.Value); var zones_stopped = stopped.Tags.Where(x => x.Key.Contains("Route53 Zone") && !x.Value.IsNullOrEmpty()).Select(x => x.Value); if (names_live.IntersectAny(names_stopped) && zones_live.IntersectAny(zones_stopped)) { Console.WriteLine($"Blacklisting instance '{stopped.InstanceId}' from processing, found overlapping zones and names with already running instance '{live.InstanceId}'."); blacklist.Add(stopped.InstanceId); } } } } //Select Non blacklisted instances instances = instances.Where(instance => !blacklist.Contains(instance.InstanceId)).ToArray(); var zones = await _R53.GetRecordSets(); if (zones.Count <= 0) { Log($"AWSRouter53 can't process any tags, not a single Route53 Zone was found."); } else { Log($"Processing validating routes for {zones.Count} zones and {instances.Length} instances..."); } await ParallelEx.ForEachAsync(instances, async instance => await Process(zones, instance)); return(sw.ElapsedMilliseconds); } finally { Log($"{_context?.FunctionName} => {nameof(FunctionHandler)} => Stopped, Eveluated within: {sw.ElapsedMilliseconds} [ms]"); } }
public async Task <SyncResult> DownloadAWS(SyncTarget st) { var bkp = st.source.ToBucketKeyPair(); var bucket = bkp.bucket; var timestamp = DateTimeEx.UnixTimestampNow(); if (bucket.IsNullOrEmpty()) { throw new Exception($"Source '{st.source ?? "undefined"}' does not contain bucket name."); } var destination = st.destination?.ToDirectoryInfo(); if (destination?.TryCreate() != true) { throw new Exception($"Destination '{st.destination ?? "undefined"}' does not exist and coudn't be created."); } var status = await GetStatusFile(st, st.minTimestamp, st.maxTimestamp); var downloadStatus = await GetStatusFile(st, DownloadStatusFilePrefix); if (status == null) { throw new Exception($"Could not download latest data from the source '{st.source}', status file was not found in '{st?.status ?? "undefined"}' within time range of <{st.minTimestamp.ToDateTimeFromTimestamp().ToLongDateTimeString()},{st.maxTimestamp.ToDateTimeFromTimestamp().ToLongDateTimeString()}>"); } if (downloadStatus.finalized) { var elspased = DateTimeEx.UnixTimestampNow() - downloadStatus.timestamp; Console.WriteLine($"Download sync file '{st.status}' was already finalized {elspased}s ago."); await Task.Delay(millisecondsDelay : 1000); return(new SyncResult(success: true)); } _syncInfo[st.id] = new SyncInfo(st); _syncInfo[st.id].total = status.files.Sum(x => x?.Length ?? 0); _syncInfo[st.id].timestamp = timestamp; int counter = 0; var directories = new List <DirectoryInfo>(); directories.Add(st.destination.ToDirectoryInfo()); foreach (var dir in status.directories) { if (dir == null) { continue; } var relativeDir = dir.FullName.TrimStart(status.source); var downloadDir = PathEx.RuntimeCombine(st.destination, relativeDir).ToDirectoryInfo(); if (!downloadDir.Exists && st.verbose >= 1) { Console.WriteLine($"Creating Directory [{++counter}/{status.directories.Length}] '{downloadDir.FullName}' ..."); } if (downloadDir?.TryCreate() != true) { throw new Exception($"Could not find or create directory '{downloadDir?.FullName ?? "undefined"}'."); } directories.Add(downloadDir); } if (st.wipe) { counter = 0; var currentDirectories = st.destination.ToDirectoryInfo().GetDirectories(recursive: st.recursive); foreach (var dir in currentDirectories) { if (!directories.Any(x => x.FullName == dir.FullName)) { Console.WriteLine($"Removing Directory [{++counter}/{currentDirectories.Length - directories.Count}] '{dir.FullName}' ..."); dir.Delete(recursive: st.recursive); } } } counter = 0; var files = new List <FileInfo>(); var speedList = new List <double>(); await ParallelEx.ForEachAsync(status.files, async file => { if (file == null) { return; } try { var relativePath = file.FullName.TrimStart(status.source); var downloadPath = PathEx.RuntimeCombine(st.destination, relativePath).ToFileInfo(); files.Add(downloadPath); if (downloadPath.Exists && downloadPath.MD5().ToHexString() == file.MD5) { return; //file already exists } if (downloadPath.Exists && downloadPath.TryDelete() != true) { throw new Exception($"Obsolete file was found in '{downloadPath?.FullName ?? "undefined"}' but couldn't be deleted."); } var key = $"{st.source.TrimEnd('/')}/{file.MD5}".ToBucketKeyPair().key; ++counter; if (st.verbose >= 1) { Console.WriteLine($"Downloading [{counter}/{status.files.Length}][{file.Length}B] '{bucket}/{key}' => '{downloadPath.FullName}' ..."); } var sw = Stopwatch.StartNew(); var stream = await _S3Helper.DownloadObjectAsync(bucketName: bucket, key: key, throwIfNotFound: true) .Timeout(msTimeout: st.timeout); if (!downloadPath.Directory.TryCreate()) { throw new Exception($"Failed to create directory '{downloadPath?.Directory.FullName ?? "undefined"}'."); } using (var fs = File.Create(downloadPath.FullName)) stream.CopyTo(fs); downloadPath.Refresh(); if (!downloadPath.Exists) { throw new Exception($"Failed download '{bucket}/{key}'-/-> '{downloadPath.FullName}'."); } if (st.verify) { var md5 = downloadPath.MD5().ToHexString(); if (md5 != file.MD5) { throw new Exception($"Failed download '{bucket}/{key}'-/-> '{downloadPath.FullName}', expected MD5 to be '{md5 ?? "undefined"}' but was '{file.MD5 ?? "undefined"}'."); } else { lock (_locker) { var megabytes = (double)(file.Length + (md5.Length + bucket.Length + key.Length) * sizeof(char)) / (1024 * 1024); var seconds = (double)(sw.ElapsedMilliseconds + 1) / 1000; var speed = megabytes / seconds; speedList.Add(speed); } } } } finally { _syncInfo[st.id].processed += file.Length; _syncInfo[st.id].progress = ((double)_syncInfo[st.id].processed / _syncInfo[st.id].total) * 100; } }, maxDegreeOfParallelism : st.parallelism); if (st.wipe) { counter = 0; var currentFiles = st.destination.ToDirectoryInfo().GetFiles("*", recursive: st.recursive); foreach (var file in currentFiles) { if (!files.Any(x => x.FullName == file.FullName)) { Console.WriteLine($"Removing File [{++counter}/{currentFiles.Length - files.Count}] '{file.FullName}' ..."); file.Delete(); } } } downloadStatus.finalized = true; var uploadResult = await _S3Helper.UploadJsonAsync(downloadStatus.bucket, downloadStatus.key, downloadStatus) .Timeout(msTimeout: st.timeout) .TryCatchRetryAsync(maxRepeats: st.retry); var avgSpeed = speedList.IsNullOrEmpty() ? double.NaN : speedList.Average(); Console.WriteLine($"SUCCESS, processed '{st.status}', all {status.files.Length} files and {status.directories.Length} directories were updated."); Console.WriteLine($"Average Download Speed: {avgSpeed} MB/s"); return(new SyncResult(success: true, speed: avgSpeed)); }
public async Task <SyncInfo> Upload() { var si = new SyncInfo(st); var bkp = st.destination.ToBucketKeyPair(); var bucket = bkp.bucket; var key = bkp.key; si.start = DateTimeEx.UnixTimestampNow(); if (bucket.IsNullOrEmpty()) { throw new Exception($"Destination '{st.destination ?? "undefined"}' does not contain bucket name."); } var path = st.destination; var sourceInfo = st.GetSourceInfo(); if (sourceInfo.rootDirectory == null) { return(si); } var directory = st.source.ToDirectoryInfo(); var prefix = directory.FullName; var counter = 0; var status = await S3HashStoreStatus.GetStatusFile(s3h, st, S3HashStoreStatus.UploadStatusFilePrefix); var elspased = DateTimeEx.UnixTimestampNow() - status.timestamp; if (status.finalized) { var remaining = st.retention - elspased; Console.WriteLine($"Upload sync file '{st.status}' was already finalized {elspased}s ago. Next sync in {st.retention - elspased}s."); await Task.Delay(millisecondsDelay : 1000); si.success = true; return(si); } si.total = sourceInfo.files.Sum(x => x?.Length ?? 0); var cleanup = st.cleanup ? Cleanup(status) : null; var isStatusFileUpdated = false; var files = new List <SilyFileInfo>(); var uploadedBytes = new List <long>(); double compressionSum = 0; await ParallelEx.ForEachAsync(sourceInfo.files, async file => { double compression = 1; try { var sw = Stopwatch.StartNew(); var uploadedFile = status.files?.FirstOrDefault(x => x.FullNameEqual(file)); string localMD5; string destination; if (uploadedFile != null) //file was already uploaded to AWS { if (uploadedFile.LastWriteTime == file.LastWriteTime.ToUnixTimestamp()) { if (st.verbose) { Console.WriteLine($"Skipping upload of '{file.FullName}', file did not changed since last upload."); } await ss.LockAsync(() => { files.Add(uploadedFile); ++counter; }); return; //do not uplad, file did not changed } localMD5 = file.MD5().ToHexString(); destination = $"{key}/{localMD5}"; if (localMD5 == uploadedFile.MD5) { if (st.verbose) { Console.WriteLine($"Skipping upload of '{file.FullName}', file alredy exists in the '{bucket}/{destination}'."); } await ss.LockAsync(() => { ++counter; files.Add(uploadedFile); }); return; } } else //file was not uploaded to AWS yet { localMD5 = file.MD5().ToHexString(); destination = $"{key}/{localMD5}"; var metadata = await s3h.ObjectMetadataAsync( bucketName: bucket, key: $"{key}/{localMD5}", throwIfNotFound: false) .Timeout(msTimeout: st.timeout) .TryCatchRetryAsync(maxRepeats: st.retry); if (metadata != null) //file exists { await ss.LockAsync(() => { ++counter; var sfi = file.ToSilyFileInfo(md5: localMD5); if (sfi.Length >= (metadata.ContentLength + 128)) { sfi.TrySetProperty("compress", "zip"); } files.Add(sfi); }); if (st.verbose) { Console.WriteLine($"Skipping upload of '{file.FullName}', file was found in the '{bucket}/{destination}'."); } return; } } await ss.LockAsync(async() => { if (!isStatusFileUpdated) //update status file { status.timestamp = si.start; status.version = status.version + 1; status.finalized = false; var statusUploadResult = await s3h.UploadJsonAsync(status.bucket, status.key, status) .Timeout(msTimeout: st.timeout) .TryCatchRetryAsync(maxRepeats: st.retry); isStatusFileUpdated = true; } ++counter; }); async Task <string> UploadFile() { file?.Refresh(); if (file == null || !file.Exists) { return(null); } var shareMode = EnumEx.ToEnum <FileShare>(st.filesShare); FileInfo compressedFile = null; await ss.LockAsync(() => { if (st.compress) { compressedFile = PathEx.RuntimeCombine(st.sync, localMD5).ToFileInfo(); file.Zip(compressedFile); compressedFile.Refresh(); if ((compressedFile.Length + 128) < file.Length) { compression = (double)compressedFile.Length / Math.Max(file.Length, 1); } else { compression = 1; } } }); FileStream fs = null; await ss.LockAsync(() => { fs = File.Open( //upload new file to AWS compression < 1 ? compressedFile.FullName : file.FullName, FileMode.Open, FileAccess.Read, shareMode); }); var hash = await s3h.UploadStreamAsync(bucketName: bucket, key: destination, inputStream: fs, throwIfAlreadyExists: false, msTimeout: st.timeout).TryCatchRetryAsync(maxRepeats: st.retry); fs.Close(); if (!compressedFile.TryDelete()) { throw new Exception($"Failed to remove temporary file '{compressedFile?.FullName ?? "undefined"}' after deletion."); } return(hash.IsNullOrEmpty() ? null : hash); } if (st.verbose) { Console.WriteLine($"Uploading [{counter}/{sourceInfo.files.Length}][{file.Length.ToPrettyBytes()}] '{file.FullName}' => '{bucket}/{destination}' ..."); } var md5 = await UploadFile().TryCatchRetryAsync(maxRepeats: st.retry).Timeout(msTimeout: st.timeout); if (md5.IsNullOrEmpty()) { throw new Exception($"FAILED, Upload '{file.FullName}' => '{bucket}/{destination}'"); } var silyFile = file.ToSilyFileInfo(localMD5); if (compression < 1) { if (st.verbose) { Console.WriteLine($"File size reduced by [{compression * 100:0.00} %], file: '{file.FullName}' ({md5})"); } silyFile.TrySetProperty("compress", "zip"); compressionSum += compression; } else { if (md5 != localMD5 && st.verbose) { Console.WriteLine($"Warning! file hash changed during upload '{file.FullName}' {localMD5} => {md5}."); } compressionSum += 1; } await ss.LockAsync(() => { files.Add(silyFile); si.transferred += (long)(file.Length *compressionSum); }); } finally { await ss.LockAsync(() => { si.processed += file.Length; si.progress = ((double)si.processed / si.total) * 100; st.WriteInfoFile(si); }); } }, maxDegreeOfParallelism : st.parallelism); var directories = sourceInfo.directories.Select(x => x.ToSilyDirectoryInfo()).ToArray(); si.speed = (double)si.transferred / Math.Max(si.stop - si.start, 1); si.success = true; si.stop = DateTimeEx.UnixTimestampNow(); si.compression = (double)si.transferred / si.total; if (cleanup != null) { await cleanup; } if (isStatusFileUpdated || //if modifications were made to files !status.directories.JsonEquals(directories)) // or directories { status.files = files.ToArray(); status.finalized = true; status.directories = directories; status.source = st.source; status.destination = st.destination; var uploadResult = await s3h.UploadJsonAsync(status.bucket, status.key, status) .Timeout(msTimeout: st.timeout) .TryCatchRetryAsync(maxRepeats: st.retry); if (st.verbose) { Console.WriteLine($"SUCCESS, processed '{st.status}', all {status.files.Length} files and {status.directories.Length} directories were updated."); Console.WriteLine($"Uploaded {si.transferred.ToPrettyBytes()}, Speed: {si.speed.ToPrettyBytes()}/s, Compressed: {si.compression*100:0.00}%"); } } return(si); }
public async Task FunctionHandler(ILambdaContext context) { _sw = Stopwatch.StartNew(); _context = context; _logger = _context.Logger; _logger.Log($"{context?.FunctionName} => {nameof(FunctionHandler)} => Started"); _verbose = Environment.GetEnvironmentVariable("verbose").ToBoolOrDefault(true); _masterChatId = new ChatId(Environment.GetEnvironmentVariable("MASTER_CHAT_ID").ToLongOrDefault(-1001261081309)); _maxParallelism = Environment.GetEnvironmentVariable("MAX_PARALLELISM").ToIntOrDefault(0); _cosmosHubClientTimeout = Environment.GetEnvironmentVariable("HUB_CLIENT_TIMEOUT").ToIntOrDefault(7); _maxMessageAge = Environment.GetEnvironmentVariable("MAX_MESSAGE_AGE").ToDoubleOrDefault(24 * 3600); _bucket = Environment.GetEnvironmentVariable("BUCKET_NAME"); _lambdaTime = Environment.GetEnvironmentVariable("LAMBDA_TIME").ToIntOrDefault((15 * 60 * 1000) - 5000); var secretName = Environment.GetEnvironmentVariable("SECRET_NAME") ?? "KiraFaucetBot"; if (Environment.GetEnvironmentVariable("test_connection").ToBoolOrDefault(false)) { Log($"Your Internet Connection is {(SilyWebClientEx.CheckInternetAccess(timeout: 5000) ? "" : "NOT")} available."); } var secret = JObject.Parse(await _SM.GetSecret(secretName)); #if (TEST) var accessToken = secret["test_token"]?.ToString(); //_mnemonic = (secret["test_mnemonic"]?.ToString()).ToSecureString(); _mnemonic = (secret["mnemonic"]?.ToString()).ToSecureString(); _bucket = "kira-faucet-test"; #elif (PUBLISH) var accessToken = secret["token"]?.ToString(); _mnemonic = (secret["mnemonic"]?.ToString()).ToSecureString(); #endif _TBC = new TelegramBotClient(accessToken); _bot = await _TBC.GetMeAsync(); Log($"[INFO] {_bot.FirstName} {_version} started! Bot Name: @{_bot.Username ?? "undefined"}, Bot Id: '{_bot.Id}', Master Chat: '{_masterChatId.Identifier}'"); _TBC.OnMessage += Tbc_OnMessage; _TBC.OnCallbackQuery += _TBC_OnCallbackQuery; _TBC.OnInlineQuery += _TBC_OnInlineQuery; _TBC.OnInlineResultChosen += _TBC_OnInlineResultChosen; _TBC.StartReceiving(); try { Log($"Processing..."); var finalize = false; while (true) { #if (PUBLISH) if (!finalize && _sw.ElapsedMilliseconds >= _lambdaTime) { _TBC.StopReceiving(); finalize = true; _logger.Log($"Finalizing, elapsed {_sw.ElapsedMilliseconds} / {_lambdaTime} [ms] ..."); } #endif if (_messages.IsNullOrEmpty() && _callbacks.IsNullOrEmpty()) { if (finalize) { _logger.Log($"Lambda was finalized gracefully within {_lambdaTime - _sw.ElapsedMilliseconds} ms."); return; } else { await Task.Delay(100); continue; } } Message[] msgArr = null; _ssMsgLocker.Lock(() => { msgArr = _messages.ToArray().DeepCopy(); _messages.Clear(); }); var t0 = ParallelEx.ForEachAsync(msgArr, async msg => { async Task ProcessUser(Message m) { var user = m.From; var replyUser = m.ReplyToMessage?.From; if (user != null) { await UpdateUserData(user); } if (replyUser != null && user?.Id != replyUser.Id) { await UpdateUserData(replyUser); } } #if (TEST) await ProcessUser(msg); #elif (PUBLISH) try { await ProcessUser(msg); } catch (Exception ex) { _logger.Log($"[USR ERROR] => Filed ('{msg?.Chat?.Id ?? 0}') to save user status: '{ex.JsonSerializeAsPrettyException(Newtonsoft.Json.Formatting.Indented)}'"); } #endif }); var t1 = ParallelEx.ForEachAsync(msgArr, async msg => { #if (TEST) await ProcessMessage(msg); #elif (PUBLISH) try { await ProcessMessage(msg); } catch (Exception ex) { _logger.Log($"[MSG ERROR] => Filed ('{msg?.Chat?.Id ?? 0}') to process message ({msg?.MessageId}): '{ex.JsonSerializeAsPrettyException(Newtonsoft.Json.Formatting.Indented)}'"); await _TBC.SendTextMessageAsync(chatId: msg.Chat, $"Something went wrong, visit {await GetMasterChatInviteLink()} to find help.", replyToMessageId: msg.MessageId, parseMode: Telegram.Bot.Types.Enums.ParseMode.Markdown); } #endif }, maxDegreeOfParallelism: _maxParallelism); CallbackQuery[] cbqArr = null; _ssCbqLocker.Lock(() => { cbqArr = _callbacks.ToArray().DeepCopy(); _callbacks.Clear(); }); var t2 = ParallelEx.ForEachAsync(cbqArr, async cbq => { #if (TEST) await ProcessCallbacks(cbq); #elif (PUBLISH) try { await ProcessCallbacks(cbq); } catch (Exception ex) { _logger.Log($"[CBQ ERROR] => Filed ('{cbq.Message?.Chat?.Id ?? 0}') to process callback ({cbq.Id}): '{ex.JsonSerializeAsPrettyException(Newtonsoft.Json.Formatting.Indented)}'"); await _TBC.SendTextMessageAsync(chatId: cbq.Message.Chat, $"Something went wrong, visit {await GetMasterChatInviteLink()} to find help.", parseMode: Telegram.Bot.Types.Enums.ParseMode.Markdown); } #endif }, maxDegreeOfParallelism: _maxParallelism); await Task.WhenAll(t0, t1, t2); } } finally { _logger.Log($"{context?.FunctionName} => {nameof(FunctionHandler)} => Stopped, Eveluated within: {_sw.ElapsedMilliseconds} [ms]"); } }
public async Task <SyncInfo> Download() { var bkp = st.source.ToBucketKeyPair(); var bucket = bkp.bucket; si = new SyncInfo(st); si.start = DateTimeEx.UnixTimestampNow(); if (bucket.IsNullOrEmpty()) { throw new Exception($"Source '{st.source ?? "undefined"}' does not contain bucket name."); } var destination = st.destination?.ToDirectoryInfo(); if (destination?.TryCreate() != true) { throw new Exception($"Destination '{st.destination ?? "undefined"}' does not exist and coudn't be created."); } if (st.verbose) { Console.WriteLine($"Processing Download Target: '{st?.id ?? "undefined"}'"); } var status = await S3HashStoreStatus.GetStatusFile(s3h, st, st.minTimestamp, st.maxTimestamp); var downloadStatus = st.ReadSyncFile(); if (status == null) { throw new Exception($"Could not download latest data from the source '{st.source}', status file was not found in '{st?.status ?? "undefined"}' within time range of <{st.minTimestamp.ToDateTimeFromTimestamp().ToLongDateTimeString()},{st.maxTimestamp.ToDateTimeFromTimestamp().ToLongDateTimeString()}>"); } status.files = status?.files?.Where(x => x != null)?.ToArray() ?? new SilyFileInfo[0]; si.total = status.files.Sum(x => x?.Length ?? 0); if (downloadStatus.finalized) { var elspased = DateTimeEx.UnixTimestampNow() - si.start; if (st.verbose) { Console.WriteLine($"Download sync file '{st.status}' was already finalized {elspased}s ago."); } await Task.Delay(millisecondsDelay : 1000); si.success = true; return(si); } if (st.verbose) { Console.WriteLine($"Download Target: '{st?.id ?? "undefined"}' status indicates that targt is not finalized"); } int counter = 0; var directories = new List <DirectoryInfo>(); directories.Add(st.destination.ToDirectoryInfo()); foreach (var dir in status.directories) { if (dir == null) { continue; } var relativeDir = dir.FullName.ToRuntimePath().TrimStart(status.source.ToRuntimePath()); var downloadDir = PathEx.RuntimeCombine(st.destination, relativeDir).ToDirectoryInfo(); if (!downloadDir.Exists && st.verbose) { Console.WriteLine($"Creating Directory [{++counter}/{status.directories.Length}] '{downloadDir.FullName}' ..."); } if (downloadDir?.TryCreate() != true) { throw new Exception($"Could not find or create directory '{downloadDir?.FullName ?? "undefined"}'."); } directories.Add(downloadDir); } if (st.wipe) { counter = 0; var currentDirectories = st.destination.ToDirectoryInfo().GetDirectories(recursive: st.recursive); foreach (var dir in currentDirectories) { if (!directories.Any(x => x.FullName == dir.FullName)) { Console.WriteLine($"Removing Directory [{++counter}/{currentDirectories.Length - directories.Count}] '{dir.FullName}' ..."); dir.Delete(recursive: st.recursive); } } } if (st.verbose) { Console.WriteLine($"Found {status.files} files and {status.directories} directories for target '{st?.id ?? "undefined"}'."); } counter = 1; var files = new List <FileInfo>(); await ParallelEx.ForEachAsync(status.files, async file => { try { var relativePath = file.FullName.ToRuntimePath().TrimStart(status.source.ToRuntimePath()); var downloadPath = PathEx.RuntimeCombine(st.destination, relativePath).ToFileInfo(); files.Add(downloadPath); if (downloadPath.Exists && downloadPath.MD5().ToHexString() == file.MD5) { if (st.verbose) { Console.WriteLine($"Found [{counter}/{status.files.Length}][{file.Length.ToPrettyBytes()}], file '{downloadPath.FullName}' ({file.MD5}) already exists."); } return; //file already exists } var key = $"{st.source.TrimEnd('/')}/{file.MD5}".ToBucketKeyPair().key; if (st.verbose) { Console.WriteLine($"Downloading [{counter}/{status.files.Length}][{file.Length.ToPrettyBytes()}] '{bucket}/{key}' => '{downloadPath.FullName}' ..."); } var sw = Stopwatch.StartNew(); async Task DownloadFile() { downloadPath.Refresh(); if (downloadPath.Exists && downloadPath.TryDelete() != true) { throw new Exception($"Obsolete file was found in '{downloadPath?.FullName ?? "undefined"}' but couldn't be deleted."); } using (var stream = await s3h.DownloadObjectAsync(bucketName: bucket, key: key, throwIfNotFound: true)) { var compressed = file.TryGetProperty("compress") == "zip"; if (!downloadPath.Directory.TryCreate()) { throw new Exception($"Failed to create directory '{downloadPath?.Directory.FullName ?? "undefined"}'."); } if (compressed) { if (st.verbose) { Console.WriteLine($"UnZipping '{downloadPath.FullName}' ..."); } downloadPath.UnZipStream(stream); } else { using (var fs = File.Create(downloadPath.FullName)) stream.CopyTo(fs); } } downloadPath.Refresh(); if (!downloadPath.Exists) { throw new Exception($"Failed download '{bucket}/{key}'-/-> '{downloadPath.FullName}'."); } if (st.verify) { var md5 = downloadPath.MD5().ToHexString(); if (md5 != file.MD5) { throw new Exception($"Failed download '{bucket}/{key}'-/-> '{downloadPath.FullName}', expected MD5 to be '{md5 ?? "undefined"}' but was '{file.MD5 ?? "undefined"}'."); } } await ss.LockAsync(() => { si.transferred += file.Length; }); } await DownloadFile().TryCatchRetryAsync(maxRepeats: st.retry).Timeout(msTimeout: st.timeout); } finally { await ss.LockAsync(() => { ++counter; si.processed += file.Length; si.progress = ((double)si.processed / si.total) * 100; st.WriteInfoFile(si); }); } }, maxDegreeOfParallelism : st.parallelism); if (st.wipe) { counter = 0; var currentFiles = st.destination.ToDirectoryInfo().GetFiles("*", recursive: st.recursive); foreach (var file in currentFiles) { if (!files.Any(x => x.FullName == file.FullName)) { if (st.verbose) { Console.WriteLine($"Removing File [{++counter}/{currentFiles.Length - files.Count}] '{file.FullName}' ..."); } file.Delete(); } } } downloadStatus.finalized = true; si.stop = DateTimeEx.UnixTimestampNow(); si.speed = (double)si.transferred / Math.Max(si.stop - si.start, 1); si.success = true; st.WriteSyncFile(downloadStatus); if (st.verbose) { Console.WriteLine($"SUCCESS, processed '{st.status}', all {status.files.Length} files and {status.directories.Length} directories were updated."); Console.WriteLine($"Average Download Speed: {si.speed.ToPrettyBytes()}/s"); } return(si); }
private static async Task <bool> executeScheduler(string[] args) { var nArgs = CLIHelper.GetNamedArguments(args); switch (args[1]?.ToLower()) { case "github": { if (Environment.GetEnvironmentVariable("test_connection").ToBoolOrDefault(false)) { Console.WriteLine($"Your Internet Connection is {(SilyWebClientEx.CheckInternetAccess(timeout: 5000) ? "" : "NOT")} available."); } var userWhitelist = nArgs.GetValueOrDefault("users")?.Split(','); var repositoriesWhitelist = nArgs.GetValueOrDefault("repositories")?.Split(','); Console.WriteLine($"Fetching scheduler info."); var workingDirectory = (await GetVariableByKey("working_directory", nArgs: nArgs)).ToDirectoryInfo(); var githubSchedule = await GetVariableByKey("github_schedule", nArgs : nArgs); var user = GITWrapper.GitHubHelperEx.GetUserFromUrl(githubSchedule); if (!userWhitelist.IsNullOrEmpty() && !userWhitelist.Any(x => x == user)) { throw new Exception($"User was specified but, user '{user ?? "undefined"}' was not present among whitelisted users: {userWhitelist.JsonSerialize()}"); } var accessToken = await GetSecretHexToken("github_token", nArgs); var repo = GITWrapper.GitHubHelperEx.GetRepoFromUrl(githubSchedule); if (!repositoriesWhitelist.IsNullOrEmpty() && !repositoriesWhitelist.Any(x => x == user)) { throw new Exception($"Repository was specified but, repo '{repo ?? "undefined"}' was not present among whitelisted repositories: {repositoriesWhitelist.JsonSerialize()}"); } var branch = GITWrapper.GitHubHelperEx.GetBranchFromUrl(githubSchedule); var scheduleLocation = GITWrapper.GitHubHelperEx.GetFileFromUrl(githubSchedule); var git = new GITWrapper.GitHubHelper(new GITWrapper.Models.GitHubRepoConfig() { accessToken = accessToken, user = user, repository = repo, branch = branch }); var contentDirectory = PathEx.RuntimeCombine(workingDirectory.FullName, repo).ToDirectoryInfo(); var statusDirectory = PathEx.RuntimeCombine(workingDirectory.FullName, "status").ToDirectoryInfo(); var logsDirectory = PathEx.RuntimeCombine(workingDirectory.FullName, "logs").ToDirectoryInfo(); var scheduleFileInfo = PathEx.RuntimeCombine(contentDirectory.FullName, scheduleLocation).ToFileInfo(); contentDirectory.TryDelete(recursive: true, exception: out var contentDirectoryException); Console.WriteLine($"Removing git directory '{contentDirectory.FullName}' {(contentDirectory.Exists ? $"did NOT suceeded, error: {contentDirectoryException.JsonSerializeAsPrettyException()}" : "succeded")}."); statusDirectory.TryCreate(); CommandOutput result; var pullCommand = $"git clone https://{accessToken}@github.com/{user}/{repo}.git --branch {branch}"; result = CLIHelper.Console(pullCommand, workingDirectory: workingDirectory.FullName); Console.WriteLine(result.JsonSerialize()); var gitDirectory = PathEx.RuntimeCombine(contentDirectory.FullName, ".git").ToDirectoryInfo(); gitDirectory.TryDelete(recursive: true); Console.WriteLine($"Removing git directory '{gitDirectory.FullName}' {(gitDirectory.Exists ? "did NOT" : "")} succeded."); if (!RuntimeEx.IsWindows()) { result = CLIHelper.Console($"chmod 777 -R ./{repo}", workingDirectory: workingDirectory.FullName); Console.WriteLine(result.JsonSerialize()); } if (!scheduleFileInfo.Exists) { Console.WriteLine($"FAILURE, schedule file '{scheduleFileInfo.FullName}' does not exist or was not defined."); return(false); } var deploymentConfig = scheduleFileInfo.DeserialiseJson <DeploymentConfig>(); var deploymentConfigOld = deploymentConfig.LoadDeploymentConfig(statusDirectory); if (deploymentConfig?.enable != true || deploymentConfig.schedules.IsNullOrEmpty()) { Console.WriteLine($"Deployment config '{scheduleFileInfo.FullName}' was not enabled or schedules were not defined."); return(false); } //Defines if schedule executuions should be triggered var masterTrigger = deploymentConfig.IsTriggered(deploymentConfigOld); var serialSchedules = deploymentConfig.schedules .Where(x => !(x?.id).IsNullOrEmpty() && x.parallelizable == false) ?.OrderBy(x => x.priority)?.DistinctBy(x => x.id)?.ToArray(); var parallelSchedules = deploymentConfig.schedules .Where(x => !(x?.id).IsNullOrEmpty() && x.parallelizable == true) ?.OrderBy(x => x.priority)?.DistinctBy(x => x.id)?.ToArray(); var breakAll = false; async Task TryCatchExecute(ExecutionSchedule s) { var sOld = s.LoadExecutionSchedule(statusDirectory); if (s == null || sOld == null) { Console.WriteLine($"New or old schedule could not be found."); return; } if (!s.IsTriggered(sOld, masterTrigger)) { Console.WriteLine($"WARNING, schedule '{s?.id ?? "undefined"}' execution was not triggered."); return; } Console.WriteLine($"Processing executioon schedule '{s.id}', parralelized: {s.parallelizable}, cron: {s.cron ?? "null"}, trigger: {s.trigger}/{sOld.trigger}."); if (s.delay > 0) { await Task.Delay(s.delay); } if (_debug) { Console.WriteLine($"WARNING! github schedule will be processed in DEBUG mode"); await ProcessSchedule(s, sOld, contentDirectory, statusDirectory, logsDirectory, masterTrigger : masterTrigger); return; } try { await ProcessSchedule(s, sOld, contentDirectory, statusDirectory, logsDirectory, masterTrigger : masterTrigger); breakAll = s.breakAllOnFinalize; if (s.sleep > 0) { await Task.Delay(s.sleep); } } catch (Exception ex) { try { if (deploymentConfig.throwOnFailure == true) { if (deploymentConfig.finalizeOnFailure) { deploymentConfig.UpdateDeploymentConfig(statusDirectory); breakAll = s.breakAllOnFinalize; } throw; } Console.WriteLine($"FAILED! execution of schedule '{s.id}', parralelized: {s.parallelizable}, error: {ex.JsonSerializeAsPrettyException()}."); } finally { var logPath = PathEx.Combine(logsDirectory.FullName, $"{s.GetFileSafeId() ?? "tmp.log"}.log").ToFileInfo(); if (logPath.TryCreate()) { logPath.AppendAllText(ex.JsonSerializeAsPrettyException()); } } } } if (deploymentConfig.delay > 0) { await Task.Delay(deploymentConfig.delay); } var sum = 0; if (!serialSchedules.IsNullOrEmpty()) { sum += serialSchedules.Length; foreach (var s in serialSchedules) { await TryCatchExecute(s); } } if (!parallelSchedules.IsNullOrEmpty()) { sum += serialSchedules.Length; await ParallelEx.ForEachAsync(parallelSchedules, s => TryCatchExecute(s), maxDegreeOfParallelism : parallelSchedules.Count()); } deploymentConfig.UpdateDeploymentConfig(statusDirectory); Console.WriteLine($"SUCCESS, {sum} github schedule/s was/were executed out of {deploymentConfig.schedules.Length}."); if (deploymentConfig.sleep > 0) { await Task.Delay(deploymentConfig.sleep); } return(true); } case "help": case "--help": case "-help": case "-h": case "h": { HelpPrinter($"{args[0]}", "Command Deployment", ("github", "Accepts params: working_directory, github_schedule, github_token")); return(true); } default: { Console.WriteLine($"Try '{args[0]} help' to find out list of available commands."); throw new Exception($"Unknown String command: '{args[0]} {args[1]}'"); } } }
public async Task <SyncResult> UploadAWS(SyncTarget st) { var bkp = st.destination.ToBucketKeyPair(); var bucket = bkp.bucket; var key = bkp.key; var timestamp = DateTimeEx.UnixTimestampNow(); if (bucket.IsNullOrEmpty()) { throw new Exception($"Destination '{st.destination ?? "undefined"}' does not contain bucket name."); } var path = st.destination; var sourceInfo = st.GetSourceInfo(); if (sourceInfo.rootDirectory == null) { return(new SyncResult(success: false)); //failed to get source info } var directory = st.source.ToDirectoryInfo(); var prefix = directory.FullName; var counter = 0; var status = await GetStatusFile(st, UploadStatusFilePrefix); var elspased = DateTimeEx.UnixTimestampNow() - status.timestamp; if (status.finalized) { var remaining = st.retention - elspased; Console.WriteLine($"Upload sync file '{st.status}' was already finalized {elspased}s ago. Next sync in {st.retention - elspased}s."); await Task.Delay(millisecondsDelay : 1000); return(new SyncResult(success: true)); } _syncInfo[st.id] = new SyncInfo(st); _syncInfo[st.id].total = sourceInfo.files.Sum(x => x?.Length ?? 0); _syncInfo[st.id].timestamp = timestamp; var cleanup = st.cleanup ? Cleanup(st, status) : null; var isStatusFileUpdated = false; var files = new List <SilyFileInfo>(); var speedList = new List <double>(); await ParallelEx.ForEachAsync(sourceInfo.files, async file => { try { var sw = Stopwatch.StartNew(); var uploadedFile = status.files?.FirstOrDefault(x => x.FullNameEqual(file)); string localMD5; string destination; if (uploadedFile != null) //file was already uploaded to AWS { if (uploadedFile.LastWriteTime == file.LastWriteTime.ToUnixTimestamp()) { if (st.verbose > 1) { Console.WriteLine($"Skipping upload of '{file.FullName}', file did not changed since last upload."); } lock (_locker) { files.Add(uploadedFile); ++counter; } return; //do not uplad, file did not changed } localMD5 = file.MD5().ToHexString(); destination = $"{key}/{localMD5}"; if (localMD5 == uploadedFile.MD5) { if (st.verbose > 1) { Console.WriteLine($"Skipping upload of '{file.FullName}', file alredy exists in the '{bucket}/{destination}'."); } lock (_locker) { ++counter; files.Add(uploadedFile); } return; } } else //file was not uploaded to AWS yet { localMD5 = file.MD5().ToHexString(); destination = $"{key}/{localMD5}"; if (await _S3Helper.ObjectExistsAsync(bucketName: bucket, key: $"{key}/{localMD5}") .Timeout(msTimeout: st.timeout) .TryCatchRetryAsync(maxRepeats: st.retry)) { lock (_locker) { ++counter; files.Add(file.ToSilyFileInfo(md5: localMD5)); } if (st.verbose > 1) { Console.WriteLine($"Skipping upload of '{file.FullName}', file was found in the '{bucket}/{destination}'."); } return; } } lock (_locker) { if (!isStatusFileUpdated) //update status file { status.timestamp = timestamp; status.version = status.version + 1; status.finalized = false; var statusUploadResult = _S3Helper.UploadJsonAsync(status.bucket, status.key, status) .Timeout(msTimeout: st.timeout) .TryCatchRetryAsync(maxRepeats: st.retry).Result; isStatusFileUpdated = true; } ++counter; } async Task <string> UploadFile() { file?.Refresh(); if (file == null || !file.Exists) { return(null); } using (var fs = File.Open( //upload new file to AWS file.FullName, FileMode.Open, FileAccess.Read, EnumEx.ToEnum <FileShare>(st.filesShare))) { var hash = await _S3Helper.UploadStreamAsync(bucketName: bucket, key: destination, inputStream: fs, throwIfAlreadyExists: false, msTimeout: st.timeout).TryCatchRetryAsync(maxRepeats: st.retry); fs.Close(); return(hash.IsNullOrEmpty() ? null : hash); } } Console.WriteLine($"Uploading [{counter}/{sourceInfo.files.Length}][{file.Length}B] '{file.FullName}' => '{bucket}/{destination}' ..."); var md5 = await UploadFile().TryCatchRetryAsync(maxRepeats: st.retry); if (md5 != localMD5) { Console.WriteLine($"Warning! file changed during upload '{file.FullName}' => '{bucket}/{destination}'."); } if (!md5.IsNullOrEmpty()) { lock (_locker) { files.Add(file.ToSilyFileInfo(md5)); var megabytes = (double)(file.Length + (md5.Length + bucket.Length + key.Length) * sizeof(char)) / (1024 * 1024); var seconds = (double)(sw.ElapsedMilliseconds + 1) / 1000; var speed = megabytes / seconds; speedList.Add(speed); } } else { Console.WriteLine($"FAILED, Upload '{file.FullName}' => '{bucket}/{destination}'"); } } finally { _syncInfo[st.id].processed += file.Length; _syncInfo[st.id].progress = ((double)_syncInfo[st.id].processed / _syncInfo[st.id].total) * 100; } }, maxDegreeOfParallelism : st.parallelism); var directories = sourceInfo.directories.Select(x => x.ToSilyDirectoryInfo()).ToArray(); var avgSpeed = speedList.IsNullOrEmpty() ? double.NaN : speedList.Average(); if (cleanup != null) { await cleanup; } if (isStatusFileUpdated || //if modifications were made to files !status.directories.JsonEquals(directories)) // or directories { status.files = files.ToArray(); status.finalized = true; status.directories = directories; status.source = st.source; status.destination = st.destination; var uploadResult = await _S3Helper.UploadJsonAsync(status.bucket, status.key, status) .Timeout(msTimeout: st.timeout) .TryCatchRetryAsync(maxRepeats: st.retry); Console.WriteLine($"SUCCESS, processed '{st.status}', all {status.files.Length} files and {status.directories.Length} directories were updated."); Console.WriteLine($"Average Upload Speed: {avgSpeed} MB/s"); } return(new SyncResult(success: true, speed: avgSpeed)); }
public async Task Process() { var syncTargets = _cfg.GetSyncTargets(); if (syncTargets.IsNullOrEmpty()) { return; } foreach (var st in syncTargets) { if (st.source.IsNullOrEmpty()) { throw new Exception("SyncTarget 'source' was not defined"); } if (st.destination.IsNullOrEmpty()) { throw new Exception("SyncTarget 'destination' was not defined"); } if (st.id.IsNullOrEmpty()) { st.id = Guid.NewGuid().ToString(); } } if (_syncResult == null || _syncResult.Count != syncTargets.Length || syncTargets.Any(x => !_syncResult.ContainsKey(x.id))) { _syncResult = new ConcurrentDictionary <string, SyncResult>(); foreach (var st in syncTargets) { _syncResult.Add(st.id, null); } } if (_syncInfo == null || _syncResult.Count != syncTargets.Length || syncTargets.Any(x => !_syncInfo.ContainsKey(x.id))) { _syncInfo = new ConcurrentDictionary <string, SyncInfo>(); foreach (var st in syncTargets) { _syncInfo.Add(st.id, null); } } ++_run; await ParallelEx.ForEachAsync(syncTargets, async st => { var sw = Stopwatch.StartNew(); if (st.type == SyncTarget.types.none) { return; } _S3Helper = st.profile.IsNullOrEmpty() ? new S3Helper() : new S3Helper(AWSWrapper.Extensions.Helper.GetAWSCredentials(st.profile)); SyncResult result; if (st.type == SyncTarget.types.awsUpload) { /* // debug only * result = await UploadAWS(st); * /*/ result = TryProcessUploadAWS(st); //*/ } else if (st.type == SyncTarget.types.awsDownload) { /* // debug only * result = await DownloadAWS(st); * /*/ result = TryProcessDownloadAWS(st); //*/ } else { throw new Exception($"SyncTarget type '{st.type.ToString()}' was not defined"); } result.run = _run; result.duration = sw.ElapsedMilliseconds / 1000; _syncResult[st.id] = result; if (st.sleep >= 0) { var sleep = st.sleep + 1000; Console.WriteLine($"Sync Task {st.id} was compleated, result: {(result.success ? "success":"failure")}, sleep: {st.sleep} [ms]."); await Task.Delay(st.sleep); } }, maxDegreeOfParallelism : _cfg.parallelism); }