예제 #1
0
        public async Task <SyncInfo> Upload()
        {
            var si     = new SyncInfo(st);
            var bkp    = st.destination.ToBucketKeyPair();
            var bucket = bkp.bucket;
            var key    = bkp.key;

            si.start = DateTimeEx.UnixTimestampNow();

            if (bucket.IsNullOrEmpty())
            {
                throw new Exception($"Destination '{st.destination ?? "undefined"}' does not contain bucket name.");
            }

            var path       = st.destination;
            var sourceInfo = st.GetSourceInfo();

            if (sourceInfo.rootDirectory == null)
            {
                return(si);
            }

            var directory = st.source.ToDirectoryInfo();
            var prefix    = directory.FullName;
            var counter   = 0;

            var status = await S3HashStoreStatus.GetStatusFile(s3h, st, S3HashStoreStatus.UploadStatusFilePrefix);

            var elspased = DateTimeEx.UnixTimestampNow() - status.timestamp;

            if (status.finalized)
            {
                var remaining = st.retention - elspased;
                Console.WriteLine($"Upload sync file '{st.status}' was already finalized {elspased}s ago. Next sync in {st.retention - elspased}s.");
                await Task.Delay(millisecondsDelay : 1000);

                si.success = true;
                return(si);
            }

            si.total = sourceInfo.files.Sum(x => x?.Length ?? 0);
            var    cleanup             = st.cleanup ? Cleanup(status) : null;
            var    isStatusFileUpdated = false;
            var    files          = new List <SilyFileInfo>();
            var    uploadedBytes  = new List <long>();
            double compressionSum = 0;

            await ParallelEx.ForEachAsync(sourceInfo.files, async file =>
            {
                double compression = 1;

                try
                {
                    var sw           = Stopwatch.StartNew();
                    var uploadedFile = status.files?.FirstOrDefault(x => x.FullNameEqual(file));

                    string localMD5;
                    string destination;
                    if (uploadedFile != null) //file was already uploaded to AWS
                    {
                        if (uploadedFile.LastWriteTime == file.LastWriteTime.ToUnixTimestamp())
                        {
                            if (st.verbose)
                            {
                                Console.WriteLine($"Skipping upload of '{file.FullName}', file did not changed since last upload.");
                            }

                            await ss.LockAsync(() =>
                            {
                                files.Add(uploadedFile);
                                ++counter;
                            });
                            return; //do not uplad, file did not changed
                        }

                        localMD5    = file.MD5().ToHexString();
                        destination = $"{key}/{localMD5}";
                        if (localMD5 == uploadedFile.MD5)
                        {
                            if (st.verbose)
                            {
                                Console.WriteLine($"Skipping upload of '{file.FullName}', file alredy exists in the '{bucket}/{destination}'.");
                            }

                            await ss.LockAsync(() =>
                            {
                                ++counter;
                                files.Add(uploadedFile);
                            });
                            return;
                        }
                    }
                    else //file was not uploaded to AWS yet
                    {
                        localMD5     = file.MD5().ToHexString();
                        destination  = $"{key}/{localMD5}";
                        var metadata = await s3h.ObjectMetadataAsync(
                            bucketName: bucket,
                            key: $"{key}/{localMD5}",
                            throwIfNotFound: false)
                                       .Timeout(msTimeout: st.timeout)
                                       .TryCatchRetryAsync(maxRepeats: st.retry);

                        if (metadata != null) //file exists
                        {
                            await ss.LockAsync(() =>
                            {
                                ++counter;
                                var sfi = file.ToSilyFileInfo(md5: localMD5);

                                if (sfi.Length >= (metadata.ContentLength + 128))
                                {
                                    sfi.TrySetProperty("compress", "zip");
                                }

                                files.Add(sfi);
                            });

                            if (st.verbose)
                            {
                                Console.WriteLine($"Skipping upload of '{file.FullName}', file was found in the '{bucket}/{destination}'.");
                            }
                            return;
                        }
                    }

                    await ss.LockAsync(async() =>
                    {
                        if (!isStatusFileUpdated) //update status file
                        {
                            status.timestamp       = si.start;
                            status.version         = status.version + 1;
                            status.finalized       = false;
                            var statusUploadResult = await s3h.UploadJsonAsync(status.bucket, status.key, status)
                                                     .Timeout(msTimeout: st.timeout)
                                                     .TryCatchRetryAsync(maxRepeats: st.retry);

                            isStatusFileUpdated = true;
                        }

                        ++counter;
                    });

                    async Task <string> UploadFile()
                    {
                        file?.Refresh();
                        if (file == null || !file.Exists)
                        {
                            return(null);
                        }

                        var shareMode = EnumEx.ToEnum <FileShare>(st.filesShare);

                        FileInfo compressedFile = null;

                        await ss.LockAsync(() =>
                        {
                            if (st.compress)
                            {
                                compressedFile = PathEx.RuntimeCombine(st.sync, localMD5).ToFileInfo();
                                file.Zip(compressedFile);
                                compressedFile.Refresh();

                                if ((compressedFile.Length + 128) < file.Length)
                                {
                                    compression = (double)compressedFile.Length / Math.Max(file.Length, 1);
                                }
                                else
                                {
                                    compression = 1;
                                }
                            }
                        });

                        FileStream fs = null;

                        await ss.LockAsync(() =>
                        {
                            fs = File.Open( //upload new file to AWS
                                compression < 1 ? compressedFile.FullName : file.FullName,
                                FileMode.Open,
                                FileAccess.Read,
                                shareMode);
                        });

                        var hash = await s3h.UploadStreamAsync(bucketName: bucket,
                                                               key: destination,
                                                               inputStream: fs,
                                                               throwIfAlreadyExists: false, msTimeout: st.timeout).TryCatchRetryAsync(maxRepeats: st.retry);

                        fs.Close();

                        if (!compressedFile.TryDelete())
                        {
                            throw new Exception($"Failed to remove temporary file '{compressedFile?.FullName ?? "undefined"}' after deletion.");
                        }

                        return(hash.IsNullOrEmpty() ? null : hash);
                    }

                    if (st.verbose)
                    {
                        Console.WriteLine($"Uploading [{counter}/{sourceInfo.files.Length}][{file.Length.ToPrettyBytes()}] '{file.FullName}' => '{bucket}/{destination}' ...");
                    }

                    var md5 = await UploadFile().TryCatchRetryAsync(maxRepeats: st.retry).Timeout(msTimeout: st.timeout);

                    if (md5.IsNullOrEmpty())
                    {
                        throw new Exception($"FAILED, Upload '{file.FullName}' => '{bucket}/{destination}'");
                    }

                    var silyFile = file.ToSilyFileInfo(localMD5);

                    if (compression < 1)
                    {
                        if (st.verbose)
                        {
                            Console.WriteLine($"File size reduced by [{compression * 100:0.00} %], file: '{file.FullName}' ({md5})");
                        }
                        silyFile.TrySetProperty("compress", "zip");
                        compressionSum += compression;
                    }
                    else
                    {
                        if (md5 != localMD5 && st.verbose)
                        {
                            Console.WriteLine($"Warning! file hash changed during upload '{file.FullName}' {localMD5} => {md5}.");
                        }

                        compressionSum += 1;
                    }

                    await ss.LockAsync(() =>
                    {
                        files.Add(silyFile);
                        si.transferred += (long)(file.Length *compressionSum);
                    });
                }
                finally
                {
                    await ss.LockAsync(() =>
                    {
                        si.processed += file.Length;
                        si.progress   = ((double)si.processed / si.total) * 100;
                        st.WriteInfoFile(si);
                    });
                }
            }, maxDegreeOfParallelism : st.parallelism);

            var directories = sourceInfo.directories.Select(x => x.ToSilyDirectoryInfo()).ToArray();

            si.speed       = (double)si.transferred / Math.Max(si.stop - si.start, 1);
            si.success     = true;
            si.stop        = DateTimeEx.UnixTimestampNow();
            si.compression = (double)si.transferred / si.total;

            if (cleanup != null)
            {
                await cleanup;
            }

            if (isStatusFileUpdated ||                       //if modifications were made to files
                !status.directories.JsonEquals(directories)) // or directories
            {
                status.files       = files.ToArray();
                status.finalized   = true;
                status.directories = directories;
                status.source      = st.source;
                status.destination = st.destination;
                var uploadResult = await s3h.UploadJsonAsync(status.bucket, status.key, status)
                                   .Timeout(msTimeout: st.timeout)
                                   .TryCatchRetryAsync(maxRepeats: st.retry);

                if (st.verbose)
                {
                    Console.WriteLine($"SUCCESS, processed '{st.status}', all {status.files.Length} files and {status.directories.Length} directories were updated.");
                    Console.WriteLine($"Uploaded {si.transferred.ToPrettyBytes()}, Speed: {si.speed.ToPrettyBytes()}/s, Compressed: {si.compression*100:0.00}%");
                }
            }

            return(si);
        }
예제 #2
0
        public async Task <SyncResult> DownloadAWS(SyncTarget st)
        {
            var bkp       = st.source.ToBucketKeyPair();
            var bucket    = bkp.bucket;
            var timestamp = DateTimeEx.UnixTimestampNow();

            if (bucket.IsNullOrEmpty())
            {
                throw new Exception($"Source '{st.source ?? "undefined"}' does not contain bucket name.");
            }

            var destination = st.destination?.ToDirectoryInfo();

            if (destination?.TryCreate() != true)
            {
                throw new Exception($"Destination '{st.destination ?? "undefined"}' does not exist and coudn't be created.");
            }

            var status = await GetStatusFile(st, st.minTimestamp, st.maxTimestamp);

            var downloadStatus = await GetStatusFile(st, DownloadStatusFilePrefix);

            if (status == null)
            {
                throw new Exception($"Could not download latest data from the source '{st.source}', status file was not found in '{st?.status ?? "undefined"}' within time range of <{st.minTimestamp.ToDateTimeFromTimestamp().ToLongDateTimeString()},{st.maxTimestamp.ToDateTimeFromTimestamp().ToLongDateTimeString()}>");
            }

            if (downloadStatus.finalized)
            {
                var elspased = DateTimeEx.UnixTimestampNow() - downloadStatus.timestamp;
                Console.WriteLine($"Download sync file '{st.status}' was already finalized {elspased}s ago.");
                await Task.Delay(millisecondsDelay : 1000);

                return(new SyncResult(success: true));
            }

            _syncInfo[st.id]           = new SyncInfo(st);
            _syncInfo[st.id].total     = status.files.Sum(x => x?.Length ?? 0);
            _syncInfo[st.id].timestamp = timestamp;

            int counter     = 0;
            var directories = new List <DirectoryInfo>();

            directories.Add(st.destination.ToDirectoryInfo());
            foreach (var dir in status.directories)
            {
                if (dir == null)
                {
                    continue;
                }

                var relativeDir = dir.FullName.TrimStart(status.source);
                var downloadDir = PathEx.RuntimeCombine(st.destination, relativeDir).ToDirectoryInfo();

                if (!downloadDir.Exists && st.verbose >= 1)
                {
                    Console.WriteLine($"Creating Directory [{++counter}/{status.directories.Length}] '{downloadDir.FullName}' ...");
                }

                if (downloadDir?.TryCreate() != true)
                {
                    throw new Exception($"Could not find or create directory '{downloadDir?.FullName ?? "undefined"}'.");
                }

                directories.Add(downloadDir);
            }

            if (st.wipe)
            {
                counter = 0;
                var currentDirectories = st.destination.ToDirectoryInfo().GetDirectories(recursive: st.recursive);
                foreach (var dir in currentDirectories)
                {
                    if (!directories.Any(x => x.FullName == dir.FullName))
                    {
                        Console.WriteLine($"Removing Directory [{++counter}/{currentDirectories.Length - directories.Count}] '{dir.FullName}' ...");
                        dir.Delete(recursive: st.recursive);
                    }
                }
            }

            counter = 0;
            var files     = new List <FileInfo>();
            var speedList = new List <double>();
            await ParallelEx.ForEachAsync(status.files, async file =>
            {
                if (file == null)
                {
                    return;
                }

                try
                {
                    var relativePath = file.FullName.TrimStart(status.source);
                    var downloadPath = PathEx.RuntimeCombine(st.destination, relativePath).ToFileInfo();
                    files.Add(downloadPath);

                    if (downloadPath.Exists && downloadPath.MD5().ToHexString() == file.MD5)
                    {
                        return; //file already exists
                    }
                    if (downloadPath.Exists && downloadPath.TryDelete() != true)
                    {
                        throw new Exception($"Obsolete file was found in '{downloadPath?.FullName ?? "undefined"}' but couldn't be deleted.");
                    }

                    var key = $"{st.source.TrimEnd('/')}/{file.MD5}".ToBucketKeyPair().key;

                    ++counter;
                    if (st.verbose >= 1)
                    {
                        Console.WriteLine($"Downloading [{counter}/{status.files.Length}][{file.Length}B] '{bucket}/{key}' => '{downloadPath.FullName}' ...");
                    }

                    var sw     = Stopwatch.StartNew();
                    var stream = await _S3Helper.DownloadObjectAsync(bucketName: bucket, key: key, throwIfNotFound: true)
                                 .Timeout(msTimeout: st.timeout);

                    if (!downloadPath.Directory.TryCreate())
                    {
                        throw new Exception($"Failed to create directory '{downloadPath?.Directory.FullName ?? "undefined"}'.");
                    }

                    using (var fs = File.Create(downloadPath.FullName))
                        stream.CopyTo(fs);

                    downloadPath.Refresh();
                    if (!downloadPath.Exists)
                    {
                        throw new Exception($"Failed download '{bucket}/{key}'-/-> '{downloadPath.FullName}'.");
                    }

                    if (st.verify)
                    {
                        var md5 = downloadPath.MD5().ToHexString();
                        if (md5 != file.MD5)
                        {
                            throw new Exception($"Failed download '{bucket}/{key}'-/-> '{downloadPath.FullName}', expected MD5 to be '{md5 ?? "undefined"}' but was '{file.MD5 ?? "undefined"}'.");
                        }
                        else
                        {
                            lock (_locker)
                            {
                                var megabytes = (double)(file.Length + (md5.Length + bucket.Length + key.Length) * sizeof(char)) / (1024 * 1024);
                                var seconds   = (double)(sw.ElapsedMilliseconds + 1) / 1000;
                                var speed     = megabytes / seconds;
                                speedList.Add(speed);
                            }
                        }
                    }
                }
                finally
                {
                    _syncInfo[st.id].processed += file.Length;
                    _syncInfo[st.id].progress   = ((double)_syncInfo[st.id].processed / _syncInfo[st.id].total) * 100;
                }
            }, maxDegreeOfParallelism : st.parallelism);

            if (st.wipe)
            {
                counter = 0;
                var currentFiles = st.destination.ToDirectoryInfo().GetFiles("*", recursive: st.recursive);
                foreach (var file in currentFiles)
                {
                    if (!files.Any(x => x.FullName == file.FullName))
                    {
                        Console.WriteLine($"Removing File [{++counter}/{currentFiles.Length - files.Count}] '{file.FullName}' ...");
                        file.Delete();
                    }
                }
            }

            downloadStatus.finalized = true;
            var uploadResult = await _S3Helper.UploadJsonAsync(downloadStatus.bucket, downloadStatus.key, downloadStatus)
                               .Timeout(msTimeout: st.timeout)
                               .TryCatchRetryAsync(maxRepeats: st.retry);

            var avgSpeed = speedList.IsNullOrEmpty() ? double.NaN : speedList.Average();

            Console.WriteLine($"SUCCESS, processed '{st.status}', all {status.files.Length} files and {status.directories.Length} directories were updated.");
            Console.WriteLine($"Average Download Speed: {avgSpeed} MB/s");
            return(new SyncResult(success: true, speed: avgSpeed));
        }
예제 #3
0
        public async Task FunctionHandler(ILambdaContext context)
        {
            _sw      = Stopwatch.StartNew();
            _context = context;
            _logger  = _context.Logger;
            _logger.Log($"{context?.FunctionName} => {nameof(FunctionHandler)} => Started");
            _verbose                = Environment.GetEnvironmentVariable("verbose").ToBoolOrDefault(true);
            _masterChatId           = new ChatId(Environment.GetEnvironmentVariable("MASTER_CHAT_ID").ToLongOrDefault(-1001261081309));
            _maxParallelism         = Environment.GetEnvironmentVariable("MAX_PARALLELISM").ToIntOrDefault(0);
            _cosmosHubClientTimeout = Environment.GetEnvironmentVariable("HUB_CLIENT_TIMEOUT").ToIntOrDefault(7);
            _maxMessageAge          = Environment.GetEnvironmentVariable("MAX_MESSAGE_AGE").ToDoubleOrDefault(24 * 3600);
            _bucket     = Environment.GetEnvironmentVariable("BUCKET_NAME");
            _lambdaTime = Environment.GetEnvironmentVariable("LAMBDA_TIME").ToIntOrDefault((15 * 60 * 1000) - 5000);

            var secretName = Environment.GetEnvironmentVariable("SECRET_NAME") ?? "KiraFaucetBot";

            if (Environment.GetEnvironmentVariable("test_connection").ToBoolOrDefault(false))
            {
                Log($"Your Internet Connection is {(SilyWebClientEx.CheckInternetAccess(timeout: 5000) ? "" : "NOT")} available.");
            }

            var secret = JObject.Parse(await _SM.GetSecret(secretName));

#if (TEST)
            var accessToken = secret["test_token"]?.ToString();
            //_mnemonic = (secret["test_mnemonic"]?.ToString()).ToSecureString();
            _mnemonic = (secret["mnemonic"]?.ToString()).ToSecureString();
            _bucket   = "kira-faucet-test";
#elif (PUBLISH)
            var accessToken = secret["token"]?.ToString();
            _mnemonic = (secret["mnemonic"]?.ToString()).ToSecureString();
#endif
            _TBC = new TelegramBotClient(accessToken);
            _bot = await _TBC.GetMeAsync();

            Log($"[INFO] {_bot.FirstName} {_version} started! Bot Name: @{_bot.Username ?? "undefined"}, Bot Id: '{_bot.Id}', Master Chat: '{_masterChatId.Identifier}'");

            _TBC.OnMessage            += Tbc_OnMessage;
            _TBC.OnCallbackQuery      += _TBC_OnCallbackQuery;
            _TBC.OnInlineQuery        += _TBC_OnInlineQuery;
            _TBC.OnInlineResultChosen += _TBC_OnInlineResultChosen;
            _TBC.StartReceiving();

            try
            {
                Log($"Processing...");
                var finalize = false;
                while (true)
                {
#if (PUBLISH)
                    if (!finalize && _sw.ElapsedMilliseconds >= _lambdaTime)
                    {
                        _TBC.StopReceiving();
                        finalize = true;
                        _logger.Log($"Finalizing, elapsed {_sw.ElapsedMilliseconds} / {_lambdaTime} [ms] ...");
                    }
#endif

                    if (_messages.IsNullOrEmpty() && _callbacks.IsNullOrEmpty())
                    {
                        if (finalize)
                        {
                            _logger.Log($"Lambda was finalized gracefully within {_lambdaTime - _sw.ElapsedMilliseconds} ms.");
                            return;
                        }
                        else
                        {
                            await Task.Delay(100);

                            continue;
                        }
                    }

                    Message[] msgArr = null;
                    _ssMsgLocker.Lock(() =>
                    {
                        msgArr = _messages.ToArray().DeepCopy();
                        _messages.Clear();
                    });

                    var t0 = ParallelEx.ForEachAsync(msgArr, async msg => {
                        async Task ProcessUser(Message m)
                        {
                            var user      = m.From;
                            var replyUser = m.ReplyToMessage?.From;

                            if (user != null)
                            {
                                await UpdateUserData(user);
                            }

                            if (replyUser != null && user?.Id != replyUser.Id)
                            {
                                await UpdateUserData(replyUser);
                            }
                        }

#if (TEST)
                        await ProcessUser(msg);
#elif (PUBLISH)
                        try
                        {
                            await ProcessUser(msg);
                        }
                        catch (Exception ex)
                        {
                            _logger.Log($"[USR ERROR] => Filed ('{msg?.Chat?.Id ?? 0}') to save user status: '{ex.JsonSerializeAsPrettyException(Newtonsoft.Json.Formatting.Indented)}'");
                        }
#endif
                    });

                    var t1 = ParallelEx.ForEachAsync(msgArr, async msg =>
                    {
#if (TEST)
                        await ProcessMessage(msg);
#elif (PUBLISH)
                        try
                        {
                            await ProcessMessage(msg);
                        }
                        catch (Exception ex)
                        {
                            _logger.Log($"[MSG ERROR] => Filed ('{msg?.Chat?.Id ?? 0}') to process message ({msg?.MessageId}): '{ex.JsonSerializeAsPrettyException(Newtonsoft.Json.Formatting.Indented)}'");
                            await _TBC.SendTextMessageAsync(chatId: msg.Chat,
                                                            $"Something went wrong, visit {await GetMasterChatInviteLink()} to find help.",
                                                            replyToMessageId: msg.MessageId,
                                                            parseMode: Telegram.Bot.Types.Enums.ParseMode.Markdown);
                        }
#endif
                    }, maxDegreeOfParallelism: _maxParallelism);

                    CallbackQuery[] cbqArr = null;
                    _ssCbqLocker.Lock(() =>
                    {
                        cbqArr = _callbacks.ToArray().DeepCopy();
                        _callbacks.Clear();
                    });
                    var t2 = ParallelEx.ForEachAsync(cbqArr, async cbq =>
                    {
#if (TEST)
                        await ProcessCallbacks(cbq);
#elif (PUBLISH)
                        try
                        {
                            await ProcessCallbacks(cbq);
                        }
                        catch (Exception ex)
                        {
                            _logger.Log($"[CBQ ERROR] => Filed ('{cbq.Message?.Chat?.Id ?? 0}') to process callback ({cbq.Id}): '{ex.JsonSerializeAsPrettyException(Newtonsoft.Json.Formatting.Indented)}'");
                            await _TBC.SendTextMessageAsync(chatId: cbq.Message.Chat,
                                                            $"Something went wrong, visit {await GetMasterChatInviteLink()} to find help.",
                                                            parseMode: Telegram.Bot.Types.Enums.ParseMode.Markdown);
                        }
#endif
                    }, maxDegreeOfParallelism: _maxParallelism);

                    await Task.WhenAll(t0, t1, t2);
                }
            }
            finally
            {
                _logger.Log($"{context?.FunctionName} => {nameof(FunctionHandler)} => Stopped, Eveluated within: {_sw.ElapsedMilliseconds} [ms]");
            }
        }
예제 #4
0
        public static PeakTransitionGroupFeatureSet GetPeakFeatures(this SrmDocument document,
                                                                    IList <IPeakFeatureCalculator> calcs,
                                                                    IProgressMonitor progressMonitor = null,
                                                                    bool verbose = false)
        {
            // Get features for each peptide
            int             totalPeptides  = document.MoleculeCount;
            int             currentPeptide = 0;
            IProgressStatus status         = new ProgressStatus(Resources.PeakFeatureEnumerator_GetPeakFeatures_Calculating_peak_group_scores);

            // Set up run ID dictionary
            var runEnumDict   = new Dictionary <int, int>();
            var chromatograms = document.Settings.MeasuredResults.Chromatograms;

            foreach (var fileInfo in chromatograms.SelectMany(c => c.MSDataFileInfos))
            {
                runEnumDict.Add(fileInfo.FileIndex, runEnumDict.Count + 1);
            }

            // Using Parallel.For is quicker, but order needs to be maintained
            var moleculeGroupPairs = document.GetMoleculeGroupPairs();
            var peakFeatureLists   = new PeakTransitionGroupFeatures[moleculeGroupPairs.Length][];
            int peakFeatureCount   = 0;

            ParallelEx.For(0, moleculeGroupPairs.Length, i =>
            {
                var pair         = moleculeGroupPairs[i];
                var nodePepGroup = pair.NodeMoleculeGroup;
                var nodePep      = pair.NodeMolecule;
                if (nodePep.TransitionGroupCount == 0)
                {
                    return;
                }

                // Exclude standard peptides
                if (nodePep.GlobalStandardType != null)
                {
                    return;
                }

                if (progressMonitor != null)
                {
                    if (progressMonitor.IsCanceled)
                    {
                        throw new OperationCanceledException();
                    }

                    int?percentComplete = ProgressStatus.ThreadsafeIncementPercent(ref currentPeptide, totalPeptides);
                    if (percentComplete.HasValue && percentComplete.Value < 100)
                    {
                        progressMonitor.UpdateProgress(status = status.ChangePercentComplete(percentComplete.Value));
                    }
                }

                var peakFeatureList = new List <PeakTransitionGroupFeatures>();
                foreach (var peakFeature in document.GetPeakFeatures(nodePepGroup, nodePep, calcs, runEnumDict, verbose))
                {
                    if (peakFeature.PeakGroupFeatures.Any())
                    {
                        peakFeatureList.Add(peakFeature);
                        Interlocked.Increment(ref peakFeatureCount);
                    }
                }
                peakFeatureLists[i] = peakFeatureList.ToArray();
            });

            var result             = new PeakTransitionGroupFeatures[peakFeatureCount];
            int peakFeatureCurrent = 0;
            int decoyCount         = 0;

            foreach (var peakFeatureList in peakFeatureLists)
            {
                if (peakFeatureList == null)
                {
                    continue;
                }

                foreach (var peakFeature in peakFeatureList)
                {
                    result[peakFeatureCurrent++] = peakFeature;
                    if (peakFeature.IsDecoy)
                    {
                        decoyCount++;
                    }
                }
            }

            if (progressMonitor != null)
            {
                progressMonitor.UpdateProgress(status.ChangePercentComplete(100));
            }
            return(new PeakTransitionGroupFeatureSet(decoyCount, result));
        }
예제 #5
0
        /// <summary>
        /// Constructs batches and makes predictions in parallel
        /// </summary>
        /// <param name="predictionClient">Client to use for prediction</param>
        /// <param name="progressMonitor">Monitor to show progress in UI</param>
        /// <param name="progressStatus"/>
        /// <param name="settings">Settings to use for constructing inputs and outputs</param>
        /// <param name="inputs">List of inputs to predict</param>
        /// <param name="token">Token for cancelling prediction</param>
        /// <returns>Predictions from Prosit</returns>
        public TSkylineOutput PredictBatches(PredictionService.PredictionServiceClient predictionClient,
                                             IProgressMonitor progressMonitor, ref IProgressStatus progressStatus, SrmSettings settings, IList <TSkylineInputRow> inputs, CancellationToken token)
        {
            const int CONSTRUCTING_INPUTS_FRACTION = 50;

            progressMonitor.UpdateProgress(progressStatus = progressStatus
                                                            .ChangeMessage(PrositResources.PrositModel_BatchPredict_Constructing_Prosit_inputs)
                                                            .ChangePercentComplete(0));


            inputs = inputs.Distinct().ToArray();

            var processed  = 0;
            var totalCount = inputs.Count;

            var inputLock  = new object();
            var inputsList =
                new List <TPrositIn>();
            var validInputsList =
                new List <List <TSkylineInputRow> >();

            // Construct batch inputs in parallel
            var localProgressStatus = progressStatus;

            ParallelEx.ForEach(PrositHelpers.EnumerateBatches(inputs, PrositConstants.BATCH_SIZE),
                               batchEnumerable =>
            {
                var batch = batchEnumerable.ToArray();

                var batchInputs        = new List <TPrositInputRow>(batch.Length);
                var validSkylineInputs = new List <TSkylineInputRow>(batch.Length);

                foreach (var singleInput in batch)
                {
                    var input = CreatePrositInputRow(settings, singleInput, out _);
                    if (input != null)
                    {
                        batchInputs.Add(input);
                        validSkylineInputs.Add(singleInput);
                    }
                }

                lock (inputLock)
                {
                    inputsList.Add(CreatePrositInput(batchInputs));
                    validInputsList.Add(validSkylineInputs);

                    // ReSharper disable AccessToModifiedClosure
                    processed += batch.Length;
                    progressMonitor.UpdateProgress(localProgressStatus.ChangePercentComplete(CONSTRUCTING_INPUTS_FRACTION * processed / totalCount));
                    // ReSharper enable AccessToModifiedClosure
                }
            });

            processed  = 0;
            totalCount = inputsList.Sum(pi => pi.InputRows.Count);

            const int REQUESTING_INPUTS_FRACTION = 100 - CONSTRUCTING_INPUTS_FRACTION;

            progressStatus = progressStatus
                             .ChangeMessage(PrositResources.PrositModel_BatchPredict_Requesting_predictions_from_Prosit)
                             .ChangePercentComplete(CONSTRUCTING_INPUTS_FRACTION);
            progressMonitor.UpdateProgress(progressStatus);

            // Make predictions batch by batch in sequence and merge the outputs
            var prositOutputAll = new TPrositOut();

            foreach (var prositIn in inputsList)
            {
                var prositOutput = Predict(predictionClient, prositIn, token);
                prositOutputAll = prositOutputAll.MergeOutputs(prositOutput);

                processed     += prositIn.InputRows.Count;
                progressStatus = progressStatus.ChangeMessage(TextUtil.SpaceSeparate(
                                                                  PrositResources.PrositModel_BatchPredict_Requesting_predictions_from_Prosit,
                                                                  processed.ToString(), @"/", totalCount.ToString()))
                                 .ChangePercentComplete(CONSTRUCTING_INPUTS_FRACTION +
                                                        REQUESTING_INPUTS_FRACTION * processed / totalCount);
                progressMonitor.UpdateProgress(progressStatus);
            }

            return(CreateSkylineOutput(settings, validInputsList.SelectMany(i => i).ToArray(), prositOutputAll));
        }
예제 #6
0
        public async Task <SyncInfo> Download()
        {
            var bkp    = st.source.ToBucketKeyPair();
            var bucket = bkp.bucket;

            si       = new SyncInfo(st);
            si.start = DateTimeEx.UnixTimestampNow();

            if (bucket.IsNullOrEmpty())
            {
                throw new Exception($"Source '{st.source ?? "undefined"}' does not contain bucket name.");
            }

            var destination = st.destination?.ToDirectoryInfo();

            if (destination?.TryCreate() != true)
            {
                throw new Exception($"Destination '{st.destination ?? "undefined"}' does not exist and coudn't be created.");
            }

            if (st.verbose)
            {
                Console.WriteLine($"Processing Download Target: '{st?.id ?? "undefined"}'");
            }

            var status = await S3HashStoreStatus.GetStatusFile(s3h, st, st.minTimestamp, st.maxTimestamp);

            var downloadStatus = st.ReadSyncFile();

            if (status == null)
            {
                throw new Exception($"Could not download latest data from the source '{st.source}', status file was not found in '{st?.status ?? "undefined"}' within time range of <{st.minTimestamp.ToDateTimeFromTimestamp().ToLongDateTimeString()},{st.maxTimestamp.ToDateTimeFromTimestamp().ToLongDateTimeString()}>");
            }

            status.files = status?.files?.Where(x => x != null)?.ToArray() ?? new SilyFileInfo[0];
            si.total     = status.files.Sum(x => x?.Length ?? 0);

            if (downloadStatus.finalized)
            {
                var elspased = DateTimeEx.UnixTimestampNow() - si.start;
                if (st.verbose)
                {
                    Console.WriteLine($"Download sync file '{st.status}' was already finalized {elspased}s ago.");
                }
                await Task.Delay(millisecondsDelay : 1000);

                si.success = true;
                return(si);
            }

            if (st.verbose)
            {
                Console.WriteLine($"Download Target: '{st?.id ?? "undefined"}' status indicates that targt is not finalized");
            }

            int counter     = 0;
            var directories = new List <DirectoryInfo>();

            directories.Add(st.destination.ToDirectoryInfo());
            foreach (var dir in status.directories)
            {
                if (dir == null)
                {
                    continue;
                }

                var relativeDir = dir.FullName.ToRuntimePath().TrimStart(status.source.ToRuntimePath());
                var downloadDir = PathEx.RuntimeCombine(st.destination, relativeDir).ToDirectoryInfo();

                if (!downloadDir.Exists && st.verbose)
                {
                    Console.WriteLine($"Creating Directory [{++counter}/{status.directories.Length}] '{downloadDir.FullName}' ...");
                }

                if (downloadDir?.TryCreate() != true)
                {
                    throw new Exception($"Could not find or create directory '{downloadDir?.FullName ?? "undefined"}'.");
                }

                directories.Add(downloadDir);
            }

            if (st.wipe)
            {
                counter = 0;
                var currentDirectories = st.destination.ToDirectoryInfo().GetDirectories(recursive: st.recursive);
                foreach (var dir in currentDirectories)
                {
                    if (!directories.Any(x => x.FullName == dir.FullName))
                    {
                        Console.WriteLine($"Removing Directory [{++counter}/{currentDirectories.Length - directories.Count}] '{dir.FullName}' ...");
                        dir.Delete(recursive: st.recursive);
                    }
                }
            }

            if (st.verbose)
            {
                Console.WriteLine($"Found {status.files} files and {status.directories} directories for target '{st?.id ?? "undefined"}'.");
            }

            counter = 1;
            var files = new List <FileInfo>();
            await ParallelEx.ForEachAsync(status.files, async file =>
            {
                try
                {
                    var relativePath = file.FullName.ToRuntimePath().TrimStart(status.source.ToRuntimePath());
                    var downloadPath = PathEx.RuntimeCombine(st.destination, relativePath).ToFileInfo();
                    files.Add(downloadPath);

                    if (downloadPath.Exists && downloadPath.MD5().ToHexString() == file.MD5)
                    {
                        if (st.verbose)
                        {
                            Console.WriteLine($"Found [{counter}/{status.files.Length}][{file.Length.ToPrettyBytes()}], file '{downloadPath.FullName}' ({file.MD5}) already exists.");
                        }
                        return; //file already exists
                    }

                    var key = $"{st.source.TrimEnd('/')}/{file.MD5}".ToBucketKeyPair().key;

                    if (st.verbose)
                    {
                        Console.WriteLine($"Downloading [{counter}/{status.files.Length}][{file.Length.ToPrettyBytes()}] '{bucket}/{key}' => '{downloadPath.FullName}' ...");
                    }

                    var sw = Stopwatch.StartNew();

                    async Task DownloadFile()
                    {
                        downloadPath.Refresh();
                        if (downloadPath.Exists && downloadPath.TryDelete() != true)
                        {
                            throw new Exception($"Obsolete file was found in '{downloadPath?.FullName ?? "undefined"}' but couldn't be deleted.");
                        }

                        using (var stream = await s3h.DownloadObjectAsync(bucketName: bucket, key: key, throwIfNotFound: true))
                        {
                            var compressed = file.TryGetProperty("compress") == "zip";

                            if (!downloadPath.Directory.TryCreate())
                            {
                                throw new Exception($"Failed to create directory '{downloadPath?.Directory.FullName ?? "undefined"}'.");
                            }

                            if (compressed)
                            {
                                if (st.verbose)
                                {
                                    Console.WriteLine($"UnZipping '{downloadPath.FullName}' ...");
                                }
                                downloadPath.UnZipStream(stream);
                            }
                            else
                            {
                                using (var fs = File.Create(downloadPath.FullName))
                                    stream.CopyTo(fs);
                            }
                        }

                        downloadPath.Refresh();
                        if (!downloadPath.Exists)
                        {
                            throw new Exception($"Failed download '{bucket}/{key}'-/-> '{downloadPath.FullName}'.");
                        }

                        if (st.verify)
                        {
                            var md5 = downloadPath.MD5().ToHexString();
                            if (md5 != file.MD5)
                            {
                                throw new Exception($"Failed download '{bucket}/{key}'-/-> '{downloadPath.FullName}', expected MD5 to be '{md5 ?? "undefined"}' but was '{file.MD5 ?? "undefined"}'.");
                            }
                        }

                        await ss.LockAsync(() =>
                        {
                            si.transferred += file.Length;
                        });
                    }

                    await DownloadFile().TryCatchRetryAsync(maxRepeats: st.retry).Timeout(msTimeout: st.timeout);
                }
                finally
                {
                    await ss.LockAsync(() =>
                    {
                        ++counter;
                        si.processed += file.Length;
                        si.progress   = ((double)si.processed / si.total) * 100;
                        st.WriteInfoFile(si);
                    });
                }
            }, maxDegreeOfParallelism : st.parallelism);

            if (st.wipe)
            {
                counter = 0;
                var currentFiles = st.destination.ToDirectoryInfo().GetFiles("*", recursive: st.recursive);
                foreach (var file in currentFiles)
                {
                    if (!files.Any(x => x.FullName == file.FullName))
                    {
                        if (st.verbose)
                        {
                            Console.WriteLine($"Removing File [{++counter}/{currentFiles.Length - files.Count}] '{file.FullName}' ...");
                        }
                        file.Delete();
                    }
                }
            }

            downloadStatus.finalized = true;
            si.stop    = DateTimeEx.UnixTimestampNow();
            si.speed   = (double)si.transferred / Math.Max(si.stop - si.start, 1);
            si.success = true;

            st.WriteSyncFile(downloadStatus);

            if (st.verbose)
            {
                Console.WriteLine($"SUCCESS, processed '{st.status}', all {status.files.Length} files and {status.directories.Length} directories were updated.");
                Console.WriteLine($"Average Download Speed: {si.speed.ToPrettyBytes()}/s");
            }

            return(si);
        }
예제 #7
0
        private static async Task <bool> executeScheduler(string[] args)
        {
            var nArgs = CLIHelper.GetNamedArguments(args);

            switch (args[1]?.ToLower())
            {
            case "github":
            {
                if (Environment.GetEnvironmentVariable("test_connection").ToBoolOrDefault(false))
                {
                    Console.WriteLine($"Your Internet Connection is {(SilyWebClientEx.CheckInternetAccess(timeout: 5000) ? "" : "NOT")} available.");
                }

                var userWhitelist         = nArgs.GetValueOrDefault("users")?.Split(',');
                var repositoriesWhitelist = nArgs.GetValueOrDefault("repositories")?.Split(',');

                Console.WriteLine($"Fetching scheduler info.");
                var workingDirectory = (await GetVariableByKey("working_directory", nArgs: nArgs)).ToDirectoryInfo();
                var githubSchedule   = await GetVariableByKey("github_schedule", nArgs : nArgs);

                var user = GITWrapper.GitHubHelperEx.GetUserFromUrl(githubSchedule);

                if (!userWhitelist.IsNullOrEmpty() && !userWhitelist.Any(x => x == user))
                {
                    throw new Exception($"User was specified but, user '{user ?? "undefined"}' was not present among whitelisted users: {userWhitelist.JsonSerialize()}");
                }

                var accessToken = await GetSecretHexToken("github_token", nArgs);

                var repo = GITWrapper.GitHubHelperEx.GetRepoFromUrl(githubSchedule);


                if (!repositoriesWhitelist.IsNullOrEmpty() && !repositoriesWhitelist.Any(x => x == user))
                {
                    throw new Exception($"Repository was specified but, repo '{repo ?? "undefined"}' was not present among whitelisted repositories: {repositoriesWhitelist.JsonSerialize()}");
                }

                var branch           = GITWrapper.GitHubHelperEx.GetBranchFromUrl(githubSchedule);
                var scheduleLocation = GITWrapper.GitHubHelperEx.GetFileFromUrl(githubSchedule);

                var git = new GITWrapper.GitHubHelper(new GITWrapper.Models.GitHubRepoConfig()
                    {
                        accessToken = accessToken,
                        user        = user,
                        repository  = repo,
                        branch      = branch
                    });

                var contentDirectory = PathEx.RuntimeCombine(workingDirectory.FullName, repo).ToDirectoryInfo();
                var statusDirectory  = PathEx.RuntimeCombine(workingDirectory.FullName, "status").ToDirectoryInfo();
                var logsDirectory    = PathEx.RuntimeCombine(workingDirectory.FullName, "logs").ToDirectoryInfo();
                var scheduleFileInfo = PathEx.RuntimeCombine(contentDirectory.FullName, scheduleLocation).ToFileInfo();

                contentDirectory.TryDelete(recursive: true, exception: out var contentDirectoryException);
                Console.WriteLine($"Removing git directory '{contentDirectory.FullName}' {(contentDirectory.Exists ? $"did NOT suceeded, error: {contentDirectoryException.JsonSerializeAsPrettyException()}" : "succeded")}.");

                statusDirectory.TryCreate();
                CommandOutput result;

                var pullCommand = $"git clone https://{accessToken}@github.com/{user}/{repo}.git --branch {branch}";
                result = CLIHelper.Console(pullCommand, workingDirectory: workingDirectory.FullName);
                Console.WriteLine(result.JsonSerialize());

                var gitDirectory = PathEx.RuntimeCombine(contentDirectory.FullName, ".git").ToDirectoryInfo();
                gitDirectory.TryDelete(recursive: true);
                Console.WriteLine($"Removing git directory '{gitDirectory.FullName}' {(gitDirectory.Exists ? "did NOT" : "")} succeded.");

                if (!RuntimeEx.IsWindows())
                {
                    result = CLIHelper.Console($"chmod 777 -R ./{repo}", workingDirectory: workingDirectory.FullName);
                    Console.WriteLine(result.JsonSerialize());
                }

                if (!scheduleFileInfo.Exists)
                {
                    Console.WriteLine($"FAILURE, schedule file '{scheduleFileInfo.FullName}' does not exist or was not defined.");
                    return(false);
                }

                var deploymentConfig    = scheduleFileInfo.DeserialiseJson <DeploymentConfig>();
                var deploymentConfigOld = deploymentConfig.LoadDeploymentConfig(statusDirectory);

                if (deploymentConfig?.enable != true || deploymentConfig.schedules.IsNullOrEmpty())
                {
                    Console.WriteLine($"Deployment config '{scheduleFileInfo.FullName}' was not enabled or schedules were not defined.");
                    return(false);
                }

                //Defines if schedule executuions should be triggered
                var masterTrigger = deploymentConfig.IsTriggered(deploymentConfigOld);

                var serialSchedules = deploymentConfig.schedules
                                      .Where(x => !(x?.id).IsNullOrEmpty() && x.parallelizable == false)
                                      ?.OrderBy(x => x.priority)?.DistinctBy(x => x.id)?.ToArray();

                var parallelSchedules = deploymentConfig.schedules
                                        .Where(x => !(x?.id).IsNullOrEmpty() && x.parallelizable == true)
                                        ?.OrderBy(x => x.priority)?.DistinctBy(x => x.id)?.ToArray();

                var breakAll = false;
                async Task TryCatchExecute(ExecutionSchedule s)
                {
                    var sOld = s.LoadExecutionSchedule(statusDirectory);

                    if (s == null || sOld == null)
                    {
                        Console.WriteLine($"New or old schedule could not be found.");
                        return;
                    }

                    if (!s.IsTriggered(sOld, masterTrigger))
                    {
                        Console.WriteLine($"WARNING, schedule '{s?.id ?? "undefined"}' execution was not triggered.");
                        return;
                    }


                    Console.WriteLine($"Processing executioon schedule '{s.id}', parralelized: {s.parallelizable}, cron: {s.cron ?? "null"}, trigger: {s.trigger}/{sOld.trigger}.");

                    if (s.delay > 0)
                    {
                        await Task.Delay(s.delay);
                    }

                    if (_debug)
                    {
                        Console.WriteLine($"WARNING! github schedule will be processed in DEBUG mode");
                        await ProcessSchedule(s, sOld, contentDirectory, statusDirectory, logsDirectory, masterTrigger : masterTrigger);

                        return;
                    }

                    try
                    {
                        await ProcessSchedule(s, sOld, contentDirectory, statusDirectory, logsDirectory, masterTrigger : masterTrigger);

                        breakAll = s.breakAllOnFinalize;

                        if (s.sleep > 0)
                        {
                            await Task.Delay(s.sleep);
                        }
                    }
                    catch (Exception ex)
                    {
                        try
                        {
                            if (deploymentConfig.throwOnFailure == true)
                            {
                                if (deploymentConfig.finalizeOnFailure)
                                {
                                    deploymentConfig.UpdateDeploymentConfig(statusDirectory);
                                    breakAll = s.breakAllOnFinalize;
                                }

                                throw;
                            }

                            Console.WriteLine($"FAILED! execution of schedule '{s.id}', parralelized: {s.parallelizable}, error: {ex.JsonSerializeAsPrettyException()}.");
                        }
                        finally
                        {
                            var logPath = PathEx.Combine(logsDirectory.FullName, $"{s.GetFileSafeId() ?? "tmp.log"}.log").ToFileInfo();
                            if (logPath.TryCreate())
                            {
                                logPath.AppendAllText(ex.JsonSerializeAsPrettyException());
                            }
                        }
                    }
                }

                if (deploymentConfig.delay > 0)
                {
                    await Task.Delay(deploymentConfig.delay);
                }

                var sum = 0;
                if (!serialSchedules.IsNullOrEmpty())
                {
                    sum += serialSchedules.Length;
                    foreach (var s in serialSchedules)
                    {
                        await TryCatchExecute(s);
                    }
                }

                if (!parallelSchedules.IsNullOrEmpty())
                {
                    sum += serialSchedules.Length;
                    await ParallelEx.ForEachAsync(parallelSchedules,
                                                  s => TryCatchExecute(s), maxDegreeOfParallelism : parallelSchedules.Count());
                }

                deploymentConfig.UpdateDeploymentConfig(statusDirectory);

                Console.WriteLine($"SUCCESS, {sum} github schedule/s was/were executed out of {deploymentConfig.schedules.Length}.");

                if (deploymentConfig.sleep > 0)
                {
                    await Task.Delay(deploymentConfig.sleep);
                }

                return(true);
            }

            case "help":
            case "--help":
            case "-help":
            case "-h":
            case "h":
            {
                HelpPrinter($"{args[0]}", "Command Deployment",
                            ("github", "Accepts params: working_directory, github_schedule, github_token"));
                return(true);
            }

            default:
            {
                Console.WriteLine($"Try '{args[0]} help' to find out list of available commands.");
                throw new Exception($"Unknown String command: '{args[0]} {args[1]}'");
            }
            }
        }
예제 #8
0
        public async Task <SyncResult> UploadAWS(SyncTarget st)
        {
            var bkp       = st.destination.ToBucketKeyPair();
            var bucket    = bkp.bucket;
            var key       = bkp.key;
            var timestamp = DateTimeEx.UnixTimestampNow();

            if (bucket.IsNullOrEmpty())
            {
                throw new Exception($"Destination '{st.destination ?? "undefined"}' does not contain bucket name.");
            }

            var path       = st.destination;
            var sourceInfo = st.GetSourceInfo();

            if (sourceInfo.rootDirectory == null)
            {
                return(new SyncResult(success: false)); //failed to get source info
            }
            var directory = st.source.ToDirectoryInfo();
            var prefix    = directory.FullName;
            var counter   = 0;

            var status = await GetStatusFile(st, UploadStatusFilePrefix);

            var elspased = DateTimeEx.UnixTimestampNow() - status.timestamp;

            if (status.finalized)
            {
                var remaining = st.retention - elspased;
                Console.WriteLine($"Upload sync file '{st.status}' was already finalized {elspased}s ago. Next sync in {st.retention - elspased}s.");
                await Task.Delay(millisecondsDelay : 1000);

                return(new SyncResult(success: true));
            }

            _syncInfo[st.id]           = new SyncInfo(st);
            _syncInfo[st.id].total     = sourceInfo.files.Sum(x => x?.Length ?? 0);
            _syncInfo[st.id].timestamp = timestamp;

            var cleanup             = st.cleanup ? Cleanup(st, status) : null;
            var isStatusFileUpdated = false;
            var files     = new List <SilyFileInfo>();
            var speedList = new List <double>();

            await ParallelEx.ForEachAsync(sourceInfo.files, async file =>
            {
                try
                {
                    var sw           = Stopwatch.StartNew();
                    var uploadedFile = status.files?.FirstOrDefault(x => x.FullNameEqual(file));

                    string localMD5;
                    string destination;
                    if (uploadedFile != null) //file was already uploaded to AWS
                    {
                        if (uploadedFile.LastWriteTime == file.LastWriteTime.ToUnixTimestamp())
                        {
                            if (st.verbose > 1)
                            {
                                Console.WriteLine($"Skipping upload of '{file.FullName}', file did not changed since last upload.");
                            }

                            lock (_locker)
                            {
                                files.Add(uploadedFile);
                                ++counter;
                            }
                            return; //do not uplad, file did not changed
                        }

                        localMD5    = file.MD5().ToHexString();
                        destination = $"{key}/{localMD5}";
                        if (localMD5 == uploadedFile.MD5)
                        {
                            if (st.verbose > 1)
                            {
                                Console.WriteLine($"Skipping upload of '{file.FullName}', file alredy exists in the '{bucket}/{destination}'.");
                            }

                            lock (_locker)
                            {
                                ++counter;
                                files.Add(uploadedFile);
                            }
                            return;
                        }
                    }
                    else //file was not uploaded to AWS yet
                    {
                        localMD5    = file.MD5().ToHexString();
                        destination = $"{key}/{localMD5}";
                        if (await _S3Helper.ObjectExistsAsync(bucketName: bucket, key: $"{key}/{localMD5}")
                            .Timeout(msTimeout: st.timeout)
                            .TryCatchRetryAsync(maxRepeats: st.retry))
                        {
                            lock (_locker)
                            {
                                ++counter;
                                files.Add(file.ToSilyFileInfo(md5: localMD5));
                            }
                            if (st.verbose > 1)
                            {
                                Console.WriteLine($"Skipping upload of '{file.FullName}', file was found in the '{bucket}/{destination}'.");
                            }
                            return;
                        }
                    }

                    lock (_locker)
                    {
                        if (!isStatusFileUpdated) //update status file
                        {
                            status.timestamp       = timestamp;
                            status.version         = status.version + 1;
                            status.finalized       = false;
                            var statusUploadResult = _S3Helper.UploadJsonAsync(status.bucket, status.key, status)
                                                     .Timeout(msTimeout: st.timeout)
                                                     .TryCatchRetryAsync(maxRepeats: st.retry).Result;

                            isStatusFileUpdated = true;
                        }

                        ++counter;
                    }

                    async Task <string> UploadFile()
                    {
                        file?.Refresh();
                        if (file == null || !file.Exists)
                        {
                            return(null);
                        }

                        using (var fs = File.Open( //upload new file to AWS
                                   file.FullName,
                                   FileMode.Open,
                                   FileAccess.Read,
                                   EnumEx.ToEnum <FileShare>(st.filesShare)))
                        {
                            var hash = await _S3Helper.UploadStreamAsync(bucketName: bucket,
                                                                         key: destination,
                                                                         inputStream: fs,
                                                                         throwIfAlreadyExists: false, msTimeout: st.timeout).TryCatchRetryAsync(maxRepeats: st.retry);

                            fs.Close();
                            return(hash.IsNullOrEmpty() ? null : hash);
                        }
                    }

                    Console.WriteLine($"Uploading [{counter}/{sourceInfo.files.Length}][{file.Length}B] '{file.FullName}' => '{bucket}/{destination}' ...");
                    var md5 = await UploadFile().TryCatchRetryAsync(maxRepeats: st.retry);

                    if (md5 != localMD5)
                    {
                        Console.WriteLine($"Warning! file changed during upload '{file.FullName}' => '{bucket}/{destination}'.");
                    }

                    if (!md5.IsNullOrEmpty())
                    {
                        lock (_locker)
                        {
                            files.Add(file.ToSilyFileInfo(md5));
                            var megabytes = (double)(file.Length + (md5.Length + bucket.Length + key.Length) * sizeof(char)) / (1024 * 1024);
                            var seconds   = (double)(sw.ElapsedMilliseconds + 1) / 1000;
                            var speed     = megabytes / seconds;
                            speedList.Add(speed);
                        }
                    }
                    else
                    {
                        Console.WriteLine($"FAILED, Upload '{file.FullName}' => '{bucket}/{destination}'");
                    }
                }
                finally
                {
                    _syncInfo[st.id].processed += file.Length;
                    _syncInfo[st.id].progress   = ((double)_syncInfo[st.id].processed / _syncInfo[st.id].total) * 100;
                }
            }, maxDegreeOfParallelism : st.parallelism);

            var directories = sourceInfo.directories.Select(x => x.ToSilyDirectoryInfo()).ToArray();
            var avgSpeed    = speedList.IsNullOrEmpty() ? double.NaN : speedList.Average();

            if (cleanup != null)
            {
                await cleanup;
            }

            if (isStatusFileUpdated ||                       //if modifications were made to files
                !status.directories.JsonEquals(directories)) // or directories
            {
                status.files       = files.ToArray();
                status.finalized   = true;
                status.directories = directories;
                status.source      = st.source;
                status.destination = st.destination;
                var uploadResult = await _S3Helper.UploadJsonAsync(status.bucket, status.key, status)
                                   .Timeout(msTimeout: st.timeout)
                                   .TryCatchRetryAsync(maxRepeats: st.retry);

                Console.WriteLine($"SUCCESS, processed '{st.status}', all {status.files.Length} files and {status.directories.Length} directories were updated.");
                Console.WriteLine($"Average Upload Speed: {avgSpeed} MB/s");
            }

            return(new SyncResult(success: true, speed: avgSpeed));
        }
예제 #9
0
        public async Task Process()
        {
            var syncTargets = _cfg.GetSyncTargets();

            if (syncTargets.IsNullOrEmpty())
            {
                return;
            }

            foreach (var st in syncTargets)
            {
                if (st.source.IsNullOrEmpty())
                {
                    throw new Exception("SyncTarget 'source' was not defined");
                }
                if (st.destination.IsNullOrEmpty())
                {
                    throw new Exception("SyncTarget 'destination' was not defined");
                }

                if (st.id.IsNullOrEmpty())
                {
                    st.id = Guid.NewGuid().ToString();
                }
            }

            if (_syncResult == null || _syncResult.Count != syncTargets.Length || syncTargets.Any(x => !_syncResult.ContainsKey(x.id)))
            {
                _syncResult = new ConcurrentDictionary <string, SyncResult>();
                foreach (var st in syncTargets)
                {
                    _syncResult.Add(st.id, null);
                }
            }

            if (_syncInfo == null || _syncResult.Count != syncTargets.Length || syncTargets.Any(x => !_syncInfo.ContainsKey(x.id)))
            {
                _syncInfo = new ConcurrentDictionary <string, SyncInfo>();
                foreach (var st in syncTargets)
                {
                    _syncInfo.Add(st.id, null);
                }
            }

            ++_run;
            await ParallelEx.ForEachAsync(syncTargets, async st => {
                var sw = Stopwatch.StartNew();

                if (st.type == SyncTarget.types.none)
                {
                    return;
                }

                _S3Helper = st.profile.IsNullOrEmpty() ?
                            new S3Helper() :
                            new S3Helper(AWSWrapper.Extensions.Helper.GetAWSCredentials(st.profile));

                SyncResult result;
                if (st.type == SyncTarget.types.awsUpload)
                {
                    /* // debug only
                     * result = await UploadAWS(st);
                     * /*/
                    result = TryProcessUploadAWS(st);
                    //*/
                }
                else if (st.type == SyncTarget.types.awsDownload)
                {
                    /* // debug only
                     * result = await DownloadAWS(st);
                     * /*/
                    result = TryProcessDownloadAWS(st);
                    //*/
                }
                else
                {
                    throw new Exception($"SyncTarget type '{st.type.ToString()}' was not defined");
                }

                result.run         = _run;
                result.duration    = sw.ElapsedMilliseconds / 1000;
                _syncResult[st.id] = result;

                if (st.sleep >= 0)
                {
                    var sleep = st.sleep + 1000;
                    Console.WriteLine($"Sync Task {st.id} was compleated, result: {(result.success ? "success":"failure")}, sleep: {st.sleep} [ms].");
                    await Task.Delay(st.sleep);
                }
            }, maxDegreeOfParallelism : _cfg.parallelism);
        }