Пример #1
0
        public async Task <SyncInfo> Upload()
        {
            var si     = new SyncInfo(st);
            var bkp    = st.destination.ToBucketKeyPair();
            var bucket = bkp.bucket;
            var key    = bkp.key;

            si.start = DateTimeEx.UnixTimestampNow();

            if (bucket.IsNullOrEmpty())
            {
                throw new Exception($"Destination '{st.destination ?? "undefined"}' does not contain bucket name.");
            }

            var path       = st.destination;
            var sourceInfo = st.GetSourceInfo();

            if (sourceInfo.rootDirectory == null)
            {
                return(si);
            }

            var directory = st.source.ToDirectoryInfo();
            var prefix    = directory.FullName;
            var counter   = 0;

            var status = await S3HashStoreStatus.GetStatusFile(s3h, st, S3HashStoreStatus.UploadStatusFilePrefix);

            var elspased = DateTimeEx.UnixTimestampNow() - status.timestamp;

            if (status.finalized)
            {
                var remaining = st.retention - elspased;
                Console.WriteLine($"Upload sync file '{st.status}' was already finalized {elspased}s ago. Next sync in {st.retention - elspased}s.");
                await Task.Delay(millisecondsDelay : 1000);

                si.success = true;
                return(si);
            }

            si.total = sourceInfo.files.Sum(x => x?.Length ?? 0);
            var    cleanup             = st.cleanup ? Cleanup(status) : null;
            var    isStatusFileUpdated = false;
            var    files          = new List <SilyFileInfo>();
            var    uploadedBytes  = new List <long>();
            double compressionSum = 0;

            await ParallelEx.ForEachAsync(sourceInfo.files, async file =>
            {
                double compression = 1;

                try
                {
                    var sw           = Stopwatch.StartNew();
                    var uploadedFile = status.files?.FirstOrDefault(x => x.FullNameEqual(file));

                    string localMD5;
                    string destination;
                    if (uploadedFile != null) //file was already uploaded to AWS
                    {
                        if (uploadedFile.LastWriteTime == file.LastWriteTime.ToUnixTimestamp())
                        {
                            if (st.verbose)
                            {
                                Console.WriteLine($"Skipping upload of '{file.FullName}', file did not changed since last upload.");
                            }

                            await ss.LockAsync(() =>
                            {
                                files.Add(uploadedFile);
                                ++counter;
                            });
                            return; //do not uplad, file did not changed
                        }

                        localMD5    = file.MD5().ToHexString();
                        destination = $"{key}/{localMD5}";
                        if (localMD5 == uploadedFile.MD5)
                        {
                            if (st.verbose)
                            {
                                Console.WriteLine($"Skipping upload of '{file.FullName}', file alredy exists in the '{bucket}/{destination}'.");
                            }

                            await ss.LockAsync(() =>
                            {
                                ++counter;
                                files.Add(uploadedFile);
                            });
                            return;
                        }
                    }
                    else //file was not uploaded to AWS yet
                    {
                        localMD5     = file.MD5().ToHexString();
                        destination  = $"{key}/{localMD5}";
                        var metadata = await s3h.ObjectMetadataAsync(
                            bucketName: bucket,
                            key: $"{key}/{localMD5}",
                            throwIfNotFound: false)
                                       .Timeout(msTimeout: st.timeout)
                                       .TryCatchRetryAsync(maxRepeats: st.retry);

                        if (metadata != null) //file exists
                        {
                            await ss.LockAsync(() =>
                            {
                                ++counter;
                                var sfi = file.ToSilyFileInfo(md5: localMD5);

                                if (sfi.Length >= (metadata.ContentLength + 128))
                                {
                                    sfi.TrySetProperty("compress", "zip");
                                }

                                files.Add(sfi);
                            });

                            if (st.verbose)
                            {
                                Console.WriteLine($"Skipping upload of '{file.FullName}', file was found in the '{bucket}/{destination}'.");
                            }
                            return;
                        }
                    }

                    await ss.LockAsync(async() =>
                    {
                        if (!isStatusFileUpdated) //update status file
                        {
                            status.timestamp       = si.start;
                            status.version         = status.version + 1;
                            status.finalized       = false;
                            var statusUploadResult = await s3h.UploadJsonAsync(status.bucket, status.key, status)
                                                     .Timeout(msTimeout: st.timeout)
                                                     .TryCatchRetryAsync(maxRepeats: st.retry);

                            isStatusFileUpdated = true;
                        }

                        ++counter;
                    });

                    async Task <string> UploadFile()
                    {
                        file?.Refresh();
                        if (file == null || !file.Exists)
                        {
                            return(null);
                        }

                        var shareMode = EnumEx.ToEnum <FileShare>(st.filesShare);

                        FileInfo compressedFile = null;

                        await ss.LockAsync(() =>
                        {
                            if (st.compress)
                            {
                                compressedFile = PathEx.RuntimeCombine(st.sync, localMD5).ToFileInfo();
                                file.Zip(compressedFile);
                                compressedFile.Refresh();

                                if ((compressedFile.Length + 128) < file.Length)
                                {
                                    compression = (double)compressedFile.Length / Math.Max(file.Length, 1);
                                }
                                else
                                {
                                    compression = 1;
                                }
                            }
                        });

                        FileStream fs = null;

                        await ss.LockAsync(() =>
                        {
                            fs = File.Open( //upload new file to AWS
                                compression < 1 ? compressedFile.FullName : file.FullName,
                                FileMode.Open,
                                FileAccess.Read,
                                shareMode);
                        });

                        var hash = await s3h.UploadStreamAsync(bucketName: bucket,
                                                               key: destination,
                                                               inputStream: fs,
                                                               throwIfAlreadyExists: false, msTimeout: st.timeout).TryCatchRetryAsync(maxRepeats: st.retry);

                        fs.Close();

                        if (!compressedFile.TryDelete())
                        {
                            throw new Exception($"Failed to remove temporary file '{compressedFile?.FullName ?? "undefined"}' after deletion.");
                        }

                        return(hash.IsNullOrEmpty() ? null : hash);
                    }

                    if (st.verbose)
                    {
                        Console.WriteLine($"Uploading [{counter}/{sourceInfo.files.Length}][{file.Length.ToPrettyBytes()}] '{file.FullName}' => '{bucket}/{destination}' ...");
                    }

                    var md5 = await UploadFile().TryCatchRetryAsync(maxRepeats: st.retry).Timeout(msTimeout: st.timeout);

                    if (md5.IsNullOrEmpty())
                    {
                        throw new Exception($"FAILED, Upload '{file.FullName}' => '{bucket}/{destination}'");
                    }

                    var silyFile = file.ToSilyFileInfo(localMD5);

                    if (compression < 1)
                    {
                        if (st.verbose)
                        {
                            Console.WriteLine($"File size reduced by [{compression * 100:0.00} %], file: '{file.FullName}' ({md5})");
                        }
                        silyFile.TrySetProperty("compress", "zip");
                        compressionSum += compression;
                    }
                    else
                    {
                        if (md5 != localMD5 && st.verbose)
                        {
                            Console.WriteLine($"Warning! file hash changed during upload '{file.FullName}' {localMD5} => {md5}.");
                        }

                        compressionSum += 1;
                    }

                    await ss.LockAsync(() =>
                    {
                        files.Add(silyFile);
                        si.transferred += (long)(file.Length *compressionSum);
                    });
                }
                finally
                {
                    await ss.LockAsync(() =>
                    {
                        si.processed += file.Length;
                        si.progress   = ((double)si.processed / si.total) * 100;
                        st.WriteInfoFile(si);
                    });
                }
            }, maxDegreeOfParallelism : st.parallelism);

            var directories = sourceInfo.directories.Select(x => x.ToSilyDirectoryInfo()).ToArray();

            si.speed       = (double)si.transferred / Math.Max(si.stop - si.start, 1);
            si.success     = true;
            si.stop        = DateTimeEx.UnixTimestampNow();
            si.compression = (double)si.transferred / si.total;

            if (cleanup != null)
            {
                await cleanup;
            }

            if (isStatusFileUpdated ||                       //if modifications were made to files
                !status.directories.JsonEquals(directories)) // or directories
            {
                status.files       = files.ToArray();
                status.finalized   = true;
                status.directories = directories;
                status.source      = st.source;
                status.destination = st.destination;
                var uploadResult = await s3h.UploadJsonAsync(status.bucket, status.key, status)
                                   .Timeout(msTimeout: st.timeout)
                                   .TryCatchRetryAsync(maxRepeats: st.retry);

                if (st.verbose)
                {
                    Console.WriteLine($"SUCCESS, processed '{st.status}', all {status.files.Length} files and {status.directories.Length} directories were updated.");
                    Console.WriteLine($"Uploaded {si.transferred.ToPrettyBytes()}, Speed: {si.speed.ToPrettyBytes()}/s, Compressed: {si.compression*100:0.00}%");
                }
            }

            return(si);
        }
Пример #2
0
        public async Task <SyncInfo> Download()
        {
            var bkp    = st.source.ToBucketKeyPair();
            var bucket = bkp.bucket;

            si       = new SyncInfo(st);
            si.start = DateTimeEx.UnixTimestampNow();

            if (bucket.IsNullOrEmpty())
            {
                throw new Exception($"Source '{st.source ?? "undefined"}' does not contain bucket name.");
            }

            var destination = st.destination?.ToDirectoryInfo();

            if (destination?.TryCreate() != true)
            {
                throw new Exception($"Destination '{st.destination ?? "undefined"}' does not exist and coudn't be created.");
            }

            if (st.verbose)
            {
                Console.WriteLine($"Processing Download Target: '{st?.id ?? "undefined"}'");
            }

            var status = await S3HashStoreStatus.GetStatusFile(s3h, st, st.minTimestamp, st.maxTimestamp);

            var downloadStatus = st.ReadSyncFile();

            if (status == null)
            {
                throw new Exception($"Could not download latest data from the source '{st.source}', status file was not found in '{st?.status ?? "undefined"}' within time range of <{st.minTimestamp.ToDateTimeFromTimestamp().ToLongDateTimeString()},{st.maxTimestamp.ToDateTimeFromTimestamp().ToLongDateTimeString()}>");
            }

            status.files = status?.files?.Where(x => x != null)?.ToArray() ?? new SilyFileInfo[0];
            si.total     = status.files.Sum(x => x?.Length ?? 0);

            if (downloadStatus.finalized)
            {
                var elspased = DateTimeEx.UnixTimestampNow() - si.start;
                if (st.verbose)
                {
                    Console.WriteLine($"Download sync file '{st.status}' was already finalized {elspased}s ago.");
                }
                await Task.Delay(millisecondsDelay : 1000);

                si.success = true;
                return(si);
            }

            if (st.verbose)
            {
                Console.WriteLine($"Download Target: '{st?.id ?? "undefined"}' status indicates that targt is not finalized");
            }

            int counter     = 0;
            var directories = new List <DirectoryInfo>();

            directories.Add(st.destination.ToDirectoryInfo());
            foreach (var dir in status.directories)
            {
                if (dir == null)
                {
                    continue;
                }

                var relativeDir = dir.FullName.ToRuntimePath().TrimStart(status.source.ToRuntimePath());
                var downloadDir = PathEx.RuntimeCombine(st.destination, relativeDir).ToDirectoryInfo();

                if (!downloadDir.Exists && st.verbose)
                {
                    Console.WriteLine($"Creating Directory [{++counter}/{status.directories.Length}] '{downloadDir.FullName}' ...");
                }

                if (downloadDir?.TryCreate() != true)
                {
                    throw new Exception($"Could not find or create directory '{downloadDir?.FullName ?? "undefined"}'.");
                }

                directories.Add(downloadDir);
            }

            if (st.wipe)
            {
                counter = 0;
                var currentDirectories = st.destination.ToDirectoryInfo().GetDirectories(recursive: st.recursive);
                foreach (var dir in currentDirectories)
                {
                    if (!directories.Any(x => x.FullName == dir.FullName))
                    {
                        Console.WriteLine($"Removing Directory [{++counter}/{currentDirectories.Length - directories.Count}] '{dir.FullName}' ...");
                        dir.Delete(recursive: st.recursive);
                    }
                }
            }

            if (st.verbose)
            {
                Console.WriteLine($"Found {status.files} files and {status.directories} directories for target '{st?.id ?? "undefined"}'.");
            }

            counter = 1;
            var files = new List <FileInfo>();
            await ParallelEx.ForEachAsync(status.files, async file =>
            {
                try
                {
                    var relativePath = file.FullName.ToRuntimePath().TrimStart(status.source.ToRuntimePath());
                    var downloadPath = PathEx.RuntimeCombine(st.destination, relativePath).ToFileInfo();
                    files.Add(downloadPath);

                    if (downloadPath.Exists && downloadPath.MD5().ToHexString() == file.MD5)
                    {
                        if (st.verbose)
                        {
                            Console.WriteLine($"Found [{counter}/{status.files.Length}][{file.Length.ToPrettyBytes()}], file '{downloadPath.FullName}' ({file.MD5}) already exists.");
                        }
                        return; //file already exists
                    }

                    var key = $"{st.source.TrimEnd('/')}/{file.MD5}".ToBucketKeyPair().key;

                    if (st.verbose)
                    {
                        Console.WriteLine($"Downloading [{counter}/{status.files.Length}][{file.Length.ToPrettyBytes()}] '{bucket}/{key}' => '{downloadPath.FullName}' ...");
                    }

                    var sw = Stopwatch.StartNew();

                    async Task DownloadFile()
                    {
                        downloadPath.Refresh();
                        if (downloadPath.Exists && downloadPath.TryDelete() != true)
                        {
                            throw new Exception($"Obsolete file was found in '{downloadPath?.FullName ?? "undefined"}' but couldn't be deleted.");
                        }

                        using (var stream = await s3h.DownloadObjectAsync(bucketName: bucket, key: key, throwIfNotFound: true))
                        {
                            var compressed = file.TryGetProperty("compress") == "zip";

                            if (!downloadPath.Directory.TryCreate())
                            {
                                throw new Exception($"Failed to create directory '{downloadPath?.Directory.FullName ?? "undefined"}'.");
                            }

                            if (compressed)
                            {
                                if (st.verbose)
                                {
                                    Console.WriteLine($"UnZipping '{downloadPath.FullName}' ...");
                                }
                                downloadPath.UnZipStream(stream);
                            }
                            else
                            {
                                using (var fs = File.Create(downloadPath.FullName))
                                    stream.CopyTo(fs);
                            }
                        }

                        downloadPath.Refresh();
                        if (!downloadPath.Exists)
                        {
                            throw new Exception($"Failed download '{bucket}/{key}'-/-> '{downloadPath.FullName}'.");
                        }

                        if (st.verify)
                        {
                            var md5 = downloadPath.MD5().ToHexString();
                            if (md5 != file.MD5)
                            {
                                throw new Exception($"Failed download '{bucket}/{key}'-/-> '{downloadPath.FullName}', expected MD5 to be '{md5 ?? "undefined"}' but was '{file.MD5 ?? "undefined"}'.");
                            }
                        }

                        await ss.LockAsync(() =>
                        {
                            si.transferred += file.Length;
                        });
                    }

                    await DownloadFile().TryCatchRetryAsync(maxRepeats: st.retry).Timeout(msTimeout: st.timeout);
                }
                finally
                {
                    await ss.LockAsync(() =>
                    {
                        ++counter;
                        si.processed += file.Length;
                        si.progress   = ((double)si.processed / si.total) * 100;
                        st.WriteInfoFile(si);
                    });
                }
            }, maxDegreeOfParallelism : st.parallelism);

            if (st.wipe)
            {
                counter = 0;
                var currentFiles = st.destination.ToDirectoryInfo().GetFiles("*", recursive: st.recursive);
                foreach (var file in currentFiles)
                {
                    if (!files.Any(x => x.FullName == file.FullName))
                    {
                        if (st.verbose)
                        {
                            Console.WriteLine($"Removing File [{++counter}/{currentFiles.Length - files.Count}] '{file.FullName}' ...");
                        }
                        file.Delete();
                    }
                }
            }

            downloadStatus.finalized = true;
            si.stop    = DateTimeEx.UnixTimestampNow();
            si.speed   = (double)si.transferred / Math.Max(si.stop - si.start, 1);
            si.success = true;

            st.WriteSyncFile(downloadStatus);

            if (st.verbose)
            {
                Console.WriteLine($"SUCCESS, processed '{st.status}', all {status.files.Length} files and {status.directories.Length} directories were updated.");
                Console.WriteLine($"Average Download Speed: {si.speed.ToPrettyBytes()}/s");
            }

            return(si);
        }