Exemplo n.º 1
0
 /// <summary>
 /// Swaps two instances of temporary files, equivalent to renaming the files but requires no IO
 /// </summary>
 /// <param name="tf">The temp file to swap with</param>
 public void Swap(TempFile tf)
 {
     string p = m_path;
     m_path = tf.m_path;
     tf.m_path = p;
 }
Exemplo n.º 2
0
 public void Encrypt(Library.Interface.IEncryption encryption, IBackendWriter stat)
 {
     if (encryption != null && !this.Encrypted)
     {
         var tempfile = new Library.Utility.TempFile();
         encryption.Encrypt(this.LocalFilename, tempfile);
         this.DeleteLocalFile(stat);
         this.LocalTempfile = tempfile;
         this.Hash = null;
         this.Size = 0;
         this.Encrypted = true;
     }
 }
Exemplo n.º 3
0
 public void SetLocalfilename(string name)
 {
     this.LocalTempfile = Library.Utility.TempFile.WrapExistingFile(name);
     this.LocalTempfile.Protected = true;
 }
Exemplo n.º 4
0
        private TempFile coreDoGetPiping(FileEntryItem item, Interface.IEncryption useDecrypter, out long retDownloadSize, out string retHashcode)
        {
            // With piping allowed, we will parallelize the operation with buffered pipes to maximize throughput:
            // Separated: Download (only for streaming) - Hashing - Decryption
            // The idea is to use DirectStreamLink's that are inserted in the stream stack, creating a fork to run
            // the crypto operations on.

            retDownloadSize = -1;
            retHashcode = null;

            bool enableStreaming = (m_backend is Library.Interface.IStreamingBackend && !m_options.DisableStreamingTransfers);

            System.Threading.Tasks.Task<string> taskHasher = null;
            DirectStreamLink linkForkHasher = null;
            System.Threading.Tasks.Task taskDecrypter = null;
            DirectStreamLink linkForkDecryptor = null;

            // keep potential temp files and their streams for cleanup (cannot use using here).
            TempFile retTarget = null, dlTarget = null, decryptTarget = null;
            System.IO.Stream dlToStream = null, decryptToStream = null;
            try
            {
                System.IO.Stream nextTierWriter = null; // target of our stacked streams
                if (!enableStreaming) // we will always need dlTarget if not streaming...
                    dlTarget = new TempFile();
                else if (enableStreaming && useDecrypter == null)
                {
                    dlTarget = new TempFile();
                    dlToStream = System.IO.File.OpenWrite(dlTarget);
                    nextTierWriter = dlToStream; // actually write through to file.
                }

                // setup decryption: fork off a StreamLink from stack, and setup decryptor task
                if (useDecrypter != null)
                {
                    linkForkDecryptor = new DirectStreamLink(1 << 16, false, false, nextTierWriter);
                    nextTierWriter = linkForkDecryptor.WriterStream;
                    linkForkDecryptor.SetKnownLength(item.Size, false); // Set length to allow AES-decryption (not streamable yet)
                    decryptTarget = new TempFile();
                    decryptToStream = System.IO.File.OpenWrite(decryptTarget);
                    taskDecrypter = new System.Threading.Tasks.Task(() =>
                            {
                                using (var input = linkForkDecryptor.ReaderStream)
                                using (var output = decryptToStream)
                                    lock (m_encryptionLock) { useDecrypter.Decrypt(input, output); }
                            }
                        );
                }

                // setup hashing: fork off a StreamLink from stack, then task computes hash
                linkForkHasher = new DirectStreamLink(1 << 16, false, false, nextTierWriter);
                nextTierWriter = linkForkHasher.WriterStream;
                taskHasher = new System.Threading.Tasks.Task<string>(() =>
                        {
                            using (var input = linkForkHasher.ReaderStream)
                                return CalculateFileHash(input);
                        }
                    );

                // OK, forks with tasks are set up, so let's do the download which is performed in main thread.
                bool hadException = false;
                try
                {
                    if (enableStreaming)
                    {
                        using (var ss = new ShaderStream(nextTierWriter, false))
                        {
                            using (var ts = new ThrottledStream(ss, m_options.MaxUploadPrSecond, m_options.MaxDownloadPrSecond))
                            using (var pgs = new Library.Utility.ProgressReportingStream(ts, item.Size, HandleProgress))
                            {
                                taskHasher.Start(); // We do not start tasks earlier to be sure the input always gets closed.
                                if (taskDecrypter != null) taskDecrypter.Start();
                                ((Library.Interface.IStreamingBackend)m_backend).Get(item.RemoteFilename, pgs);
                            }
                            retDownloadSize = ss.TotalBytesWritten;
                        }
                    }
                    else
                    {
                        m_backend.Get(item.RemoteFilename, dlTarget);
                        retDownloadSize = new System.IO.FileInfo(dlTarget).Length;
                        using (dlToStream = System.IO.File.OpenRead(dlTarget))
                        {
                            taskHasher.Start(); // We do not start tasks earlier to be sure the input always gets closed.
                            if (taskDecrypter != null) taskDecrypter.Start();
                            new DirectStreamLink.DataPump(dlToStream, nextTierWriter).Run();
                        }
                    }
                }
                catch (Exception)
                { hadException = true; throw; }
                finally
                {
                    // This nested try-catch-finally blocks will make sure we do not miss any exceptions ans all started tasks
                    // are properly ended and tidied up. For what is thrown: If exceptions in main thread occured (download) it is thrown,
                    // then hasher task is checked and last decryption. This resembles old logic.
                    try { retHashcode = taskHasher.Result; }
                    catch (AggregateException ex) { if (!hadException) { hadException = true; throw ex.InnerExceptions[0]; } }
                    finally
                    {
                        if (taskDecrypter != null)
                        {
                            try { taskDecrypter.Wait(); }
                            catch (AggregateException ex)
                            {
                                if (!hadException)
                                {
                                    hadException = true;
                                    if (ex.InnerExceptions[0] is System.Security.Cryptography.CryptographicException)
                                        throw ex.InnerExceptions[0];
                                    else
                                        throw new System.Security.Cryptography.CryptographicException(ex.InnerExceptions[0].Message, ex.InnerExceptions[0]);
                                }
                            }
                        }
                    }
                }

                if (useDecrypter != null) // return decrypted temp file
                { retTarget = decryptTarget; decryptTarget = null; }
                else // return downloaded file
                { retTarget = dlTarget; dlTarget = null; }
            }
            finally
            {
                // Be tidy: manually do some cleanup to temp files, as we could not use using's.
                // Unclosed streams should only occur if we failed even before tasks were started.
                if (dlToStream != null) dlToStream.Dispose();
                if (dlTarget != null) dlTarget.Dispose();
                if (decryptToStream != null) decryptToStream.Dispose();
                if (decryptTarget != null) decryptTarget.Dispose();
            }

            return retTarget;
        }
Exemplo n.º 5
0
 public void DeleteLocalFile(IBackendWriter stat)
 {
     if (this.LocalTempfile != null)
         try { this.LocalTempfile.Dispose(); }
         catch (Exception ex) { stat.AddWarning(string.Format("Failed to dispose temporary file: {0}", this.LocalTempfile), ex); }
         finally { this.LocalTempfile = null; }
 }
Exemplo n.º 6
0
 public TempFileStream(TempFile file)
 {
     m_file   = file;
     m_stream = System.IO.File.Open(file, FileMode.Create, FileAccess.ReadWrite, FileShare.None);
 }
Exemplo n.º 7
0
 public TempFileStream(string file)
     : this(TempFile.WrapExistingFile(file))
 {
 }
Exemplo n.º 8
0
        public string Backup(string[] sources)
        {
            BackupStatistics bs = new BackupStatistics(DuplicatiOperationMode.Backup);
            SetupCommonOptions(bs);

            BackendWrapper backend = null;
            VerificationFile verification = null;

            if (m_options.DontReadManifests)
                throw new Exception(Strings.Interface.ManifestsMustBeReadOnBackups);
            if (m_options.SkipFileHashChecks)
                throw new Exception(Strings.Interface.CannotSkipHashChecksOnBackup);

            if (sources == null || sources.Length == 0)
                throw new Exception(Strings.Interface.NoSourceFoldersError);

            //Make sure they all have the same format and exist
            for (int i = 0; i < sources.Length; i++)
            {
                sources[i] = Utility.Utility.AppendDirSeparator(System.IO.Path.GetFullPath(sources[i]));

                if (!System.IO.Directory.Exists(sources[i]))
                    throw new System.IO.IOException(String.Format(Strings.Interface.SourceFolderIsMissingError, sources[i]));
            }

            //Sanity check for duplicate folders and multiple inclusions of the same folder
            for (int i = 0; i < sources.Length - 1; i++)
            {
                for (int j = i + 1; j < sources.Length; j++)
                    if (sources[i].Equals(sources[j], Utility.Utility.IsFSCaseSensitive ? StringComparison.CurrentCulture : StringComparison.CurrentCultureIgnoreCase))
                        throw new Exception(string.Format(Strings.Interface.SourceDirIsIncludedMultipleTimesError, sources[i]));
                    else if (sources[i].StartsWith(sources[j], Utility.Utility.IsFSCaseSensitive ? StringComparison.CurrentCulture : StringComparison.CurrentCultureIgnoreCase))
                        throw new Exception(string.Format(Strings.Interface.SourceDirsAreRelatedError, sources[i], sources[j]));
            }

            if (m_options.AsynchronousUpload)
            {
                m_asyncReserved = ASYNC_RESERVED;
                m_allowUploadProgress = false;
            }

            //Unused, but triggers errors in the encryption setup here
            Library.Interface.IEncryption encryptionModule = m_options.NoEncryption ? null : DynamicLoader.EncryptionLoader.GetModule(m_options.EncryptionModule, m_options.Passphrase, m_options.RawOptions);

            using (new Logging.Timer("Backup from " + string.Join(";", sources) + " to " + m_backend))
            {
                try
                {
                    if (OperationStarted != null)
                        OperationStarted(this, DuplicatiOperation.Backup, bs.OperationMode, -1, -1, Strings.Interface.StatusLoadingFilelist, "");
                    OperationProgress(this, DuplicatiOperation.Backup, bs.OperationMode, -1, -1, Strings.Interface.StatusLoadingFilelist, "");

                    CheckLiveControl();

                    bool full = m_options.Full;
                    if (full)
                        bs.SetTypeReason(string.Format(Strings.Interface.FullBecauseFlagWasSet, "full"));

                    backend = new BackendWrapper(bs, m_backend, m_options);
                    backend.ProgressEvent += new Duplicati.Library.Main.RSync.RSyncDir.ProgressEventDelegate(BackupTransfer_ProgressEvent);
                    backend.AsyncItemProcessedEvent += new EventHandler(backend_AsyncItemProcessedEvent);

                    m_progress = 0.0;

                    OperationProgress(this, DuplicatiOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, Strings.Interface.StatusReadingIncrementals, "");

                    CheckLiveControl();

                    List<ManifestEntry> backupsets;

                    if (full)
                    {
                        //This will create the target folder
                        backend.List(false);
                        backupsets = new List<ManifestEntry>();
                    }
                    else
                    {
                        //This will list all files on the backend and create the target folder
                        backupsets = backend.GetBackupSets();
                    }

                    if (backupsets.Count == 0)
                    {
                        if (!full)
                            bs.SetTypeReason(Strings.Interface.FullBecauseBackendIsEmpty);
                        full = true;
                    }
                    else
                    {
                        //A prioir backup exists, extract the compression and encryption modules used in the most recent entry
                        string compression = null;
                        string encryption = null;
                        for (int i = backupsets.Count - 1; compression == null && i >= 0; i--)
                        {
                            for (int j = backupsets[i].Incrementals.Count - 1; compression == null && j >= 0; j--)
                                for (int k = backupsets[i].Incrementals[j].Volumes.Count - 1; compression == null && k >= 0; k--)
                                {
                                    compression = backupsets[i].Incrementals[j].Volumes[k].Key.Compression;
                                    encryption = backupsets[i].Incrementals[j].Volumes[k].Key.EncryptionMode;

                                    if (compression != null)
                                        break;
                                }

                            for (int k = backupsets[i].Volumes.Count - 1; compression == null && k >= 0; k--)
                            {
                                compression = backupsets[i].Volumes[k].Key.Compression;
                                encryption = backupsets[i].Volumes[k].Key.EncryptionMode;

                                if (compression != null)
                                    break;
                            }
                        }

                        if (compression != null)
                        {
                            m_options.SetEncryptionModuleDefault(encryption);
                            m_options.SetCompressionModuleDefault(compression);
                        }
                    }

                    string fullCriteria1 = null;
                    string fullCriteria2 = null;
                    if (!full)
                    {
                        full = DateTime.Now > m_options.FullIfOlderThan(backupsets[backupsets.Count - 1].Time);
                        if (full)
                            bs.SetTypeReason(string.Format(Strings.Interface.FullBecauseLastFullIsFrom, backupsets[backupsets.Count - 1].Time, m_options.FullIfOlderThanValue));
                        else if (!string.IsNullOrEmpty(m_options.FullIfOlderThanValue))
                            fullCriteria1 = string.Format(Strings.Interface.IncrementalBecauseLastFullIsFrom, backupsets[backupsets.Count - 1].Time, m_options.FullIfOlderThanValue);
                    }

                    if (!full && m_options.FullIfMoreThanNIncrementals > 0)
                    {
                        full = backupsets[backupsets.Count - 1].Incrementals.Count >= m_options.FullIfMoreThanNIncrementals;
                        if (full)
                            bs.SetTypeReason(string.Format(Strings.Interface.FullBecauseThereAreNIncrementals, backupsets[backupsets.Count - 1].Incrementals.Count, m_options.FullIfMoreThanNIncrementals));
                        else
                            fullCriteria2 = string.Format(Strings.Interface.IncrementalBecauseThereAreNIncrementals, backupsets[backupsets.Count - 1].Incrementals.Count, m_options.FullIfMoreThanNIncrementals);

                    }
                    bs.Full = full;
                    if (!full)
                    {
                        if (fullCriteria1 == null && fullCriteria2 == null)
                            bs.SetTypeReason(Strings.Interface.IncrementalBecauseNoFlagsWereSet);
                        else if (fullCriteria2 == null)
                            bs.SetTypeReason(fullCriteria1);
                        else if (fullCriteria1 == null)
                            bs.SetTypeReason(fullCriteria2);
                        else
                            bs.SetTypeReason(fullCriteria1 + ". " + fullCriteria2);

                    }

                    List<string> controlfiles = new List<string>();
                    if (!string.IsNullOrEmpty(m_options.SignatureControlFiles))
                        controlfiles.AddRange(m_options.SignatureControlFiles.Split(System.IO.Path.PathSeparator));

                    int vol = 0;
                    long totalsize = 0;
                    Manifestfile manifest = new Manifestfile();

                    using (Utility.TempFolder tempfolder = new Duplicati.Library.Utility.TempFolder())
                    {
                        List<KeyValuePair<ManifestEntry, Library.Interface.ICompression>> patches = new List<KeyValuePair<ManifestEntry, Duplicati.Library.Interface.ICompression>>();
                        if (!full)
                        {
                            m_incrementalFraction = INCREMENAL_COST;
                            List<ManifestEntry> entries = new List<ManifestEntry>();
                            entries.Add(backupsets[backupsets.Count - 1]);
                            entries.AddRange(backupsets[backupsets.Count - 1].Incrementals);

                            //Check before we start the download
                            CheckLiveControl();

                            VerifyBackupChainWithFiles(backend, entries[entries.Count - 1]);
                            if (m_options.CreateVerificationFile)
                                verification = new VerificationFile(entries, backend.FilenameStrategy);

                            OperationProgress(this, DuplicatiOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, Strings.Interface.StatusReadingIncrementals, "");

                            patches = FindPatches(backend, entries, tempfolder, false, bs);

                            //Check before we start the download
                            CheckLiveControl();
                            Manifestfile latest = GetManifest(backend, backupsets[backupsets.Count - 1]);

                            //Manifest version 1 does not support multiple folders
                            if (latest.Version == 1)
                                latest.SourceDirs = new string[] { sources[0] };

                            if (latest.SourceDirs.Length != sources.Length)
                            {
                                if (m_options.FullIfSourceFolderChanged)
                                {
                                    Logging.Log.WriteMessage("Source folder count changed, issuing full backup", Duplicati.Library.Logging.LogMessageType.Information);
                                    if (!full)
                                        bs.SetTypeReason(Strings.Interface.FullBecauseSourceFoldersChanged);
                                    full = true;
                                }
                                else
                                    throw new Exception(string.Format(Strings.Interface.NumberOfSourceFoldersHasChangedError, latest.SourceDirs.Length, sources.Length));
                            }
                            else
                            {

                                if (!m_options.AllowSourceFolderChange)
                                {
                                    foreach (string s1 in latest.SourceDirs)
                                    {
                                        bool found = false;
                                        foreach (string s2 in sources)
                                            if (s1.Equals(s2, Utility.Utility.IsFSCaseSensitive ? StringComparison.CurrentCulture : StringComparison.CurrentCultureIgnoreCase))
                                            {
                                                found = true;
                                                break;
                                            }

                                        if (!found)
                                        {
                                            if (m_options.FullIfSourceFolderChanged)
                                            {
                                                Logging.Log.WriteMessage("Source folders changed, issuing full backup", Duplicati.Library.Logging.LogMessageType.Information);
                                                if (!full)
                                                    bs.SetTypeReason(Strings.Interface.FullBecauseSourceFoldersChanged);
                                                full = true;
                                                break; //Exit the folder loop
                                            }
                                            else
                                                throw new Exception(string.Format(Strings.Interface.SourceFoldersHasChangedError, s1));
                                        }
                                    }

                                    manifest.SourceDirs = latest.SourceDirs;
                                }
                                else
                                {
                                    manifest.SourceDirs = sources;
                                }
                            }

                        }

                        DateTime backuptime = DateTime.Now;
                        DateTime backupchaintime;

                        if (full)
                        {
                            patches.Clear();
                            m_incrementalFraction = 0.0;
                            manifest.SourceDirs = sources;
                            if (m_options.CreateVerificationFile)
                                verification = new VerificationFile(new ManifestEntry[0], backend.FilenameStrategy);
                            backupchaintime = backuptime;
                        }
                        else
                        {
                            backupchaintime = patches[0].Key.Time;
                            manifest.PreviousManifestFilename = patches[patches.Count - 1].Key.Filename;
                            manifest.PreviousManifestHash = patches[patches.Count - 1].Key.RemoteHash;
                        }

                        OperationProgress(this, DuplicatiOperation.Backup, bs.OperationMode, -1, -1, Strings.Interface.StatusBuildingFilelist, "");

                        bool completedWithoutChanges;

                        using (RSync.RSyncDir dir = new Duplicati.Library.Main.RSync.RSyncDir(manifest.SourceDirs, bs, m_options.Filter, patches))
                        {
                            CheckLiveControl();

                            dir.ProgressEvent += new Duplicati.Library.Main.RSync.RSyncDir.ProgressEventDelegate(BackupRSyncDir_ProgressEvent);

                            dir.DisableFiletimeCheck = m_options.DisableFiletimeCheck;
                            dir.MaxFileSize = m_options.SkipFilesLargerThan;
                            using (new Logging.Timer("Initiating multipass"))
                                dir.InitiateMultiPassDiff(full, m_options);

                            string tempVolumeFolder = m_options.AsynchronousUpload ? m_options.AsynchronousUploadFolder : m_options.TempDir;

                            bool done = false;
                            while (!done && totalsize < m_options.MaxSize)
                            {
                                using (new Logging.Timer("Multipass " + (vol + 1).ToString()))
                                using (Utility.TempFile signaturefile = new Duplicati.Library.Utility.TempFile(System.IO.Path.Combine(tempVolumeFolder, Guid.NewGuid().ToString())))
                                using (Utility.TempFile contentfile = new Duplicati.Library.Utility.TempFile(System.IO.Path.Combine(tempVolumeFolder, Guid.NewGuid().ToString())))
                                {
                                    OperationProgress(this, DuplicatiOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, string.Format(Strings.Interface.StatusCreatingVolume, vol + 1), "");

                                    CheckLiveControl();

                                    using (Library.Interface.ICompression signaturearchive = DynamicLoader.CompressionLoader.GetModule(m_options.CompressionModule, signaturefile, m_options.RawOptions))
                                    using (Library.Interface.ICompression contentarchive = DynamicLoader.CompressionLoader.GetModule(m_options.CompressionModule, contentfile, m_options.RawOptions))
                                    {
                                        //If we are all out, stop now, this may cause incomplete partial files
                                        if (m_options.MaxSize - totalsize < (contentarchive.FlushBufferSize + backend.FileSizeOverhead))
                                            break;

                                        //Add signature files to archive
                                        foreach (string s in controlfiles)
                                            if (!string.IsNullOrEmpty(s))
                                                using (System.IO.Stream cs = signaturearchive.CreateFile(System.IO.Path.Combine(RSync.RSyncDir.CONTROL_ROOT, System.IO.Path.GetFileName(s))))
                                                using (System.IO.FileStream fs = System.IO.File.OpenRead(s))
                                                    Utility.Utility.CopyStream(fs, cs);

                                        //Only add control files to the very first volume
                                        controlfiles.Clear();

                                        done = dir.MakeMultiPassDiff(signaturearchive, contentarchive, (Math.Min(m_options.VolumeSize, m_options.MaxSize - totalsize)) - backend.FileSizeOverhead);

                                        //TODO: This is not the correct size, we need to account for file size overhead as well
                                        totalsize += signaturearchive.Size;
                                        totalsize += contentarchive.Size;

                                        //TODO: This is not the best way to determine this
                                        if (totalsize >= m_options.MaxSize)
                                            dir.FinalizeMultiPass(signaturearchive, contentarchive, long.MaxValue);

                                    }

                                    completedWithoutChanges = done && !dir.AnyChangesFound;

                                    if (m_options.UploadUnchangedBackups || full)
                                        completedWithoutChanges = false;

                                    if (!completedWithoutChanges)
                                    {

                                        if (m_options.AsynchronousUpload)
                                        {
                                            m_lastProgressMessage = Strings.Interface.StatusWaitingForUpload;
                                            m_allowUploadProgress = true;
                                            m_allowUploadProgressAfter = DateTime.Now.AddSeconds(1);
                                        }
                                        else
                                            OperationProgress(this, DuplicatiOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, string.Format(Strings.Interface.StatusUploadingContentVolume, vol + 1), "");

                                        //Last check before we upload, we do not interrupt transfers
                                        CheckLiveControl();

                                        //The backendwrapper will remove these
                                        signaturefile.Protected = true;
                                        contentfile.Protected = true;

                                        ContentEntry ce = new ContentEntry(backuptime, full, vol + 1);
                                        SignatureEntry se = new SignatureEntry(backuptime, full, vol + 1);

                                        using (new Logging.Timer("Writing delta file " + (vol + 1).ToString()))
                                            backend.Put(ce, contentfile);

                                        if (!m_options.AsynchronousUpload)
                                            OperationProgress(this, DuplicatiOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, string.Format(Strings.Interface.StatusUploadingSignatureVolume, vol + 1), "");

                                        using (new Logging.Timer("Writing remote signatures"))
                                            backend.Put(se, signaturefile);

                                        manifest.AddEntries(ce, se);

                                        if (verification != null)
                                        {
                                            verification.AddFile(ce);
                                            verification.AddFile(se);
                                        }
                                    }
                                }

                                if (!completedWithoutChanges)
                                {
                                    //The backend wrapper will remove these
                                    Utility.TempFile mf = new Duplicati.Library.Utility.TempFile();

                                    using (new Logging.Timer("Writing manifest " + backuptime.ToUniversalTime().ToString("yyyyMMddTHHmmssK")))
                                    {
                                        //Alternate primary/secondary
                                        ManifestEntry mfe = new ManifestEntry(backuptime, full, manifest.SignatureHashes.Count % 2 != 0);
                                        manifest.SelfFilename = backend.GenerateFilename(mfe);
                                        manifest.Save(mf);

                                        if (!m_options.AsynchronousUpload)
                                            OperationProgress(this, DuplicatiOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, string.Format(Strings.Interface.StatusUploadingManifestVolume, vol + 1), "");

                                        //Write the file
                                        mf.Protected = true;
                                        backend.Put(mfe, mf);

                                        if (verification != null)
                                            verification.UpdateManifest(mfe);
                                    }

                                    if (verification != null)
                                    {
                                        using (new Logging.Timer("Writing verification " + backuptime.ToUniversalTime().ToString("yyyyMMddTHHmmssK")))
                                        {
                                            Utility.TempFile vt = new Duplicati.Library.Utility.TempFile();

                                            verification.Save(vt);

                                            if (!m_options.AsynchronousUpload)
                                                OperationProgress(this, DuplicatiOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, Strings.Interface.StatusUploadingVerificationVolume, "");

                                            vt.Protected = true;
                                            backend.Put(new VerificationEntry(backupchaintime), vt);
                                        }
                                    }

                                    if (m_options.AsynchronousUpload)
                                        m_allowUploadProgress = false;

                                    //The file volume counter
                                    vol++;
                                }
                            }
                        }

                        //If we are running asynchronous, we now enter the end-game
                        if (m_options.AsynchronousUpload)
                        {
                            m_lastProgressMessage = Strings.Interface.StatusWaitingForUpload;
                            m_allowUploadProgress = true;
                            m_allowUploadProgressAfter = DateTime.Now;

                            //Before we clear the temp folder, we need to ensure that all volumes are uploaded.
                            //To allow the UI to show some progress while uploading, we perform the remaining
                            // uploads synchronous
                            List<KeyValuePair<BackupEntryBase, string>> pendingUploads = backend.ExtractPendingUploads();

                            //Figure out what volume number we are at
                            foreach (KeyValuePair<BackupEntryBase, string> p in pendingUploads)
                                if (p.Key is ManifestEntry)
                                    vol--;

                            double unitcost = m_asyncReserved / pendingUploads.Count;

                            //The upload each remaining volume in order
                            foreach (KeyValuePair<BackupEntryBase, string> p in pendingUploads)
                            {
                                string msg;
                                if (p.Key is ManifestEntry)
                                {
                                    vol++;
                                    msg = string.Format(Strings.Interface.StatusUploadingManifestVolume, vol);
                                }
                                else if (p.Key is SignatureEntry)
                                    msg = string.Format(Strings.Interface.StatusUploadingSignatureVolume, ((SignatureEntry)p.Key).Volumenumber);
                                else if (p.Key is ContentEntry)
                                {
                                    msg = string.Format(Strings.Interface.StatusUploadingContentVolume, ((ContentEntry)p.Key).Volumenumber);

                                    //We allow a stop or pause request here
                                    CheckLiveControl();
                                }
                                else if (p.Key is VerificationEntry)
                                    msg = Strings.Interface.StatusUploadingVerificationVolume;
                                else
                                    throw new InvalidOperationException();

                                OperationProgress(this, DuplicatiOperation.Backup, bs.OperationMode, (int)(m_progress * 100), -1, msg, "");
                                backend.Put(p.Key, p.Value);
                                m_asyncReserved -= unitcost;
                                m_progress += unitcost;
                            }
                        }
                    }
                }
                catch(Exception ex)
                {
                        //If this is a controlled user-requested stop, wait for the current upload to complete
                    if (backend != null && ex is LiveControl.ExecutionStoppedException)
                    {
                        try
                        {
                            if (m_options.AsynchronousUpload)
                            {
                                m_lastProgressMessage = Strings.Interface.StatusWaitingForUpload;
                                m_allowUploadProgress = true;
                                m_allowUploadProgressAfter = DateTime.Now;

                                //Wait for the current upload to complete and then delete all remaining temporary files
                                foreach (KeyValuePair<BackupEntryBase, string> p in backend.ExtractPendingUploads())
                                    try
                                    {
                                        if (System.IO.File.Exists(p.Value))
                                            System.IO.File.Delete(p.Value);
                                    }
                                    catch { } //Better to delete as many as possible rather than choke on a single file
                            }

                        }
                        catch { } //We already have an exception, just go with that
                    }

                    if (backend == null || backend.ManifestUploads == 0)
                    {
                        Logging.Log.WriteMessage(string.Format(Strings.Interface.ErrorRunningBackup, ex.Message), Logging.LogMessageType.Error);
                        throw; //This also activates "finally", unlike in other languages...
                    }

                    Logging.Log.WriteMessage(string.Format(Strings.Interface.PartialUploadMessage, backend.ManifestUploads, ex.Message), Logging.LogMessageType.Warning);
                    bs.LogError(string.Format(Strings.Interface.PartialUploadMessage, backend.ManifestUploads, ex.Message), ex);
                }
                finally
                {
                    m_progress = 100.0;
                    if (backend != null)
                        try { backend.Dispose(); }
                        catch { }

                    if (OperationCompleted != null)
                        OperationCompleted(this, DuplicatiOperation.Backup, bs.OperationMode, 100, -1, Strings.Interface.StatusCompleted, "");

                    OperationProgress(this, DuplicatiOperation.Backup, bs.OperationMode, 100, -1, Strings.Interface.StatusCompleted, "");
                }
            }

            bs.EndTime = DateTime.Now;

            return bs.ToString();
        }
Exemplo n.º 9
0
        public void Put(string remotename, System.IO.Stream stream)
        {
            TempFile tmp = null;

            // A bit dirty, but we need the underlying stream to compute the hash without any interference
            var measure = stream;
            while (measure is OverrideableStream)
                measure = typeof(OverrideableStream).GetField("m_basestream", System.Reflection.BindingFlags.DeclaredOnly | System.Reflection.BindingFlags.Instance | System.Reflection.BindingFlags.NonPublic).GetValue(measure) as System.IO.Stream;

            if (measure == null)
                throw new Exception(string.Format("Unable to unwrap stream from: {0}", stream.GetType()));

            string sha1;
            if (measure.CanSeek)
            {
                // Record the stream position
                var p = measure.Position;

                // Compute the hash
                using(var hashalg = System.Security.Cryptography.HashAlgorithm.Create("sha1"))
                    sha1 = Library.Utility.Utility.ByteArrayAsHexString(hashalg.ComputeHash(measure));

                // Reset the stream position
                measure.Position = p;
            }
            else
            {
                // No seeking possible, use a temp file
                tmp = new TempFile();
                using(var sr = System.IO.File.OpenWrite(tmp))
                using(var hc = new HashCalculatingStream(measure, "sha1"))
                {
                    Library.Utility.Utility.CopyStream(hc, sr);
                    sha1 = hc.GetFinalHashString();
                }

                stream = System.IO.File.OpenRead(tmp);
            }

            if (m_filecache == null)
                List();

            try
            {
                var fileinfo = m_helper.GetJSONData<UploadFileResponse>(
                    UploadUrlData.UploadUrl,
                    req =>
                    {
                        req.Method = "POST";
                        req.Headers["Authorization"] = UploadUrlData.AuthorizationToken;
                        req.Headers["X-Bz-Content-Sha1"] = sha1;
						req.Headers["X-Bz-File-Name"] = m_urlencodedprefix + Utility.Uri.UrlPathEncode(remotename);
                        req.ContentType = "application/octet-stream";
                        req.ContentLength = stream.Length;
                    },

                    req =>
                    {
                        using(var rs = req.GetRequestStream())
                            Utility.Utility.CopyStream(stream, rs);
                    }
                );

                // Delete old versions
                if (m_filecache.ContainsKey(remotename))
                    Delete(remotename);

                m_filecache[remotename] = new List<FileEntity>();                
                m_filecache[remotename].Add(new FileEntity() {
                    FileID = fileinfo.FileID,
                    FileName = fileinfo.FileName,
                    Action = "upload",
                    Size = fileinfo.ContentLength,
                    UploadTimestamp = (long)(DateTime.UtcNow - Utility.Utility.EPOCH).TotalMilliseconds
                });
            }
            catch(Exception ex)
            {
                m_filecache = null;

                var code = (int)B2AuthHelper.GetExceptionStatusCode(ex);
                if (code >= 500 && code <= 599)
                    m_uploadUrl = null;
                
                throw;
            }
            finally
            {
                try
                {
                    if (tmp != null)
                        tmp.Dispose();
                }
                catch
                {
                }
            }
        }
Exemplo n.º 10
0
        /// <summary>
        /// Will attempt to read the manifest file, optionally reverting to the secondary manifest if reading one fails.
        /// </summary>
        /// <param name="backend">The backendwrapper to read from</param>
        /// <param name="entry">The manifest to read</param>
        /// <returns>The parsed manifest</returns>
        private Manifestfile GetManifest(BackendWrapper backend, ManifestEntry entry)
        {
            if (m_options.DontReadManifests)
            {
                Manifestfile mf = new Manifestfile();
                mf.SignatureHashes = null;
                mf.ContentHashes = null;
                return mf;
            }

            if (entry.ParsedManifest != null)
                return entry.ParsedManifest;
            else if (entry.Alternate != null && entry.Alternate.ParsedManifest != null)
                return entry.Alternate.ParsedManifest;

            if (OperationProgress != null && backend.Statistics != null)
                OperationProgress(this, GetOperationType(), backend.Statistics.OperationMode, (int)(m_progress * 100), -1, string.Format(Strings.Interface.StatusReadingManifest, entry.Time.ToShortDateString() + " " + entry.Time.ToShortTimeString()), "");

            bool tryAlternateManifest = false;

            //This method has some very special logic to ensure correct handling of errors
            //The assumption is that it is possible to determine if the error occurred due to a
            // transfer problem or a corrupt file. If the former happens, the operation should
            // be retried, and thus an exception is thrown. If the latter, the file should
            // be ignored and the backup file should be used.
            //
            //We detect a parsing error, either directly or indirectly through CryptographicException,
            // and assume that a parsing error is an indication of a broken file.
            //All other errors are assumed to be transfer problems, and throws exceptions.
            //
            //This holds as long as the backend always throws an exception if a partial file
            // was downloaded. The FTP backend may not honor this, and some webservers
            // may ommit the "Content-Length" header, which will cause problems.
            //There is a guard agains partial downloads in BackendWrapper.GetInternal()

            using (new Logging.Timer("Get " + entry.Filename))
            using (Utility.TempFile tf = new Duplicati.Library.Utility.TempFile())
            {
                try
                {
                    backend.Get(entry, null, tf, null);

                    //We now have the file decrypted, if the next step fails,
                    // its a broken xml or invalid content
                    tryAlternateManifest = true;
                    Manifestfile mf = new Manifestfile(tf, m_options.SkipFileHashChecks);

                    if (string.IsNullOrEmpty(mf.SelfFilename))
                        mf.SelfFilename = entry.Filename;

                    if (mf.ContentHashes != null && entry.Alternate != null)
                    {
                        //Special case, the manifest has not recorded all volumes,
                        // we must see if the alternate manifest has more volumes
                        if (entry.Volumes.Count > mf.ContentHashes.Count)
                        {
                            //Do not try the alternate, we just did
                            tryAlternateManifest = false;
                            Logging.Log.WriteMessage(string.Format(Strings.Interface.ReadingSecondaryManifestLogMessage, entry.Alternate.Filename), Duplicati.Library.Logging.LogMessageType.Information);

                            Manifestfile amf = null;

                            //Read the alternate file and try to differentiate between a defect file or a partial one
                            bool defectFile = false;

                            try
                            {
                                System.IO.File.Delete(tf);
                                backend.Get(entry.Alternate, null, tf, null);
                            }
                            catch (System.Security.Cryptography.CryptographicException cex)
                            {
                                //We assume that CryptoException means partial file
                                Logging.Log.WriteMessage(string.Format(Strings.Interface.SecondaryManifestReadErrorLogMessage, entry.Alternate.Filename, cex), Duplicati.Library.Logging.LogMessageType.Warning);
                                defectFile = true;
                            }

                            if (!defectFile)
                            {
                                try
                                {
                                    amf = new Manifestfile(tf, m_options.SkipFileHashChecks);
                                }
                                catch (Exception ex)
                                {
                                    //Parsing error means partial file
                                    Logging.Log.WriteMessage(string.Format(Strings.Interface.SecondaryManifestReadErrorLogMessage, entry.Alternate.Filename, ex), Duplicati.Library.Logging.LogMessageType.Warning);
                                    defectFile = true;
                                }
                            }

                            //If the alternate manifest is correct, assign it so we have a copy
                            if (!defectFile && amf != null)
                            {
                                if (string.IsNullOrEmpty(amf.SelfFilename))
                                    amf.SelfFilename = entry.Alternate.Filename;

                                //If the alternate manifest has more files than the primary, we use that one
                                if (amf.ContentHashes != null && amf.ContentHashes.Count > mf.ContentHashes.Count)
                                {
                                    entry.Alternate.ParsedManifest = amf;

                                    if (m_options.SkipFileHashChecks)
                                    {
                                        mf.SignatureHashes = null;
                                        mf.ContentHashes = null;
                                    }

                                    return amf;

                                }
                            }
                        }
                    }

                    if (m_options.SkipFileHashChecks)
                    {
                        mf.SignatureHashes = null;
                        mf.ContentHashes = null;
                    }

                    entry.ParsedManifest = mf;
                    return mf;
                }
                catch (Exception ex)
                {
                    //Only try secondary if the parsing/decrypting fails, not if the transfer fails
                    if (entry.Alternate != null && (ex is System.Security.Cryptography.CryptographicException || tryAlternateManifest))
                    {
                        //TODO: If it is a version error, there is no need to read the alternate version
                        Logging.Log.WriteMessage(string.Format(Strings.Interface.PrimaryManifestReadErrorLogMessage, entry.Filename, ex.Message), Duplicati.Library.Logging.LogMessageType.Warning);
                        try
                        {
                            Logging.Log.WriteMessage(string.Format(Strings.Interface.ReadingSecondaryManifestLogMessage, entry.Alternate.Filename), Duplicati.Library.Logging.LogMessageType.Information);
                            return GetManifest(backend, entry.Alternate);
                        }
                        catch (Exception ex2)
                        {
                            Logging.Log.WriteMessage(string.Format(Strings.Interface.SecondaryManifestReadErrorLogMessage, entry.Alternate.Filename, ex2.Message), Duplicati.Library.Logging.LogMessageType.Warning);
                        }
                    }

                    //Report the original error
                    throw;
                }
            }
        }
Exemplo n.º 11
0
        private string RemoveBackupSets(BackendWrapper backend, List<ManifestEntry> entries)
        {
            StringBuilder sb = new StringBuilder();

            sb.Append(backend.FinishDeleteTransaction(false));

            if (entries.Count > 0)
            {
                System.Xml.XmlDocument doc = new System.Xml.XmlDocument();
                System.Xml.XmlNode root = doc.AppendChild(doc.CreateElement("files"));
                root.Attributes.Append(doc.CreateAttribute("version")).Value = "1";

                foreach (ManifestEntry me in entries)
                {
                    if (me.Alternate != null)
                        root.AppendChild(doc.CreateElement("file")).InnerText = me.Alternate.Filename;
                    root.AppendChild(doc.CreateElement("file")).InnerText = me.Filename;

                    if (me.Verification != null)
                        root.AppendChild(doc.CreateElement("file")).InnerText = me.Verification.Filename;

                    foreach (KeyValuePair<SignatureEntry, ContentEntry> kx in me.Volumes)
                    {
                        root.AppendChild(doc.CreateElement("file")).InnerText = kx.Key.Filename;
                        root.AppendChild(doc.CreateElement("file")).InnerText = kx.Value.Filename;
                    }
                }

                if (m_options.Force)
                {
                    using (TempFile tf = new TempFile())
                    {
                        doc.Save(tf);
                        tf.Protected = true;
                        backend.WriteDeleteTransactionFile(tf);
                    }
                }

                foreach (ManifestEntry me in entries)
                {
                    sb.AppendLine(string.Format(Strings.Interface.DeletingBackupSetMessage, me.Time.ToString(System.Globalization.CultureInfo.InvariantCulture)));

                    if (m_options.Force)
                    {
                        //Delete manifest
                        if (me.Alternate != null)
                            backend.Delete(me.Alternate);

                        backend.Delete(me);

                        if (me.Verification != null)
                            backend.Delete(me.Verification);

                        foreach (KeyValuePair<SignatureEntry, ContentEntry> kx in me.Volumes)
                        {
                            backend.Delete(kx.Key);
                            backend.Delete(kx.Value);
                        }
                    }
                }

                if (m_options.Force)
                    backend.RemoveDeleteTransactionFile();

                if (!m_options.Force && entries.Count > 0)
                    sb.AppendLine(Strings.Interface.FilesAreNotForceDeletedMessage);
            }

            return sb.ToString();
        }
Exemplo n.º 12
0
        public List<KeyValuePair<BackupEntryBase, Exception>> VerifyBackupChain()
        {
            CommunicationStatistics stats = new CommunicationStatistics(DuplicatiOperationMode.Verify);
            SetupCommonOptions(stats);

            List<KeyValuePair<BackupEntryBase, Exception>> results = new List<KeyValuePair<BackupEntryBase, Exception>>();

            if (m_options.DontReadManifests)
                throw new InvalidOperationException(Strings.Interface.ManifestsMustBeRead);
            if (m_options.SkipFileHashChecks)
                throw new InvalidOperationException(Strings.Interface.CannotVerifyWithoutHashes);

            if (!string.IsNullOrEmpty(m_options.SignatureCachePath))
            {
                stats.LogWarning(Strings.Interface.DisablingSignatureCacheForVerification, null);
                m_options.SignatureCachePath = null;
            }

            using (BackendWrapper backend = new BackendWrapper(stats, m_backend, m_options))
            {
                //Find the spot in the chain where we start
                ManifestEntry bestFit = backend.GetBackupSet(m_options.RestoreTime);

                //Get the list of manifests to validate
                List<ManifestEntry> entries = new List<ManifestEntry>();
                entries.Add(bestFit);
                entries.AddRange(bestFit.Incrementals);

                entries.Reverse();

                foreach (ManifestEntry me in entries)
                {
                    Manifestfile mf = null;

                    try
                    {
                        mf = GetManifest(backend, me);
                        VerifyBackupChainWithFiles(backend, me);

                        if (mf.SignatureHashes.Count != me.Volumes.Count)
                            results.Add(new KeyValuePair<BackupEntryBase,Exception>(me, new Exception(string.Format(Strings.Interface.ManifestAndFileCountMismatchError, mf.SelfFilename, mf.SignatureHashes.Count, me.Volumes.Count))));
                        else
                            results.Add(new KeyValuePair<BackupEntryBase,Exception>(me, null));
                    }
                    catch (Exception ex)
                    {
                        results.Add(new KeyValuePair<BackupEntryBase,Exception>(me, ex));
                    }

                    if (mf != null)
                    {
                        int volumes = Math.Min(mf.SignatureHashes.Count, me.Volumes.Count);
                        for(int i = 0; i <volumes; i++)
                        {
                            if (m_options.Verificationlevel == VerificationLevel.Signature || m_options.Verificationlevel == VerificationLevel.Full)
                            {
                                try
                                {
                                    using(Utility.TempFile tf = new Duplicati.Library.Utility.TempFile())
                                        backend.Get(me.Volumes[i].Key, mf, tf, mf.SignatureHashes[i]);
                                    results.Add(new KeyValuePair<BackupEntryBase, Exception>(me.Volumes[i].Key, null));
                                }
                                catch (Exception ex) { results.Add(new KeyValuePair<BackupEntryBase,Exception>(me.Volumes[i].Key, ex)); }
                            }

                            if (m_options.Verificationlevel == VerificationLevel.Full)
                            {
                                try
                                {
                                    using(Utility.TempFile tf = new Duplicati.Library.Utility.TempFile())
                                        backend.Get(me.Volumes[i].Value, mf, tf, mf.ContentHashes[i]);
                                    results.Add(new KeyValuePair<BackupEntryBase, Exception>(me.Volumes[i].Value, null));
                                }
                                catch (Exception ex) { results.Add(new KeyValuePair<BackupEntryBase,Exception>(me.Volumes[i].Value, ex)); }

                            }
                        }
                    }

                }

                //Re-generate verification file
                if (m_options.CreateVerificationFile)
                {
                    //Stop any async operations
                    if (m_options.AsynchronousUpload)
                        backend.ExtractPendingUploads();

                    VerificationFile vf = new VerificationFile(entries, new FilenameStrategy(m_options));
                    using (Utility.TempFile tf = new Duplicati.Library.Utility.TempFile())
                    {
                        vf.Save(tf);
                        tf.Protected = true;
                        backend.Put(new VerificationEntry(entries[entries.Count - 1].Time), tf);
                    }
                }

            }

            return results;
        }
Exemplo n.º 13
0
        /// <summary>
        /// Restores control files added to a backup.
        /// </summary>
        /// <param name="target">The folder into which to restore the files</param>
        /// <returns>A restore report</returns>
        public string RestoreControlFiles(string target)
        {
            RestoreStatistics rs = new RestoreStatistics(DuplicatiOperationMode.RestoreControlfiles);
            SetupCommonOptions(rs);

            BackendWrapper backend = null;

            using (new Logging.Timer("Restore control files from " + m_backend + " to " + target))
            {
                try
                {
                    if (OperationStarted != null)
                        OperationStarted(this, DuplicatiOperation.Restore, rs.OperationMode, 0, -1, Strings.Interface.StatusStarted, "");

                    backend = new BackendWrapper(rs, m_backend, m_options);

                    List<ManifestEntry> attempts = backend.GetBackupSets();

                    List<ManifestEntry> flatlist = new List<ManifestEntry>();
                    foreach (ManifestEntry be in attempts)
                    {
                        flatlist.Add(be);
                        flatlist.AddRange(be.Incrementals);
                    }

                    flatlist.Reverse();

                    string prefix = Utility.Utility.AppendDirSeparator(RSync.RSyncDir.CONTROL_ROOT);

                    foreach (ManifestEntry be in flatlist)
                    {
                        if (be.Volumes.Count > 0)
                            using(Utility.TempFile z = new Duplicati.Library.Utility.TempFile())
                            {
                                OperationProgress(this, DuplicatiOperation.Restore, rs.OperationMode, 0, -1, string.Format(Strings.Interface.StatusReadingIncrementalFile, be.Volumes[0].Key.Filename), "");

                                Manifestfile mf = GetManifest(backend, be);

                                OperationProgress(this, DuplicatiOperation.Restore, rs.OperationMode, 0, -1, string.Format(Strings.Interface.StatusReadingIncrementalFile, be.Volumes[0].Key.Filename), "");

                                using (new Logging.Timer("Get " + be.Volumes[0].Key.Filename))
                                    backend.Get(be.Volumes[0].Key, mf, z, mf.SignatureHashes == null ? null : mf.SignatureHashes[0]);

                                using(Library.Interface.ICompression fz = DynamicLoader.CompressionLoader.GetModule(be.Volumes[0].Key.Compression, z, m_options.RawOptions))
                                {
                                    bool any = false;
                                    foreach (string f in fz.ListFiles(prefix))
                                    {
                                        any = true;
                                        using (System.IO.Stream s1 = fz.OpenRead(f))
                                        using (System.IO.Stream s2 = System.IO.File.Create(System.IO.Path.Combine(target, f.Substring(prefix.Length))))
                                            Utility.Utility.CopyStream(s1, s2);
                                    }

                                    if (any)
                                        break;

                                    rs.LogError(string.Format(Strings.Interface.FailedToFindControlFilesMessage, be.Volumes[0].Key.Filename), null);
                                }
                            }
                    }

                }
                finally
                {
                    if (backend != null)
                        backend.Dispose();

                    if (OperationCompleted != null)
                        OperationCompleted(this, DuplicatiOperation.Restore, rs.OperationMode, 100, -1, Strings.Interface.StatusCompleted, "");
                }
            }

            rs.EndTime = DateTime.Now;

            return rs.ToString();
        }
Exemplo n.º 14
0
        public string Restore(string[] target)
        {
            RestoreStatistics rs = new RestoreStatistics(DuplicatiOperationMode.Restore);
            SetupCommonOptions(rs);

            m_progress = 0;
            BackendWrapper backend = null;
            m_restorePatches = 0;

            using (new Logging.Timer("Restore from " + m_backend + " to " + string.Join(System.IO.Path.PathSeparator.ToString(), target)))
            {
                try
                {
                    if (OperationStarted != null)
                        OperationStarted(this, DuplicatiOperation.Restore, rs.OperationMode, -1, -1, Strings.Interface.StatusStarted, "");
                    OperationProgress(this, DuplicatiOperation.Restore, rs.OperationMode, -1, -1, Strings.Interface.StatusStarted, "");

                    Utility.FilenameFilter filter = m_options.Filter;

                    //Filter is prefered, if both file and filter is specified
                    if (!m_options.HasFilter && !string.IsNullOrEmpty(m_options.FileToRestore))
                    {
                        List<Utility.IFilenameFilter> list = new List<Duplicati.Library.Utility.IFilenameFilter>();
                        list.Add(new Utility.FilelistFilter(true, m_options.FileToRestore.Split(System.IO.Path.PathSeparator)));
                        list.Add(new Utility.RegularExpressionFilter(false, ".*"));

                        filter = new Duplicati.Library.Utility.FilenameFilter(list);
                    }

                    backend = new BackendWrapper(rs, m_backend, m_options);
                    backend.ProgressEvent += new Duplicati.Library.Main.RSync.RSyncDir.ProgressEventDelegate(BackupTransfer_ProgressEvent);

                    OperationProgress(this, DuplicatiOperation.Restore, rs.OperationMode, (int)(m_progress * 100), -1, Strings.Interface.StatusReadingIncrementals, "");

                    ManifestEntry bestFit = backend.GetBackupSet(m_options.RestoreTime);

                    //We will need all the manifests downloaded anyway
                    if (!m_options.DontReadManifests)
                    {
                        if (bestFit.Incrementals.Count > 0)
                            VerifyManifestChain(backend, bestFit.Incrementals[bestFit.Incrementals.Count - 1]);
                        else
                            VerifyManifestChain(backend, bestFit);

                        OperationProgress(this, DuplicatiOperation.Restore, rs.OperationMode, (int)(m_progress * 100), -1, Strings.Interface.StatusReadingIncrementals, "");
                    }

                    m_progress = INCREMENAL_COST;

                    List<ManifestEntry> entries = new List<ManifestEntry>();
                    entries.Add(bestFit);
                    entries.AddRange(bestFit.Incrementals);
                    int patchno = 0;

                    foreach (ManifestEntry be in entries)
                        m_restorePatches += be.Volumes.Count;

                    Manifestfile rootManifest = GetManifest(backend, bestFit);

                    int sourceDirCount = (rootManifest.SourceDirs == null || rootManifest.SourceDirs.Length == 0) ? 1 : rootManifest.SourceDirs.Length;

                    //After reading the first manifest, we know the source folder count
                    if ((rootManifest.SourceDirs == null || rootManifest.SourceDirs.Length == 0) && target.Length > 1)
                    {
                        //V1 support
                        rs.LogWarning(string.Format(Strings.Interface.TooManyTargetFoldersWarning, 1, target.Length), null);
                        Array.Resize(ref target, 1);
                    }
                    else if (target.Length > sourceDirCount)
                    {
                        //If we get too many, we can just cut them off
                        rs.LogWarning(string.Format(Strings.Interface.TooManyTargetFoldersWarning, sourceDirCount, target.Length), null);
                        Array.Resize(ref target, rootManifest.SourceDirs.Length);
                    }
                    else if (target.Length != 1 && target.Length < sourceDirCount)
                    {
                        //If we get too few, we have to bail
                        throw new Exception(string.Format(Strings.Interface.TooFewTargetFoldersError, sourceDirCount, target.Length));
                    }
                    else if (target.Length == 1 && sourceDirCount > 1)
                    {
                        //If there is just one target folder, we automatically compose target subfolders
                        string[] newtargets = new string[rootManifest.SourceDirs.Length];

                        List<string> suggestions = new List<string>();
                        for (int i = 0; i < rootManifest.SourceDirs.Length; i++)
                        {
                            string s = rootManifest.SourceDirs[i];
                            //HACK: We use a leading / in the path name to detect source OS
                            // all paths are absolute, so this detects all unix like systems
                            string dirSepChar = s.StartsWith("/") ? "/" : "\\";

                            if (s.EndsWith(dirSepChar))
                                s = s.Substring(0, s.Length - 1);

                            int lix = s.LastIndexOf(dirSepChar);
                            if (lix < 0 || lix + 1 >= s.Length)
                                s = i.ToString();
                            else
                                s = s.Substring(lix + 1);

                            foreach (char c in System.IO.Path.GetInvalidFileNameChars())
                                s = s.Replace(c, '_');

                            suggestions.Add(s);
                        }

                        Dictionary<string, int> duplicates = new Dictionary<string, int>(Library.Utility.Utility.ClientFilenameStringComparer);
                        for (int i = 0; i < suggestions.Count; i++)
                            if (duplicates.ContainsKey(suggestions[i]))
                                duplicates[suggestions[i]]++;
                            else
                                duplicates[suggestions[i]] = 1;

                        for (int i = 0; i < newtargets.Length; i++)
                        {
                            string suffix = duplicates[suggestions[i]] > 1 ? i.ToString() : suggestions[i];
                            newtargets[i] = System.IO.Path.Combine(target[0], suffix);
                        }

                        target = newtargets;
                    }

                    //Make sure all targets exist
                    foreach(string s in target)
                        if (!System.IO.Directory.Exists(s))
                            System.IO.Directory.CreateDirectory(s);

                    using (RSync.RSyncDir sync = new Duplicati.Library.Main.RSync.RSyncDir(target, rs, filter))
                    {
                        sync.ProgressEvent += new Duplicati.Library.Main.RSync.RSyncDir.ProgressEventDelegate(RestoreRSyncDir_ProgressEvent);

                        foreach (ManifestEntry be in entries)
                        {
                            m_progress = ((1.0 - INCREMENAL_COST) * (patchno / (double)m_restorePatches)) + INCREMENAL_COST;

                            CheckLiveControl();

                            Manifestfile manifest = be == bestFit ? rootManifest : GetManifest(backend, be);

                            CheckLiveControl();

                            foreach (KeyValuePair<SignatureEntry, ContentEntry> vol in be.Volumes)
                            {
                                ContentEntry contentVol = vol.Value;
                                SignatureEntry signatureVol = vol.Key;

                                m_progress = ((1.0 - INCREMENAL_COST) * (patchno / (double)m_restorePatches)) + INCREMENAL_COST;

                                //Skip nonlisted
                                if (manifest.ContentHashes != null && contentVol.Volumenumber > manifest.ContentHashes.Count)
                                {
                                    Logging.Log.WriteMessage(string.Format(Strings.Interface.SkippedContentVolumeLogMessage, contentVol.Volumenumber), Duplicati.Library.Logging.LogMessageType.Warning);
                                    rs.LogWarning(string.Format(Strings.Interface.SkippedContentVolumeLogMessage, contentVol.Volumenumber), null);
                                    patchno++;
                                    continue;
                                }

                                using (Utility.TempFile patchzip = new Duplicati.Library.Utility.TempFile())
                                {
                                    OperationProgress(this, DuplicatiOperation.Restore, rs.OperationMode, (int)(m_progress * 100), -1, string.Format(Strings.Interface.StatusPatching, patchno + 1), "");

                                    CheckLiveControl();

                                     if (m_options.HasFilter || !string.IsNullOrEmpty(m_options.FileToRestore))
                                     {
                                         bool hasFiles = false;

                                         using (Utility.TempFile sigFile = new Duplicati.Library.Utility.TempFile())
                                         {
                                             OperationProgress(this, DuplicatiOperation.Restore, rs.OperationMode, (int)(m_progress * 100), -1, string.Format(Strings.Interface.StatusDownloadingSignatureVolume, patchno + 1), "");

                                             try
                                             {
                                                 using (new Logging.Timer("Get " + signatureVol.Filename))
                                                     backend.Get(signatureVol, manifest, sigFile, manifest.SignatureHashes == null ? null : manifest.SignatureHashes[signatureVol.Volumenumber - 1]);
                                             }
                                             catch (BackendWrapper.HashMismathcException hme)
                                             {
                                                 hasFiles = true;
                                                 rs.LogError(string.Format(Strings.Interface.FileHashFailure, hme.Message), hme);
                                             }

                                             if (!hasFiles)
                                                 using (Library.Interface.ICompression patch = DynamicLoader.CompressionLoader.GetModule(signatureVol.Compression, sigFile, m_options.RawOptions))
                                                 {
                                                     foreach(KeyValuePair<RSync.RSyncDir.PatchFileType, string> k in sync.ListPatchFiles(patch))
                                                         if (filter.ShouldInclude("", System.IO.Path.DirectorySeparatorChar.ToString() + k.Value))
                                                         {
                                                             //TODO: Perhaps a bit much to download the content archive
                                                             // if the file is only marked for deletion?
                                                             hasFiles = true;
                                                             break;
                                                         }
                                                 }
                                         }

                                         if (!hasFiles)
                                         {
                                             //Avoid downloading the content file
                                             patchno++;
                                             continue;
                                         }
                                    }

                                     OperationProgress(this, DuplicatiOperation.Restore, rs.OperationMode, (int)(m_progress * 100), -1, string.Format(Strings.Interface.StatusDownloadingContentVolume, patchno + 1), "");

                                    using (new Logging.Timer("Get " + contentVol.Filename))
                                        backend.Get(contentVol, manifest, patchzip, manifest.ContentHashes == null ? null : manifest.ContentHashes[contentVol.Volumenumber - 1]);

                                    OperationProgress(this, DuplicatiOperation.Restore, rs.OperationMode, (int)(m_progress * 100), -1, string.Format(Strings.Interface.StatusPatching, patchno + 1), "");

                                    using (new Logging.Timer((patchno == 0 ? "Full restore to: " : "Incremental restore " + patchno.ToString() + " to: ") + string.Join(System.IO.Path.PathSeparator.ToString(), target)))
                                    using (Library.Interface.ICompression patch = DynamicLoader.CompressionLoader.GetModule(contentVol.Compression, patchzip, m_options.RawOptions))
                                        sync.Patch(target, patch);
                                }
                                patchno++;
                            }

                            //Make sure there are no partial files, as partial files are not allowed to span backup sets
                            sync.FinalizeRestore();
                        }
                    }
                }
                finally
                {
                    if (backend != null)
                        backend.Dispose();

                    if (OperationCompleted != null)
                        OperationCompleted(this, DuplicatiOperation.Restore, rs.OperationMode, 100, -1, Strings.Interface.StatusCompleted, "");

                    OperationProgress(this, DuplicatiOperation.Restore, rs.OperationMode, 100, -1, Strings.Interface.StatusCompleted, "");
                }
            }

            rs.EndTime = DateTime.Now;
            return rs.ToString();
        }
Exemplo n.º 15
0
        public string[] ListSourceFolders()
        {
            RestoreStatistics rs = new RestoreStatistics(DuplicatiOperationMode.ListSourceFolders);
            SetupCommonOptions(rs);

            if (m_options.DontReadManifests)
                throw new Exception(Strings.Interface.ManifestsMustBeRead);

            DateTime timelimit = m_options.RestoreTime;

            if (OperationStarted != null)
                OperationStarted(this, DuplicatiOperation.List, rs.OperationMode, 0, -1, Strings.Interface.StatusStarted, "");

            string[] res;

            using (BackendWrapper backend = new BackendWrapper(rs, m_backend, m_options))
            using (Utility.TempFile mfile = new Duplicati.Library.Utility.TempFile())
            {
                ManifestEntry bestFit = backend.GetBackupSet(timelimit);

                backend.Get(bestFit, null, mfile, null);
                res = new Manifestfile(mfile, m_options.SkipFileHashChecks).SourceDirs;
            }

            if (OperationCompleted != null)
                OperationCompleted(this, DuplicatiOperation.List, rs.OperationMode, 100, -1, Strings.Interface.StatusCompleted, "");

            return res;
        }
Exemplo n.º 16
0
        /// <summary>
        /// Reads through a backup and finds the last backup entry that has a specific file
        /// </summary>
        /// <returns></returns>
        public List<KeyValuePair<string, DateTime>> FindLastFileVersion()
        {
            CommunicationStatistics stats = new CommunicationStatistics(DuplicatiOperationMode.FindLastFileVersion);
            SetupCommonOptions(stats);

            if (m_options.DontReadManifests)
                throw new Exception(Strings.Interface.ManifestsMustBeRead);

            if (string.IsNullOrEmpty(m_options.FileToRestore))
                throw new Exception(Strings.Interface.NoFilesGivenError);

            string[] filesToFind = m_options.FileToRestore.Split(System.IO.Path.PathSeparator);
            KeyValuePair<string, DateTime>[] results = new KeyValuePair<string, DateTime>[filesToFind.Length];
            for (int i = 0; i < results.Length; i++)
                results[i] = new KeyValuePair<string, DateTime>(filesToFind[i], new DateTime(0));

            using (BackendWrapper backend = new BackendWrapper(stats, m_backend, m_options))
            {
                //Extract the full backup set list
                List<ManifestEntry> fulls = backend.GetBackupSets();

                //Flatten the list
                List<ManifestEntry> workList = new List<ManifestEntry>();

                //The list is oldest first, this function work newest first
                fulls.Reverse();
                foreach (ManifestEntry f in fulls)
                {
                    f.Incrementals.Reverse();

                    workList.AddRange(f.Incrementals);
                    workList.Add(f);
                }

                bool warned_manifest_v1 = false;

                foreach (ManifestEntry mf in workList)
                {
                    List<Manifestfile.HashEntry> signatureHashes = null;
                    Manifestfile mfi;

                    using(Utility.TempFile tf = new Duplicati.Library.Utility.TempFile())
                    {
                        backend.Get(mf, null, tf, null);
                        mfi = new Manifestfile(tf, m_options.SkipFileHashChecks);
                        if (!m_options.SkipFileHashChecks)
                            signatureHashes = mfi.SignatureHashes;
                    }

                    //If there are no volumes, don't stop here
                    bool any_unmatched = true;

                    if (stats != null && !warned_manifest_v1 && (mfi.SourceDirs == null || mfi.SourceDirs.Length == 0))
                    {
                        warned_manifest_v1 = true;
                        stats.LogWarning(Strings.Interface.ManifestVersionRequiresRelativeNamesWarning, null);
                    }

                    foreach(KeyValuePair<SignatureEntry, ContentEntry> e in mf.Volumes)
                        using (Utility.TempFile tf = new Duplicati.Library.Utility.TempFile())
                        {
                            //Skip non-approved signature files
                            if (signatureHashes != null && e.Key.Volumenumber > signatureHashes.Count)
                            {
                                stats.LogWarning(string.Format(Strings.Interface.SkippedUnlistedSignatureFileWarning, e.Key.Filename), null);
                                continue;
                            }

                            backend.Get(e.Key, mfi, tf, signatureHashes == null ? null : signatureHashes[e.Key.Volumenumber - 1]);

                            any_unmatched = false;

                            RSync.RSyncDir.ContainsFile(mfi, filesToFind, DynamicLoader.CompressionLoader.GetModule(e.Key.Compression, tf, m_options.RawOptions));

                            for (int i = 0; i < filesToFind.Length; i++)
                            {
                                if (results[i].Value.Ticks == 0 && string.IsNullOrEmpty(filesToFind[i]))
                                    results[i] = new KeyValuePair<string,DateTime>(results[i].Key, mf.Time);
                                else
                                    any_unmatched = true;
                            }

                            if (!any_unmatched)
                                break;
                        }

                    if (!any_unmatched)
                        break;
                }

                return new List<KeyValuePair<string,DateTime>>(results);
            }
        }