Ejemplo n.º 1
0
        public void AddMessage(string message)
        {
            if (m_parent != null)
            {
                m_parent.AddMessage(message);
            }
            else
            {
                lock (m_lock)
                {
                    Logging.Log.WriteMessage(message, Duplicati.Library.Logging.LogMessageType.Information);
                    m_messages.Add(message);

                    if (MessageSink != null)
                    {
                        MessageSink.MessageEvent(message);
                    }

                    if (m_db != null && !m_db.IsDisposed)
                    {
                        LogDbMessage("Message", message, null);
                    }
                }
            }
        }
Ejemplo n.º 2
0
        public void AddError(string message, Exception ex)
        {
            if (m_parent != null)
            {
                m_parent.AddError(message, ex);
            }
            else
            {
                lock (Logging.Log.Lock)
                {
                    if (m_is_reporting)
                    {
                        return;
                    }

                    try
                    {
                        m_is_reporting = true;
                        Logging.Log.WriteMessage(message, Duplicati.Library.Logging.LogMessageType.Error, ex);

                        var s = ex == null ? message : string.Format("{0} => {1}", message, VerboseErrors ? ex.ToString() : ex.Message);
                        m_errors.Add(s);

                        if (MessageSink != null)
                        {
                            MessageSink.ErrorEvent(message, ex);
                        }

                        lock (m_lock)
                            if (m_db != null && !m_db.IsDisposed)
                            {
                                LogDbMessage("Error", message, ex);
                            }
                    }
                    finally
                    {
                        m_is_reporting = false;
                    }
                }
            }
        }
Ejemplo n.º 3
0
        public void AddMessage(string message)
        {
            if (m_parent != null)
            {
                m_parent.AddMessage(message);
            }
            else
            {
                lock (Logging.Log.Lock)
                {
                    if (m_is_reporting)
                    {
                        return;
                    }

                    try
                    {
                        m_is_reporting = true;
                        Logging.Log.WriteMessage(message, Duplicati.Library.Logging.LogMessageType.Information, null);

                        m_messages.Add(message);

                        if (MessageSink != null)
                        {
                            MessageSink.MessageEvent(message);
                        }

                        lock (m_lock)
                            if (m_db != null && !m_db.IsDisposed)
                            {
                                LogDbMessage("Message", message, null);
                            }
                    }
                    finally
                    {
                        m_is_reporting = false;
                    }
                }
            }
        }
Ejemplo n.º 4
0
        public void WriteMessage(LogEntry entry)
        {
            if (m_parent != null)
            {
                m_parent.WriteMessage(entry);
            }
            else
            {
                switch (entry.Level)
                {
                case LogMessageType.Error:
                    m_errors.Add(entry.AsString(false));
                    break;

                case LogMessageType.Warning:
                    m_warnings.Add(entry.AsString(false));
                    break;

                case LogMessageType.Information:
                    m_messages.Add(entry.AsString(false));
                    break;
                }
            }
        }
Ejemplo n.º 5
0
        public void AddError(string message, Exception ex)
        {
            if (m_parent != null)
            {
                m_parent.AddError(message, ex);
            }
            else
            {
                Logging.Log.WriteMessage(message, Duplicati.Library.Logging.LogMessageType.Error, ex);

                var s = ex == null ? message : string.Format("{0} => {1}", message, VerboseErrors ? ex.ToString() : ex.Message);
                m_errors.Add(s);

                if (MessageSink != null)
                {
                    MessageSink.ErrorEvent(message, ex);
                }

                if (m_db != null && !m_db.IsDisposed)
                {
                    LogDbMessage("Error", message, ex);
                }
            }
        }
Ejemplo n.º 6
0
        private bool HandleFilesystemEntry(string path, System.IO.FileAttributes attributes)
        {
            // If we lost the connection, there is no point in keeping on processing
            if (m_backend.HasDied)
                throw m_backend.LastException;

            try
            {
                m_result.OperationProgressUpdater.StartFile(path, -1);

                if (m_backendLogFlushTimer < DateTime.Now)
                {
                    m_backendLogFlushTimer = DateTime.Now.Add(FLUSH_TIMESPAN);
                    m_backend.FlushDbMessages(m_database, null);
                }

                if ((attributes & FileAttributes.ReparsePoint) == FileAttributes.ReparsePoint)
                {
                    if (m_options.SymlinkPolicy == Options.SymlinkStrategy.Ignore)
                    {
                        m_result.AddVerboseMessage("Ignoring symlink {0}", path);
                        return false;
                    }

                    if (m_options.SymlinkPolicy == Options.SymlinkStrategy.Store)
                    {
                        Dictionary<string, string> metadata;

                        if (m_options.StoreMetadata)
                        {
                            metadata = m_snapshot.GetMetadata(path);
                            if (metadata == null)
                                metadata = new Dictionary<string, string>();

                            if (!metadata.ContainsKey("CoreAttributes"))
                                metadata["CoreAttributes"] = attributes.ToString();
                            if (!metadata.ContainsKey("CoreLastWritetime"))
                                metadata["CoreLastWritetime"] = m_snapshot.GetLastWriteTimeUtc(path).Ticks.ToString();
                        }
                        else
                        {
                            metadata = new Dictionary<string, string>();
                        }

                        if (!metadata.ContainsKey("CoreSymlinkTarget"))
                            metadata["CoreSymlinkTarget"] = m_snapshot.GetSymlinkTarget(path);

                        var metahash = Utility.WrapMetadata(metadata, m_options);
                        AddSymlinkToOutput(path, DateTime.UtcNow, metahash);

                        m_result.AddVerboseMessage("Stored symlink {0}", path);
                        //Do not recurse symlinks
                        return false;
                    }
                }

                if ((attributes & FileAttributes.Directory) == FileAttributes.Directory)
                {
                    IMetahash metahash;

                    if (m_options.StoreMetadata)
                    {
                        Dictionary<string, string> metadata = m_snapshot.GetMetadata(path);
                        if (metadata == null)
                            metadata = new Dictionary<string, string>();

                        if (!metadata.ContainsKey("CoreAttributes"))
                            metadata["CoreAttributes"] = attributes.ToString();
                        if (!metadata.ContainsKey("CoreLastWritetime"))
                            metadata["CoreLastWritetime"] = m_snapshot.GetLastWriteTimeUtc(path).Ticks.ToString();
                        metahash = Utility.WrapMetadata(metadata, m_options);
                    }
                    else
                    {
                        metahash = EMPTY_METADATA;
                    }

                    m_result.AddVerboseMessage("Adding directory {0}", path);
                    AddFolderToOutput(path, DateTime.UtcNow, metahash);
                    return true;
                }

                m_result.OperationProgressUpdater.UpdatefilesProcessed(++m_result.ExaminedFiles, m_result.SizeOfExaminedFiles);

                bool changed = false;

                // The time we scan
                DateTime scantime = DateTime.UtcNow;
                // Last scan time
                DateTime oldScanned;
                // Last file modification
                DateTime lastModified = m_snapshot.GetLastWriteTimeUtc(path);
                var oldId = m_database.GetFileEntry(path, out oldScanned);

                long filestatsize = m_snapshot.GetFileSize(path);
                if ((oldId < 0 || m_options.DisableFiletimeCheck || LocalDatabase.NormalizeDateTime(lastModified) >= oldScanned) && (m_options.SkipFilesLargerThan == long.MaxValue || m_options.SkipFilesLargerThan == 0 || filestatsize < m_options.SkipFilesLargerThan))
                {
                    m_result.AddVerboseMessage("Checking file for changes {0}", path);
                    m_result.OpenedFiles++;

                    long filesize = 0;
                    IMetahash metahashandsize;
                    if (m_options.StoreMetadata)
                    {
                        Dictionary<string, string> metadata = m_snapshot.GetMetadata(path);
                        if (metadata == null)
                            metadata = new Dictionary<string, string>();

                        if (!metadata.ContainsKey("CoreAttributes"))
                            metadata["CoreAttributes"] = attributes.ToString();
                        if (!metadata.ContainsKey("CoreLastWritetime"))
                            metadata["CoreLastWritetime"] = lastModified.Ticks.ToString();

                        metahashandsize = Utility.WrapMetadata(metadata, m_options);
                    }
                    else
                    {
                        metahashandsize = EMPTY_METADATA;
                    }

                    var hint = m_options.GetCompressionHintFromFilename(path);
                    var oldHash = oldId < 0 ? null : m_database.GetFileHash(oldId);

                    using (var blocklisthashes = new Library.Utility.FileBackedStringList())
                    using (var hashcollector = new Library.Utility.FileBackedStringList())
                    {
                        using (var fs = new Blockprocessor(m_snapshot.OpenRead(path), m_blockbuffer))
                        {
                            try { m_result.OperationProgressUpdater.StartFile(path, fs.Length); }
                            catch (Exception ex) { m_result.AddWarning(string.Format("Failed to read file length for file {0}", path), ex); }

                            int blocklistoffset = 0;

                            m_filehasher.Initialize();

                            var offset = 0;
                            var remaining = fs.Readblock();

                            do
                            {
                                var size = Math.Min(m_blocksize, remaining);

                                m_filehasher.TransformBlock(m_blockbuffer, offset, size, m_blockbuffer, offset);
                                var blockkey = m_blockhasher.ComputeHash(m_blockbuffer, offset, size);
                                if (m_blocklistbuffer.Length - blocklistoffset < blockkey.Length)
                                {
                                    var blkey = Convert.ToBase64String(m_blockhasher.ComputeHash(m_blocklistbuffer, 0, blocklistoffset));
                                    blocklisthashes.Add(blkey);
                                    AddBlockToOutput(blkey, m_blocklistbuffer, 0, blocklistoffset, CompressionHint.Noncompressible, true);
                                    blocklistoffset = 0;
                                }

                                Array.Copy(blockkey, 0, m_blocklistbuffer, blocklistoffset, blockkey.Length);
                                blocklistoffset += blockkey.Length;

                                var key = Convert.ToBase64String(blockkey);
                                AddBlockToOutput(key, m_blockbuffer, offset, size, hint, false);
                                hashcollector.Add(key);
                                filesize += size;

                                m_result.OperationProgressUpdater.UpdateFileProgress(filesize);
                                if (m_result.TaskControlRendevouz() == TaskControlState.Stop)
                                    return false;

                                remaining -= size;
                                offset += size;

                                if (remaining == 0)
                                {
                                    offset = 0;
                                    remaining = fs.Readblock();
                                }

                            } while (remaining > 0);

                            //If all fits in a single block, don't bother with blocklists
                            if (hashcollector.Count > 1)
                            {
                                var blkeyfinal = Convert.ToBase64String(m_blockhasher.ComputeHash(m_blocklistbuffer, 0, blocklistoffset));
                                blocklisthashes.Add(blkeyfinal);
                                AddBlockToOutput(blkeyfinal, m_blocklistbuffer, 0, blocklistoffset, CompressionHint.Noncompressible, true);
                            }
                        }

                        m_result.SizeOfOpenedFiles += filesize;
                        m_filehasher.TransformFinalBlock(m_blockbuffer, 0, 0);

                        var filekey = Convert.ToBase64String(m_filehasher.Hash);
                        if (oldHash != filekey)
                        {
                            if (oldHash == null)
                                m_result.AddVerboseMessage("New file {0}", path);
                            else
                                m_result.AddVerboseMessage("File has changed {0}", path);
                            if (oldId < 0)
                            {
                                m_result.AddedFiles++;
                                m_result.SizeOfAddedFiles += filesize;

                                if (m_options.Dryrun)
                                    m_result.AddDryrunMessage(string.Format("Would add new file {0}, size {1}", path, Library.Utility.Utility.FormatSizeString(filesize)));
                            }
                            else
                            {
                                m_result.ModifiedFiles++;
                                m_result.SizeOfModifiedFiles += filesize;

                                if (m_options.Dryrun)
                                    m_result.AddDryrunMessage(string.Format("Would add changed file {0}, size {1}", path, Library.Utility.Utility.FormatSizeString(filesize)));
                            }

                            AddFileToOutput(path, filesize, scantime, metahashandsize, hashcollector, filekey, blocklisthashes);
                            changed = true;
                        }
                        else
                        {
                            // When we write the file to output, update the scan time
                            oldScanned = scantime;
                            m_result.AddVerboseMessage("File has not changed {0}", path);
                        }
                    }
                }
                else
                {
                    if (m_options.SkipFilesLargerThan == long.MaxValue || m_options.SkipFilesLargerThan == 0 || m_snapshot.GetFileSize(path) < m_options.SkipFilesLargerThan)
                        m_result.AddVerboseMessage("Skipped checking file, because timestamp was not updated {0}", path);
                    else
                        m_result.AddVerboseMessage("Skipped checking file, because the size exceeds limit {0}", path);
                }

                if (!changed)
                    AddUnmodifiedFile(oldId, oldScanned);

                m_result.SizeOfExaminedFiles += filestatsize;
                if (filestatsize != 0)
                    m_result.OperationProgressUpdater.UpdatefilesProcessed(m_result.ExaminedFiles, m_result.SizeOfExaminedFiles);
            }
            catch (Exception ex)
            {
                m_result.AddWarning(string.Format("Failed to process path: {0}", path), ex);
                m_result.FilesWithError++;
            }

            return true;
        }
Ejemplo n.º 7
0
        public static Task Run(Options options, BackupDatabase database, ITaskReader taskreader)
        {
            return(AutomationExtensions.RunTask(
                       new
            {
                Input = Channels.StreamBlock.ForRead,
                ProgressChannel = Channels.ProgressEvents.ForWrite,
                BlockOutput = Channels.OutputBlocks.ForWrite
            },

                       async self =>
            {
                var blocksize = options.Blocksize;
                var filehasher = Duplicati.Library.Utility.HashAlgorithmHelper.Create(options.FileHashAlgorithm);
                var blockhasher = Duplicati.Library.Utility.HashAlgorithmHelper.Create(options.BlockHashAlgorithm);
                var emptymetadata = Utility.WrapMetadata(new Dictionary <string, string>(), options);
                var maxmetadatasize = (options.Blocksize / (long)options.BlockhashSize) * options.Blocksize;

                if (blockhasher == null)
                {
                    throw new UserInformationException(Strings.Common.InvalidHashAlgorithm(options.BlockHashAlgorithm), "BlockHashAlgorithmNotSupported");
                }
                if (filehasher == null)
                {
                    throw new UserInformationException(Strings.Common.InvalidHashAlgorithm(options.FileHashAlgorithm), "FileHashAlgorithmNotSupported");
                }

                if (!blockhasher.CanReuseTransform)
                {
                    throw new UserInformationException(Strings.Common.InvalidCryptoSystem(options.BlockHashAlgorithm), "BlockHashAlgorithmNotSupported");
                }
                if (!filehasher.CanReuseTransform)
                {
                    throw new UserInformationException(Strings.Common.InvalidCryptoSystem(options.FileHashAlgorithm), "FileHashAlgorithmNotSupported");
                }

                using (var empty_metadata_stream = new MemoryStream(emptymetadata.Blob))
                    while (await taskreader.ProgressAsync)
                    {
                        var send_close = false;
                        var filesize = 0L;
                        var filename = string.Empty;

                        var e = await self.Input.ReadAsync();
                        var cur = e.Result;

                        try
                        {
                            var stream = e.Stream;

                            using (var blocklisthashes = new Library.Utility.FileBackedStringList())
                                using (var hashcollector = new Library.Utility.FileBackedStringList())
                                {
                                    var blocklistbuffer = new byte[blocksize];
                                    var blocklistoffset = 0L;

                                    long fslen = -1;
                                    try { fslen = stream.Length; }
                                    catch (Exception ex) { Logging.Log.WriteWarningMessage(FILELOGTAG, "FileLengthFailure", ex, "Failed to read file length for file {0}", e.Path); }

                                    if (e.IsMetadata && fslen > maxmetadatasize)
                                    {
                                        //TODO: To fix this, the "WriteFileset" method in BackupHandler needs to
                                        // be updated such that it can select sets even when there are multiple
                                        // blocklist hashes for the metadata.
                                        // This could be done such that an extra query is made if the metadata
                                        // spans multiple blocklist hashes, as it is not expected to be common

                                        Logging.Log.WriteWarningMessage(LOGTAG, "TooLargeMetadata", null, "Metadata size is {0}, but the largest accepted size is {1}, recording empty metadata for {2}", fslen, maxmetadatasize, e.Path);
                                        empty_metadata_stream.Position = 0;
                                        stream = empty_metadata_stream;
                                        fslen = stream.Length;
                                    }

                                    await self.ProgressChannel.WriteAsync(new ProgressEvent()
                                    {
                                        Filepath = e.Path, Length = fslen, Type = EventType.FileStarted
                                    });
                                    send_close = true;

                                    filehasher.Initialize();
                                    var lastread = 0;
                                    var buf = new byte[blocksize];
                                    var lastupdate = DateTime.Now;

                                    // Core processing loop, read blocks of data and hash individually
                                    while (((lastread = await stream.ForceStreamReadAsync(buf, blocksize)) != 0))
                                    {
                                        // Run file hashing concurrently to squeeze a little extra concurrency out of it
                                        var pftask = Task.Run(() => filehasher.TransformBlock(buf, 0, lastread, buf, 0));

                                        var hashdata = blockhasher.ComputeHash(buf, 0, lastread);
                                        var hashkey = Convert.ToBase64String(hashdata);

                                        // If we have too many hashes, flush the blocklist
                                        if (blocklistbuffer.Length - blocklistoffset < hashdata.Length)
                                        {
                                            var blkey = Convert.ToBase64String(blockhasher.ComputeHash(blocklistbuffer, 0, (int)blocklistoffset));
                                            blocklisthashes.Add(blkey);
                                            await DataBlock.AddBlockToOutputAsync(self.BlockOutput, blkey, blocklistbuffer, 0, blocklistoffset, CompressionHint.Noncompressible, true);
                                            blocklistoffset = 0;
                                            blocklistbuffer = new byte[blocksize];
                                        }

                                        // Store the current hash in the blocklist
                                        Array.Copy(hashdata, 0, blocklistbuffer, blocklistoffset, hashdata.Length);
                                        blocklistoffset += hashdata.Length;
                                        hashcollector.Add(hashkey);
                                        filesize += lastread;

                                        // Don't spam updates
                                        if ((DateTime.Now - lastupdate).TotalSeconds > 10)
                                        {
                                            await self.ProgressChannel.WriteAsync(new ProgressEvent()
                                            {
                                                Filepath = e.Path, Length = filesize, Type = EventType.FileProgressUpdate
                                            });
                                            lastupdate = DateTime.Now;
                                        }

                                        // Make sure the filehasher is done with the buf instance before we pass it on
                                        await pftask;
                                        await DataBlock.AddBlockToOutputAsync(self.BlockOutput, hashkey, buf, 0, lastread, e.Hint, true);
                                        buf = new byte[blocksize];
                                    }

                                    // If we have more than a single block of data, output the (trailing) blocklist
                                    if (hashcollector.Count > 1)
                                    {
                                        var blkey = Convert.ToBase64String(blockhasher.ComputeHash(blocklistbuffer, 0, (int)blocklistoffset));
                                        blocklisthashes.Add(blkey);
                                        await DataBlock.AddBlockToOutputAsync(self.BlockOutput, blkey, blocklistbuffer, 0, blocklistoffset, CompressionHint.Noncompressible, true);
                                    }

                                    filehasher.TransformFinalBlock(new byte[0], 0, 0);
                                    var filehash = Convert.ToBase64String(filehasher.Hash);
                                    var blocksetid = await database.AddBlocksetAsync(filehash, filesize, blocksize, hashcollector, blocklisthashes);
                                    cur.SetResult(new StreamProcessResult()
                                    {
                                        Streamlength = filesize, Streamhash = filehash, Blocksetid = blocksetid
                                    });
                                    cur = null;
                                }
                        }
                        catch (Exception ex)
                        {
                            try
                            {
                                if (cur != null)
                                {
                                    cur.TrySetException(ex);
                                }
                            }
                            catch { }

                            // Rethrow
                            if (ex.IsRetiredException())
                            {
                                throw;
                            }
                        }
                        finally
                        {
                            if (cur != null)
                            {
                                try { cur.TrySetCanceled(); }
                                catch { }
                                cur = null;
                            }

                            if (send_close)
                            {
                                await self.ProgressChannel.WriteAsync(new ProgressEvent()
                                {
                                    Filepath = e.Path, Length = filesize, Type = EventType.FileClosed
                                });
                            }
                            send_close = false;
                        }
                    }
            }));
        }
Ejemplo n.º 8
0
 /// <summary>
 /// Adds a single block hash to the index
 /// </summary>
 /// <param name="hash">The hash of the block.</param>
 /// <param name="size">The size of the block.</param>
 public void AddBlock(string hash, long size)
 {
     blockHashes.Add(size.ToString() + ":" + hash);
 }