public WorkerItem(BackupWorker parent, FileInfo sourceFile, string destinationFileName, byte[] nameBufferPreText) { _parent = parent; _sourceFile = sourceFile; _destinationFileName = destinationFileName; _centralContentFileName = string.Empty; _centralContentFolder = string.Empty; _centralNameFileName = string.Empty; _buffer = null; _sha256 = null; var backupMode = _parent._backupMode; if (backupMode == BackupMode.Fast) { EvaluateCentralNameFileName(nameBufferPreText); NextStation = NextStation.Writer; // we first try to use the central name file } else { NextStation = NextStation.Reader; // in all other cases: read and hash the content } _stream = Stream.Null; _streamLength = 0; }
/// <summary> /// Initializes a new instance of the <see cref="ItemProducerCollection"/> class. /// </summary> /// <param name="parent">The parent worker instance.</param> /// <param name="entries">All directory entries that should be backuped.</param> public ItemProducerCollection(BackupWorker parent, Dom.DirectoryEntryReadonly[] entries) { // TODO find out, which entries belong to which hard disk // Entries with the same hard disk should be grouped in one CollectorItem, since they should be evaluated sequential // Entries with different hard disk can be put in different CollectorItems. _collectorItems = new ItemProducer[1]; var item = new ItemProducer(parent, entries); item.OutputAvailable += item => ItemAvailable?.Invoke(item); _collectorItems[0] = item; }
/// <summary> /// Initializes a new instance of the <see cref="ItemProducer"/> class. /// </summary> /// <param name="parent">The parent backup worker instance.</param> /// <param name="entries">The directory entries that should be bundled in this CollectorItem. See class comment /// in which way the DirectoryEntries should be bundled.</param> public ItemProducer(BackupWorker parent, Dom.DirectoryEntryReadonly[] entries) { _parent = parent; _entries = entries; }