protected byte[] DownloadCacheFile(IFileInfo target, IDocument remoteDocument, Transmission transmission, IFileSystemInfoFactory fsFactory) { if (!this.LoadCacheFile(target, remoteDocument, fsFactory)) { if (target.Exists) { target.Delete(); } } using (var hashAlg = new SHA1Reuse()) { using (var filestream = target.Open(FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None)) using (var downloader = ContentTaskUtils.CreateDownloader()) { try { downloader.DownloadFile(remoteDocument, filestream, transmission, hashAlg, (byte[] checksumUpdate, long length) => this.SaveCacheFile(target, remoteDocument, checksumUpdate, length, transmission)); if (this.TransmissionStorage != null) { this.TransmissionStorage.RemoveObjectByRemoteObjectId(remoteDocument.Id); } } catch (Exception ex) { transmission.FailedException = ex; throw; } } target.Refresh(); return(hashAlg.Hash); } }
public void CreateNewChunkedUploader() { long chunkSize = 1024; var uploader = ContentTaskUtils.CreateUploader(chunkSize); Assert.IsTrue(uploader is ChunkedUploader); Assert.AreEqual(chunkSize, (uploader as ChunkedUploader).ChunkSize); }
public void PrepareResumeFailsOnTooShortInputStream() { var streamMock = new Mock <Stream>(); streamMock.Setup(s => s.Read(It.IsAny <byte[]>(), It.IsAny <int>(), It.IsAny <int>())).Returns(0); using (HashAlgorithm hashAlg = new SHA1Managed()) { Assert.Throws <IOException>(() => ContentTaskUtils.PrepareResume(this.successfulLength, streamMock.Object, hashAlg)); } }
public void ResumeDownloadWithUtils() { long successfulLength = 1024; this.localFileStream.Write(this.remoteContent, 0, (int)successfulLength); this.localFileStream.Seek(0, SeekOrigin.Begin); byte[] remoteChunk = new byte[this.remoteLength - successfulLength]; for (int i = 0; i < remoteChunk.Length; i++) { remoteChunk[i] = this.remoteContent[i + successfulLength]; } this.mockedStream.Setup(stream => stream.Length).Returns(remoteChunk.Length); var memStream = new MemoryStream(remoteChunk); this.mockedStream.Setup(stream => stream.Stream).Returns(memStream); this.mock.Setup(doc => doc.ContentStreamLength).Returns(this.remoteLength); this.mock.Setup(doc => doc.ContentStreamId).Returns(this.contentStreamId); this.mock.Setup(doc => doc.GetContentStream( It.Is <string>((string s) => s.Equals(this.contentStreamId)), It.Is <long?>((long?l) => (l == successfulLength)), It.Is <long?>((long?l) => l == remoteChunk.Length))) .Returns(this.mockedStream.Object); this.transmissionEvent.TransmissionStatus += delegate(object sender, TransmissionProgressEventArgs e) { if (e.ActualPosition != null) { Assert.GreaterOrEqual((long)e.ActualPosition, successfulLength); Assert.LessOrEqual((long)e.ActualPosition, this.remoteLength); } if (e.Percent != null) { Assert.Greater(e.Percent, 0); Assert.LessOrEqual(e.Percent, 100); } if (e.Length != null) { Assert.GreaterOrEqual(e.Length, successfulLength); Assert.LessOrEqual(e.Length, this.remoteLength); } }; using (IFileDownloader downloader = new ChunkedDownloader(this.chunkSize)) { ContentTaskUtils.PrepareResume(successfulLength, this.localFileStream, this.hashAlg); downloader.DownloadFile(this.mock.Object, this.localFileStream, this.transmissionEvent, this.hashAlg); Assert.AreEqual(this.remoteContent.Length, this.localFileStream.Length); Assert.AreEqual(SHA1Managed.Create().ComputeHash(this.remoteContent), this.hashAlg.Hash); Assert.AreEqual(SHA1Managed.Create().ComputeHash(this.localFileStream.ToArray()), this.hashAlg.Hash); } memStream.Dispose(); }
public void PrepareResumeFailsOnIOException() { var streamMock = new Mock <Stream>(); streamMock.Setup(s => s.Read(It.IsAny <byte[]>(), It.IsAny <int>(), It.IsAny <int>())).Throws(new IOException()); using (HashAlgorithm hashAlg = new SHA1Managed()) { ContentTaskUtils.PrepareResume(this.successfulLength, streamMock.Object, hashAlg); } }
public void PrepareResumeDoesNotChangeHashOnZeroLengthInputStream() { byte[] localContent = new byte[0]; byte[] localHash = new SHA1Managed().ComputeHash(localContent); using (MemoryStream stream = new MemoryStream(localContent)) using (HashAlgorithm hashAlg = new SHA1Managed()) { ContentTaskUtils.PrepareResume(0, stream, hashAlg); hashAlg.TransformFinalBlock(new byte[0], 0, 0); Assert.AreEqual(localHash, hashAlg.Hash); } }
public void CreateNewChunkedDownloader() { long chunkSize = 1024; var downloader = ContentTaskUtils.CreateDownloader(chunkSize, Mock.Of <IFileTransmissionStorage>()); Assert.IsTrue(downloader is ChunkedDownloader); Assert.AreEqual(chunkSize, (downloader as ChunkedDownloader).ChunkSize); downloader = ContentTaskUtils.CreateDownloader(chunkSize, null); Assert.IsTrue(downloader is ChunkedDownloader); Assert.AreEqual(chunkSize, (downloader as ChunkedDownloader).ChunkSize); downloader = ContentTaskUtils.CreateDownloader(chunkSize); Assert.IsTrue(downloader is ChunkedDownloader); Assert.AreEqual(chunkSize, (downloader as ChunkedDownloader).ChunkSize); }
public void PrepareResumeWithExactFittingStream() { byte[] localContent = new byte[this.successfulLength]; using (RandomNumberGenerator random = RandomNumberGenerator.Create()) { random.GetBytes(localContent); } byte[] localHash = new SHA1Managed().ComputeHash(localContent); using (MemoryStream stream = new MemoryStream(localContent)) using (HashAlgorithm hashAlg = new SHA1Managed()) { ContentTaskUtils.PrepareResume(this.successfulLength, stream, hashAlg); hashAlg.TransformFinalBlock(new byte[0], 0, 0); Assert.AreEqual(localHash, hashAlg.Hash); } }
public void ResumeDownloadWithUtils() { long startPos = this.remoteLength / 2; SetupResumeDownload(startPos); using (var memorystream = new MemoryStream(this.remoteChunk)) { this.mockedStream.Setup(stream => stream.Stream).Returns(memorystream); using (IFileDownloader downloader = new ChunkedDownloader(this.chunkSize)) { ContentTaskUtils.PrepareResume(startPos, this.localFileStream, this.hashAlg); downloader.DownloadFile(this.mock.Object, this.localFileStream, this.transmission, this.hashAlg); Assert.AreEqual(this.remoteContent.Length, this.localFileStream.Length); Assert.AreEqual(SHA1Managed.Create().ComputeHash(this.remoteContent), this.hashAlg.Hash); Assert.AreEqual(SHA1Managed.Create().ComputeHash(this.localFileStream.ToArray()), this.hashAlg.Hash); } } }
public void ResumeUploadWithUtils() { double successfulUploadPart = 0.2; int successfulUploaded = (int)(this.fileLength * successfulUploadPart); double minPercent = 100 * successfulUploadPart; this.InitRemoteChunkWithSize(successfulUploaded); this.transmissionEvent.TransmissionStatus += delegate(object sender, TransmissionProgressEventArgs e) { this.AssertThatProgressFitsMinimumLimits(e, successfulUploaded, minPercent, successfulUploaded); }; using (IFileUploader uploader = new ChunkedUploader(this.chunkSize)) { ContentTaskUtils.PrepareResume(successfulUploaded, this.localFileStream, this.hashAlg); uploader.UploadFile(this.mockedDocument.Object, this.localFileStream, this.transmissionEvent, this.hashAlg); } this.AssertThatLocalAndRemoteContentAreEqualToHash(); Assert.AreEqual(1, this.lastChunk); }
public void PrepareResumeWithLongerLocalStream() { byte[] localContent = new byte[this.successfulLength]; using (RandomNumberGenerator random = RandomNumberGenerator.Create()) { random.GetBytes(localContent); } byte[] localHash = new SHA1Managed().ComputeHash(localContent); using (MemoryStream stream = new MemoryStream()) using (HashAlgorithm hashAlg = new SHA1Managed()) { stream.Write(localContent, 0, (int)this.successfulLength); stream.Write(localContent, 0, (int)this.successfulLength); stream.Seek(0, SeekOrigin.Begin); ContentTaskUtils.PrepareResume(this.successfulLength, stream, hashAlg); hashAlg.TransformFinalBlock(new byte[0], 0, 0); Assert.AreEqual(localHash, hashAlg.Hash); } }
public void ResumeUploadWithUtils() { double successfulUploadPart = 0.2; int successfulUploaded = (int)(this.fileLength * successfulUploadPart); double minPercent = 100 * successfulUploadPart; this.InitRemoteChunkWithSize(successfulUploaded); this.transmission.AddLengthConstraint(Is.GreaterThanOrEqualTo(successfulUploaded)); this.transmission.AddPercentConstraint(Is.GreaterThanOrEqualTo(minPercent)); this.transmission.AddPositionConstraint(Is.GreaterThanOrEqualTo(successfulUploaded)); using (IFileUploader uploader = new ChunkedUploader(this.chunkSize)) { ContentTaskUtils.PrepareResume(successfulUploaded, this.localFileStream, this.hashAlg); uploader.UploadFile(this.mockedDocument.Object, this.localFileStream, this.transmission, this.hashAlg); } this.AssertThatLocalAndRemoteContentAreEqualToHash(); Assert.AreEqual(1, this.lastChunk); }
public void CreateNewSimpleDownloaderByPassingNegativeChunkSize() { var downloader = ContentTaskUtils.CreateDownloader(-1); Assert.IsTrue(downloader is SimpleFileDownloader); }
public void CreateNewSimpleDownloaderWithoutParam() { var downloader = ContentTaskUtils.CreateDownloader(); Assert.IsTrue(downloader is SimpleFileDownloader); }
/// <summary> /// Solve the specified situation by using localFile and remote object. /// </summary> /// <param name="localFileSystemInfo">Local filesystem info instance.</param> /// <param name="remoteId">Remote identifier or object.</param> /// <param name="localContent">Hint if the local content has been changed.</param> /// <param name="remoteContent">Information if the remote content has been changed.</param> public override void Solve( IFileSystemInfo localFileSystemInfo, IObjectId remoteId, ContentChangeType localContent = ContentChangeType.NONE, ContentChangeType remoteContent = ContentChangeType.NONE) { Stopwatch completewatch = new Stopwatch(); completewatch.Start(); Logger.Debug("Starting LocalObjectAdded"); string parentId = this.GetParentId(localFileSystemInfo, this.Storage); Guid uuid = this.WriteOrUseUuidIfSupported(localFileSystemInfo); ICmisObject addedObject; try { addedObject = this.AddCmisObject(localFileSystemInfo, parentId, this.Session); } catch (CmisPermissionDeniedException e) { OperationsLogger.Warn(string.Format("Permission denied while trying to Create the locally added object {0} on the server ({1}).", localFileSystemInfo.FullName, e.Message)); return; } OperationsLogger.Info(string.Format("Created remote {2} {0} for {1}", addedObject.Id, localFileSystemInfo.FullName, addedObject is IFolder ? "folder" : "document")); MappedObject mapped = new MappedObject( localFileSystemInfo.Name, addedObject.Id, localFileSystemInfo is IDirectoryInfo ? MappedObjectType.Folder : MappedObjectType.File, parentId, addedObject.ChangeToken) { Guid = uuid, LastRemoteWriteTimeUtc = addedObject.LastModificationDate, LastLocalWriteTimeUtc = localFileSystemInfo is IFileInfo && (localFileSystemInfo as IFileInfo).Length > 0 ? (DateTime?)null : (DateTime?)localFileSystemInfo.LastWriteTimeUtc, LastChangeToken = addedObject.ChangeToken, LastContentSize = localFileSystemInfo is IDirectoryInfo ? -1 : 0, ChecksumAlgorithmName = localFileSystemInfo is IDirectoryInfo ? null : "SHA-1", LastChecksum = localFileSystemInfo is IDirectoryInfo ? null : SHA1.Create().ComputeHash(new byte[0]) }; this.Storage.SaveMappedObject(mapped); var localFile = localFileSystemInfo as IFileInfo; if (localFile != null) { FileTransmissionEvent transmissionEvent = new FileTransmissionEvent(FileTransmissionType.UPLOAD_NEW_FILE, localFile.FullName); this.transmissionManager.AddTransmission(transmissionEvent); if (localFile.Length > 0) { Stopwatch watch = new Stopwatch(); OperationsLogger.Debug(string.Format("Uploading file content of {0}", localFile.FullName)); watch.Start(); IFileUploader uploader = ContentTaskUtils.CreateUploader(); using (SHA1 hashAlg = new SHA1Managed()) using (var fileStream = localFile.Open(FileMode.Open, FileAccess.Read)) { try { uploader.UploadFile(addedObject as IDocument, fileStream, transmissionEvent, hashAlg); } catch (Exception ex) { transmissionEvent.ReportProgress(new TransmissionProgressEventArgs { FailedException = ex }); throw; } mapped.ChecksumAlgorithmName = "SHA-1"; mapped.LastChecksum = hashAlg.Hash; } watch.Stop(); if (this.ServerCanModifyDateTimes) { (addedObject as IDocument).UpdateLastWriteTimeUtc(localFile.LastWriteTimeUtc); } mapped.LastContentSize = localFile.Length; mapped.LastChangeToken = addedObject.ChangeToken; mapped.LastRemoteWriteTimeUtc = addedObject.LastModificationDate; mapped.LastLocalWriteTimeUtc = localFileSystemInfo.LastWriteTimeUtc; if (mapped.RemoteObjectId != addedObject.Id) { this.Storage.RemoveObject(mapped); mapped.RemoteObjectId = addedObject.Id; } this.Storage.SaveMappedObject(mapped); OperationsLogger.Info(string.Format("Uploaded file content of {0} in [{1} msec]", localFile.FullName, watch.ElapsedMilliseconds)); } transmissionEvent.ReportProgress(new TransmissionProgressEventArgs { Completed = true }); } completewatch.Stop(); Logger.Debug(string.Format("Finished LocalObjectAdded after [{0} msec]", completewatch.ElapsedMilliseconds)); }
protected static byte[] DownloadChanges(IFileInfo target, IDocument remoteDocument, IMappedObject obj, IFileSystemInfoFactory fsFactory, ActiveActivitiesManager transmissonManager, ILog logger) { // Download changes byte[] lastChecksum = obj.LastChecksum; byte[] hash = null; var cacheFile = fsFactory.CreateDownloadCacheFileInfo(target); var transmissionEvent = new FileTransmissionEvent(FileTransmissionType.DOWNLOAD_MODIFIED_FILE, target.FullName, cacheFile.FullName); transmissonManager.AddTransmission(transmissionEvent); try { using (SHA1 hashAlg = new SHA1Managed()) { using (var filestream = cacheFile.Open(FileMode.Create, FileAccess.Write, FileShare.None)) using (IFileDownloader download = ContentTaskUtils.CreateDownloader()) { download.DownloadFile(remoteDocument, filestream, transmissionEvent, hashAlg); obj.ChecksumAlgorithmName = "SHA-1"; hash = hashAlg.Hash; } } var backupFile = fsFactory.CreateFileInfo(target.FullName + ".bak.sync"); Guid?uuid = target.Uuid; cacheFile.Replace(target, backupFile, true); try { target.Uuid = uuid; } catch (RestoreModificationDateException e) { logger.Debug("Failed to restore modification date of original file", e); } try { backupFile.Uuid = null; } catch (RestoreModificationDateException e) { logger.Debug("Failed to restore modification date of backup file", e); } byte[] checksumOfOldFile = null; using (var oldFileStream = backupFile.Open(FileMode.Open, FileAccess.Read, FileShare.None)) { checksumOfOldFile = SHA1Managed.Create().ComputeHash(oldFileStream); } if (!lastChecksum.SequenceEqual(checksumOfOldFile)) { var conflictFile = fsFactory.CreateConflictFileInfo(target); backupFile.MoveTo(conflictFile.FullName); OperationsLogger.Info(string.Format("Updated local content of \"{0}\" with content of remote document {1} and created conflict file {2}", target.FullName, remoteDocument.Id, conflictFile.FullName)); } else { backupFile.Delete(); OperationsLogger.Info(string.Format("Updated local content of \"{0}\" with content of remote document {1}", target.FullName, remoteDocument.Id)); } } catch (Exception ex) { transmissionEvent.ReportProgress(new TransmissionProgressEventArgs { FailedException = ex }); throw; } transmissionEvent.ReportProgress(new TransmissionProgressEventArgs { Completed = true }); return(hash); }
/// <summary> /// Solve the specified situation by using the session, storage, localFile and remoteId. /// If a folder is affected, simply update the local change time of the corresponding local folder. /// If it is a file and the changeToken is not equal to the saved, the new content is downloaded. /// </summary> /// <param name="localFile">Local file.</param> /// <param name="remoteId">Remote identifier.</param> /// <param name="localContent">Hint if the local content has been changed.</param> /// <param name="remoteContent">Information if the remote content has been changed.</param> public override void Solve( IFileSystemInfo localFile, IObjectId remoteId, ContentChangeType localContent = ContentChangeType.NONE, ContentChangeType remoteContent = ContentChangeType.NONE) { IMappedObject obj = this.Storage.GetObjectByRemoteId(remoteId.Id); if (remoteId is IFolder) { var remoteFolder = remoteId as IFolder; DateTime?lastModified = remoteFolder.LastModificationDate; obj.LastChangeToken = remoteFolder.ChangeToken; if (lastModified != null) { try { localFile.LastWriteTimeUtc = (DateTime)lastModified; } catch (IOException e) { Logger.Debug("Couldn't set the server side modification date", e); } obj.LastLocalWriteTimeUtc = localFile.LastWriteTimeUtc; } } else if (remoteId is IDocument) { byte[] lastChecksum = obj.LastChecksum; var remoteDocument = remoteId as IDocument; DateTime?lastModified = remoteDocument.LastModificationDate; if ((lastModified != null && lastModified != obj.LastRemoteWriteTimeUtc) || obj.LastChangeToken != remoteDocument.ChangeToken) { if (remoteContent != ContentChangeType.NONE) { if (obj.LastLocalWriteTimeUtc != localFile.LastWriteTimeUtc) { throw new ArgumentException("The local file has been changed since last write => aborting update"); } // Download changes var file = localFile as IFileInfo; var cacheFile = this.fsFactory.CreateDownloadCacheFileInfo(file); var transmissionEvent = new FileTransmissionEvent(FileTransmissionType.DOWNLOAD_MODIFIED_FILE, localFile.FullName, cacheFile.FullName); this.queue.AddEvent(transmissionEvent); this.transmissonManager.AddTransmission(transmissionEvent); using (SHA1 hashAlg = new SHA1Managed()) using (var filestream = cacheFile.Open(FileMode.Create, FileAccess.Write, FileShare.None)) using (IFileDownloader download = ContentTaskUtils.CreateDownloader()) { try { download.DownloadFile(remoteDocument, filestream, transmissionEvent, hashAlg); } catch (Exception ex) { transmissionEvent.ReportProgress(new TransmissionProgressEventArgs { FailedException = ex }); throw; } obj.ChecksumAlgorithmName = "SHA-1"; obj.LastChecksum = hashAlg.Hash; } var backupFile = this.fsFactory.CreateFileInfo(file.FullName + ".bak.sync"); Guid?uuid = file.Uuid; cacheFile.Replace(file, backupFile, true); try { file.Uuid = uuid; } catch (RestoreModificationDateException e) { Logger.Debug("Failed to restore modification date of original file", e); } try { backupFile.Uuid = null; } catch (RestoreModificationDateException e) { Logger.Debug("Failed to restore modification date of backup file", e); } byte[] checksumOfOldFile = null; using (var oldFileStream = backupFile.Open(FileMode.Open, FileAccess.Read, FileShare.None)) { checksumOfOldFile = SHA1Managed.Create().ComputeHash(oldFileStream); } if (!lastChecksum.SequenceEqual(checksumOfOldFile)) { var conflictFile = this.fsFactory.CreateConflictFileInfo(file); backupFile.MoveTo(conflictFile.FullName); OperationsLogger.Info(string.Format("Updated local content of \"{0}\" with content of remote document {1} and created conflict file {2}", file.FullName, remoteId.Id, conflictFile.FullName)); } else { backupFile.Delete(); OperationsLogger.Info(string.Format("Updated local content of \"{0}\" with content of remote document {1}", file.FullName, remoteId.Id)); } transmissionEvent.ReportProgress(new TransmissionProgressEventArgs { Completed = true }); } obj.LastRemoteWriteTimeUtc = remoteDocument.LastModificationDate; if (remoteDocument.LastModificationDate != null) { localFile.LastWriteTimeUtc = (DateTime)remoteDocument.LastModificationDate; } obj.LastLocalWriteTimeUtc = localFile.LastWriteTimeUtc; obj.LastContentSize = remoteDocument.ContentStreamLength ?? 0; } obj.LastChangeToken = remoteDocument.ChangeToken; obj.LastRemoteWriteTimeUtc = lastModified; } this.Storage.SaveMappedObject(obj); }