/// <summary> /// Uploads the file content to the remote document. /// </summary> /// <returns>The SHA-1 hash of the uploaded file content.</returns> /// <param name="localFile">Local file.</param> /// <param name="doc">Remote document.</param> /// <param name="transmissionManager">Transmission manager.</param> protected static byte[] UploadFile(IFileInfo localFile, IDocument doc, ActiveActivitiesManager transmissionManager) { byte[] hash = null; IFileUploader uploader = FileTransmission.ContentTaskUtils.CreateUploader(); FileTransmissionEvent transmissionEvent = new FileTransmissionEvent(FileTransmissionType.UPLOAD_MODIFIED_FILE, localFile.FullName); transmissionManager.AddTransmission(transmissionEvent); transmissionEvent.ReportProgress(new TransmissionProgressEventArgs { Started = true }); using (var hashAlg = new SHA1Managed()) { try { using (var file = localFile.Open(FileMode.Open, FileAccess.Read, FileShare.ReadWrite | FileShare.Delete)) { uploader.UploadFile(doc, file, transmissionEvent, hashAlg); hash = hashAlg.Hash; } } catch (Exception ex) { transmissionEvent.ReportProgress(new TransmissionProgressEventArgs { FailedException = ex }); throw; } } transmissionEvent.ReportProgress(new TransmissionProgressEventArgs { Completed = true }); return(hash); }
public void ReplaceFileContent() { string sourceFile = "source"; string targetFile = "target"; string backupFile = "source.bak"; IFileInfo sourceInfo = Factory.CreateFileInfo(Path.Combine(this.testFolder.FullName, sourceFile)); IFileInfo targetInfo = Factory.CreateFileInfo(Path.Combine(this.testFolder.FullName, targetFile)); IFileInfo backupInfo = Factory.CreateFileInfo(Path.Combine(this.testFolder.FullName, backupFile)); using (var stream = sourceInfo.Open(FileMode.CreateNew, FileAccess.Write)) { stream.Write(new byte[2], 0, 2); } sourceInfo.Refresh(); Assert.That(sourceInfo.Exists, Is.True); Assert.That(sourceInfo.Length, Is.EqualTo(2)); using (var stream = targetInfo.Open(FileMode.CreateNew, FileAccess.Write)) { stream.Write(new byte[5], 0, 5); } targetInfo.Refresh(); Assert.That(targetInfo.Exists, Is.True); Assert.That(targetInfo.Length, Is.EqualTo(5)); var newFileInfo = sourceInfo.Replace(targetInfo, backupInfo, true); sourceInfo.Refresh(); targetInfo.Refresh(); backupInfo.Refresh(); Assert.That(sourceInfo.Exists, Is.False); Assert.That(targetInfo.Length, Is.EqualTo(2)); Assert.That(backupInfo.Exists, Is.True); Assert.That(backupInfo.Length, Is.EqualTo(5)); Assert.That(newFileInfo.FullName, Is.EqualTo(targetInfo.FullName)); }
protected byte[] DownloadCacheFile(IFileInfo target, IDocument remoteDocument, Transmission transmission, IFileSystemInfoFactory fsFactory) { if (!this.LoadCacheFile(target, remoteDocument, fsFactory)) { if (target.Exists) { target.Delete(); } } using (var hashAlg = new SHA1Reuse()) { using (var filestream = target.Open(FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None)) using (var downloader = ContentTaskUtils.CreateDownloader()) { try { downloader.DownloadFile(remoteDocument, filestream, transmission, hashAlg, (byte[] checksumUpdate, long length) => this.SaveCacheFile(target, remoteDocument, checksumUpdate, length, transmission)); if (this.TransmissionStorage != null) { this.TransmissionStorage.RemoveObjectByRemoteObjectId(remoteDocument.Id); } } catch (Exception ex) { transmission.FailedException = ex; throw; } } target.Refresh(); return(hashAlg.Hash); } }
/// <summary> /// Calculates the checksum over the given stream with a former created hashAlgorithm /// </summary> /// <returns>The checksum.</returns> /// <param name="hashAlgorithm">Hash algorithm.</param> /// <param name="file">File to be hashed.</param> public static byte[] CalculateChecksum(string hashAlgorithm, IFileInfo file) { using (var stream = file.Open(FileMode.Open, FileAccess.Read, FileShare.Read)) { return(CalculateChecksum(hashAlgorithm, stream)); } }
public void ReplaceFileContentAndExtendedAttributes() { this.SkipIfExtendedAttributesAreNotAvailable(); string sourceFile = "source"; string targetFile = "target"; string backupFile = "source.bak"; IFileInfo sourceInfo = Factory.CreateFileInfo(Path.Combine(this.testFolder.FullName, sourceFile)); IFileInfo targetInfo = Factory.CreateFileInfo(Path.Combine(this.testFolder.FullName, targetFile)); IFileInfo backupInfo = Factory.CreateFileInfo(Path.Combine(this.testFolder.FullName, backupFile)); using (var stream = sourceInfo.Open(FileMode.CreateNew, FileAccess.Write)) { stream.Write(new byte[2], 0, 2); } sourceInfo.SetExtendedAttribute("test", sourceFile, false); sourceInfo.Refresh(); using (var stream = targetInfo.Open(FileMode.CreateNew, FileAccess.Write)) { stream.Write(new byte[5], 0, 5); } targetInfo.SetExtendedAttribute("test", targetFile, false); targetInfo.Refresh(); var newFileInfo = sourceInfo.Replace(targetInfo, backupInfo, true); Assert.That(newFileInfo.GetExtendedAttribute("test"), Is.EqualTo(sourceFile)); backupInfo.Refresh(); Assert.That(backupInfo.Exists, Is.True); Assert.That(backupInfo.GetExtendedAttribute("test"), Is.EqualTo(targetFile)); }
public static StreamWriter AppendText([NotNull] this IFileInfo fileInfo) { Guard.NotNull(fileInfo, nameof(fileInfo)); IFileStream stream = fileInfo.Open(FileMode.Append, FileAccess.Write); return(new StreamWriter(stream.AsStream())); }
public static StreamReader OpenText([NotNull] this IFileInfo fileInfo) { Guard.NotNull(fileInfo, nameof(fileInfo)); IFileStream stream = fileInfo.Open(FileMode.Open, FileAccess.Read, FileShare.Read); return(new StreamReader(stream.AsStream())); }
private MailMessage CreateMessage(IFileInfo fileInfo) { return(_messageBuilder .Create() .SetSenderAddress(_settings.Mail.SenderEmailAddres) .SetDestinationAddresses(_settings.Mail.ToEmailAddreses) .SetSubject(_settings.Mail.Subject) .SetBody(_settings.Mail.Body) .AddAttachment(fileInfo.Open(FileMode.Open, FileAccess.Read, FileShare.Read), fileInfo.Name) .Build()); }
public void ReadFile_With_Open() { IFileInfo fileInfo = _factory.CreateFileInfo(_EXAMPLE_FILE_PATH); using (IFileStream stream = fileInfo.Open(FileMode.Open, FileAccess.Read, FileShare.Read)) using (IStreamReader reader = _factory.CreateReader(stream)) { string content = reader.ReadToEnd(); Console.WriteLine("[ViaFileInfo.ReadFile_With_Open] File content: " + content); } }
public static Catalog Read(IFileInfo fileInfo) { using (System.IO.Stream stream = fileInfo.Open(System.IO.FileMode.Open, System.IO.FileAccess.Read)) { XDocument xDocument = XDocument.Load(stream); string baseDirectoryPath = (string)xDocument.Element("Catalog").Element("BaseDirectoryPath"); DateTime catalogedOn = (DateTime)xDocument.Element("Catalog").Element("CatalogedOn"); DateTime updatedOn = (DateTime)xDocument.Element("Catalog").Element("UpdatedOn"); List <FileInstance> fileInstances = xDocument.Element("Catalog").Element("Files").Elements("f") .Select((element) => new FileInstance(element.Attribute("p").Value, (long)element.Attribute("l"), Hash256.Parse(element.Attribute("h").Value))) .ToList(); return(new Catalog(baseDirectoryPath, catalogedOn, updatedOn, fileInstances)); } }
public void DeleteFile() { string fileName = "toBeDeleted"; string fullPath = Path.Combine(this.testFolder.FullName, fileName); IFileInfo fileInfo = Factory.CreateFileInfo(fullPath); using (fileInfo.Open(FileMode.CreateNew)) { } Assert.That(fileInfo.Exists, Is.True); fileInfo.Delete(); fileInfo.Refresh(); Assert.That(fileInfo.Exists, Is.False); }
private void ExportFile(IFileInfo file) { using (var dialog = new SaveFileDialog { FileName = file.Name, Filter = string.Format("{0}|*{0}", file.Extension) }) { if (dialog.ShowDialog(this) == DialogResult.OK) { using (var input = file.Open(FileMode.Open, FileAccess.Read)) using (var output = File.OpenWrite(dialog.FileName)) { input.CopyTo(output); } } } }
private bool MergeExistingFileWithRemoteFile(IFileInfo file, IDocument remoteDoc, Guid guid, out byte[] localHash) { byte[] remoteHash = remoteDoc.ContentStreamHash(); localHash = null; if (file.Length.Equals(remoteDoc.ContentStreamLength)) { using (var f = file.Open(FileMode.Open, FileAccess.Read, FileShare.ReadWrite | FileShare.Delete)) { localHash = SHA1Managed.Create().ComputeHash(f); } if (remoteHash != null) { if (localHash != null && localHash.SequenceEqual(remoteHash)) { if (remoteDoc.LastModificationDate != null) { try { file.LastWriteTimeUtc = (DateTime)remoteDoc.LastModificationDate; } catch (IOException e) { Logger.Debug("Cannot set last modification date", e); } } file.Uuid = guid; MappedObject mappedObject = new MappedObject( file.Name, remoteDoc.Id, MappedObjectType.File, remoteDoc.Parents[0].Id, remoteDoc.ChangeToken, remoteDoc.ContentStreamLength ?? file.Length) { Guid = guid, LastLocalWriteTimeUtc = file.LastWriteTimeUtc, LastRemoteWriteTimeUtc = remoteDoc.LastModificationDate, LastChecksum = localHash, ChecksumAlgorithmName = "SHA-1" }; this.Storage.SaveMappedObject(mappedObject); return(true); } } } return(false); }
public void Write(IFileInfo fileInfo) { using (System.IO.Stream stream = fileInfo.Open(System.IO.FileMode.Create, System.IO.FileAccess.Write)) { new XDocument( new XElement("Catalog", new XElement("FileFormatVersion", Catalog.fileFormatVersion), new XElement("SoftwareVersion", Program.SoftwareVersion), new XElement("BaseDirectoryPath", this.BaseDirectoryPath), new XElement("CatalogedOn", this.CatalogedOn), new XElement("UpdatedOn", this.UpdatedOn), new XElement("Files", this.FileInstances.Select((fileInstance) => new XElement("f", new XAttribute("p", fileInstance.RelativePath), new XAttribute("h", fileInstance.FileContentsHash), new XAttribute("l", fileInstance.FileSize))) ) ) ).Save(stream); } }
/// <summary> /// Uploads the file content to the remote document. /// </summary> /// <returns>The SHA-1 hash of the uploaded file content.</returns> /// <param name="localFile">Local file.</param> /// <param name="doc">Remote document.</param> /// <param name="transmissionManager">Transmission manager.</param> /// <param name="transmissionEvent">File Transmission event.</param> protected byte[] UploadFile(IFileInfo localFile, IDocument doc, Transmission transmission) { using (var file = localFile.Open(FileMode.Open, FileAccess.Read, FileShare.ReadWrite | FileShare.Delete)) { byte[] hash = null; IFileUploader uploader = FileTransmission.ContentTaskUtils.CreateUploader(); using (var hashAlg = new SHA1Managed()) { try { uploader.UploadFile(doc, file, transmission, hashAlg); hash = hashAlg.Hash; } catch (Exception ex) { transmission.FailedException = ex; throw; } } transmission.Status = TransmissionStatus.FINISHED; return(hash); } }
/// <summary> /// Determines if file content is changed to the specified obj. /// </summary> /// <returns><c>true</c> if is the file content is different to the specified obj otherwise, <c>false</c>.</returns> /// <param name="file">File instance.</param> /// <param name="obj">Object to check the file content against.</param> /// <param name="actualHash">Contains the hash of the local file if scanned, or null if file wasn't scanned</param> /// <param name="scanOnlyIfModificationDateDiffers">If set to <c>true</c> content scan runs only if the modification date differs to given one.</param> public static bool IsContentChangedTo(this IFileInfo file, IMappedObject obj, out byte[] actualHash, bool scanOnlyIfModificationDateDiffers = false) { actualHash = null; if (obj == null) { throw new ArgumentNullException("obj"); } if (obj.LastContentSize < 0) { throw new ArgumentOutOfRangeException(string.Format("Given LastContentSize {0} is invalid for files", obj.LastContentSize.ToString())); } if (!file.Exists) { throw new FileNotFoundException(string.Format("File {0} does not exists", file.FullName)); } if (obj.LastChecksum == null) { return(true); } if (file.Length == obj.LastContentSize) { if (scanOnlyIfModificationDateDiffers && obj.LastLocalWriteTimeUtc == file.LastWriteTimeUtc) { return(false); } else { using (var f = file.Open(FileMode.Open, FileAccess.Read, FileShare.ReadWrite | FileShare.Delete)) { byte[] fileHash = SHA1Managed.Create().ComputeHash(f); actualHash = fileHash; return(!fileHash.SequenceEqual(obj.LastChecksum)); } } } else { return(true); } }
public async System.Threading.Tasks.Task GetBlogHeadTest(IFileInfo sample, IEnumerable <string> tags, string title, bool valid) { IFormatter formatter = new Formatter(); formatter.DisableSummary(); using var fileReader = new StreamReader(sample.Open(FileMode.Open)); if (!valid) { await Assert.ThrowsAsync <FormatException>(() => formatter.GetBlogHeadAsync(sample, fileReader)).ConfigureAwait(false); } else { var entry = await formatter.GetBlogHeadAsync(sample, fileReader).ConfigureAwait(false); var entryTags = entry.Tags; entry.Title.Should().Be(title); entry.Url.Should().Be(Path.Combine(RoutePathInfo.ContentPath, Path.GetFileNameWithoutExtension(sample.Name)).Replace('\\', '/')); entryTags.Should().BeEquivalentTo(tags); } }
private async Task BuildFileAsync(string hashDir, HashAlgorithm crypto, IFileInfo file) { Stream fileStream = file.Open(FileMode.Open, FileAccess.Read); Stream hashStream = null; byte[] hash; try { string fileName = file.GetRelativeName(_filePath, _fileType.DirectorySearchOption); if (_hashFileMap.TryGetValue(fileName, out IFileInfo hashFile)) { hashStream = hashFile.Open(FileMode.OpenOrCreate); hash = await _changeDetector.DetectAsync(fileStream, hashStream).ConfigureAwait(false); _hashFileMap.TryRemove(fileName, out _); } else { //new hash _fileSystem.Directory.CreateDirectoriesIfNotExist(hashDir, fileName); hashStream = _fileSystem.File.Create($"{Path.Combine(hashDir, fileName)}.hashfile"); hash = crypto.ComputeHash(fileStream); } if (hash != null) { await _fileType.SaveAsync(file, fileStream).ConfigureAwait(false); await _changeDetector.WriteHashAsync(hash, hashStream).ConfigureAwait(false); } } finally { fileStream.Close(); hashStream?.Close(); } }
private Guid GetModelItemIdFromFile(IFileInfo file) { using (var stream = file.Open(FileMode.Open, FileAccess.Read)) { return GetModelItemIdFromStream(stream); } }
private Stream GetFileStream (IFileInfo fileInfo) { bool onErrorRetry; do { try { return fileInfo.Open (FileMode.Open, FileAccess.Read, FileShare.Read); } catch (FileNotFoundException ex) { onErrorRetry = OnFileOpenError (this, new FileOpenExceptionEventArgs (fileInfo.FullName, ex)); } catch (DirectoryNotFoundException ex) { onErrorRetry = OnFileOpenError (this, new FileOpenExceptionEventArgs (fileInfo.FullName, ex)); } catch (IOException ex) { onErrorRetry = OnFileOpenError (this, new FileOpenExceptionEventArgs (fileInfo.FullName, ex)); } catch (UnauthorizedAccessException ex) { onErrorRetry = OnFileOpenError (this, new FileOpenExceptionEventArgs (fileInfo.FullName, ex)); } } while (onErrorRetry); return null; }
public static IFileStream OpenRead([NotNull] this IFileInfo fileInfo) { Guard.NotNull(fileInfo, nameof(fileInfo)); return(fileInfo.Open(FileMode.Open, FileAccess.Read, FileShare.Read)); }
/// <summary> /// Uploads the file content to the remote document. /// </summary> /// <returns>The SHA-1 hash of the uploaded file content.</returns> /// <param name="localFile">Local file.</param> /// <param name="doc">Remote document.</param> /// <param name="transmissionManager">Transmission manager.</param> /// <param name="transmissionEvent">File Transmission event.</param> protected byte[] UploadFile(IFileInfo localFile, IDocument doc, Transmission transmission) { using (var file = localFile.Open(FileMode.Open, FileAccess.Read, FileShare.ReadWrite | FileShare.Delete)) { byte[] hash = null; IFileUploader uploader = FileTransmission.ContentTaskUtils.CreateUploader(); using (var hashAlg = new SHA1Managed()) { try { uploader.UploadFile(doc, file, transmission, hashAlg); hash = hashAlg.Hash; } catch (Exception ex) { transmission.FailedException = ex; throw; } } transmission.Status = TransmissionStatus.FINISHED; return hash; } }
/// <summary> /// Uploads the file content to the remote document. /// </summary> /// <returns>The SHA-1 hash of the uploaded file content.</returns> /// <param name="localFile">Local file.</param> /// <param name="doc">Remote document.</param> /// <param name="transmissionManager">Transmission manager.</param> /// <param name="transmissionEvent">File Transmission event.</param> /// <param name="mappedObject">Mapped object saved in <c>Storage</c></param> protected byte[] UploadFileWithPWC(IFileInfo localFile, ref IDocument doc, Transmission transmission, IMappedObject mappedObject = null) { byte[] checksum = null; var docPWC = this.LoadRemotePWCDocument(doc, ref checksum); using (var file = localFile.Open(FileMode.Open, FileAccess.Read, FileShare.ReadWrite | FileShare.Delete)) { if (checksum != null) { // check PWC checksum for integration using (var hashAlg = new SHA1Managed()) { int bufsize = 8 * 1024; byte[] buffer = new byte[bufsize]; for (long offset = 0; offset < docPWC.ContentStreamLength.GetValueOrDefault();) { int readsize = bufsize; if (readsize + offset > docPWC.ContentStreamLength.GetValueOrDefault()) { readsize = (int)(docPWC.ContentStreamLength.GetValueOrDefault() - offset); } readsize = file.Read(buffer, 0, readsize); hashAlg.TransformBlock(buffer, 0, readsize, buffer, 0); offset += readsize; if (readsize == 0) { break; } } hashAlg.TransformFinalBlock(new byte[0], 0, 0); if (!hashAlg.Hash.SequenceEqual(checksum)) { docPWC.DeleteContentStream(); } file.Seek(0, SeekOrigin.Begin); } } byte[] hash = null; var uploader = FileTransmission.ContentTaskUtils.CreateUploader(this.TransmissionStorage.ChunkSize); using (var hashAlg = new SHA1Reuse()) { try { using (var hashstream = new NonClosingHashStream(file, hashAlg, CryptoStreamMode.Read)) { int bufsize = 8 * 1024; byte[] buffer = new byte[bufsize]; for (long offset = 0; offset < docPWC.ContentStreamLength.GetValueOrDefault();) { int readsize = bufsize; if (readsize + offset > docPWC.ContentStreamLength.GetValueOrDefault()) { readsize = (int)(docPWC.ContentStreamLength.GetValueOrDefault() - offset); } readsize = hashstream.Read(buffer, 0, readsize); offset += readsize; if (readsize == 0) { break; } } } var document = doc; uploader.UploadFile(docPWC, file, transmission, hashAlg, false, (byte[] checksumUpdate, long length) => this.SaveRemotePWCDocument(localFile, document, docPWC, checksumUpdate, transmission)); hash = hashAlg.Hash; } catch (Exception ex) { transmission.FailedException = ex; throw; } } this.TransmissionStorage.RemoveObjectByRemoteObjectId(doc.Id); var properties = new Dictionary<string, object>(); properties.Add(PropertyIds.LastModificationDate, localFile.LastWriteTimeUtc); doc = this.Session.GetObject(docPWC.CheckIn(true, properties, null, string.Empty)) as IDocument; // Refresh is required, or DotCMIS will use cached one only doc.Refresh(); transmission.Status = TransmissionStatus.FINISHED; return hash; } }
private static Dictionary<string, object> GetAllResources(IFileInfo resxFile) { using (var stream = resxFile.Open(FileMode.Open, FileAccess.Read)) { return ReadAllResxEntries(stream); } }
// TODO: Create a domain service /// <summary> /// Handles the specified request. /// </summary> /// <param name="request">The request.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns></returns> /// <exception cref="NotFoundException">No file profile found with Id {request.FileProfileId}</exception> /// <exception cref="System.IO.FileNotFoundException">File {file.FullName} not found</exception> /// <exception cref="System.IO.DirectoryNotFoundException">Directory {fileProfile.ListeningDirectory} not found</exception> public async Task <Guid> Handle(UploadFileRequest request, CancellationToken cancellationToken) { DateTime importLogDateTime = request.FileUploadedDateTime; // This will now create the import log and add an event for the file being uploaded Guid importLogId = Helpers.CalculateFileImportLogAggregateId(importLogDateTime.Date, request.EstateId); // Get the import log FileImportLogAggregate fileImportLogAggregate = await this.FileImportLogAggregateRepository.GetLatestVersion(importLogId, cancellationToken); if (fileImportLogAggregate.IsCreated == false) { // First file of the day so create fileImportLogAggregate.CreateImportLog(request.EstateId, importLogDateTime); } // Move the file FileProfile fileProfile = await this.FileProcessorManager.GetFileProfile(request.FileProfileId, cancellationToken); if (fileProfile == null) { throw new NotFoundException($"No file profile found with Id {request.FileProfileId}"); } // Copy file from the temp location to file processing listening directory IFileInfo file = this.FileSystem.FileInfo.FromFileName(request.FilePath); if (file.Exists == false) { throw new FileNotFoundException($"File {file.FullName} not found"); } String originalName = file.Name; if (this.FileSystem.Directory.Exists(fileProfile.ListeningDirectory) == false) { throw new DirectoryNotFoundException($"Directory {fileProfile.ListeningDirectory} not found"); } // Read the file data String fileContent = null; //Open file for Read\Write using (Stream fs = file.Open(FileMode.OpenOrCreate, FileAccess.Read, FileShare.Read)) { //Create object of StreamReader by passing FileStream object on which it needs to operates on using (StreamReader sr = new StreamReader(fs)) { //Use ReadToEnd method to read all the content from file fileContent = await sr.ReadToEndAsync(); } } Guid fileId = this.CreateGuidFromFileData(fileContent); String fileDestination = $"{fileProfile.ListeningDirectory}//{request.EstateId:N}-{fileId:N}"; file.MoveTo(fileDestination, overwrite: true); // Update Import log aggregate fileImportLogAggregate.AddImportedFile(fileId, request.MerchantId, request.UserId, request.FileProfileId, originalName, fileDestination, request.FileUploadedDateTime); // Save changes await this.FileImportLogAggregateRepository.SaveChanges(fileImportLogAggregate, cancellationToken); return(fileId); }
public static FileStream Open(this IFileInfo fileInfo, FileMode mode, FileAccess access) { Verify(fileInfo); return(fileInfo.Open(mode, access, FileShare.None)); }
/// <summary> /// Calculates the checksum over the given stream with a former created hashAlgorithm /// </summary> /// <returns>The checksum.</returns> /// <param name="hashAlgorithm">Hash algorithm.</param> /// <param name="file">File to be hashed.</param> public static byte[] CalculateChecksum(string hashAlgorithm, IFileInfo file) { using (var stream = file.Open(FileMode.Open, FileAccess.Read, FileShare.Read)) { return CalculateChecksum(hashAlgorithm, stream); } }
/// <summary> /// Solve the specified situation by using the storage, localFile and remoteId. /// Uploads the file content if content has been changed. Otherwise simply saves the /// last modification date. /// </summary> /// <param name="localFileSystemInfo">Local filesystem info instance.</param> /// <param name="remoteId">Remote identifier or object.</param> /// <param name="localContent">Hint if the local content has been changed.</param> /// <param name="remoteContent">Information if the remote content has been changed.</param> public override void Solve( IFileSystemInfo localFileSystemInfo, IObjectId remoteId, ContentChangeType localContent = ContentChangeType.NONE, ContentChangeType remoteContent = ContentChangeType.NONE) { if (!localFileSystemInfo.Exists) { throw new ArgumentException("Given local path does not exists: " + localFileSystemInfo.FullName); } // Match local changes to remote changes and updated them remotely IMappedObject mappedObject = null; try { string ea = localFileSystemInfo.GetExtendedAttribute(MappedObject.ExtendedAttributeKey); Guid guid; if (Guid.TryParse(ea, out guid)) { mappedObject = this.Storage.GetObjectByGuid(guid); } } catch (Exception) { } if (mappedObject == null) { mappedObject = this.Storage.GetObjectByLocalPath(localFileSystemInfo); } if (mappedObject == null) { throw new ArgumentException(string.Format("Could not find db entry for {0} => invoke crawl sync", localFileSystemInfo.FullName)); } IFileInfo localFile = localFileSystemInfo as IFileInfo; if (localFile != null && localFile.IsContentChangedTo(mappedObject, scanOnlyIfModificationDateDiffers: true)) { Logger.Debug(string.Format("\"{0}\" is different from {1}", localFile.FullName, mappedObject.ToString())); OperationsLogger.Debug(string.Format("Local file \"{0}\" has been changed", localFile.FullName)); IFileUploader uploader = FileTransmission.ContentTaskUtils.CreateUploader(); var doc = remoteId as IDocument; FileTransmissionEvent transmissionEvent = new FileTransmissionEvent(FileTransmissionType.UPLOAD_MODIFIED_FILE, localFile.FullName); this.transmissionManager.AddTransmission(transmissionEvent); transmissionEvent.ReportProgress(new TransmissionProgressEventArgs { Started = true }); using (var hashAlg = new SHA1Managed()) using (var file = localFile.Open(FileMode.Open, FileAccess.Read, FileShare.ReadWrite | FileShare.Delete)) { try { uploader.UploadFile(doc, file, transmissionEvent, hashAlg); } catch (Exception ex) { transmissionEvent.ReportProgress(new TransmissionProgressEventArgs { FailedException = ex }); if (ex.InnerException is CmisPermissionDeniedException) { OperationsLogger.Warn(string.Format("Local changed file \"{0}\" has been not been uploaded: PermissionDenied", localFile.FullName)); return; } throw; } mappedObject.LastChecksum = hashAlg.Hash; } mappedObject.LastChangeToken = doc.ChangeToken; mappedObject.LastRemoteWriteTimeUtc = doc.LastModificationDate; mappedObject.LastLocalWriteTimeUtc = localFile.LastWriteTimeUtc; mappedObject.LastContentSize = localFile.Length; OperationsLogger.Info(string.Format("Local changed file \"{0}\" has been uploaded", localFile.FullName)); transmissionEvent.ReportProgress(new TransmissionProgressEventArgs { Completed = true }); } if (this.ServerCanModifyDateTimes) { try { if (remoteId is IDocument) { (remoteId as IDocument).UpdateLastWriteTimeUtc(localFileSystemInfo.LastWriteTimeUtc); mappedObject.LastRemoteWriteTimeUtc = localFileSystemInfo.LastWriteTimeUtc; } else if (remoteId is IFolder) { (remoteId as IFolder).UpdateLastWriteTimeUtc(localFileSystemInfo.LastWriteTimeUtc); mappedObject.LastRemoteWriteTimeUtc = localFileSystemInfo.LastWriteTimeUtc; } } catch (CmisPermissionDeniedException) { Logger.Debug(string.Format("Locally changed modification date \"{0}\"is not uploaded to the server: PermissionDenied", localFileSystemInfo.LastWriteTimeUtc)); } } mappedObject.LastLocalWriteTimeUtc = localFileSystemInfo.LastWriteTimeUtc; this.Storage.SaveMappedObject(mappedObject); }
private bool MergeExistingFileWithRemoteFile(IFileInfo file, IDocument remoteDoc, Guid guid, out byte[] localHash) { byte[] remoteHash = remoteDoc.ContentStreamHash(); localHash = null; if (file.Length.Equals(remoteDoc.ContentStreamLength)) { using (var f = file.Open(FileMode.Open, FileAccess.Read, FileShare.ReadWrite | FileShare.Delete)) { localHash = SHA1Managed.Create().ComputeHash(f); } if (remoteHash != null) { if (localHash != null && localHash.SequenceEqual(remoteHash)) { if (remoteDoc.LastModificationDate != null) { try { file.LastWriteTimeUtc = (DateTime)remoteDoc.LastModificationDate; } catch(IOException e) { Logger.Debug("Cannot set last modification date", e); } } file.Uuid = guid; MappedObject mappedObject = new MappedObject( file.Name, remoteDoc.Id, MappedObjectType.File, remoteDoc.Parents[0].Id, remoteDoc.ChangeToken, remoteDoc.ContentStreamLength ?? file.Length) { Guid = guid, LastLocalWriteTimeUtc = file.LastWriteTimeUtc, LastRemoteWriteTimeUtc = remoteDoc.LastModificationDate, LastChecksum = localHash, ChecksumAlgorithmName = "SHA-1" }; this.Storage.SaveMappedObject(mappedObject); return true; } } } return false; }
public static void ToDotFile <T>(this IObjectTree <T> tree, IFileInfo file) { using (var stream = file.Open(FileMode.CreateNew)) { tree.ToDotStream(stream); } }
internal virtual byte[] CalculateHashCode(IFileInfo file) { using (Stream stream = file.Open(FileMode.Open, FileAccess.Read)) { var hashProvider = SHA1.Create(); return hashProvider.ComputeHash(stream); } }
private async Task <Unit> ProcessFile(Guid fileId, Guid fileProfileId, String fileName, CancellationToken cancellationToken) { IFileInfo inProgressFile = null; FileProfile fileProfile = null; try { fileProfile = await this.FileProcessorManager.GetFileProfile(fileProfileId, cancellationToken); if (fileProfile == null) { throw new NotFoundException($"No file profile found with Id {fileProfileId}"); } // Check the processed/failed directories exist if (this.FileSystem.Directory.Exists(fileProfile.ProcessedDirectory) == false) { Logger.LogWarning($"Creating Directory {fileProfile.ProcessedDirectory} as not found"); this.FileSystem.Directory.CreateDirectory(fileProfile.ProcessedDirectory); } if (this.FileSystem.Directory.Exists(fileProfile.FailedDirectory) == false) { Logger.LogWarning($"Creating Directory {fileProfile.FailedDirectory} as not found"); this.FileSystem.Directory.CreateDirectory(fileProfile.FailedDirectory); } inProgressFile = this.FileSystem.FileInfo.FromFileName(fileName); if (inProgressFile.Exists == false) { throw new FileNotFoundException($"File {inProgressFile.FullName} not found"); } FileAggregate fileAggregate = await this.FileAggregateRepository.GetLatestVersion(fileId, cancellationToken); if (fileAggregate.IsCreated == false) { throw new InvalidOperationException($"File with Id {fileId} not created"); } String fileContent = null; //Open file for Read\Write using (Stream fs = inProgressFile.Open(FileMode.OpenOrCreate, FileAccess.Read, FileShare.Read)) { //Create object of StreamReader by passing FileStream object on which it needs to operates on using (StreamReader sr = new StreamReader(fs)) { //Use ReadToEnd method to read all the content from file fileContent = await sr.ReadToEndAsync(); } } if (String.IsNullOrEmpty(fileContent) == false) { String[] fileLines = fileContent.Split(fileProfile.LineTerminator); foreach (String fileLine in fileLines) { fileAggregate.AddFileLine(fileLine); } await this.FileAggregateRepository.SaveChanges(fileAggregate, cancellationToken); } Logger.LogInformation( $"About to move file {inProgressFile.Name} to [{fileProfile.ProcessedDirectory}]"); // TODO: Move file now inProgressFile.MoveTo($"{fileProfile.ProcessedDirectory}/{inProgressFile.Name}"); return(new Unit()); } catch (Exception e) { if (inProgressFile != null && fileProfile != null) { inProgressFile.MoveTo($"{fileProfile.FailedDirectory}/{inProgressFile.Name}"); } Logger.LogError(e); throw; } }
private void ExportFile(IFileInfo file) { using (var dialog = new SaveFileDialog {FileName = file.Name, Filter = string.Format("{0}|*{0}", file.Extension)}) { if (dialog.ShowDialog(this) == DialogResult.OK) { using (var input = file.Open(FileMode.Open, FileAccess.Read)) using (var output = File.OpenWrite(dialog.FileName)) { input.CopyTo(output); } } } }
public static void MergeChangesIntoResx(ResxDifferences changes, IFileInfo targetResxFile) { Dictionary<string, object> currentEntries; using (var stream = targetResxFile.Open(FileMode.Open, FileAccess.Read)) { currentEntries = ReadAllResxEntries(stream); } using (var newStream = targetResxFile.Open(FileMode.Truncate, FileAccess.Write)) { MergeChangesIntoResx(changes, currentEntries, newStream); } }
private bool LoadCacheFile(IFileInfo target, IDocument remoteDocument, IFileSystemInfoFactory fsFactory) { if (this.TransmissionStorage == null) { return(false); } IFileTransmissionObject obj = this.TransmissionStorage.GetObjectByRemoteObjectId(remoteDocument.Id); if (obj == null) { return(false); } IFileInfo localFile = fsFactory.CreateFileInfo(obj.LocalPath); if (!localFile.Exists) { return(false); } if (obj.LastChangeToken != remoteDocument.ChangeToken || localFile.Length != obj.LastContentSize) { localFile.Delete(); return(false); } try { byte[] localHash; using (var f = localFile.Open(FileMode.Open, FileAccess.Read, FileShare.None)) { localHash = SHA1Managed.Create().ComputeHash(f); } if (!localHash.SequenceEqual(obj.LastChecksum)) { localFile.Delete(); return(false); } if (target.FullName != obj.LocalPath) { if (target.Exists) { Guid?uuid = target.Uuid; if (uuid != null) { localFile.Uuid = uuid; } target.Delete(); } localFile.MoveTo(target.FullName); target.Refresh(); } return(true); } catch (Exception) { localFile.Delete(); return(false); } }
public static IFileStream OpenWrite([NotNull] this IFileInfo fileInfo) { Guard.NotNull(fileInfo, nameof(fileInfo)); return(fileInfo.Open(FileMode.OpenOrCreate, FileAccess.Write)); }
protected byte[] DownloadCacheFile(IFileInfo target, IDocument remoteDocument, Transmission transmission, IFileSystemInfoFactory fsFactory) { if (!this.LoadCacheFile(target, remoteDocument, fsFactory)) { if (target.Exists) { target.Delete(); } } using (var hashAlg = new SHA1Reuse()) { using (var filestream = target.Open(FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None)) using (var downloader = ContentTaskUtils.CreateDownloader()) { try { downloader.DownloadFile(remoteDocument, filestream, transmission, hashAlg, (byte[] checksumUpdate, long length) => this.SaveCacheFile(target, remoteDocument, checksumUpdate, length, transmission)); if (this.TransmissionStorage != null) { this.TransmissionStorage.RemoveObjectByRemoteObjectId(remoteDocument.Id); } } catch (Exception ex) { transmission.FailedException = ex; throw; } } target.Refresh(); return hashAlg.Hash; } }
private byte[] GetBytesFromFile (IFileInfo fileInfo) { using (var stream = fileInfo.Open (FileMode.Open, FileAccess.Read, FileShare.None)) { using (BinaryReader binaryReader = new BinaryReader (stream)) { return binaryReader.ReadBytes ((int) fileInfo.Length); } } }
/// <summary> /// Uploads the file content to the remote document. /// </summary> /// <returns>The SHA-1 hash of the uploaded file content.</returns> /// <param name="localFile">Local file.</param> /// <param name="doc">Remote document.</param> /// <param name="transmissionManager">Transmission manager.</param> /// <param name="transmissionEvent">File Transmission event.</param> /// <param name="mappedObject">Mapped object saved in <c>Storage</c></param> protected byte[] UploadFileWithPWC(IFileInfo localFile, ref IDocument doc, Transmission transmission, IMappedObject mappedObject = null) { byte[] checksum = null; var docPWC = this.LoadRemotePWCDocument(doc, ref checksum); using (var file = localFile.Open(FileMode.Open, FileAccess.Read, FileShare.ReadWrite | FileShare.Delete)) { if (checksum != null) { // check PWC checksum for integration using (var hashAlg = new SHA1Managed()) { int bufsize = 8 * 1024; byte[] buffer = new byte[bufsize]; for (long offset = 0; offset < docPWC.ContentStreamLength.GetValueOrDefault();) { int readsize = bufsize; if (readsize + offset > docPWC.ContentStreamLength.GetValueOrDefault()) { readsize = (int)(docPWC.ContentStreamLength.GetValueOrDefault() - offset); } readsize = file.Read(buffer, 0, readsize); hashAlg.TransformBlock(buffer, 0, readsize, buffer, 0); offset += readsize; if (readsize == 0) { break; } } hashAlg.TransformFinalBlock(new byte[0], 0, 0); if (!hashAlg.Hash.SequenceEqual(checksum)) { docPWC.DeleteContentStream(); } file.Seek(0, SeekOrigin.Begin); } } byte[] hash = null; var uploader = FileTransmission.ContentTaskUtils.CreateUploader(this.TransmissionStorage.ChunkSize); using (var hashAlg = new SHA1Reuse()) { try { using (var hashstream = new NonClosingHashStream(file, hashAlg, CryptoStreamMode.Read)) { int bufsize = 8 * 1024; byte[] buffer = new byte[bufsize]; for (long offset = 0; offset < docPWC.ContentStreamLength.GetValueOrDefault();) { int readsize = bufsize; if (readsize + offset > docPWC.ContentStreamLength.GetValueOrDefault()) { readsize = (int)(docPWC.ContentStreamLength.GetValueOrDefault() - offset); } readsize = hashstream.Read(buffer, 0, readsize); offset += readsize; if (readsize == 0) { break; } } } var document = doc; uploader.UploadFile(docPWC, file, transmission, hashAlg, false, (byte[] checksumUpdate, long length) => this.SaveRemotePWCDocument(localFile, document, docPWC, checksumUpdate, transmission)); hash = hashAlg.Hash; } catch (Exception ex) { transmission.FailedException = ex; throw; } } this.TransmissionStorage.RemoveObjectByRemoteObjectId(doc.Id); var properties = new Dictionary <string, object>(); properties.Add(PropertyIds.LastModificationDate, localFile.LastWriteTimeUtc); doc = this.Session.GetObject(docPWC.CheckIn(true, properties, null, string.Empty)) as IDocument; // Refresh is required, or DotCMIS will use cached one only doc.Refresh(); transmission.Status = TransmissionStatus.FINISHED; return(hash); } }