public void Upsert(FileMeta meta) { var collection = this.GetCollection(); collection.DeleteMany(n => n.FullPath == meta.FullPath); collection.Insert(meta); }
/// <summary> /// Read MetaData from file /// </summary> /// <param name="stream">File stream used for read</param> /// <returns>Number of readed bytes and InitMetaBlock</returns> private static (int readed, FileMeta data) ReadMetaHeader(FileStream stream) { int offset = 0; // Read serialization version from file byte versionBytes = ReadBytes(stream, sizeof(byte), offset)[0]; offset += sizeof(byte); // Compare serialization version if (versionBytes != _serializationVersion) { throw new IncompatibleSerializationVersionException("Incompatible serialization version!"); } // Read MetaHeader size byte[] metaHeadersLenghtBytes = ReadBytes(stream, sizeof(int), offset); offset += metaHeadersLenghtBytes.Length; int metaHeaderLength = BitConverter.ToInt32(metaHeadersLenghtBytes, 0); // Read MetaHeader Data byte[] metaHeadersBytes = ReadBytes(stream, metaHeaderLength, offset); offset += metaHeadersBytes.Length; FileMeta metaHeader = BytesToObject <FileMeta>(metaHeadersBytes); return(offset, metaHeader); }
private FileMeta?GetFileMeta(string filePath) { var meta = _fileMetaRepository.Find(filePath); if (meta is not null) { return(meta); } try { var fileInfo = new FileInfo(filePath); using var stream = new FileStream(filePath, FileMode.Open); using var hash = SHA256.Create(); var sha256HashValue = hash.ComputeHash(stream); meta = new FileMeta() { FullPath = filePath, LastWriteTime = fileInfo.LastWriteTimeUtc, Sha256HashValue = sha256HashValue }; } catch (UnauthorizedAccessException) { return(null); } _fileMetaRepository.Upsert(meta); return(meta); }
// HELPER PRIVATE METHODS //--------------------------------------------------------------------- #region Class helper methods private List <FileMeta> ParseFtpFolderMetadata(IEnumerable <string> metadataArray) { var ftpListDirectoryDetailsRegex = new Regex(@".*(?<month>(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))\s*(?<day>[0-9]*)\s*(?<yearTime>([0-9]|:)*)\s*(?<fileName>.*)", RegexOptions.Compiled | RegexOptions.IgnoreCase); var filesMeta = new List <FileMeta>(); foreach (var item in metadataArray) { Match match = ftpListDirectoryDetailsRegex.Match(item); string fileName = match.Groups["fileName"].Value; var dateBuilder = new StringBuilder(); dateBuilder.AppendFormat("{0}-{1} {2}", match.Groups["day"].Value, match.Groups["month"].Value, match.Groups["yearTime"].Value); var fileMeta = new FileMeta(fileName, dateBuilder.ToString()); filesMeta.Add(fileMeta); } return(filesMeta); }
/// <inheritdoc /> public async Task AddOrReplaceFileMetaToTransactionAsync(FileMeta fileMeta, string transactionId, string fileId) { if (fileMeta == null) { throw new ArgumentNullException("fileMeta"); } if (transactionId == null) { throw new ArgumentNullException(nameof(transactionId)); } if (string.IsNullOrWhiteSpace(transactionId)) { throw new ArgumentException("Cannot be empty or contain only whitespaces.", nameof(transactionId)); } if (fileId == null) { throw new ArgumentNullException(nameof(fileId)); } if (string.IsNullOrWhiteSpace(fileId)) { throw new ArgumentException("Cannot be empty or contain only whitespaces.", nameof(fileId)); } await client.PutAsync( "transaction".JoinPaths(transactionId, "file", fileId), JsonContent.From(fileMeta)) .EnsureSignhostSuccessStatusCodeAsync() .ConfigureAwait(false); }
//constructor public FileTransReq(FileMeta fileDetails, Stream mFileStream, int mBufferSize) { this.FileDetails = new FileMeta(fileDetails.FileName, fileDetails.FilePath, fileDetails.FileSize); this.bufferSize = mBufferSize; this.bytesProccessed = 0; this.fileDataStream = mFileStream; }
private async void Vsl_FileTransferRequested(object sender, FTEventArgs e) { if (e.Mode == StreamMode.PushHeader || e.Mode == StreamMode.PushFile) { using (OpenFileDialog fd = new OpenFileDialog()) { fd.InitialDirectory = Program.TempPath; if (fd.ShowDialog() == DialogResult.OK) { if (lastMeta != null && MessageBox.Show("Sie können die Metadaten der letzen Dateiübertragung erneut verwenden", "Metadaten wiederverwenden?", MessageBoxButtons.YesNo) == DialogResult.No) { lastMeta = null; } await vsl.FileTransfer.AcceptAsync(e, fd.FileName, lastMeta); } else { await vsl.FileTransfer.CancelAsync(e); } } } else { e.FileMetaReceived += Vsl_FTFileMetaReceived; await vsl.FileTransfer.AcceptAsync(e, Path.Combine(Program.TempPath, Path.GetRandomFileName())); } }
private async void Vsl_FTFileMetaReceived(object sender, EventArgs e) { FTEventArgs args = (FTEventArgs)sender; lastMeta = args.FileMeta; await vsl.FileTransfer.ContinueAsync(args); args.FileMetaReceived -= Vsl_FTFileMetaReceived; }
void parseFileMeta() { Console.WriteLine("[FileParser] File count is {0} ", this.headerParser.fileCount); for (int i = 0; i < this.headerParser.fileCount; i++) { int startAddress = this.headerParser.metaStartAddress + (this.fileMetaLength * i); Console.WriteLine("start Address is : 0x{0:X}", startAddress); FileMeta newFileMeta = new FileMeta(); newFileMeta.fileNameStartOffset = this.archiveBytesPointer[(startAddress + 0x2)]; newFileMeta.fileNameStartAddress = newFileMeta.fileNameStartOffset + ((this.archiveBytesPointer[(startAddress + 0x09)] << 8) | this.archiveBytesPointer[(startAddress + 0x0A)]); newFileMeta.fileNameLength = this.archiveBytesPointer[(startAddress + 0x3)]; newFileMeta.fileName = this.getFileNameFromBytes( newFileMeta.fileNameStartAddress, newFileMeta.fileNameLength ); newFileMeta.compressedDataSize = (this.archiveBytesPointer[(startAddress + 0x14)] << 16) | (this.archiveBytesPointer[(startAddress + 0xD)] << 8) | (this.archiveBytesPointer[(startAddress + 0x06)]); newFileMeta.fileDataStartAddress = (this.archiveBytesPointer[(startAddress + 0x19)] << 24) | (this.archiveBytesPointer[(startAddress + 0x12)] << 16) | (this.archiveBytesPointer[(startAddress + 0xB)] << 8) | (this.archiveBytesPointer[(startAddress + 0x4)]); newFileMeta.fileDataSize = (this.archiveBytesPointer[(startAddress + 0x13)] << 16) | (this.archiveBytesPointer[(startAddress + 0x0C)] << 8) | (this.archiveBytesPointer[(startAddress + 0x05)]); this.fileMetaList.Add(newFileMeta); } Console.WriteLine("---------------------------------------\n"); foreach (FileMeta fit in this.fileMetaList) //for(fit = this->fileMetaList.begin(); fit!=this->fileMetaList.end(); fit++) { Console.WriteLine("File NAME: {0}", fit.fileName); Console.WriteLine("File NAME start address: {0:X} \n", fit.fileNameStartAddress); Console.WriteLine("File NAME length: %d character (byte) \n", fit.fileNameLength); Console.WriteLine("<<<<<<<<<<<<<<<<<<>>>>>>>>>>>>>>>>>>\n"); Console.WriteLine("File DATA start Address: 0x%4x \n", fit.fileDataStartAddress); Console.WriteLine("File DATA size: %d byte \n", fit.fileDataSize); Console.WriteLine("File Data compressed size: 0x%04x byte \n", fit.compressedDataSize); //printf("File name: %s \n", fit->fileName); Console.WriteLine("---------------------------------------\n"); } }
public void OnPreprocessTexture() { if (assetPath.Contains(SPRITE_SHEET_FOLDER_CONTAINS)) { currentFileMeta = new FileMeta(assetPath); TextureImporter textureImporter = (TextureImporter)assetImporter; textureImporter.textureType = TextureImporterType.Sprite; textureImporter.spriteImportMode = SpriteImportMode.Multiple; } }
private void CheckForChangedFiles() { _changes.Clear(); ForeachEntityInDirectory(_watchedDirectory, f => { var fullFilePath = f.FullName; if (!_knownEntities.ContainsKey(fullFilePath)) { // New file RecordChange(f); } else { var fileMeta = _knownEntities[fullFilePath]; try { if (fileMeta.FileInfo.LastWriteTime != f.LastWriteTime) { // File changed RecordChange(f); } _knownEntities[fullFilePath] = new FileMeta(fileMeta.FileInfo, true); } catch (FileNotFoundException) { _knownEntities[fullFilePath] = new FileMeta(fileMeta.FileInfo, false); } } _tempDictionary.Add(f.FullName, new FileMeta(f)); }); foreach (var file in _knownEntities) { if (!file.Value.FoundAgain) { // File deleted RecordChange(file.Value.FileInfo); } } NotifyChanges(); // Swap the two dictionaries var swap = _knownEntities; _knownEntities = _tempDictionary; _tempDictionary = swap; _tempDictionary.Clear(); }
public async Task <IActionResult> Update(string id, [FromBody] FileMeta fileMeta) { var result = await _fileService.Update(CurrentUser.TenantId, CurrentUser.Id, id, fileMeta); if (result.Code <= 0) { return(BadRequest(result)); } return(Ok(result)); }
public async Task <FileMeta> Add(FileMeta fileMeta) { return(await Task.Run(() => { fileMeta.DateCreated = DateTime.UtcNow; fileMeta.DateModified = DateTime.UtcNow; _records.Add(fileMeta); return fileMeta; })); }
private async ValueTask <ThumbnailGeneratorGetThumbnailResult> GetPictureThumbnailAsync(NestedPath filePath, ThumbnailGeneratorGetThumbnailOptions options, CancellationToken cancellationToken = default) { var ext = filePath.GetExtension().ToLower(); if (!_pictureTypeExtensionList.Contains(ext)) { return(new ThumbnailGeneratorGetThumbnailResult(ThumbnailGeneratorGetThumbnailResultStatus.Failed)); } try { var fileLength = await _fileSystem.GetFileSizeAsync(filePath, cancellationToken); var fileLastWriteTime = await _fileSystem.GetFileLastWriteTimeAsync(filePath, cancellationToken); using (var inStream = await _fileSystem.GetFileStreamAsync(filePath, cancellationToken)) using (var outStream = new RecyclableMemoryStream(_bytesPool)) { this.ConvertImage(inStream, outStream, options.Width, options.Height, options.ResizeType, options.FormatType); outStream.Seek(0, SeekOrigin.Begin); var image = outStream.ToMemoryOwner(); var fileMeta = new FileMeta(filePath, (ulong)fileLength, Timestamp.FromDateTime(fileLastWriteTime)); var thumbnailMeta = new ThumbnailMeta(options.ResizeType, options.FormatType, (uint)options.Width, (uint)options.Height); var content = new ThumbnailContent(image); var cache = new ThumbnailCache(fileMeta, thumbnailMeta, new[] { content }); await _thumbnailGeneratorRepository.ThumbnailCaches.InsertAsync(cache); return(new ThumbnailGeneratorGetThumbnailResult(ThumbnailGeneratorGetThumbnailResultStatus.Succeeded, cache.Contents)); } } catch (NotSupportedException e) { _logger.Warn(e); } catch (OperationCanceledException e) { _logger.Debug(e); } catch (Exception e) { _logger.Error(e); throw; } return(new ThumbnailGeneratorGetThumbnailResult(ThumbnailGeneratorGetThumbnailResultStatus.Failed)); }
/// <inheritdoc /> public async Task AddOrReplaceFileMetaToTransactionAsync( FileMeta fileMeta, string transactionId, string fileId, CancellationToken cancellationToken = default) { await retryPolicy .ExecuteAsync( ct => client.AddOrReplaceFileMetaToTransactionAsync( fileMeta, transactionId, fileId, ct), cancellationToken); }
public void TestGetFilesForFileReceivedFromProviderShouldReturnEmptyCollection() { var date = new DateTime(2016, 10, 22); var fileMeta = new FileMeta { FileName = "name", FilePath = "path", FileSize = 1234, Lines = new[] { $"H|{date.ToShortDateString()}|Employee Status", "D|John Walsh|456RT4|True", "F|1" } }; provider.Setup(x => x.GetFiles()).Returns(new[] { fileMeta }); var parsedfiles = engine.GetFiles <HeaderLine, DataLine, FooterLine>(); Assert.IsNotEmpty(parsedfiles); Assert.That(parsedfiles[0].FileMeta.FileName, Is.EqualTo(fileMeta.FileName)); Assert.That(parsedfiles[0].FileMeta.FilePath, Is.EqualTo(fileMeta.FilePath)); Assert.That(parsedfiles[0].FileMeta.FileSize, Is.EqualTo(fileMeta.FileSize)); Assert.That(parsedfiles[0].FileMeta.Lines, Is.EqualTo(fileMeta.Lines)); Assert.IsAssignableFrom <HeaderLine>(parsedfiles[0].Header); Assert.That(parsedfiles[0].Header.Index, Is.EqualTo(0)); Assert.That(parsedfiles[0].Header.Type, Is.EqualTo(LineType.Header)); Assert.IsEmpty(parsedfiles[0].Header.Errors); Assert.That(parsedfiles[0].Header.Date, Is.EqualTo(date)); Assert.That(parsedfiles[0].Header.Name, Is.EqualTo("Employee Status")); Assert.IsAssignableFrom <DataLine>(parsedfiles[0].Data[0]); Assert.That(parsedfiles[0].Data[0].Index, Is.EqualTo(0)); Assert.That(parsedfiles[0].Data[0].Type, Is.EqualTo(LineType.Data)); Assert.IsEmpty(parsedfiles[0].Data[0].Errors); Assert.That(parsedfiles[0].Data[0].Employee, Is.EqualTo("John Walsh")); Assert.That(parsedfiles[0].Data[0].Reference, Is.EqualTo("456RT4")); Assert.That(parsedfiles[0].Data[0].InService, Is.EqualTo(true)); Assert.IsAssignableFrom <FooterLine>(parsedfiles[0].Footer); Assert.That(parsedfiles[0].Footer.Index, Is.EqualTo(0)); Assert.That(parsedfiles[0].Footer.Type, Is.EqualTo(LineType.Footer)); Assert.IsEmpty(parsedfiles[0].Footer.Errors); Assert.That(parsedfiles[0].Footer.TotalRecords, Is.EqualTo(1)); }
private void SendFile(string fileName) { FileInfo f = new FileInfo(fileName); string md5 = PUtility.CalculateMD5(fileName); FileMeta fileMeta = new FileMeta( f.Length, fileName, md5); fileMeta.Type = (int)PacketType.FileMeta; Packet.Serialize(fileMeta).CopyTo(this.sendBuf, 0); this.Send(); Recv(); ACK ack = (ACK)Packet.Deserialize(this.recvBuf); if (ack.isOK == false) { // same name file ack = new ACK(false); if (MessageBox.Show("A same name already exists.\r\n\r\n" + "Do you want version control?", "FM", MessageBoxButtons.YesNo) == DialogResult.Yes) { ack.isOK = true; } Packet.Serialize(ack).CopyTo(this.sendBuf, 0); this.Send(); } if (f.Length != 0) { byte[] file = File.ReadAllBytes(fileName); this.stream.Write(file, 0, file.Length); this.stream.Flush(); } Recv(); ack = (ACK)Packet.Deserialize(this.recvBuf); if (ack.isOK) { //MessageBox.Show("성공적으로 업로드하였습니다!"); } else { MessageBox.Show("파일 업로드 실패"); } }
public FileTransReq GetFileTransReqFromFileMeta(FileMeta mFileMeta) { //collect info to identify file transfer object string fileName = mFileMeta.FileName; string filePath = mFileMeta.FilePath; foreach (FileTransReq fileTrans in fileTransReqs) { if (fileTrans.FileDetails.FileName == fileName && fileTrans.FileDetails.FilePath == filePath) { return(fileTrans); } } //can't find file transfer object return(null); }
public async void when_AddOrReplaceFileMetaToTransaction_is_called_then_the_request_body_should_contain_the_serialized_file_meta() { var mockHttp = new MockHttpMessageHandler(); mockHttp.Expect(HttpMethod.Put, "http://localhost/api/transaction/transactionId/file/fileId") .WithContent(RequestBodies.AddOrReplaceFileMetaToTransaction) .Respond(HttpStatusCode.OK); using (var httpClient = mockHttp.ToHttpClient()) { var signhostApiClient = new SignHostApiClient(settings, httpClient); var fileSignerMeta = new FileSignerMeta { FormSets = new string[] { "SampleFormSet" } }; var field = new Field { Type = "Check", Value = "I agree", Location = new Location { Search = "test" } }; FileMeta fileMeta = new FileMeta { Signers = new Dictionary <string, FileSignerMeta> { { "someSignerId", fileSignerMeta } }, FormSets = new Dictionary <string, IDictionary <string, Field> > { { "SampleFormSet", new Dictionary <string, Field> { { "SampleCheck", field } } } } }; await signhostApiClient.AddOrReplaceFileMetaToTransactionAsync(fileMeta, "transactionId", "fileId"); } mockHttp.VerifyNoOutstandingExpectation(); }
/// <summary> /// Find block with valid range for finding value /// </summary> /// <param name="stream">File stream used for read</param> /// <param name="metadata">File metadata</param> /// <param name="keyHash">Searching hash value</param> /// <param name="headerLength">File header size (offset for first block)</param> /// <param name="method">Searching method</param> /// <returns>Data array of appropriate block</returns> private (int blockID, DataItem[] blockData) FindBlockData(FileStream stream, FileMeta metadata, int keyHash, int headerLength, SearchMethod method) { int left = 0; int right = (metadata.BuildValuesCount - 1); int mid; int maxVal = metadata.BuildMax; int minVal = metadata.BuildMin; // While until search interval is valid while (left <= right && keyHash >= minVal && keyHash <= maxVal) { // Calculate middle index mid = CalculateMiddle(left, right, minVal, maxVal, keyHash, method); // Get blockID with item with middle index int blockId = mid / _itemsForBlock; if (mid < 0 && mid > metadata.ActualValuesCount) { throw new ApplicationException("Invalid operation! Out of range!"); } // Read block header BlockMeta block = ReadBlockHeader(stream, blockId, headerLength, metadata.BlockSize); if (block.Min <= keyHash && keyHash <= block.Max) { // Read block data for search value (value is in this range) DataItem[] data = ReadBlock(stream, blockId, headerLength, metadata.BlockSize).blockdata; return(blockId, data); } if (block.Max < keyHash) { // Is in the upper part left = mid + 1; } else { // Is in the lower part right = mid - 1; } } // Valid block not found return(-1, null); }
private void HandleReqFile() { FileMeta reqFile = (FileMeta)Packet.Deserialize(this.recvBuf); this.Invoke((MethodInvoker)(() => { txtLog.AppendText(pathCur + reqFile.fileName + " start\r\n"); })); FileInfo f = new FileInfo(pathCur + reqFile.fileName); FileMeta fileMeta = new FileMeta( f.Length, reqFile.fileName, ""); fileMeta.Type = (int)PacketType.FileMeta; Packet.Serialize(fileMeta).CopyTo(this.sendBuf, 0); this.Send(); if (f.Length != 0) { byte[] file = File.ReadAllBytes(pathCur + reqFile.fileName); this.stream.Write(file, 0, file.Length); this.stream.Flush(); } Recv(); ACK ack = (ACK)Packet.Deserialize(this.recvBuf); if (ack.isOK) { this.Invoke((MethodInvoker)(() => { txtLog.AppendText("download " + reqFile.fileName + " success\r\n"); })); } else { this.Invoke((MethodInvoker)(() => { txtLog.AppendText("download " + reqFile.fileName + " failure\r\n"); })); } }
private async Task ConnectNode() { HttpClient httpClient = new HttpClient(); string url = $"http://{_configuration.BalancerHostName}:{_configuration.BalancerHostPort}/{_apiPrefix}/RegNode"; Dictionary <string, FileMeta> fileMeta = new Dictionary <string, FileMeta>(); foreach (var fileInfo in Files) { int fileSize = fileInfo.Value.Select(b => b.Index).Distinct().Max(); FileMeta meta = new FileMeta(fileSize) { FileName = fileInfo.Key, TotalBlockCount = fileInfo.Value.FirstOrDefault().TotalBlockCount }; if (!fileMeta.TryGetValue(fileInfo.Key, out meta)) { fileMeta.Add(fileInfo.Key, meta); } foreach (var info in fileInfo.Value) { meta.Indexes.Add(info.Index); } } NodeInfo nodeInfo = new NodeInfo { NodeUrl = _hostConfig.Url, PartialFiles = fileMeta.Values.ToList() }; //string json = JsonConvert.SerializeObject(nodeInfo); JsonContent content = new JsonContent(nodeInfo); var response = await httpClient.PostAsync(url, content); if (response.StatusCode == System.Net.HttpStatusCode.OK) { string json = await response.Content.ReadAsStringAsync(); BlockSize = JsonConvert.DeserializeObject <int>(json); } }
public int UploadAttachment(Attachment _a, Service service, string SolnId, bool isMq) { FileUploadResponse resp; _a.ContentStream.Seek(0, SeekOrigin.Begin); byte[] myFileContent = new byte[_a.ContentStream.Length]; _a.ContentStream.Read(myFileContent, 0, myFileContent.Length); FileMeta meta = new FileMeta { FileName = _a.Name, FileType = _a.Name.Split('.').Last(), Length = myFileContent.Length, FileCategory = Enums.EbFileCategory.File, MetaDataDictionary = new Dictionary <String, List <string> >(), }; if (isMq) { FileUploadRequest request = new FileUploadRequest { FileByte = myFileContent, FileDetails = meta }; request.SolnId = SolnId; resp = service.Gateway.Send <FileUploadResponse>(request); } else { FileUploadInternalRequest request = new FileUploadInternalRequest { FileByte = myFileContent, FileDetails = meta }; request.SolnId = SolnId; resp = service.Gateway.Send <FileUploadResponse>(request); } return(resp.FileRefId); }
public async Task <FileMeta> UploadAsync(Guid fileId, string fileName, string contentType, Stream stream) { // using fileId/filename to store a file in unique folders //var file = await _storageClient.UploadObjectAsync(_gcpStorageConfig.BucketName, $"{fileId}/{fileName}", null, stream); // using fileId to store it as a file name on the flat level var file = await _storageClient.UploadObjectAsync(_gcpStorageConfig.BucketName, fileName, contentType, stream); var fileMeta = new FileMeta { Id = file.Id, Name = file.Name, Link = file.SelfLink }; _logger.LogInformation("uploaded to {bucket}|{@message}", _gcpStorageConfig.BucketName, fileMeta); return(fileMeta); }
public async Task <FilePartObj> GetNextFilePart(FileTransReq mCurFileTrans) { //set buffer lengths int bufferLen = (int)Math.Min(mCurFileTrans.FileDetails.FileSize - mCurFileTrans.BytesProcessed, this.bufferSize); if (bufferLen <= 0) { //nothing more to be sent return(null); } FileMeta fileMetadata = mCurFileTrans.FileDetails; byte[] fileData = await mCurFileTrans.ReadBytes(bufferLen); //populate File part object FilePartObj filePart = new FilePartObj(fileMetadata, fileData, mCurFileTrans.curFilePartNum, mCurFileTrans.TotalPartNum); return(filePart); }
private void btnDownload_Click(object sender, EventArgs e) { if (listServer.SelectedItems[0].Tag.ToString() == "F") { RecvFile(servFile); } else if (listServer.SelectedItems[0].Tag.ToString() == "D") { FileMeta finfo = new FileMeta(0, servFile, ""); finfo.Type = (int)PacketType.ReqDir; Packet.Serialize(finfo).CopyTo(this.sendBuf, 0); this.Send(); RecvFile(servFile + ".tmp.zip"); PUtility.ExtractDirectoryTmp(dirPath + "\\" + Path.GetFileName(servFile + ".tmp.zip")); File.Delete(dirPath + "\\" + Path.GetFileName(servFile + ".tmp.zip")); } var item = viewClient.SelectedNode; viewClient.SelectedNode = null; viewClient.SelectedNode = item; }
private void btnUpload_Click(object sender, EventArgs e) { if (listClient.SelectedItems[0].Tag.ToString() == "F") { SendFile(filePath); } else if (listClient.SelectedItems[0].Tag.ToString() == "D") { FileMeta finfo = new FileMeta(0, fdirPath + ".tmp.zip", ""); finfo.Type = (int)PacketType.SendDir; Packet.Serialize(finfo).CopyTo(this.sendBuf, 0); this.Send(); PUtility.CompressDirectoryTmp(fdirPath); SendFile(fdirPath + ".tmp.zip"); File.Delete(fdirPath + ".tmp.zip"); } var item = viewServer.SelectedNode; viewServer.SelectedNode = null; viewServer.SelectedNode = item; }
private async ValueTask <ThumbnailGeneratorGetThumbnailResult> GetMovieThumbnailAsync(NestedPath filePath, ThumbnailGeneratorGetThumbnailOptions options, CancellationToken cancellationToken = default) { if (!_movieTypeExtensionList.Contains(filePath.GetExtension().ToLower())) { return(new ThumbnailGeneratorGetThumbnailResult(ThumbnailGeneratorGetThumbnailResultStatus.Failed)); } try { var fileLength = await _fileSystem.GetFileSizeAsync(filePath, cancellationToken); var fileLastWriteTime = await _fileSystem.GetFileLastWriteTimeAsync(filePath, cancellationToken); var images = await this.GetMovieImagesAsync(filePath, options.MinInterval, options.MaxImageCount, options.Width, options.Height, options.ResizeType, options.FormatType, cancellationToken).ConfigureAwait(false); var fileMeta = new FileMeta(filePath, (ulong)fileLength, Timestamp.FromDateTime(fileLastWriteTime)); var thumbnailMeta = new ThumbnailMeta(options.ResizeType, options.FormatType, (uint)options.Width, (uint)options.Height); var contents = images.Select(n => new ThumbnailContent(n)).ToArray(); var cache = new ThumbnailCache(fileMeta, thumbnailMeta, contents); await _thumbnailGeneratorRepository.ThumbnailCaches.InsertAsync(cache); return(new ThumbnailGeneratorGetThumbnailResult(ThumbnailGeneratorGetThumbnailResultStatus.Succeeded, cache.Contents)); } catch (NotSupportedException e) { _logger.Warn(e); } catch (OperationCanceledException e) { _logger.Debug(e); } catch (Exception e) { _logger.Error(e); throw; } return(new ThumbnailGeneratorGetThumbnailResult(ThumbnailGeneratorGetThumbnailResultStatus.Failed)); }
private async Task <FileTransReq> SetupTransmitionForNewFile(FileMeta fileDetails, int bufferSize) { //create a folder to store the file IFolder root = await fileSystem.GetFolderFromPathAsync("./"); if (await root.CheckExistsAsync(DefaultFilePath) == ExistenceCheckResult.NotFound) { //create folder await root.CreateFolderAsync(DefaultFilePath, CreationCollisionOption.FailIfExists); } IFolder tempFolder = await fileSystem.GetFolderFromPathAsync(DefaultFilePath); //create empty file stream IFile newFile = await tempFolder.CreateFileAsync(fileDetails.FileName, CreationCollisionOption.ReplaceExisting); Stream fileStream = await newFile.OpenAsync(PCLStorage.FileAccess.ReadAndWrite); //return file trans. object FileTransReq fileTrans = new FileTransReq(fileDetails, fileStream, bufferSize); return(fileTrans); }
public void TestGetFilesForFileReceivedFromProviderShouldReturnEmptyCollection() { var fileMeta = new FileMeta { FileName = "name", FilePath = "path", FileSize = 1234, Lines = new[] { "Jack Marias|false", "Samuel Dias|true" } }; provider.Setup(x => x.GetFiles()).Returns(new[] { fileMeta }); var parsedfiles = engine.GetFiles <SingleLine>(); Assert.IsNotEmpty(parsedfiles); Assert.That(parsedfiles[0].FileMeta.FileName, Is.EqualTo(fileMeta.FileName)); Assert.That(parsedfiles[0].FileMeta.FilePath, Is.EqualTo(fileMeta.FilePath)); Assert.That(parsedfiles[0].FileMeta.FileSize, Is.EqualTo(fileMeta.FileSize)); Assert.That(parsedfiles[0].FileMeta.Lines, Is.EqualTo(fileMeta.Lines)); Assert.IsAssignableFrom <SingleLine>(parsedfiles[0].Data[0]); Assert.IsAssignableFrom <SingleLine>(parsedfiles[0].Data[1]); Assert.That(parsedfiles[0].Data[0].Index, Is.EqualTo(0)); Assert.That(parsedfiles[0].Data[0].Type, Is.EqualTo(LineType.Data)); Assert.IsEmpty(parsedfiles[0].Data[0].Errors); Assert.That(parsedfiles[0].Data[0].Name, Is.EqualTo("Jack Marias")); Assert.That(parsedfiles[0].Data[0].IsMember, Is.EqualTo(false)); Assert.That(parsedfiles[0].Data[1].Index, Is.EqualTo(1)); Assert.That(parsedfiles[0].Data[1].Type, Is.EqualTo(LineType.Data)); Assert.IsEmpty(parsedfiles[0].Data[0].Errors); Assert.That(parsedfiles[0].Data[1].Name, Is.EqualTo("Samuel Dias")); Assert.That(parsedfiles[0].Data[1].IsMember, Is.EqualTo(true)); }
private void CheckForChangedFiles() { _changes.Clear(); ForeachEntityInDirectory(_watchedDirectory, f => { var fullFilePath = f.FullName; if (!_knownEntities.ContainsKey(fullFilePath)) { // New file RecordChange(f); } else { var fileMeta = _knownEntities[fullFilePath]; try { if (fileMeta.FileInfo.LastWriteTime != f.LastWriteTime) { // File changed RecordChange(f); } _knownEntities[fullFilePath] = new FileMeta(fileMeta.FileInfo, true); } catch(FileNotFoundException) { _knownEntities[fullFilePath] = new FileMeta(fileMeta.FileInfo, false); } } _tempDictionary.Add(f.FullName, new FileMeta(f)); }); foreach (var file in _knownEntities) { if (!file.Value.FoundAgain) { // File deleted RecordChange(file.Value.FileInfo); } } NotifyChanges(); // Swap the two dictionaries var swap = _knownEntities; _knownEntities = _tempDictionary; _tempDictionary = swap; _tempDictionary.Clear(); }