/// <summary> /// Finalizes the specified identifier. /// </summary> /// <param name="id">The identifier.</param> /// <param name="exportPath">The export path.</param> /// <returns><c>true</c> if XXXX, <c>false</c> otherwise.</returns> public bool Finalize(long id, string exportPath) { void saveProgress(object sender, SaveProgressEventArgs e) { switch (e.EventType) { case ZipProgressEventType.Saving_AfterWriteEntry: if (ProcessedFile != null) { ProcessedFile?.Invoke(this, EventArgs.Empty); } break; default: break; } } if (queue.TryRemove(id, out var value)) { value.SaveProgress += saveProgress; value.Save(exportPath); value.SaveProgress -= saveProgress; value.Dispose(); return(true); } return(false); }
/// <summary> /// Returns TRUE if file was modified or if it's new. /// </summary> /// <param name="m_filePath">Full file path like: C:\\folder\\file.xls</param> /// <returns></returns> public static bool FileModified(string m_filePath) { Support.CreateFile(ProcessedFilesLog); // if file already exists, nothing is done. string[] ProcessedFiles = Support.getFileLines(ProcessedFilesLog); foreach (string ProcessedFile in ProcessedFiles) { if (ProcessedFile.Split(';')[0] == m_filePath && ProcessedFile.Split().Length > 1) { try { FileInfo FileInfo = new FileInfo(m_filePath); //LogFile.write_LogFile("FileModified?: " + FileInfo.LastWriteTime.ToString() + "==" + ProcessedFile.Split(';')[1] + " e " + (BuildMd5Checksum(m_filePath) + "==" + ProcessedFile.Split(';')[2])); if (FileInfo.Name.Contains("~$")) { return(false); //file is a temporary file } if (FileInfo.LastWriteTime.ToString() == ProcessedFile.Split(';')[1]) // && (BuildMd5Checksum(m_filePath) == ProcessedFile.Split(';')[2])) { return(false); //file wasnt modified } return(true); //file was modified } catch (Exception e) { LogFile.write_LogFile("Error verifying file modification for: " + m_filePath + " with message: " + e.Message); return(false); } } } //File is new return(true); }
public static List<ILine> GetOutline(this List<ILine> lines, ProcessedFile pf) { var returnObj = new List<ILine>(); foreach (var line in lines) { GetOutline(line, returnObj, pf); } return returnObj; }
//Not completed public void ProcessCsvFiles_Successfully() { //Arrange var testStream = new MemoryStream( Encoding.UTF8.GetBytes("Account,Description,CurrencyCode,Amount" + '\n' + "Tesco,supermarket,GBP,34.5")); _mockTransactionRepository.Setup( x => x.AddTransaction(new Transaction { Account = "Tesco", Description = "supermarket", CurrencyCode = "GBP", Amount = Convert.ToDecimal(34.5) })).Returns(-1); _mockValidationService.Setup(x => x.IsValid(new FileUpload.BLL.Models.Transaction { Account = "Tesco", Description = "supermarket", CurrencyCode = "GBP", Amount = "34.5" })).Returns(new ConcurrentBag <string>()); var expectedResult = new ProcessedFile { ErrorsList = new ConcurrentBag <string>(), TotalTransactionsProcessed = 1 }; _mockFileManager.Setup(x => x.ProcessCsvFilesAsync(testStream, It.IsAny <string>())) .ReturnsAsync(expectedResult); //Act var result = _fileManager.ProcessCsvFilesAsync(testStream, "Test.csv"); //Assert }
/// <summary> /// Finalizes the specified identifier. /// </summary> /// <param name="id">The identifier.</param> /// <param name="exportPath">The export path.</param> /// <returns><c>true</c> if XXXX, <c>false</c> otherwise.</returns> public bool Finalize(long id, string exportPath) { if (queue.TryRemove(id, out var value)) { // Need to do this manually as SharpCompress doesn't raise events using var stream = new System.IO.FileInfo(exportPath).Open(FileMode.Create, FileAccess.Write); using var writer = new ZipWriter(stream, new ZipWriterOptions(CompressionType.Deflate)); foreach (var item in value.Entries.Where(p => !p.IsDirectory)) { using var entryStream = item.OpenEntryStream(); writer.Write(item.Key, entryStream, item.LastModifiedTime); if (ProcessedFile != null) { ProcessedFile?.Invoke(this, EventArgs.Empty); } } value.Dispose(); return(true); } return(false); }
private static void ProcessFile(ProcessedFiles files, string FilePath) { if (files.ContainsKey(FilePath)) return; var fileInfo = new FileInfo(FilePath); if (!fileInfo.Exists) { throw new Exception(FilePath + " not found."); } var pf = new ProcessedFile { FilePath = FilePath, }; files[FilePath] = pf; List<ILine> list = null; using (var sr = new StreamReader(FilePath)) { string content = sr.ReadToEnd(); list = Parse(content); } var outline = list.GetOutline(pf); pf.Lines = outline; foreach (var referenceLine in pf.References) { string newPath = FilePath.NavigateTo(referenceLine.Value.ClientSideReference); ProcessFile(files, newPath); } }
private static ProcessedFile ProcessFile(string fileName, LanguageProfile profile) { var stats = new SourceStats(); var fileStream = new FileStream(fileName, FileMode.Open); var inMultilineComment = false; foreach (var line in fileStream.ReadLines(Encoding.UTF8)) { stats.TotalLines++; switch (GetLineType(line, profile, inMultilineComment)) { case LineType.Empty: stats.EmptyLines++; break; case LineType.Source: break; case LineType.Preprocessor: stats.PreprocessorLines++; break; case LineType.MultilineCommentBegin: stats.CommentedLines++; inMultilineComment = true; break; case LineType.MultilineCommentEnd: case LineType.MultilineCommentInterspersedEnd: stats.CommentedLines++; inMultilineComment = false; break; case LineType.MultilineCommentInterspersedBegin: stats.CodeLines++; inMultilineComment = true; break; case LineType.MultilineCommentInterspersedEndContinued: stats.CodeLines++; inMultilineComment = false; break; case LineType.Comment: case LineType.MultilineCommentMiddle: case LineType.MultilineCommentInline: stats.CommentedLines++; break; } } fileStream.Close(); stats.CodeLines = stats.TotalLines - stats.CommentedLines - stats.EmptyLines; var file = new ProcessedFile(new FileInfo(fileName), profile, stats); return(file); }
private static void GetOutline(ILine line, List<ILine> OutputLines, ProcessedFile pf) { var statement = line as IOpenBraceStatement; if (statement == null) { if (line.IsReference()) { var refStatement = new ReferenceStatement( Line: line, ClientSideReference: line.GetReferencePath() ); //var refStatement = line.ToReferenceStatement(); pf.References[refStatement.ClientSideReference] = refStatement; OutputLines.Add(refStatement); } else { OutputLines.Add(line); } return; } bool recurse = true; if (statement.IsInterface()) { //var interfaceStatement = statement.ToInterface(); var interfaceStatement = new Interface( OpenBraceStatementBase: statement, LiveStatementBase: statement.LiveStatementBase, Line: statement.Line, Name: statement.GetInterfaceName() ); pf.Interfaces[interfaceStatement.Name] = interfaceStatement; interfaceStatement.Process(); statement = interfaceStatement as IOpenBraceStatement; recurse = false; } else if (statement.IsFunction()) { //var functionStatement = statement.ToFunction(); var functionStatement = new StaticFunction( OpenBraceStatementBase: statement, LiveStatementBase: statement.LiveStatementBase, Line: statement.Line, Name: statement.GetFunctionName() ); string functionName = functionStatement.GetFullName(); pf.Functions[functionName] = functionStatement; } else if (statement.IsClass()) { var classStatement = new Class( OpenBraceStatementBase: statement, LiveStatementBase: statement.LiveStatementBase, Line: statement.Line, Name: statement.GetClassName() ); pf.Classes[classStatement.Name] = classStatement; statement = classStatement; } else if (statement.IsModule()) { //var moduleStatement = statement.ToModule(); var moduleStatement = new Module( OpenBraceStatementBase: statement, LiveStatementBase: statement.LiveStatementBase, Line: statement.Line, FullName: statement.FrontTrimmedLiveStatement.Trim('{').Trim() ); pf.Modules[moduleStatement.FullName] = moduleStatement; statement = moduleStatement; } OutputLines.Add(statement); if (statement.Children != null && recurse) { var newc = new List<ILine>(); foreach (var childLine in statement.Children) { GetOutline(childLine, newc, pf); } statement.Children = newc; } }
public bool WasFileUpdated(ProcessedFile currentFile) { return(System.IO.File.GetLastWriteTime(currentFile.BackupPath) > currentFile.DateModified); }
public ConvertApiFileParam(ProcessedFile processedFile) : this("File", processedFile) { }
public ConvertApiFileParam(string name, ProcessedFile processedFile) : base(name, processedFile.Url) { }
public static Task <FileInfo> AsFileAsync(this ProcessedFile processedFile, string fileName) { return(AsFileAsync(processedFile.Url, fileName)); }
public void UpdateEntry(FileContext db, ProcessedFile currentFile) { currentFile.DateModified = System.IO.File.GetLastWriteTime(currentFile.FilePath); db.Files.Attach(currentFile); db.Entry(currentFile).State = Microsoft.EntityFrameworkCore.EntityState.Modified; }
public async Task <ProcessedFile> ProcessCsvFilesAsync(Stream fileContents, string fileName) { var processedFile = new ProcessedFile { TotalTransactionsProcessed = 0, ErrorsList = new ConcurrentBag <string>() }; try { //change this reader.ReadLineAsync better for memory optimisation so use a while loop if (fileContents != null) { string content; var index = 0; using (var reader = new StreamReader(fileContents)) { content = await reader.ReadToEndAsync(); } var lines = content.Split(new[] { "\n", "\r\n", "Account,", "Description,", "CurrencyCode,", "Amount" }, StringSplitOptions.RemoveEmptyEntries); Parallel.For((long)0, lines.Length, line => { var fields = lines[line].Split(','); var account = fields[0]; var description = fields[1]; var currencyCode = fields[2]; var amount = fields[3]; var errors = _validationService.IsValid(new Transaction { Account = account, Description = description, CurrencyCode = currencyCode, Amount = amount }); if (errors.Any()) { foreach (var item in errors) { processedFile.ErrorsList.Add(item); } return; } var transaction = new Transaction { Account = account, Amount = amount, CurrencyCode = currencyCode, Description = description }; if (_transactionService.AddTransaction(transaction) == -1) { processedFile.TotalTransactionsProcessed = Interlocked.Increment(ref index); } }); } } catch (AggregateException ex) { _logger.Log( $"[FileManager::ProcessedCsvFiles] A aggregateexception has occurred, the error message is {ex.Message}", LogLevel.Error, ex); ex.Handle(exc => true); } catch (Exception exception) { _logger.Log( $"[FileManager::ProcessedCsvFiles] A system error has occurred, the error message is {exception.Message}", LogLevel.Error, exception); } return(processedFile); }
public static async Task <FileInfo> SaveFileAsync(this ProcessedFile processedFile, string fileName) { return(await SaveFileAsync(processedFile.Url, fileName)); }
public static async Task <Stream> FileStreamAsync(this ProcessedFile processedFile) { return(await AsStreamAsync(processedFile.Url)); }
public void UpdateEntry(FileContext db, ProcessedFile currentFile) { currentFile.FileHash = HashTools.HashFile(currentFile.FilePath); db.Files.Attach(currentFile); db.Entry(currentFile).State = Microsoft.EntityFrameworkCore.EntityState.Modified; }
public bool WasFileUpdated(ProcessedFile currentFile) { return(!HashTools.CompareHashes(HashTools.HashFile(currentFile.FilePath), currentFile.FileHash)); }