public void Execute_ValidLogFile_ProcessesFileAndRegistersJob() { LogFileModel logFile = DataHelper.CreateLogFileModel(); string fileName = Path.GetRandomFileName() + ".log"; string filePath = Path.Combine(AppContext.BaseDirectory, fileName); // save the text file using (Stream stream = TestAsset.ReadTextStream(TestAsset.LogFile)) { var fileStream = File.Create(filePath); stream.Seek(0, SeekOrigin.Begin); stream.CopyTo(fileStream); fileStream.Close(); } _logFileRepo.GetById(logFile.Id).Returns(logFile); // execute _processLogFileCommand.Execute(logFile.Id, filePath); // assert _createRequestBatchCommand.Received(1).Execute(logFile.Id, Arg.Any <IEnumerable <W3CEvent> >()); _dbContext.Received(1).ExecuteNonQuery(Arg.Any <string>(), Arg.Any <object>()); _jobRegistrationService.Received(1).RegisterResetProcessedLogFileJob(logFile.Id); _fileWrap.Received(1).Delete(filePath); }
public void Execute_IntegrationTest_SQLite() { string filePath = Path.Combine(AppContext.BaseDirectory, Path.GetRandomFileName() + ".dbtest"); List <W3CEvent> logEvents = null; using (StreamReader logStream = new StreamReader(TestAsset.ReadTextStream(TestAsset.LogFile))) { logEvents = W3CEnumerable.FromStream(logStream).ToList(); } using (SQLiteDbContext dbContext = new SQLiteDbContext(filePath)) { dbContext.Initialise(); dbContext.BeginTransaction(); // create the project first so we have one ProjectModel project = DataHelper.CreateProjectModel(); DataHelper.InsertProjectModel(dbContext, project); // create the log file LogFileModel logFile = DataHelper.CreateLogFileModel(project.Id); DataHelper.InsertLogFileModel(dbContext, logFile); // create the request batch ICreateRequestBatchCommand createRequestBatchCommand = new CreateRequestBatchCommand(dbContext, new RequestValidator()); createRequestBatchCommand.Execute(logFile.Id, logEvents); int rowCount = dbContext.ExecuteScalar <int>("SELECT COUNT(*) FROM Requests"); Assert.AreEqual(logEvents.Count, rowCount); } }
public dynamic getTotalPages(string path, int pageSize, string date = "") { dynamic logInfo = new ExpandoObject(); try { DateTime dt; LogFileModel lfm = new LogFileModel(DateTime.TryParse(date, out dt) ? dt : DateTime.Now, path, pageSize); int itemAmount; logInfo.Pages = lfm.GetTotalPages(out itemAmount); logInfo.Items = itemAmount; } catch (Exception ex) { logInfo.hasError = true; logInfo.errorMessage = ex.Message; var resp = new HttpResponseMessage(HttpStatusCode.NotFound) { ReasonPhrase = ex.Message }; throw new HttpResponseException(resp); } return(logInfo); }
public void Execute_ValidationSucceeds_RecordInsertedAndJobRegistered() { int projectId = new Random().Next(1, 1000); string filePath = Path.Combine(AppContext.BaseDirectory, "test.log"); FileDetail fileDetail = new FileDetail(); fileDetail.Length = new Random().Next(1000, 10000); fileDetail.Hash = Guid.NewGuid().ToString(); fileDetail.Name = Guid.NewGuid().ToString(); _fileUtils.GetFileHash(filePath).Returns(fileDetail); _logFileValidator.Validate(Arg.Any <LogFileModel>()).Returns(new ValidationResult()); // execute LogFileModel result = _createLogFileCommand.Execute(projectId, filePath); // assert _dbContext.Received(1).ExecuteNonQuery(Arg.Any <string>(), Arg.Any <object>()); _dbContext.Received(1).ExecuteScalar <int>(Arg.Any <string>()); _jobRegistrationService.Received(1).RegisterProcessLogFileJob(result.Id, filePath); Assert.AreEqual(projectId, result.ProjectId); Assert.AreEqual(fileDetail.Hash, result.FileHash); Assert.AreEqual(fileDetail.Length, result.FileLength); Assert.AreEqual(fileDetail.Name, result.FileName); Assert.AreEqual(-1, result.RecordCount); Assert.AreEqual(LogFileStatus.Processing, result.Status); }
public void Execute(int logFileId) { // load all the requests for the log file IEnumerable <RequestModel> requests = _requestRepo.GetByLogFile(logFileId); // only apply aggregates if we have requests! if (requests.Any()) { // load all the aggregates for the project LogFileModel logFile = _logFileRepo.GetById(logFileId); IEnumerable <ProjectRequestAggregateModel> requestAggregates = _projectRequestAggregateRepo.GetByProject(logFile.ProjectId); // run through the requests and apply the configured aggregates - if the value changes then update in the database foreach (var req in requests) { const string usql = "UPDATE Requests SET UriStemAggregate = @UriStemAggregate WHERE Id = @RequestId"; string uriStemAggregate = _requestAggregationService.GetAggregatedUriStem(req.UriStem, requestAggregates); if (uriStemAggregate != req.UriStemAggregate) { _dbContext.ExecuteNonQuery(usql, new { UriStemAggregate = uriStemAggregate, RequestId = req.Id }); } } } // mark the log file as processed string sql = "UPDATE LogFiles SET Status = @Status WHERE Id = @LogFileId"; _dbContext.ExecuteNonQuery(sql, new { LogFileId = logFileId, Status = LogFileStatus.Complete }); _logger.Info("Marked LogFile {0} as Complete", logFileId); }
public static LogFileModel ParseLogFile(string fileName, string groupName) { List <string> logFileLines = ReadLogFile(fileName); LogFileModel model1 = new LogFileModel(); model1.LogLines = new List <LogLine>(); LogFileModel result = model1; result.Logger = GetLoggerName(logFileLines); result.CreatedAt = GetFileCreationTime(logFileLines); logFileLines.ForEach(delegate(string line) { if (!line.Contains("MOTD")) { try { LogLine item = new LogLine(line); item.Group = groupName; result.LogLines.Add(item); } catch (Exception) { } } }); return(result); }
public void Execute_AggregatesSameAsUriStem_RequestNotUpdated() { int logFileId = new Random().Next(1, 1000); // setup RequestModel requestModel = DataHelper.CreateRequestModel(logFileId); requestModel.UriStem = "TEST"; RequestModel[] requests = { requestModel }; _requestRepo.GetByLogFile(logFileId).Returns(requests); LogFileModel logFile = DataHelper.CreateLogFileModel(); logFile.Id = logFileId; _logFileRepo.GetById(logFileId).Returns(logFile); _requestAggregationService.GetAggregatedUriStem(Arg.Any <string>(), Arg.Any <IEnumerable <ProjectRequestAggregateModel> >()).Returns(requestModel.UriStem); // execute _resetRequestAggregateCommand.Execute(logFileId); // assert _requestAggregationService.Received(1).GetAggregatedUriStem(Arg.Any <string>(), Arg.Any <IEnumerable <ProjectRequestAggregateModel> >()); _dbContext.Received(2).ExecuteNonQuery(Arg.Any <string>(), Arg.Any <object>()); }
public void Execute_ValidationSucceeds_JobsRegisteredForLogFiles() { int id = new Random().Next(1, 1000); ProjectRequestAggregateModel model = DataHelper.CreateProjectRequestAggregateModel(); _projectRequestAggregateRepo.GetById(id).Returns(model); LogFileModel log1 = DataHelper.CreateLogFileModel(model.ProjectId); log1.Id = 1; LogFileModel log2 = DataHelper.CreateLogFileModel(model.ProjectId); log2.Id = 2; LogFileModel log3 = DataHelper.CreateLogFileModel(model.ProjectId); log3.Id = 3; _logFileRepo.GetByProject(model.ProjectId).Returns(new LogFileModel[] { log1, log2, log3 }); // execute _deleteProjectRequestAggregateCommand.Execute(id); // assert _logFileRepo.Received(1).GetByProject(model.ProjectId); _setLogFileUnprocessedCommand.Received(1).Execute(log1.Id); _setLogFileUnprocessedCommand.Received(1).Execute(log2.Id); _setLogFileUnprocessedCommand.Received(1).Execute(log3.Id); }
public void Execute_ValidationSucceeds_JobsRegisteredForLogFiles() { ProjectRequestAggregateModel model = DataHelper.CreateProjectRequestAggregateModel(); _projectRequestAggregateValidator.Validate(model).Returns(new ValidationResult()); LogFileModel log1 = DataHelper.CreateLogFileModel(model.ProjectId); log1.Id = 1; LogFileModel log2 = DataHelper.CreateLogFileModel(model.ProjectId); log2.Id = 2; LogFileModel log3 = DataHelper.CreateLogFileModel(model.ProjectId); log3.Id = 3; _logFileRepo.GetByProject(model.ProjectId).Returns(new LogFileModel[] { log1, log2, log3 }); // execute _createProjectRequestAggregateCommand.Execute(model); // assert _logFileRepo.Received(1).GetByProject(model.ProjectId); _setLogFileUnprocessedCommand.Received(1).Execute(log1.Id); _setLogFileUnprocessedCommand.Received(1).Execute(log2.Id); _setLogFileUnprocessedCommand.Received(1).Execute(log3.Id); }
public void Execute_AggregatesDifferentToUriStem_RequestUpdated() { int logFileId = new Random().Next(1, 1000); // setup RequestModel[] requests = { DataHelper.CreateRequestModel(logFileId), DataHelper.CreateRequestModel(logFileId) }; _requestRepo.GetByLogFile(logFileId).Returns(requests); LogFileModel logFile = DataHelper.CreateLogFileModel(); logFile.Id = logFileId; _logFileRepo.GetById(logFileId).Returns(logFile); _requestAggregationService.GetAggregatedUriStem(Arg.Any <string>(), Arg.Any <IEnumerable <ProjectRequestAggregateModel> >()).Returns(Guid.NewGuid().ToString()); // execute _resetRequestAggregateCommand.Execute(logFileId); // assert _requestRepo.Received(1).GetByLogFile(logFileId); _logFileRepo.Received(1).GetById(logFileId); _projectRequestAggregateRepo.Received(1).GetByProject(logFile.ProjectId); _requestAggregationService.Received(2).GetAggregatedUriStem(Arg.Any <string>(), Arg.Any <IEnumerable <ProjectRequestAggregateModel> >()); _dbContext.Received(3).ExecuteNonQuery(Arg.Any <string>(), Arg.Any <object>()); }
public static void InsertLogFileModel(IDbContext dbContext, LogFileModel logFile) { const string sql = @"INSERT INTO LogFiles (ProjectId, FileName, FileHash, CreateDate, FileLength, RecordCount, Status) VALUES (@ProjectId, @FileName, @FileHash, @CreateDate, @FileLength, @RecordCount, @Status)"; dbContext.ExecuteNonQuery(sql, logFile); logFile.Id = dbContext.ExecuteScalar <int>("select last_insert_rowid()"); }
public void Execute_FileWithHashAlreadyExists_ThrowsException() { int projectId = new Random().Next(1, 1000); string filePath = Path.Combine(AppContext.BaseDirectory, "test.log"); FileDetail fileDetail = new FileDetail(); fileDetail.Hash = Guid.NewGuid().ToString(); _fileUtils.GetFileHash(filePath).Returns(fileDetail); LogFileModel model = DataHelper.CreateLogFileModel(); _logFileRepo.GetByHash(projectId, fileDetail.Hash).Returns(model); // execute TestDelegate del = () => _createLogFileCommand.Execute(projectId, filePath); // assert Assert.Throws <ValidationException>(del); _fileUtils.Received(1).GetFileHash(filePath); _logFileRepo.Received(1).GetByHash(projectId, fileDetail.Hash); // we shouldn't have even tried to validate or do the insert _logFileValidator.DidNotReceive().Validate(Arg.Any <LogFileModel>()); _dbContext.DidNotReceive().ExecuteNonQuery(Arg.Any <string>(), Arg.Any <object>()); }
public void Execute_InvalidFileFormat_MarksFileAsError() { LogFileModel logFile = DataHelper.CreateLogFileModel(); LogFileModel savedLogFile = null; string fileName = Path.GetRandomFileName() + ".log"; string filePath = Path.Combine(AppContext.BaseDirectory, fileName); File.WriteAllText(filePath, "This is not a valid IIS file"); _logFileRepo.GetById(logFile.Id).Returns(logFile); _dbContext.When(x => x.ExecuteNonQuery(Arg.Any <String>(), Arg.Any <LogFileModel>())).Do((c) => { savedLogFile = c.ArgAt <LogFileModel>(1); }); // execute _processLogFileCommand.Execute(logFile.Id, filePath); // assert _createRequestBatchCommand.DidNotReceive().Execute(Arg.Any <int>(), Arg.Any <IEnumerable <W3CEvent> >()); _jobRegistrationService.DidNotReceive().RegisterProcessLogFileJob(Arg.Any <int>(), Arg.Any <string>()); _dbContext.Received(1).ExecuteNonQuery(Arg.Any <String>(), Arg.Any <LogFileModel>()); Assert.IsNotNull(savedLogFile); Assert.AreEqual(LogFileStatus.Error, savedLogFile.Status); Assert.IsTrue(savedLogFile.ErrorMsg.Contains("File is not a valid IIS")); }
public void GetById_Integration_ReturnsData() { string filePath = Path.Combine(AppContext.BaseDirectory, Path.GetRandomFileName() + ".dbtest"); using (SQLiteDbContext dbContext = new SQLiteDbContext(filePath)) { dbContext.Initialise(); ILogFileRepository logFileRepo = new LogFileRepository(dbContext); // create the project ProjectModel project = DataHelper.CreateProjectModel(); DataHelper.InsertProjectModel(dbContext, project); // create the log file record LogFileModel logFile = DataHelper.CreateLogFileModel(project.Id); DataHelper.InsertLogFileModel(dbContext, logFile); int logFileId = dbContext.ExecuteScalar <int>("select last_insert_rowid()"); LogFileModel result = logFileRepo.GetById(logFileId); Assert.IsNotNull(result); Assert.AreEqual(logFile.FileHash, result.FileHash); } }
public void GetByHash_Integration_ReturnsData() { string filePath = Path.Combine(AppContext.BaseDirectory, Path.GetRandomFileName() + ".dbtest"); string fileHash = Guid.NewGuid().ToString(); using (SQLiteDbContext dbContext = new SQLiteDbContext(filePath)) { dbContext.Initialise(); ILogFileRepository logFileRepo = new LogFileRepository(dbContext); // create the project ProjectModel project = DataHelper.CreateProjectModel(); DataHelper.InsertProjectModel(dbContext, project); // create the log file record LogFileModel logFile = DataHelper.CreateLogFileModel(); logFile.ProjectId = project.Id; logFile.FileHash = fileHash; DataHelper.InsertLogFileModel(dbContext, logFile); LogFileModel result = logFileRepo.GetByHash(project.Id, fileHash); Assert.IsNotNull(result); Assert.AreEqual(logFile.FileName, result.FileName); result = logFileRepo.GetByHash(0, fileHash); Assert.IsNull(result); } }
public void Validate_AllFieldsValid_ReturnsSuccess() { LogFileModel model = DataHelper.CreateLogFileModel(); ValidationResult result = _logFileValidator.Validate(model); Assert.IsTrue(result.Success); Assert.AreEqual(0, result.Messages.Count); }
private void ExecuteWriteFileOperation(string _FullFilenameWithPath, string _Text, bool _Append) { LogFileModel fm = new LogFileModel(_FullFilenameWithPath, _Append, Encoding.UTF8, _Text); PrintJob(fm); //Thread workerThread = new Thread(PrintJob) //{ // IsBackground = true //}; //workerThread.Start(fm); }
public void Validate_InvalidProjectId_ReturnsFailure(int projectId) { LogFileModel model = DataHelper.CreateLogFileModel(); model.ProjectId = projectId; ValidationResult result = _logFileValidator.Validate(model); Assert.IsFalse(result.Success); Assert.AreEqual(1, result.Messages.Count); Assert.IsTrue(result.Messages[0].Contains("Project id")); }
public void Validate_InvalidFileLength_ReturnsFailure(int fileLength) { LogFileModel model = DataHelper.CreateLogFileModel(); model.FileLength = fileLength; ValidationResult result = _logFileValidator.Validate(model); Assert.IsFalse(result.Success); Assert.AreEqual(1, result.Messages.Count); Assert.IsTrue(result.Messages[0].Contains("File length")); }
public void Validate_InvalidFileName_ReturnsFailure(string logFileName) { LogFileModel model = DataHelper.CreateLogFileModel(); model.FileName = logFileName; ValidationResult result = _logFileValidator.Validate(model); Assert.IsFalse(result.Success); Assert.AreEqual(1, result.Messages.Count); Assert.IsTrue(result.Messages[0].Contains("File name")); }
public static void GetLogFileNames(List <SSO.IntelMap.Models.GroupChannelName> data) { var catcher = new List <SSO.IntelMap.Models.GroupChannelName>(); var dInfo = new DirectoryInfo(Path); data.ForEach(group => { group.Channels.ForEach(channel => { var fileInfo = dInfo.EnumerateFiles(string.Concat(channel, "*")) .OrderByDescending(o => o.LastWriteTimeUtc).FirstOrDefault(); if (fileInfo != null) { var flag1 = false; Console.WriteLine("Listening to channel " + channel); catcher.Add(group); LogFileModel model = LogFileHelper.ParseLogFile(fileInfo.FullName, group.Group); DateTime createdAt = model.CreatedAt; if (model.LogLines.Count > 0) { createdAt = Enumerable.Last <LogLine>(model.LogLines).LogDateTime; } if (!ReadFromTimes.ContainsKey(group.Group)) { flag1 = false; } else { flag1 = ReadFromTimes.FirstOrDefault().Value < createdAt; } if (flag1) { ReadFromTimes.Remove(group.Group); } if (!ReadFromTimes.ContainsKey(group.Group)) { ReadFromTimes.Add(group.Group, createdAt); } } }); }); catcher = catcher.Distinct().ToList(); catcher.ForEach(group => { HubProxy.Invoke("joinGroup", group.Group); }); Console.WriteLine(); LogFileNames = catcher; }
public void GetByUriStemAggregate_Integration_ReturnsData() { string filePath = Path.Combine(AppContext.BaseDirectory, Path.GetRandomFileName() + ".dbtest"); List <W3CEvent> logEvents = null; string uriStemAggregate = Guid.NewGuid().ToString(); int expectedCount = new Random().Next(3, 7); using (StreamReader logStream = new StreamReader(TestAsset.ReadTextStream(TestAsset.LogFile))) { logEvents = W3CEnumerable.FromStream(logStream).ToList(); } for (int i = 0; i < expectedCount; i++) { logEvents[i].cs_uri_stem = uriStemAggregate; } using (SQLiteDbContext dbContext = new SQLiteDbContext(filePath)) { dbContext.Initialise(); dbContext.BeginTransaction(); IRequestRepository requestRepo = new RequestRepository(dbContext); ICreateRequestBatchCommand createRequestBatchCommand = new CreateRequestBatchCommand(dbContext, new RequestValidator()); // create the project ProjectModel project = DataHelper.CreateProjectModel(); DataHelper.InsertProjectModel(dbContext, project); ProjectModel project2 = DataHelper.CreateProjectModel(); DataHelper.InsertProjectModel(dbContext, project2); // create log file and request records for each LogFileModel logFile = DataHelper.CreateLogFileModel(project.Id); DataHelper.InsertLogFileModel(dbContext, logFile); createRequestBatchCommand.Execute(logFile.Id, logEvents); LogFileModel logFile2 = DataHelper.CreateLogFileModel(project2.Id); DataHelper.InsertLogFileModel(dbContext, logFile2); createRequestBatchCommand.Execute(logFile2.Id, logEvents); IEnumerable <RequestModel> result = requestRepo.GetByUriStemAggregate(project.Id, uriStemAggregate); Assert.IsNotNull(result); Assert.AreEqual(expectedCount, result.Count()); foreach (RequestModel rm in result) { Assert.AreEqual(logFile.Id, rm.LogFileId); } } }
public void GetPageLoadTimes_Integration_ReturnsData() { string filePath = Path.Combine(AppContext.BaseDirectory, Path.GetRandomFileName() + ".dbtest"); List <W3CEvent> logEvents = new List <W3CEvent>(); logEvents.Add(CreateW3CEvent("PageA", 17)); logEvents.Add(CreateW3CEvent("PageA", 13)); logEvents.Add(CreateW3CEvent("PageA", 21)); logEvents.Add(CreateW3CEvent("PageA", 9)); logEvents.Add(CreateW3CEvent("PageA", 40)); logEvents.Add(CreateW3CEvent("PageB", 1000)); logEvents.Add(CreateW3CEvent("PageB", 3000)); using (SQLiteDbContext dbContext = new SQLiteDbContext(filePath)) { dbContext.Initialise(); dbContext.BeginTransaction(); ICreateRequestBatchCommand createRequestBatchCommand = new CreateRequestBatchCommand(dbContext, new RequestValidator()); IRequestRepository requestRepo = new RequestRepository(dbContext); // create the project ProjectModel project = DataHelper.CreateProjectModel(); DataHelper.InsertProjectModel(dbContext, project); // create the log file record LogFileModel logFile = DataHelper.CreateLogFileModel(project.Id); DataHelper.InsertLogFileModel(dbContext, logFile); // create the requests createRequestBatchCommand.Execute(logFile.Id, logEvents); // update all requests so the aggregate is different to the stem string sql = "UPDATE Requests SET UriStemAggregate = UriStem || '__'"; dbContext.ExecuteNonQuery(sql); IEnumerable <RequestPageLoadTimeModel> result = requestRepo.GetPageLoadTimes(project.Id); Assert.AreEqual(2, result.Count()); RequestPageLoadTimeModel pageAResult = result.Where(x => x.UriStemAggregate == "PageA__").SingleOrDefault(); Assert.IsNotNull(pageAResult); Assert.AreEqual(5, pageAResult.RequestCount); Assert.AreEqual(20, pageAResult.AvgTimeTakenMilliseconds); RequestPageLoadTimeModel pageBResult = result.Where(x => x.UriStemAggregate == "PageB__").SingleOrDefault(); Assert.IsNotNull(pageBResult); Assert.AreEqual(2, pageBResult.RequestCount); Assert.AreEqual(2000, pageBResult.AvgTimeTakenMilliseconds); } }
public static LogFileModel CreateLogFileModel(int projectId = 0) { Random r = new Random(); LogFileModel model = new LogFileModel(); model.Id = r.Next(1, 1000); model.ProjectId = (projectId <= 0 ? r.Next(1, 1000) : projectId); model.FileName = Path.GetRandomFileName(); model.FileHash = Path.GetRandomFileName(); model.FileLength = r.Next(1, 1000); model.RecordCount = r.Next(1, 1000); return(model); }
public void Execute_PasswordNotSupplied_ValidationErrorThrown(string password) { LogFileModel model = DataHelper.CreateLogFileModel(); // execute TestDelegate del = () => _updateUserPasswordCommand.Execute("test", password); // assert Assert.Throws <ValidationException>(del); // we shouldn't have even tried to do the insert _dbContext.DidNotReceive().ExecuteNonQuery(Arg.Any <string>(), Arg.Any <object>()); }
public void Execute_IntegrationTest_SQLite() { string filePath = Path.Combine(AppContext.BaseDirectory, Path.GetRandomFileName() + ".dbtest"); List <W3CEvent> logEvents = null; using (StreamReader logStream = new StreamReader(TestAsset.ReadTextStream(TestAsset.LogFile))) { logEvents = W3CEnumerable.FromStream(logStream).ToList(); } using (SQLiteDbContext dbContext = new SQLiteDbContext(filePath)) { dbContext.Initialise(); dbContext.BeginTransaction(); ICreateProjectCommand createProjectCommand = new CreateProjectCommand(dbContext, new ProjectValidator()); ICreateRequestBatchCommand createRequestBatchCommand = new CreateRequestBatchCommand(dbContext, new RequestValidator()); IDeleteLogFileCommand deleteLogFileCommand = new DeleteLogFileCommand(dbContext); // create the project first so we have one ProjectModel project = DataHelper.CreateProjectModel(); // create 2 the log files LogFileModel logFile1 = DataHelper.CreateLogFileModel(project.Id); DataHelper.InsertLogFileModel(dbContext, logFile1); LogFileModel logFile2 = DataHelper.CreateLogFileModel(project.Id); DataHelper.InsertLogFileModel(dbContext, logFile2); // create the request batch createRequestBatchCommand.Execute(logFile1.Id, logEvents); createRequestBatchCommand.Execute(logFile2.Id, logEvents); int rowCount = dbContext.ExecuteScalar <int>("SELECT COUNT(*) FROM Requests WHERE LogFileId = @LogFileId", new { LogFileId = logFile1.Id }); Assert.AreEqual(logEvents.Count, rowCount); // run the delete command deleteLogFileCommand.Execute(logFile1.Id); // there should be no requests for logFile1, but requests for logFile2 should still exist rowCount = dbContext.ExecuteScalar <int>("SELECT COUNT(*) FROM Requests WHERE LogFileId = @LogFileId", new { LogFileId = logFile1.Id }); Assert.AreEqual(0, rowCount); rowCount = dbContext.ExecuteScalar <int>("SELECT COUNT(*) FROM Requests WHERE LogFileId = @LogFileId", new { LogFileId = logFile2.Id }); Assert.AreEqual(logEvents.Count, rowCount); rowCount = dbContext.ExecuteScalar <int>("SELECT COUNT(*) FROM LogFiles"); Assert.AreEqual(1, rowCount); } }
public void PrintJob(object _Data) { if (_Data != null) { lock (objLock) { LogFileModel fm = (LogFileModel)_Data; using (StreamWriter writer = new StreamWriter(path: fm.FilePath, append: fm.IsAppend, encoding: fm.EncodingType)) { if (!string.IsNullOrEmpty(Convert.ToString(fm.TextString))) { writer.WriteLine(fm.TextString); } } } } }
public void GetByLogFile_Integration_ReturnsData() { string filePath = Path.Combine(AppContext.BaseDirectory, Path.GetRandomFileName() + ".dbtest"); List <W3CEvent> logEvents = null; int logFileId = 0; using (StreamReader logStream = new StreamReader(TestAsset.ReadTextStream(TestAsset.LogFile))) { logEvents = W3CEnumerable.FromStream(logStream).ToList().GetRange(0, 10); } using (SQLiteDbContext dbContext = new SQLiteDbContext(filePath)) { dbContext.Initialise(); dbContext.BeginTransaction(); IRequestRepository requestRepo = new RequestRepository(dbContext); ICreateRequestBatchCommand createRequestBatchCommand = new CreateRequestBatchCommand(dbContext, new RequestValidator()); // create the project ProjectModel project = DataHelper.CreateProjectModel(); DataHelper.InsertProjectModel(dbContext, project); // create multiple log file records for (var i = 0; i < 3; i++) { LogFileModel logFile = DataHelper.CreateLogFileModel(project.Id); DataHelper.InsertLogFileModel(dbContext, logFile); createRequestBatchCommand.Execute(logFile.Id, logEvents); if (logFileId == 0) { logFileId = logFile.Id; } } IEnumerable <RequestModel> result = requestRepo.GetByLogFile(logFileId); Assert.IsNotNull(result); Assert.AreEqual(logEvents.Count, result.Count()); } }
public void GetUnprocessedLogFileCount_Integration_ReturnsValidCount() { string filePath = Path.Combine(AppContext.BaseDirectory, Path.GetRandomFileName() + ".dbtest"); int processedCount = new Random().Next(0, 5); using (SQLiteDbContext dbContext = new SQLiteDbContext(filePath)) { dbContext.Initialise(); IProjectRepository projectRepo = new ProjectRepository(dbContext); ProjectModel project = DataHelper.CreateProjectModel(); DataHelper.InsertProjectModel(dbContext, project); ProjectModel project2 = DataHelper.CreateProjectModel(); DataHelper.InsertProjectModel(dbContext, project2); // create records const string sql = @"INSERT INTO LogFiles (ProjectId, FileName, FileHash, CreateDate, FileLength, RecordCount, Status) VALUES (@ProjectId, @FileName, @FileHash, @CreateDate, @FileLength, @RecordCount, @Status)"; // create two processing record - this should be included LogFileModel logFile = DataHelper.CreateLogFileModel(project.Id); logFile.Status = LogFileStatus.Processing; dbContext.ExecuteNonQuery(sql, logFile); // create a processing record - this should be included logFile = DataHelper.CreateLogFileModel(project.Id); logFile.Status = LogFileStatus.Processing; dbContext.ExecuteNonQuery(sql, logFile); // create an error records - this should not be included logFile = DataHelper.CreateLogFileModel(project.Id); logFile.Status = LogFileStatus.Error; dbContext.ExecuteNonQuery(sql, logFile); // create a pending record for another project - this should also not be included logFile = DataHelper.CreateLogFileModel(project2.Id); logFile.Status = LogFileStatus.Processing; dbContext.ExecuteNonQuery(sql, logFile); int result = projectRepo.GetUnprocessedLogFileCount(project.Id); Assert.AreEqual(2, result); } }
public void Execute_IntegrationTest_SQLite() { string filePath = Path.Combine(AppContext.BaseDirectory, Path.GetRandomFileName() + ".dbtest"); List <W3CEvent> logEvents1 = null; List <W3CEvent> logEvents2 = new List <W3CEvent>(); using (StreamReader logStream = new StreamReader(TestAsset.ReadTextStream(TestAsset.LogFile))) { logEvents1 = W3CEnumerable.FromStream(logStream).ToList(); logEvents2.AddRange(logEvents1.GetRange(0, 10)); } using (SQLiteDbContext dbContext = new SQLiteDbContext(filePath)) { dbContext.Initialise(); ISetLogFileUnprocessedCommand setLogFileUnprocessedCommand = new SetLogFileUnprocessedCommand(dbContext, _jobRegistrationService); // create the project first so we have one ProjectModel project = DataHelper.CreateProjectModel(); DataHelper.InsertProjectModel(dbContext, project); // create the log files LogFileModel logFile1 = DataHelper.CreateLogFileModel(project.Id); logFile1.Status = LogFileStatus.Complete; DataHelper.InsertLogFileModel(dbContext, logFile1); LogFileModel logFile2 = DataHelper.CreateLogFileModel(project.Id); logFile2.Status = LogFileStatus.Complete; DataHelper.InsertLogFileModel(dbContext, logFile2); // check that the log file is processed int processedCount = dbContext.ExecuteScalar <int>("SELECT COUNT(*) FROM LogFiles WHERE ProjectId = @ProjectId AND Status = @Status", new { ProjectId = project.Id, Status = LogFileStatus.Complete }); Assert.AreEqual(2, processedCount); // execute for a single log file setLogFileUnprocessedCommand.Execute(logFile1.Id); processedCount = dbContext.ExecuteScalar <int>("SELECT COUNT(*) FROM LogFiles WHERE ProjectId = @ProjectId AND Status = @Status", new { ProjectId = project.Id, Status = LogFileStatus.Processing }); Assert.AreEqual(1, processedCount); } }