/// <summary> /// Creates a SQLiteBatchInserter instance against this database and tracks is for future /// calls to FlushAllBatchInserters() and ReinitializeAllBatchInserters() /// </summary> /// <returns></returns> public SQLiteBatchInserter CreateBatchInserter(int commitFrequency = 10000) { SQLiteBatchInserter result = new SQLiteBatchInserter(this, commitFrequency); batchInserters.Add(result); return(result); }
/// <summary> /// Sorts list of IDs using database information to order them in TimeSTamp and LineNumber order /// </summary> /// <param name="unsortedIds"></param> /// <returns></returns> public IList <long> SortIds(IList <long> unsortedIds) { ExecuteNonQuery("CREATE TABLE IDList ( ID INTEGER );"); SQLiteBatchInserter inserter = new SQLiteBatchInserter(this, 100000); inserter.Begin("INSERT INTO IDList (ID) VALUES (@id)"); foreach (var id in unsortedIds) { inserter["@id"] = id; inserter.Insert(); } inserter.Complete(); ExecuteNonQuery("CREATE INDEX IDX_TempIDList ON IDList (ID);"); string retrievalQuery = "SELECT ID FROM LogEntry WHERE ID IN (SELECT ID FROM IDList) ORDER BY TimeStamp ASC, LineNumber ASC"; long[] sortedIds = ExecuteReader <long>(retrievalQuery, (reader) => { return((long)reader["ID"]); }).ToArray(); ExecuteNonQuery("DROP TABLE IDList;"); return(sortedIds); }
/// <summary> /// This should only be called by public method LoadlogFiles since it takes care of index drop and rebuild. /// </summary> /// <param name="logFile">Path to a log file to load.</param> /// <param name="pb">ProgressBroadcaster which will received progress updates, can be null.</param> private long LoadLogFile(string logFile, long startingRecordCount, ProgressBroadcaster pb = null) { FileInfo logFileInfo = new FileInfo(logFile); if (logFileInfo.Length < 1) { // Skip 0 length files return(startingRecordCount); } int indexingConcurrency = 4; pb.BroadcastStatus("Loading from " + logFile); // Can be tricky to do batch insert and get each new record's ID, so instead we query database for current // highest ID value and increment and assign IDs here rather than letting DB auto increment do the job. long nextId = Database.GetHighestLogEntryID(); NuixLogReader reader = new NuixLogReader(logFile); SQLiteBatchInserter batchInserter = Database.CreateBatchInserter(1000); batchInserter.Begin(Database.GetEmbeddedSQL("NuixLogReviewer.LogRepository.InsertLogEntry.sqlite")); // Used for progress updates object locker = new object(); long recordCount = startingRecordCount; List <IEntryClassifier> classifiers = getAllClassifiers(); BlockingCollection <NuixLogEntry> toInsert = new BlockingCollection <NuixLogEntry>(); BlockingCollection <NuixLogEntry> toClassify = new BlockingCollection <NuixLogEntry>(); BlockingCollection <NuixLogEntry> toIndex = new BlockingCollection <NuixLogEntry>(); // ==== Task Dedicated to Pulling Entries from Source ==== Task readerConsumer = new Task(new Action(() => { foreach (var entry in reader) { toClassify.Add(entry); } // Signal that was the last one toClassify.Add(null); }), TaskCreationOptions.LongRunning); // ==== Classify Log Entries ==== Task classificationTask = new Task(new Action(() => { while (true) { NuixLogEntry entry = toClassify.Take(); if (entry == null) { break; } // Give each classifier a chance to look at this entry and provide flag // values to be assigned to the entry. HashSet <string> flags = new HashSet <string>(); foreach (var classifier in classifiers) { var calculatedFlags = classifier.Classify(entry); if (calculatedFlags != null) { foreach (var calculatedFlag in calculatedFlags) { flags.Add(calculatedFlag.ToLower()); } } } entry.Flags = flags; toInsert.Add(entry); } // Signal that was the last one toInsert.Add(null); }), TaskCreationOptions.LongRunning); // ==== Task Dedicated to Inserting to SQLite Database ==== Task dbConsumer = new Task(new Action(() => { DateTime lastProgress = DateTime.Now; while (true) { NuixLogEntry entry = toInsert.Take(); if (entry == null) { break; } nextId++; // Push to SQLite database entry.ID = nextId; batchInserter["@id"] = entry.ID; batchInserter["@linenumber"] = entry.LineNumber; batchInserter["@filename"] = Database.GetFilenameID(entry.FilePath); batchInserter["@timestamp"] = entry.TimeStamp.ToFileTime(); batchInserter["@channel"] = Database.GetChannelID(entry.Channel); batchInserter["@elapsed"] = entry.Elapsed.TotalMilliseconds; batchInserter["@level"] = Database.GetLevelID(entry.Level); batchInserter["@source"] = Database.GetSourceID(entry.Source); batchInserter["@content"] = entry.Content; batchInserter["@flags"] = String.Join(" ", entry.Flags); batchInserter.Insert(); recordCount++; // Periodically report progress if ((DateTime.Now - lastProgress).TotalMilliseconds >= 1000) { lock (this) { pb.BroadcastProgress(recordCount); } lastProgress = DateTime.Now; } toIndex.Add(entry); } // Let each indexing task know there are no more to index for (int i = 0; i < indexingConcurrency; i++) { toIndex.Add(null); } }), TaskCreationOptions.LongRunning); // ==== Series of Tasks Dedicated to Adding Entries to Lucene Index ==== Task[] indexers = new Task[indexingConcurrency]; for (int i = 0; i < indexingConcurrency; i++) { Task indexConsumer = new Task(new Action(() => { while (true) { NuixLogEntry entry = toIndex.Take(); if (entry == null) { break; } // Push to Lucene SearchIndex.IndexLogEntry(entry); } pb.BroadcastProgress(recordCount); }), TaskCreationOptions.LongRunning); indexers[i] = indexConsumer; indexConsumer.Start(); } readerConsumer.Start(); classificationTask.Start(); dbConsumer.Start(); // Wait for them all to finish up Task.WaitAll(readerConsumer, classificationTask, dbConsumer); Task.WaitAll(indexers); // Report final progress pb.BroadcastProgress(recordCount); // Make sure batch inserter flushes any pending inserts batchInserter.Complete(); Database.ReleaseBatchInserter(batchInserter); toClassify.Dispose(); toInsert.Dispose(); toIndex.Dispose(); return(recordCount); }
/// <summary> /// Causes this instance to drop its reference to all the SQLiteBatchInserter instances it /// has handed out so they can be garbage collected and so they no longer are included in /// FlushAllBatchInserters() and ReinitializeAllBatchInserters() calls. /// </summary> /// <param name="batchInserter"></param> public void ReleaseBatchInserter(SQLiteBatchInserter batchInserter) { batchInserters.Remove(batchInserter); }