private long Merge(string srcIxFileName) { Log.InfoFormat("merging branch {0} with trunk {1}", _ixFilesToProcess[1], _ixFilesToProcess[0]); var ix = IxInfo.Load(srcIxFileName); var documentFileName = Path.Combine(_directory, ix.VersionId + ".rdoc"); long version; using (var documentStream = new RDocStream(documentFileName, ix.PrimaryKeyFieldName)) { using (var upsert = new UpsertTransaction( _directory, _analyzer, ix.Compression, documentStream)) { version = upsert.Write(); upsert.Commit(); } Log.InfoFormat("{0} merged with {1} creating a segmented index", srcIxFileName, _ixFilesToProcess[0]); } Util.RemoveAll(srcIxFileName); return(version); }
private long Truncate(string srcIxFileName) { Log.InfoFormat("truncating {0}", srcIxFileName); var srcIx = IxInfo.Load(srcIxFileName); var documentFileName = Path.Combine(_directory, srcIx.VersionId + ".rdoc"); var docAddressFn = Path.Combine(_directory, srcIx.VersionId + ".da"); var docHashesFileName = Path.Combine(_directory, string.Format("{0}.{1}", srcIx.VersionId, "pk")); long version; using (var documentStream = new RDocStream(documentFileName, srcIx.PrimaryKeyFieldName)) { Util.TryAquireWriteLock(_directory); using (var upsert = new UpsertTransaction( _directory, _analyzer, srcIx.Compression, documentStream)) { version = upsert.Write(); upsert.Commit(); } Util.ReleaseFileLock(_directory); Log.InfoFormat("ix {0} fully truncated", _ixFilesToProcess[0]); } Util.RemoveAll(srcIxFileName); return(version); }
public Searcher(string directory, QueryParser parser, IScoringScheme scorer) { _directory = directory; _parser = parser; _scorer = scorer; _trieFiles = new ConcurrentDictionary <string, LazyTrie>(); _docContainers = new ConcurrentDictionary <string, DocContainer>(); _postingContainers = new ConcurrentDictionary <string, PostingsContainer>(); _ix = IxInfo.Load(Path.Combine(_directory, "0.ix")); }
public long Commit() { if (_ixFilesToProcess.Length == 1) { // merge segments var ix = IxInfo.Load(_ixFilesToProcess[0]); if (Util.IsSegmented(_ixFilesToProcess[0])) { return(Truncate(_ixFilesToProcess[0])); } else { return(-1); } } // merge branches return(Merge(_ixFilesToProcess[1])); }
public UpsertTransaction( string directory, IAnalyzer analyzer, Compression compression, DocumentStream documents, IDocumentStoreWriter storeWriter = null) { _directory = directory; _analyzer = analyzer; _compression = compression; _documents = documents; var mainIndexVersion = Util.GetIndexFileNamesInChronologicalOrder(_directory) .FirstOrDefault(); if (mainIndexVersion == null) { _indexVersionId = Util.GetNextChronologicalFileId(); } else { if (Util.WriteLockExists(_directory) || !Util.TryAquireWriteLock(_directory)) { _indexVersionId = Util.GetNextChronologicalFileId(); } else { _indexVersionId = long.Parse(Path.GetFileNameWithoutExtension(mainIndexVersion)); var ix = IxInfo.Load(mainIndexVersion); _count = ix.DocumentCount; } } _storeWriter = storeWriter ?? new DocumentStoreWriter(directory, _indexVersionId, _compression); }
public RDocStream(string fileName, string primaryKeyFieldName = null, int skip = 0, int take = int.MaxValue) : base(primaryKeyFieldName) { var versionId = Path.GetFileNameWithoutExtension(fileName); var directory = Path.GetDirectoryName(fileName); var docFileName = Path.Combine(directory, versionId + ".rdoc"); var docAddressFn = Path.Combine(directory, versionId + ".da"); var docHashesFileName = Path.Combine(directory, string.Format("{0}.{1}", versionId, "pk")); var keyIndexFileName = Path.Combine(directory, versionId + ".kix"); var keyIndex = Util.GetKeyIndex(keyIndexFileName); _ix = IxInfo.Load(Path.Combine(directory, versionId + ".ix")); _hashReader = new DocHashReader(docHashesFileName); _addressReader = new DocumentAddressReader(new FileStream(docAddressFn, FileMode.Open, FileAccess.Read)); _documentReader = new DocumentReader( new FileStream(docFileName, FileMode.Open, FileAccess.Read), _ix.Compression, keyIndex); _skip = skip; _take = take; _directory = directory; }