/// <summary> /// When the IndexOutput is closed we ensure that the file is flushed and written locally and also persisted to master storage /// </summary> protected override void Dispose(bool disposing) { _fileMutex.WaitOne(); try { var fileName = _name; //make sure it's all written out //we are only checking for null here in case Close is called multiple times if (_cacheDirIndexOutput != null) { _cacheDirIndexOutput.Flush(); _cacheDirIndexOutput.Dispose(); IndexInput cacheInput = null; try { cacheInput = CacheDirectory.OpenInput(fileName); } catch (IOException e) { //This would occur if the file doesn't exist! we previously threw when that happens so we'll keep //doing that for now but this is quicker than first checking if it exists and then opening it. throw; } if (cacheInput != null) { IndexOutput masterOutput = null; try { masterOutput = MasterDirectory.CreateOutput(fileName); cacheInput.CopyTo(masterOutput, fileName); } finally { masterOutput?.Dispose(); cacheInput?.Dispose(); } } #if FULLDEBUG Trace.WriteLine($"CLOSED WRITESTREAM {_name}"); #endif // clean up _cacheDirIndexOutput = null; } GC.SuppressFinalize(this); } finally { _fileMutex.ReleaseMutex(); } }
private static void UnidirectionalSync(AzureDirectory sourceDirectory, Directory destinationDirectory) { var sourceFiles = sourceDirectory.ListAll(); var fileNameFilter = IndexFileNameFilter.Filter; byte[] buffer = new byte[16384]; foreach (string sourceFile in sourceFiles) { // only copy file if it is accepted by Lucene's default filter // and it does not already exist (except for segment map files, we always want those) if (fileNameFilter.Accept(null, sourceFile) && (!destinationDirectory.FileExists(sourceFile) || sourceFile.StartsWith("segment"))) { IndexOutput indexOutput = null; IndexInput indexInput = null; try { indexOutput = destinationDirectory.CreateOutput(sourceFile); indexInput = sourceDirectory.OpenInput(sourceFile); long length = indexInput.Length(); long position = 0; while (position < length) { int bytesToRead = position + 16384L > length ? (int)(length - position) : 16384; indexInput.ReadBytes(buffer, 0, bytesToRead); indexOutput.WriteBytes(buffer, bytesToRead); position += bytesToRead; } } finally { try { indexOutput?.Dispose(); } finally { indexInput?.Dispose(); } } } } // we'll remove old files from both AzureDirectory's cache directory, as well as our destination directory // (only when older than 45 minutes - old files may still have active searches on them so we need a margin) var referenceTimestamp = LuceneTimestampFromDateTime(DateTime.UtcNow.AddMinutes(-45)); // remove old files from AzureDirectory cache directory RemoveOldFiles(sourceDirectory.CacheDirectory, sourceFiles, referenceTimestamp); // remove old files from destination directory RemoveOldFiles(destinationDirectory, sourceFiles, referenceTimestamp); }
protected override void Dispose(bool disposing) { if (disposing) { try { Sleep(closeDelayMillis + GetDelay(true)); } finally { @delegate?.Dispose(); // LUCENENET specific - only call if non-null } } }
private void Demo_FSIndexInputBug(Directory fsdir, string file) { // Setup the test file - we need more than 1024 bytes IndexOutput os = fsdir.CreateOutput(file, IOContext.DEFAULT); for (int i = 0; i < 2000; i++) { os.WriteByte((byte)(sbyte)i); } os.Dispose(); IndexInput @in = fsdir.OpenInput(file, IOContext.DEFAULT); // this read primes the buffer in IndexInput @in.ReadByte(); // Close the file @in.Dispose(); // ERROR: this call should fail, but succeeds because the buffer // is still filled @in.ReadByte(); // ERROR: this call should fail, but succeeds for some reason as well @in.Seek(1099); try { // OK: this call correctly fails. We are now past the 1024 internal // buffer, so an actual IO is attempted, which fails @in.ReadByte(); Assert.Fail("expected readByte() to throw exception"); } #pragma warning disable 168 catch (IOException e) #pragma warning restore 168 { // expected exception } }
public virtual void TestDoubleClose() { Directory newDir = NewDirectory(); CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), true); IndexOutput @out = csw.CreateOutput("d.xyz", NewIOContext(Random())); @out.WriteInt(0); @out.Dispose(); csw.Dispose(); // close a second time - must have no effect according to IDisposable csw.Dispose(); csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random()), false); IndexInput openInput = csw.OpenInput("d.xyz", NewIOContext(Random())); Assert.AreEqual(0, openInput.ReadInt()); openInput.Dispose(); csw.Dispose(); // close a second time - must have no effect according to IDisposable csw.Dispose(); newDir.Dispose(); }
protected override void Dispose(bool disposing) { if (disposing) { if (_output == null) { return; } try { Write(END); Newline(); SimpleTextUtil.WriteChecksum(_output, _scratch); } finally { _output.Dispose(); _output = null; } } }
/// <summary> /// Called to complete TermInfos creation. </summary> public void Dispose() { try { output.Seek(4); // write size after format output.WriteInt64(size); } finally { try { output.Dispose(); } finally { if (!isIndex) { other.Dispose(); } } } }
/// <summary> /// Called to complete TermInfos creation. </summary> public void Dispose() { try { Output.Seek(4); // write size after format Output.WriteInt64(Size); } finally { try { Output.Dispose(); } finally { if (!IsIndex) { Other.Dispose(); } } } }
public virtual void TestAppendTwice() { Directory newDir = NewDirectory(); CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), true); CreateSequenceFile(newDir, "d1", (sbyte)0, 15); IndexOutput @out = csw.CreateOutput("d.xyz", NewIOContext(Random)); @out.WriteInt32(0); @out.Dispose(); Assert.AreEqual(1, csw.ListAll().Length); Assert.AreEqual("d.xyz", csw.ListAll()[0]); csw.Dispose(); CompoundFileDirectory cfr = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), false); Assert.AreEqual(1, cfr.ListAll().Length); Assert.AreEqual("d.xyz", cfr.ListAll()[0]); cfr.Dispose(); newDir.Dispose(); }
public virtual void TestLargeWrites() { IndexOutput os = Dir.CreateOutput("testBufferStart.txt", NewIOContext(Random())); var largeBuf = new byte[2048]; for (int i = 0; i < largeBuf.Length; i++) { largeBuf[i] = (byte)unchecked((sbyte)(new Random(1).NextDouble() * 256)); } long currentPos = os.FilePointer; os.WriteBytes(largeBuf, largeBuf.Length); try { Assert.AreEqual(currentPos + largeBuf.Length, os.FilePointer); } finally { os.Dispose(); } }
public virtual void TestManySubFiles() { Directory d = NewFSDirectory(CreateTempDir("CFSManySubFiles")); int FILE_COUNT = AtLeast(500); for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++) { IndexOutput @out = d.CreateOutput("file." + fileIdx, NewIOContext(Random())); @out.WriteByte((byte)(sbyte)fileIdx); @out.Dispose(); } CompoundFileDirectory cfd = new CompoundFileDirectory(d, "c.cfs", NewIOContext(Random()), true); for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++) { string fileName = "file." + fileIdx; d.Copy(cfd, fileName, fileName, NewIOContext(Random())); } cfd.Dispose(); IndexInput[] ins = new IndexInput[FILE_COUNT]; CompoundFileDirectory cfr = new CompoundFileDirectory(d, "c.cfs", NewIOContext(Random()), false); for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++) { ins[fileIdx] = cfr.OpenInput("file." + fileIdx, NewIOContext(Random())); } for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++) { Assert.AreEqual((byte)fileIdx, ins[fileIdx].ReadByte()); } for (int fileIdx = 0; fileIdx < FILE_COUNT; fileIdx++) { ins[fileIdx].Dispose(); } cfr.Dispose(); d.Dispose(); }
public override void Dispose() { try { if (hitExcDuringWrite) { return; } // stuff 0s in until the "real" data is flushed: var stuffed = 0; while (upto > stuffed) { upto -= Add(0) - 1; Debug.Assert(upto >= 0); stuffed += 1; } } finally { output.Dispose(); } }
/// <summary> /// Save a single segment's info. </summary> public override void Write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext) { string fileName = IndexFileNames.SegmentFileName(si.Name, "", Lucene40SegmentInfoFormat.SI_EXTENSION); si.AddFile(fileName); IndexOutput output = dir.CreateOutput(fileName, ioContext); bool success = false; try { CodecUtil.WriteHeader(output, Lucene40SegmentInfoFormat.CODEC_NAME, Lucene40SegmentInfoFormat.VERSION_CURRENT); // Write the Lucene version that created this segment, since 3.1 output.WriteString(si.Version); output.WriteInt32(si.DocCount); output.WriteByte((byte)(sbyte)(si.UseCompoundFile ? SegmentInfo.YES : SegmentInfo.NO)); output.WriteStringStringMap(si.Diagnostics); output.WriteStringStringMap(Collections.EmptyMap <string, string>()); output.WriteStringSet(si.GetFiles()); success = true; } finally { if (!success) { IOUtils.DisposeWhileHandlingException(output); si.Dir.DeleteFile(fileName); } else { output.Dispose(); } } }
protected override void Dispose(bool disposing) { if (disposing) { Flush(); cacheOutput.Dispose(); if (isWritten && isFlushed) { var fileInfo = new FileInfo(indexDirectory.GetFullPath(indexFileName)); using (var fs = new FileStream(indexDirectory.GetFullPath(indexFileName), FileMode.Open, FileAccess.Read)) { var fullName = indexDirectory.GetFullName(indexFileName); var options = new GridFSUploadOptions { Metadata = new BsonDocument { ["WrittenTime"] = fileInfo.LastWriteTimeUtc } }; try { indexDirectory.Bucket.UploadFromStream(fullName, indexFileName, fs, options); } catch (MongoBulkWriteException ex) when(ex.WriteErrors.Any(x => x.Code == 11000)) { indexDirectory.Bucket.Delete(fullName); indexDirectory.Bucket.UploadFromStream(fullName, indexFileName, fs, options); } } } } }
public virtual void TestReadNestedCFP() { Directory newDir = NewDirectory(); CompoundFileDirectory csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), true); CompoundFileDirectory nested = new CompoundFileDirectory(newDir, "b.cfs", NewIOContext(Random), true); IndexOutput @out = nested.CreateOutput("b.xyz", NewIOContext(Random)); IndexOutput out1 = nested.CreateOutput("b_1.xyz", NewIOContext(Random)); @out.WriteInt32(0); out1.WriteInt32(1); @out.Dispose(); out1.Dispose(); nested.Dispose(); newDir.Copy(csw, "b.cfs", "b.cfs", NewIOContext(Random)); newDir.Copy(csw, "b.cfe", "b.cfe", NewIOContext(Random)); newDir.DeleteFile("b.cfs"); newDir.DeleteFile("b.cfe"); csw.Dispose(); Assert.AreEqual(2, newDir.ListAll().Length); csw = new CompoundFileDirectory(newDir, "d.cfs", NewIOContext(Random), false); Assert.AreEqual(2, csw.ListAll().Length); nested = new CompoundFileDirectory(csw, "b.cfs", NewIOContext(Random), false); Assert.AreEqual(2, nested.ListAll().Length); IndexInput openInput = nested.OpenInput("b.xyz", NewIOContext(Random)); Assert.AreEqual(0, openInput.ReadInt32()); openInput.Dispose(); openInput = nested.OpenInput("b_1.xyz", NewIOContext(Random)); Assert.AreEqual(1, openInput.ReadInt32()); openInput.Dispose(); nested.Dispose(); csw.Dispose(); newDir.Dispose(); }
protected override void Dispose(bool disposing) { _fileMutex.WaitOne(); try { // make sure it's all written out _indexOutput.Flush(); long originalLength = _indexOutput.Length; _indexOutput.Dispose(); using (var blobStream = new StreamInput(CacheDirectory.OpenInput(_name, IOContext.DEFAULT))) { // push the blobStream up to the cloud _blob.UploadFromStream(blobStream); // set the metadata with the original index file properties _blob.SetMetadata(); Debug.WriteLine(string.Format("PUT {1} bytes to {0} in cloud", _name, blobStream.Length)); } #if FULLDEBUG Debug.WriteLine(string.Format("CLOSED WRITESTREAM {0}", _name)); #endif // clean up _indexOutput = null; _blobContainer = null; _blob = null; GC.SuppressFinalize(this); } finally { _fileMutex.ReleaseMutex(); } }
public virtual void TestLargeWrites() { IndexOutput os = dir.CreateOutput("testBufferStart.txt", NewIOContext(Random)); var largeBuf = new byte[2048]; for (int i = 0; i < largeBuf.Length; i++) { largeBuf[i] = (byte)(new Random(1).NextDouble() * 256); } long currentPos = os.Position; // LUCENENET specific: Renamed from getFilePointer() to match FileStream os.WriteBytes(largeBuf, largeBuf.Length); try { Assert.AreEqual(currentPos + largeBuf.Length, os.Position); // LUCENENET specific: Renamed from getFilePointer() to match FileStream } finally { os.Dispose(); } }
public virtual void TestLargeWrites() { IndexOutput os = dir.CreateOutput("testBufferStart.txt", NewIOContext(Random)); var largeBuf = new byte[2048]; for (int i = 0; i < largeBuf.Length; i++) { largeBuf[i] = (byte)(Random.NextDouble() * 256); // LUCENENET: Using Random, since Math.random() doesn't exist in .NET, and it seems to make sense to make this repeatable. } long currentPos = os.Position; // LUCENENET specific: Renamed from getFilePointer() to match FileStream os.WriteBytes(largeBuf, largeBuf.Length); try { Assert.AreEqual(currentPos + largeBuf.Length, os.Position); // LUCENENET specific: Renamed from getFilePointer() to match FileStream } finally { os.Dispose(); } }
private bool disposed = false; // LUCENENET specific protected override void Dispose(bool disposing) { if (disposing && !disposed) { disposed = true; _wrappedPostingsWriter.Dispose(); _buffer.Dispose(); // LUCENENET specific if (_wrappedPostingsWriter is PulsingPostingsWriter || VERSION_CURRENT < VERSION_META_ARRAY) { return; } var summaryFileName = IndexFileNames.SegmentFileName(_segmentState.SegmentInfo.Name, _segmentState.SegmentSuffix, SUMMARY_EXTENSION); IndexOutput output = null; try { output = _segmentState.Directory.CreateOutput(summaryFileName, _segmentState.Context); CodecUtil.WriteHeader(output, CODEC, VERSION_CURRENT); output.WriteVInt32(_fields.Count); foreach (var field in _fields) { output.WriteVInt32(field.FieldNumber); output.WriteVInt32(field.Int64sSize); } output.Dispose(); } finally { IOUtils.DisposeWhileHandlingException(output); } } }
public override FieldsConsumer FieldsConsumer(SegmentWriteState state) { int minSkipInterval; if (state.SegmentInfo.DocCount > 1000000) { // Test2BPostings can OOME otherwise: minSkipInterval = 3; } else { minSkipInterval = 2; } // we pull this before the seed intentionally: because its not consumed at runtime // (the skipInterval is written into postings header) int skipInterval = TestUtil.NextInt32(seedRandom, minSkipInterval, 10); if (LuceneTestCase.Verbose) { Console.WriteLine("MockRandomCodec: skipInterval=" + skipInterval); } long seed = seedRandom.NextInt64(); if (LuceneTestCase.Verbose) { Console.WriteLine("MockRandomCodec: writing to seg=" + state.SegmentInfo.Name + " formatID=" + state.SegmentSuffix + " seed=" + seed); } string seedFileName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, SEED_EXT); IndexOutput @out = state.Directory.CreateOutput(seedFileName, state.Context); try { @out.WriteInt64(seed); } finally { @out.Dispose(); } Random random = new J2N.Randomizer(seed); random.Next(); // consume a random for buffersize PostingsWriterBase postingsWriter; if (random.nextBoolean()) { postingsWriter = new SepPostingsWriter(state, new MockInt32StreamFactory(random), skipInterval); } else { if (LuceneTestCase.Verbose) { Console.WriteLine("MockRandomCodec: writing Standard postings"); } // TODO: randomize variables like acceptibleOverHead?! postingsWriter = new Lucene41PostingsWriter(state, skipInterval); } if (random.NextBoolean()) { int totTFCutoff = TestUtil.NextInt32(random, 1, 20); if (LuceneTestCase.Verbose) { Console.WriteLine("MockRandomCodec: writing pulsing postings with totTFCutoff=" + totTFCutoff); } postingsWriter = new PulsingPostingsWriter(state, totTFCutoff, postingsWriter); } FieldsConsumer fields; int t1 = random.Next(4); if (t1 == 0) { bool success = false; try { fields = new FSTTermsWriter(state, postingsWriter); success = true; } finally { if (!success) { postingsWriter.Dispose(); } } } else if (t1 == 1) { bool success = false; try { fields = new FSTOrdTermsWriter(state, postingsWriter); success = true; } finally { if (!success) { postingsWriter.Dispose(); } } } else if (t1 == 2) { // Use BlockTree terms dict if (LuceneTestCase.Verbose) { Console.WriteLine("MockRandomCodec: writing BlockTree terms dict"); } // TODO: would be nice to allow 1 but this is very // slow to write int minTermsInBlock = TestUtil.NextInt32(random, 2, 100); int maxTermsInBlock = Math.Max(2, (minTermsInBlock - 1) * 2 + random.Next(100)); bool success = false; try { fields = new BlockTreeTermsWriter(state, postingsWriter, minTermsInBlock, maxTermsInBlock); success = true; } finally { if (!success) { postingsWriter.Dispose(); } } } else { if (LuceneTestCase.Verbose) { Console.WriteLine("MockRandomCodec: writing Block terms dict"); } bool success = false; TermsIndexWriterBase indexWriter; try { if (random.NextBoolean()) { state.TermIndexInterval = TestUtil.NextInt32(random, 1, 100); if (LuceneTestCase.Verbose) { Console.WriteLine("MockRandomCodec: fixed-gap terms index (tii=" + state.TermIndexInterval + ")"); } indexWriter = new FixedGapTermsIndexWriter(state); } else { VariableGapTermsIndexWriter.IndexTermSelector selector; int n2 = random.Next(3); if (n2 == 0) { int tii = TestUtil.NextInt32(random, 1, 100); selector = new VariableGapTermsIndexWriter.EveryNTermSelector(tii); if (LuceneTestCase.Verbose) { Console.WriteLine("MockRandomCodec: variable-gap terms index (tii=" + tii + ")"); } } else if (n2 == 1) { int docFreqThresh = TestUtil.NextInt32(random, 2, 100); int tii = TestUtil.NextInt32(random, 1, 100); selector = new VariableGapTermsIndexWriter.EveryNOrDocFreqTermSelector(docFreqThresh, tii); } else { long seed2 = random.NextInt64(); int gap = TestUtil.NextInt32(random, 2, 40); if (LuceneTestCase.Verbose) { Console.WriteLine("MockRandomCodec: random-gap terms index (max gap=" + gap + ")"); } selector = new IndexTermSelectorAnonymousClass(seed2, gap); } indexWriter = new VariableGapTermsIndexWriter(state, selector); } success = true; } finally { if (!success) { postingsWriter.Dispose(); } } success = false; try { fields = new BlockTermsWriter(indexWriter, state, postingsWriter); success = true; } finally { if (!success) { try { postingsWriter.Dispose(); } finally { indexWriter.Dispose(); } } } } return(fields); }
public override void Write(Directory directory, string segmentName, string segmentSuffix, FieldInfos infos, IOContext context) { string fileName = IndexFileNames.SegmentFileName(segmentName, "", FIELD_INFOS_EXTENSION); IndexOutput output = directory.CreateOutput(fileName, context); bool success = false; try { output.WriteVInt32(FORMAT_PREFLEX_RW); output.WriteVInt32(infos.Count); foreach (FieldInfo fi in infos) { sbyte bits = 0x0; if (fi.HasVectors) { bits |= STORE_TERMVECTOR; } if (fi.OmitsNorms) { bits |= OMIT_NORMS; } if (fi.HasPayloads) { bits |= STORE_PAYLOADS; } if (fi.IsIndexed) { bits |= IS_INDEXED; if (Debugging.AssertsEnabled) { Debugging.Assert(fi.IndexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads); } if (fi.IndexOptions == IndexOptions.DOCS_ONLY) { bits |= OMIT_TERM_FREQ_AND_POSITIONS; } else if (fi.IndexOptions == IndexOptions.DOCS_AND_FREQS) { bits |= OMIT_POSITIONS; } } output.WriteString(fi.Name); /* * we need to write the field number since IW tries * to stabelize the field numbers across segments so the * FI ordinal is not necessarily equivalent to the field number */ output.WriteInt32(fi.Number); output.WriteByte((byte)bits); if (fi.IsIndexed && !fi.OmitsNorms) { // to allow null norm types we need to indicate if norms are written // only in RW case output.WriteByte((byte)(fi.NormType == Index.DocValuesType.NONE ? 0 : 1)); } if (Debugging.AssertsEnabled) { Debugging.Assert(fi.Attributes is null); // not used or supported } } success = true; } finally { if (success) { output.Dispose(); } else { IOUtils.DisposeWhileHandlingException(output); } } }
public virtual void TestEncodeDecode() { int iterations = RandomInts.RandomInt32Between(Random, 1, 1000); float AcceptableOverheadRatio = (float)Random.NextDouble(); int[] values = new int[(iterations - 1) * Lucene41PostingsFormat.BLOCK_SIZE + ForUtil.MAX_DATA_SIZE]; for (int i = 0; i < iterations; ++i) { int bpv = Random.Next(32); if (bpv == 0) { int value = RandomInts.RandomInt32Between(Random, 0, int.MaxValue); for (int j = 0; j < Lucene41PostingsFormat.BLOCK_SIZE; ++j) { values[i * Lucene41PostingsFormat.BLOCK_SIZE + j] = value; } } else { for (int j = 0; j < Lucene41PostingsFormat.BLOCK_SIZE; ++j) { values[i * Lucene41PostingsFormat.BLOCK_SIZE + j] = RandomInts.RandomInt32Between(Random, 0, (int)PackedInt32s.MaxValue(bpv)); } } } Directory d = new RAMDirectory(); long endPointer; { // encode IndexOutput @out = d.CreateOutput("test.bin", IOContext.DEFAULT); ForUtil forUtil = new ForUtil(AcceptableOverheadRatio, @out); for (int i = 0; i < iterations; ++i) { forUtil.WriteBlock(Arrays.CopyOfRange(values, i * Lucene41PostingsFormat.BLOCK_SIZE, values.Length), new byte[Lucene41.ForUtil.MAX_ENCODED_SIZE], @out); } endPointer = @out.Position; // LUCENENET specific: Renamed from getFilePointer() to match FileStream @out.Dispose(); } { // decode IndexInput @in = d.OpenInput("test.bin", IOContext.READ_ONCE); ForUtil forUtil = new ForUtil(@in); for (int i = 0; i < iterations; ++i) { if (Random.NextBoolean()) { forUtil.SkipBlock(@in); continue; } int[] restored = new int[Lucene41.ForUtil.MAX_DATA_SIZE]; forUtil.ReadBlock(@in, new byte[Lucene41.ForUtil.MAX_ENCODED_SIZE], restored); Assert.AreEqual(Arrays.CopyOfRange(values, i * Lucene41PostingsFormat.BLOCK_SIZE, (i + 1) * Lucene41PostingsFormat.BLOCK_SIZE), Arrays.CopyOf(restored, Lucene41PostingsFormat.BLOCK_SIZE)); } assertEquals(endPointer, @in.Position); // LUCENENET specific: Renamed from getFilePointer() to match FileStream @in.Dispose(); } }
public void Dispose() { fieldsIndexOut.Dispose(); }
internal virtual FST <T> DoTest(int prune1, int prune2, bool allowRandomSuffixSharing) { if (LuceneTestCase.VERBOSE) { Console.WriteLine("\nTEST: prune1=" + prune1 + " prune2=" + prune2); } bool willRewrite = Random.NextBoolean(); Builder <T> builder = new Builder <T>(InputMode == 0 ? FST.INPUT_TYPE.BYTE1 : FST.INPUT_TYPE.BYTE4, prune1, prune2, prune1 == 0 && prune2 == 0, allowRandomSuffixSharing ? Random.NextBoolean() : true, allowRandomSuffixSharing ? TestUtil.NextInt(Random, 1, 10) : int.MaxValue, Outputs, null, willRewrite, PackedInts.DEFAULT, true, 15); if (LuceneTestCase.VERBOSE) { if (willRewrite) { Console.WriteLine("TEST: packed FST"); } else { Console.WriteLine("TEST: non-packed FST"); } } foreach (InputOutput <T> pair in Pairs) { if (pair.Output is IList) { IList <long> longValues = (IList <long>)pair.Output; Builder <object> builderObject = builder as Builder <object>; foreach (long value in longValues) { builderObject.Add(pair.Input, value); } } else { builder.Add(pair.Input, pair.Output); } } FST <T> fst = builder.Finish(); if (Random.NextBoolean() && fst != null && !willRewrite) { IOContext context = LuceneTestCase.NewIOContext(Random); IndexOutput @out = Dir.CreateOutput("fst.bin", context); fst.Save(@out); @out.Dispose(); IndexInput @in = Dir.OpenInput("fst.bin", context); try { fst = new FST <T>(@in, Outputs); } finally { @in.Dispose(); Dir.DeleteFile("fst.bin"); } } if (LuceneTestCase.VERBOSE && Pairs.Count <= 20 && fst != null) { TextWriter w = new StreamWriter(new FileStream("out.dot", FileMode.Open), IOUtils.CHARSET_UTF_8); Util.toDot(fst, w, false, false); w.Close(); Console.WriteLine("SAVED out.dot"); } if (LuceneTestCase.VERBOSE) { if (fst == null) { Console.WriteLine(" fst has 0 nodes (fully pruned)"); } else { Console.WriteLine(" fst has " + fst.NodeCount + " nodes and " + fst.ArcCount + " arcs"); } } if (prune1 == 0 && prune2 == 0) { VerifyUnPruned(InputMode, fst); } else { VerifyPruned(InputMode, fst, prune1, prune2); } return(fst); }
public override void Write(Directory directory, string segmentName, string segmentSuffix, FieldInfos infos, IOContext context) { string fileName = IndexFileNames.SegmentFileName(segmentName, "", Lucene42FieldInfosFormat.EXTENSION); IndexOutput output = directory.CreateOutput(fileName, context); bool success = false; try { CodecUtil.WriteHeader(output, Lucene42FieldInfosFormat.CODEC_NAME, Lucene42FieldInfosFormat.FORMAT_CURRENT); output.WriteVInt(infos.Size()); foreach (FieldInfo fi in infos) { FieldInfo.IndexOptions?indexOptions = fi.FieldIndexOptions; sbyte bits = 0x0; if (fi.HasVectors()) { bits |= Lucene42FieldInfosFormat.STORE_TERMVECTOR; } if (fi.OmitsNorms()) { bits |= Lucene42FieldInfosFormat.OMIT_NORMS; } if (fi.HasPayloads()) { bits |= Lucene42FieldInfosFormat.STORE_PAYLOADS; } if (fi.Indexed) { bits |= Lucene42FieldInfosFormat.IS_INDEXED; Debug.Assert(indexOptions >= FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS || !fi.HasPayloads()); if (indexOptions == FieldInfo.IndexOptions.DOCS_ONLY) { bits |= Lucene42FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS; } else if (indexOptions == FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) { bits |= Lucene42FieldInfosFormat.STORE_OFFSETS_IN_POSTINGS; } else if (indexOptions == FieldInfo.IndexOptions.DOCS_AND_FREQS) { bits |= Lucene42FieldInfosFormat.OMIT_POSITIONS; } } output.WriteString(fi.Name); output.WriteVInt(fi.Number); output.WriteByte(bits); // pack the DV types in one byte sbyte dv = DocValuesByte(fi.DocValuesType); sbyte nrm = DocValuesByte(fi.NormType); Debug.Assert((dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); sbyte val = unchecked ((sbyte)(0xff & ((nrm << 4) | dv))); output.WriteByte(val); output.WriteStringStringMap(fi.Attributes()); } success = true; } finally { if (success) { output.Dispose(); } else { IOUtils.CloseWhileHandlingException(output); } } }
public virtual void TestDataInputOutput() { Random random = Random(); for (int iter = 0; iter < 5 * RANDOM_MULTIPLIER; iter++) { BaseDirectoryWrapper dir = NewFSDirectory(CreateTempDir("testOverflow")); if (dir is MockDirectoryWrapper) { ((MockDirectoryWrapper)dir).Throttling = MockDirectoryWrapper.Throttling_e.NEVER; } int blockBits = TestUtil.NextInt(random, 1, 20); int blockSize = 1 << blockBits; PagedBytes p = new PagedBytes(blockBits); IndexOutput @out = dir.CreateOutput("foo", IOContext.DEFAULT); int numBytes = TestUtil.NextInt(Random(), 2, 10000000); byte[] answer = new byte[numBytes]; Random().NextBytes(answer); int written = 0; while (written < numBytes) { if (Random().Next(10) == 7) { @out.WriteByte(answer[written++]); } else { int chunk = Math.Min(Random().Next(1000), numBytes - written); @out.WriteBytes(answer, written, chunk); written += chunk; } } @out.Dispose(); IndexInput input = dir.OpenInput("foo", IOContext.DEFAULT); DataInput @in = (DataInput)input.Clone(); p.Copy(input, input.Length()); PagedBytes.Reader reader = p.Freeze(random.NextBoolean()); byte[] verify = new byte[numBytes]; int read = 0; while (read < numBytes) { if (Random().Next(10) == 7) { verify[read++] = @in.ReadByte(); } else { int chunk = Math.Min(Random().Next(1000), numBytes - read); @in.ReadBytes(verify, read, chunk); read += chunk; } } Assert.IsTrue(Arrays.Equals(answer, verify)); BytesRef slice = new BytesRef(); for (int iter2 = 0; iter2 < 100; iter2++) { int pos = random.Next(numBytes - 1); int len = random.Next(Math.Min(blockSize + 1, numBytes - pos)); reader.FillSlice(slice, pos, len); for (int byteUpto = 0; byteUpto < len; byteUpto++) { Assert.AreEqual(answer[pos + byteUpto], (byte)slice.Bytes[slice.Offset + byteUpto]); } } input.Dispose(); dir.Dispose(); } }
protected override void Dispose(bool disposing) { _fileMutex.WaitOne(); try { string fileName = _name; // make sure it's all written out _indexOutput.Flush(); long originalLength = _indexOutput.Length; _indexOutput.Dispose(); Stream blobStream; #if COMPRESSBLOBS // optionally put a compressor around the blob stream if (_azureDirectory.ShouldCompressFile(_name)) { // unfortunately, deflate stream doesn't allow seek, and we need a seekable stream // to pass to the blob storage stuff, so we compress into a memory stream MemoryStream compressedStream = new MemoryStream(); try { IndexInput indexInput = CacheDirectory.OpenInput(fileName); using (DeflateStream compressor = new DeflateStream(compressedStream, CompressionMode.Compress, true)) { // compress to compressedOutputStream byte[] bytes = new byte[indexInput.Length()]; indexInput.ReadBytes(bytes, 0, (int)bytes.Length); compressor.Write(bytes, 0, (int)bytes.Length); } indexInput.Close(); // seek back to beginning of comrpessed stream compressedStream.Seek(0, SeekOrigin.Begin); Debug.WriteLine(string.Format("COMPRESSED {0} -> {1} {2}% to {3}", originalLength, compressedStream.Length, ((float)compressedStream.Length / (float)originalLength) * 100, _name)); } catch { // release the compressed stream resources if an error occurs compressedStream.Dispose(); throw; } blobStream = compressedStream; } else #endif { blobStream = new StreamInput(CacheDirectory.OpenInput(fileName)); } try { // push the blobStream up to the cloud _blob.UploadFromStream(blobStream); // set the metadata with the original index file properties _blob.Metadata["CachedLength"] = originalLength.ToString(); _blob.Metadata["CachedLastModified"] = CacheDirectory.FileModified(fileName).ToString(); _blob.SetMetadata(); Debug.WriteLine(string.Format("PUT {1} bytes to {0} in cloud", _name, blobStream.Length)); } finally { blobStream.Dispose(); } #if FULLDEBUG Debug.WriteLine(string.Format("CLOSED WRITESTREAM {0}", _name)); #endif // clean up _indexOutput = null; _blobContainer = null; _blob = null; GC.SuppressFinalize(this); } finally { _fileMutex.ReleaseMutex(); } }
public virtual void TestRandom() { int iters = AtLeast(10); for (int iter = 0; iter < iters; iter++) { int numBytes = TestUtil.NextInt32(Random, 1, 200000); byte[] expected = new byte[numBytes]; int blockBits = TestUtil.NextInt32(Random, 8, 15); BytesStore bytes = new BytesStore(blockBits); if (VERBOSE) { Console.WriteLine("TEST: iter=" + iter + " numBytes=" + numBytes + " blockBits=" + blockBits); } int pos = 0; while (pos < numBytes) { int op = Random.Next(8); if (VERBOSE) { Console.WriteLine(" cycle pos=" + pos); } switch (op) { case 0: { // write random byte byte b = (byte)Random.Next(256); if (VERBOSE) { Console.WriteLine(" writeByte b=" + b); } expected[pos++] = b; bytes.WriteByte(b); } break; case 1: { // write random byte[] int len = Random.Next(Math.Min(numBytes - pos, 100)); byte[] temp = new byte[len]; Random.NextBytes(temp); if (VERBOSE) { Console.WriteLine(" writeBytes len=" + len + " bytes=" + Arrays.ToString(temp)); } Array.Copy(temp, 0, expected, pos, temp.Length); bytes.WriteBytes(temp, 0, temp.Length); pos += len; } break; case 2: { // write int @ absolute pos if (pos > 4) { int x = Random.Next(); int randomPos = Random.Next(pos - 4); if (VERBOSE) { Console.WriteLine(" abs writeInt pos=" + randomPos + " x=" + x); } bytes.WriteInt32(randomPos, x); expected[randomPos++] = (byte)(x >> 24); expected[randomPos++] = (byte)(x >> 16); expected[randomPos++] = (byte)(x >> 8); expected[randomPos++] = (byte)x; } } break; case 3: { // reverse bytes if (pos > 1) { int len = TestUtil.NextInt32(Random, 2, Math.Min(100, pos)); int start; if (len == pos) { start = 0; } else { start = Random.Next(pos - len); } int end = start + len - 1; if (VERBOSE) { Console.WriteLine(" reverse start=" + start + " end=" + end + " len=" + len + " pos=" + pos); } bytes.Reverse(start, end); while (start <= end) { byte b = expected[end]; expected[end] = expected[start]; expected[start] = b; start++; end--; } } } break; case 4: { // abs write random byte[] if (pos > 2) { int randomPos = Random.Next(pos - 1); int len = TestUtil.NextInt32(Random, 1, Math.Min(pos - randomPos - 1, 100)); byte[] temp = new byte[len]; Random.NextBytes(temp); if (VERBOSE) { Console.WriteLine(" abs writeBytes pos=" + randomPos + " len=" + len + " bytes=" + Arrays.ToString(temp)); } Array.Copy(temp, 0, expected, randomPos, temp.Length); bytes.WriteBytes(randomPos, temp, 0, temp.Length); } } break; case 5: { // copyBytes if (pos > 1) { int src = Random.Next(pos - 1); int dest = TestUtil.NextInt32(Random, src + 1, pos - 1); int len = TestUtil.NextInt32(Random, 1, Math.Min(300, pos - dest)); if (VERBOSE) { Console.WriteLine(" copyBytes src=" + src + " dest=" + dest + " len=" + len); } Array.Copy(expected, src, expected, dest, len); bytes.CopyBytes(src, dest, len); } } break; case 6: { // skip int len = Random.Next(Math.Min(100, numBytes - pos)); if (VERBOSE) { Console.WriteLine(" skip len=" + len); } pos += len; bytes.SkipBytes(len); // NOTE: must fill in zeros in case truncate was // used, else we get false fails: if (len > 0) { byte[] zeros = new byte[len]; bytes.WriteBytes(pos - len, zeros, 0, len); } } break; case 7: { // absWriteByte if (pos > 0) { int dest = Random.Next(pos); byte b = (byte)Random.Next(256); expected[dest] = b; bytes.WriteByte(dest, b); } break; } } Assert.AreEqual(pos, bytes.Position); if (pos > 0 && Random.Next(50) == 17) { // truncate int len = TestUtil.NextInt32(Random, 1, Math.Min(pos, 100)); bytes.Truncate(pos - len); pos -= len; Arrays.Fill(expected, pos, pos + len, (byte)0); if (VERBOSE) { Console.WriteLine(" truncate len=" + len + " newPos=" + pos); } } if ((pos > 0 && Random.Next(200) == 17)) { Verify(bytes, expected, pos); } } BytesStore bytesToVerify; if (Random.NextBoolean()) { if (VERBOSE) { Console.WriteLine("TEST: save/load final bytes"); } Directory dir = NewDirectory(); IndexOutput @out = dir.CreateOutput("bytes", IOContext.DEFAULT); bytes.WriteTo(@out); @out.Dispose(); IndexInput @in = dir.OpenInput("bytes", IOContext.DEFAULT); bytesToVerify = new BytesStore(@in, numBytes, TestUtil.NextInt32(Random, 256, int.MaxValue)); @in.Dispose(); dir.Dispose(); } else { bytesToVerify = bytes; } Verify(bytesToVerify, expected, numBytes); } }
public override void Write(Directory directory, string segmentName, string segmentSuffix, FieldInfos infos, IOContext context) { string fileName = IndexFileNames.SegmentFileName(segmentName, "", Lucene40FieldInfosFormat.FIELD_INFOS_EXTENSION); IndexOutput output = directory.CreateOutput(fileName, context); bool success = false; try { CodecUtil.WriteHeader(output, Lucene40FieldInfosFormat.CODEC_NAME, Lucene40FieldInfosFormat.FORMAT_CURRENT); output.WriteVInt32(infos.Count); foreach (FieldInfo fi in infos) { IndexOptions indexOptions = fi.IndexOptions; sbyte bits = 0x0; if (fi.HasVectors) { bits |= Lucene40FieldInfosFormat.STORE_TERMVECTOR; } if (fi.OmitsNorms) { bits |= Lucene40FieldInfosFormat.OMIT_NORMS; } if (fi.HasPayloads) { bits |= Lucene40FieldInfosFormat.STORE_PAYLOADS; } if (fi.IsIndexed) { bits |= Lucene40FieldInfosFormat.IS_INDEXED; // LUCENENET specific - to avoid boxing, changed from CompareTo() to IndexOptionsComparer.Compare() if (Debugging.AssertsEnabled) { Debugging.Assert(IndexOptionsComparer.Default.Compare(indexOptions, IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0 || !fi.HasPayloads); } if (indexOptions == IndexOptions.DOCS_ONLY) { bits |= Lucene40FieldInfosFormat.OMIT_TERM_FREQ_AND_POSITIONS; } else if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) { bits |= Lucene40FieldInfosFormat.STORE_OFFSETS_IN_POSTINGS; } else if (indexOptions == IndexOptions.DOCS_AND_FREQS) { bits |= Lucene40FieldInfosFormat.OMIT_POSITIONS; } } output.WriteString(fi.Name); output.WriteVInt32(fi.Number); output.WriteByte((byte)bits); // pack the DV types in one byte byte dv = DocValuesByte(fi.DocValuesType, fi.GetAttribute(Lucene40FieldInfosReader.LEGACY_DV_TYPE_KEY)); byte nrm = DocValuesByte(fi.NormType, fi.GetAttribute(Lucene40FieldInfosReader.LEGACY_NORM_TYPE_KEY)); if (Debugging.AssertsEnabled) { Debugging.Assert((dv & (~0xF)) == 0 && (nrm & (~0x0F)) == 0); } var val = (byte)(0xff & ((nrm << 4) | (byte)dv)); output.WriteByte(val); output.WriteStringStringMap(fi.Attributes); } success = true; } finally { if (success) { output.Dispose(); } else { IOUtils.DisposeWhileHandlingException(output); } } }
public virtual void Test() { int[] ints = new int[7]; Int32sRef input = new Int32sRef(ints, 0, ints.Length); int seed = Random.Next(); Directory dir = new MMapDirectory(CreateTempDir("2BFST")); for (int doPackIter = 0; doPackIter < 2; doPackIter++) { bool doPack = doPackIter == 1; // Build FST w/ NoOutputs and stop when nodeCount > 2.2B if (!doPack) { Console.WriteLine("\nTEST: 3B nodes; doPack=false output=NO_OUTPUTS"); Outputs <object> outputs = NoOutputs.Singleton; object NO_OUTPUT = outputs.NoOutput; Builder <object> b = new Builder <object>(FST.INPUT_TYPE.BYTE1, 0, 0, true, true, int.MaxValue, outputs, null, doPack, PackedInt32s.COMPACT, true, 15); int count = 0; Random r = new Random(seed); int[] ints2 = new int[200]; Int32sRef input2 = new Int32sRef(ints2, 0, ints2.Length); while (true) { //System.out.println("add: " + input + " -> " + output); for (int i = 10; i < ints2.Length; i++) { ints2[i] = r.Next(256); } b.Add(input2, NO_OUTPUT); count++; if (count % 100000 == 0) { Console.WriteLine(count + ": " + b.GetFstSizeInBytes() + " bytes; " + b.TotStateCount + " nodes"); } if (b.TotStateCount > int.MaxValue + 100L * 1024 * 1024) { break; } NextInput(r, ints2); } FST <object> fst = b.Finish(); for (int verify = 0; verify < 2; verify++) { Console.WriteLine("\nTEST: now verify [fst size=" + fst.GetSizeInBytes() + "; nodeCount=" + fst.NodeCount + "; arcCount=" + fst.ArcCount + "]"); Arrays.Fill(ints2, 0); r = new Random(seed); for (int i = 0; i < count; i++) { if (i % 1000000 == 0) { Console.WriteLine(i + "...: "); } for (int j = 10; j < ints2.Length; j++) { ints2[j] = r.Next(256); } Assert.AreEqual(NO_OUTPUT, Util.Get(fst, input2)); NextInput(r, ints2); } Console.WriteLine("\nTEST: enum all input/outputs"); Int32sRefFSTEnum <object> fstEnum = new Int32sRefFSTEnum <object>(fst); Arrays.Fill(ints2, 0); r = new Random(seed); int upto = 0; while (true) { Int32sRefFSTEnum.InputOutput <object> pair = fstEnum.Next(); if (pair == null) { break; } for (int j = 10; j < ints2.Length; j++) { ints2[j] = r.Next(256); } Assert.AreEqual(input2, pair.Input); Assert.AreEqual(NO_OUTPUT, pair.Output); upto++; NextInput(r, ints2); } Assert.AreEqual(count, upto); if (verify == 0) { Console.WriteLine("\nTEST: save/load FST and re-verify"); IndexOutput @out = dir.CreateOutput("fst", IOContext.DEFAULT); fst.Save(@out); @out.Dispose(); IndexInput @in = dir.OpenInput("fst", IOContext.DEFAULT); fst = new FST <object>(@in, outputs); @in.Dispose(); } else { dir.DeleteFile("fst"); } } } // Build FST w/ ByteSequenceOutputs and stop when FST // size = 3GB { Console.WriteLine("\nTEST: 3 GB size; doPack=" + doPack + " outputs=bytes"); Outputs <BytesRef> outputs = ByteSequenceOutputs.Singleton; Builder <BytesRef> b = new Builder <BytesRef>(FST.INPUT_TYPE.BYTE1, 0, 0, true, true, int.MaxValue, outputs, null, doPack, PackedInt32s.COMPACT, true, 15); var outputBytes = new byte[20]; BytesRef output = new BytesRef(outputBytes); Arrays.Fill(ints, 0); int count = 0; Random r = new Random(seed); while (true) { r.NextBytes(outputBytes); //System.out.println("add: " + input + " -> " + output); b.Add(input, BytesRef.DeepCopyOf(output)); count++; if (count % 1000000 == 0) { Console.WriteLine(count + "...: " + b.GetFstSizeInBytes() + " bytes"); } if (b.GetFstSizeInBytes() > LIMIT) { break; } NextInput(r, ints); } FST <BytesRef> fst = b.Finish(); for (int verify = 0; verify < 2; verify++) { Console.WriteLine("\nTEST: now verify [fst size=" + fst.GetSizeInBytes() + "; nodeCount=" + fst.NodeCount + "; arcCount=" + fst.ArcCount + "]"); r = new Random(seed); Arrays.Fill(ints, 0); for (int i = 0; i < count; i++) { if (i % 1000000 == 0) { Console.WriteLine(i + "...: "); } r.NextBytes(outputBytes); Assert.AreEqual(output, Util.Get(fst, input)); NextInput(r, ints); } Console.WriteLine("\nTEST: enum all input/outputs"); Int32sRefFSTEnum <BytesRef> fstEnum = new Int32sRefFSTEnum <BytesRef>(fst); Arrays.Fill(ints, 0); r = new Random(seed); int upto = 0; while (true) { Int32sRefFSTEnum.InputOutput <BytesRef> pair = fstEnum.Next(); if (pair == null) { break; } Assert.AreEqual(input, pair.Input); r.NextBytes(outputBytes); Assert.AreEqual(output, pair.Output); upto++; NextInput(r, ints); } Assert.AreEqual(count, upto); if (verify == 0) { Console.WriteLine("\nTEST: save/load FST and re-verify"); IndexOutput @out = dir.CreateOutput("fst", IOContext.DEFAULT); fst.Save(@out); @out.Dispose(); IndexInput @in = dir.OpenInput("fst", IOContext.DEFAULT); fst = new FST <BytesRef>(@in, outputs); @in.Dispose(); } else { dir.DeleteFile("fst"); } } } // Build FST w/ PositiveIntOutputs and stop when FST // size = 3GB { Console.WriteLine("\nTEST: 3 GB size; doPack=" + doPack + " outputs=long"); Outputs <long?> outputs = PositiveInt32Outputs.Singleton; Builder <long?> b = new Builder <long?>(FST.INPUT_TYPE.BYTE1, 0, 0, true, true, int.MaxValue, outputs, null, doPack, PackedInt32s.COMPACT, true, 15); long output = 1; Arrays.Fill(ints, 0); int count = 0; Random r = new Random(seed); while (true) { //System.out.println("add: " + input + " -> " + output); b.Add(input, output); output += 1 + r.Next(10); count++; if (count % 1000000 == 0) { Console.WriteLine(count + "...: " + b.GetFstSizeInBytes() + " bytes"); } if (b.GetFstSizeInBytes() > LIMIT) { break; } NextInput(r, ints); } FST <long?> fst = b.Finish(); for (int verify = 0; verify < 2; verify++) { Console.WriteLine("\nTEST: now verify [fst size=" + fst.GetSizeInBytes() + "; nodeCount=" + fst.NodeCount + "; arcCount=" + fst.ArcCount + "]"); Arrays.Fill(ints, 0); output = 1; r = new Random(seed); for (int i = 0; i < count; i++) { if (i % 1000000 == 0) { Console.WriteLine(i + "...: "); } // forward lookup: Assert.AreEqual(output, (long)Util.Get(fst, input)); // reverse lookup: Assert.AreEqual(input, Util.GetByOutput(fst, output)); output += 1 + r.Next(10); NextInput(r, ints); } Console.WriteLine("\nTEST: enum all input/outputs"); Int32sRefFSTEnum <long?> fstEnum = new Int32sRefFSTEnum <long?>(fst); Arrays.Fill(ints, 0); r = new Random(seed); int upto = 0; output = 1; while (true) { Int32sRefFSTEnum.InputOutput <long?> pair = fstEnum.Next(); if (pair == null) { break; } Assert.AreEqual(input, pair.Input); Assert.AreEqual(output, pair.Output.Value); output += 1 + r.Next(10); upto++; NextInput(r, ints); } Assert.AreEqual(count, upto); if (verify == 0) { Console.WriteLine("\nTEST: save/load FST and re-verify"); IndexOutput @out = dir.CreateOutput("fst", IOContext.DEFAULT); fst.Save(@out); @out.Dispose(); IndexInput @in = dir.OpenInput("fst", IOContext.DEFAULT); fst = new FST <long?>(@in, outputs); @in.Dispose(); } else { dir.DeleteFile("fst"); } } } } dir.Dispose(); }