Esempio n. 1
0
			internal void  OpenDocStores(SegmentInfo si)
			{
				lock (this)
				{
					
					System.Diagnostics.Debug.Assert(si.name.Equals(segment));
					
					if (fieldsReaderOrig == null)
					{
						Directory storeDir;
						if (si.GetDocStoreOffset() != - 1)
						{
							if (si.GetDocStoreIsCompoundFile())
							{
								System.Diagnostics.Debug.Assert(storeCFSReader == null);
								storeCFSReader = new CompoundFileReader(dir, si.GetDocStoreSegment() + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION, readBufferSize);
								storeDir = storeCFSReader;
								System.Diagnostics.Debug.Assert(storeDir != null);
							}
							else
							{
								storeDir = dir;
								System.Diagnostics.Debug.Assert(storeDir != null);
							}
						}
						else if (si.GetUseCompoundFile())
						{
							// In some cases, we were originally opened when CFS
							// was not used, but then we are asked to open doc
							// stores after the segment has switched to CFS
							if (cfsReader == null)
							{
								cfsReader = new CompoundFileReader(dir, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
							}
							storeDir = cfsReader;
							System.Diagnostics.Debug.Assert(storeDir != null);
						}
						else
						{
							storeDir = dir;
							System.Diagnostics.Debug.Assert(storeDir != null);
						}
						
						System.String storesSegment;
						if (si.GetDocStoreOffset() != - 1)
						{
							storesSegment = si.GetDocStoreSegment();
						}
						else
						{
							storesSegment = segment;
						}
						
						fieldsReaderOrig = new FieldsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.GetDocStoreOffset(), si.docCount);
						
						// Verify two sources of "maxDoc" agree:
						if (si.GetDocStoreOffset() == - 1 && fieldsReaderOrig.Size() != si.docCount)
						{
							throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + fieldsReaderOrig.Size() + " but segmentInfo shows " + si.docCount);
						}
						
						if (fieldInfos.HasVectors())
						{
							// open term vector files only as needed
							termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.GetDocStoreOffset(), si.docCount);
						}
					}
				}
			}
Esempio n. 2
0
            internal CoreReaders(SegmentReader origInstance, Directory dir, SegmentInfo si, int readBufferSize, int termsIndexDivisor)
			{
				segment = si.name;
				this.readBufferSize = readBufferSize;
				this.dir = dir;
				
				bool success = false;
				
				try
				{
					Directory dir0 = dir;
					if (si.GetUseCompoundFile())
					{
						cfsReader = new CompoundFileReader(dir, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
						dir0 = cfsReader;
					}
					cfsDir = dir0;
					
					fieldInfos = new FieldInfos(cfsDir, segment + "." + IndexFileNames.FIELD_INFOS_EXTENSION);
					
					this.termsIndexDivisor = termsIndexDivisor;
					TermInfosReader reader = new TermInfosReader(cfsDir, segment, fieldInfos, readBufferSize, termsIndexDivisor);
					if (termsIndexDivisor == - 1)
					{
						tisNoIndex = reader;
					}
					else
					{
						tis = reader;
						tisNoIndex = null;
					}
					
					// make sure that all index files have been read or are kept open
					// so that if an index update removes them we'll still have them
					freqStream = cfsDir.OpenInput(segment + "." + IndexFileNames.FREQ_EXTENSION, readBufferSize);
					
					if (fieldInfos.HasProx())
					{
						proxStream = cfsDir.OpenInput(segment + "." + IndexFileNames.PROX_EXTENSION, readBufferSize);
					}
					else
					{
						proxStream = null;
					}
					success = true;
				}
				finally
				{
					if (!success)
					{
						DecRef();
					}
				}


                // Must assign this at the end -- if we hit an
                // exception above core, we don't want to attempt to
                // purge the FieldCache (will hit NPE because core is
                // not assigned yet).
                this.origInstance = origInstance;
			}
Esempio n. 3
0
			// NOTE: only called from IndexWriter when a near
			// real-time reader is opened, or applyDeletes is run,
			// sharing a segment that's still being merged.  This
			// method is not fully thread safe, and relies on the
			// synchronization in IndexWriter
			internal void  LoadTermsIndex(SegmentInfo si, int termsIndexDivisor)
			{
				lock (this)
				{
					if (tis == null)
					{
						Directory dir0;
						if (si.GetUseCompoundFile())
						{
							// In some cases, we were originally opened when CFS
							// was not used, but then we are asked to open the
							// terms reader with index, the segment has switched
							// to CFS
							if (cfsReader == null)
							{
								cfsReader = new CompoundFileReader(dir, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
							}
							dir0 = cfsReader;
						}
						else
						{
							dir0 = dir;
						}
						
						tis = new TermInfosReader(dir0, segment, fieldInfos, readBufferSize, termsIndexDivisor);
					}
				}
			}
Esempio n. 4
0
		public static void  Main(System.String[] args)
		{
			System.String filename = null;
			bool extract = false;
			
			for (int i = 0; i < args.Length; ++i)
			{
				if (args[i].Equals("-extract"))
				{
					extract = true;
				}
				else if (filename == null)
				{
					filename = args[i];
				}
			}
			
			if (filename == null)
			{
				System.Console.Out.WriteLine("Usage: Mono.Lucene.Net.Index.IndexReader [-extract] <cfsfile>");
				return ;
			}
			
			Directory dir = null;
			CompoundFileReader cfr = null;
			
			try
			{
				System.IO.FileInfo file = new System.IO.FileInfo(filename);
				System.String dirname = new System.IO.FileInfo(file.FullName).DirectoryName;
				filename = file.Name;
				dir = FSDirectory.Open(new System.IO.FileInfo(dirname));
				cfr = new CompoundFileReader(dir, filename);
				
				System.String[] files = cfr.List();
				System.Array.Sort(files); // sort the array of filename so that the output is more readable
				
				for (int i = 0; i < files.Length; ++i)
				{
					long len = cfr.FileLength(files[i]);
					
					if (extract)
					{
						System.Console.Out.WriteLine("extract " + files[i] + " with " + len + " bytes to local directory...");
						IndexInput ii = cfr.OpenInput(files[i]);
						
						System.IO.FileStream f = new System.IO.FileStream(files[i], System.IO.FileMode.Create);
						
						// read and write with a small buffer, which is more effectiv than reading byte by byte
						byte[] buffer = new byte[1024];
						int chunk = buffer.Length;
						while (len > 0)
						{
							int bufLen = (int) System.Math.Min(chunk, len);
							ii.ReadBytes(buffer, 0, bufLen);
							f.Write(buffer, 0, bufLen);
							len -= bufLen;
						}
						
						f.Close();
						ii.Close();
					}
					else
						System.Console.Out.WriteLine(files[i] + ": " + len + " bytes");
				}
			}
			catch (System.IO.IOException ioe)
			{
				System.Console.Error.WriteLine(ioe.StackTrace);
			}
			finally
			{
				try
				{
					if (dir != null)
						dir.Close();
					if (cfr != null)
						cfr.Close();
				}
				catch (System.IO.IOException ioe)
				{
					System.Console.Error.WriteLine(ioe.StackTrace);
				}
			}
		}