Exemplo n.º 1
0
        public virtual void  TestTwoFiles()
        {
            CreateSequenceFile(dir, "d1", (byte)0, 15);
            CreateSequenceFile(dir, "d2", (byte)0, 114);

            CompoundFileWriter csw = new CompoundFileWriter(dir, "d.csf");

            csw.AddFile("d1");
            csw.AddFile("d2");
            csw.Close();

            CompoundFileReader csr      = new CompoundFileReader(dir, "d.csf");
            IndexInput         expected = dir.OpenInput("d1");
            IndexInput         actual   = csr.OpenInput("d1");

            AssertSameStreams("d1", expected, actual);
            AssertSameSeekBehavior("d1", expected, actual);
            expected.Close();
            actual.Close();

            expected = dir.OpenInput("d2");
            actual   = csr.OpenInput("d2");
            AssertSameStreams("d2", expected, actual);
            AssertSameSeekBehavior("d2", expected, actual);
            expected.Close();
            actual.Close();
            csr.Close();
        }
Exemplo n.º 2
0
        /// <summary>Setup a larger compound file with a number of components, each of
        /// which is a sequential file (so that we can easily tell that we are
        /// reading in the right byte). The methods sets up 20 files - f0 to f19,
        /// the size of each file is 1000 bytes.
        /// </summary>
        private void  SetUp_2()
        {
            CompoundFileWriter cw = new CompoundFileWriter(dir, "f.comp");

            for (int i = 0; i < 20; i++)
            {
                CreateSequenceFile(dir, "f" + i, (byte)0, 2000);
                cw.AddFile("f" + i);
            }
            cw.Close();
        }
Exemplo n.º 3
0
        public /*internal*/ System.Collections.IList CreateCompoundFile(System.String fileName)
        {
            CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, fileName, checkAbort);

            System.Collections.IList files = new System.Collections.ArrayList(IndexFileNames.COMPOUND_EXTENSIONS.Length + 1);

            // Basic files
            for (int i = 0; i < IndexFileNames.COMPOUND_EXTENSIONS.Length; i++)
            {
                System.String ext = IndexFileNames.COMPOUND_EXTENSIONS[i];

                if (ext.Equals(IndexFileNames.PROX_EXTENSION) && !HasProx())
                {
                    continue;
                }

                if (mergeDocStores || (!ext.Equals(IndexFileNames.FIELDS_EXTENSION) && !ext.Equals(IndexFileNames.FIELDS_INDEX_EXTENSION)))
                {
                    files.Add(segment + "." + ext);
                }
            }

            // Fieldable norm files
            for (int i = 0; i < fieldInfos.Size(); i++)
            {
                FieldInfo fi = fieldInfos.FieldInfo(i);
                if (fi.isIndexed && !fi.omitNorms)
                {
                    files.Add(segment + "." + IndexFileNames.NORMS_EXTENSION);
                    break;
                }
            }

            // Vector files
            if (fieldInfos.HasVectors() && mergeDocStores)
            {
                for (int i = 0; i < IndexFileNames.VECTOR_EXTENSIONS.Length; i++)
                {
                    files.Add(segment + "." + IndexFileNames.VECTOR_EXTENSIONS[i]);
                }
            }

            // Now merge all added files
            System.Collections.IEnumerator it = files.GetEnumerator();
            while (it.MoveNext())
            {
                cfsWriter.AddFile((System.String)it.Current);
            }

            // Perform the merge
            cfsWriter.Close();

            return(files);
        }
Exemplo n.º 4
0
        public /*internal*/ ICollection <string> CreateCompoundFile(System.String fileName)
        {
            ICollection <string> files     = GetMergedFiles();
            CompoundFileWriter   cfsWriter = new CompoundFileWriter(directory, fileName, checkAbort);

            // Now merge all added files
            foreach (var file in files)
            {
                cfsWriter.AddFile(file);
            }

            // Perform the merge
            cfsWriter.Close();

            return(files);
        }
Exemplo n.º 5
0
        public /*internal*/ System.Collections.Generic.ICollection <string> CreateCompoundFile(System.String fileName)
        {
            System.Collections.Generic.ICollection <string> files = GetMergedFiles();
            CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, fileName, checkAbort);

            // Now merge all added files
            System.Collections.IEnumerator it = files.GetEnumerator();
            while (it.MoveNext())
            {
                cfsWriter.AddFile((System.String)it.Current);
            }

            // Perform the merge
            cfsWriter.Close();

            return(files);
        }
Exemplo n.º 6
0
        public virtual void  TestRandomFiles()
        {
            // Setup the test segment
            System.String segment = "test";
            int           chunk   = 1024; // internal buffer size used by the stream

            CreateRandomFile(dir, segment + ".zero", 0);
            CreateRandomFile(dir, segment + ".one", 1);
            CreateRandomFile(dir, segment + ".ten", 10);
            CreateRandomFile(dir, segment + ".hundred", 100);
            CreateRandomFile(dir, segment + ".big1", chunk);
            CreateRandomFile(dir, segment + ".big2", chunk - 1);
            CreateRandomFile(dir, segment + ".big3", chunk + 1);
            CreateRandomFile(dir, segment + ".big4", 3 * chunk);
            CreateRandomFile(dir, segment + ".big5", 3 * chunk - 1);
            CreateRandomFile(dir, segment + ".big6", 3 * chunk + 1);
            CreateRandomFile(dir, segment + ".big7", 1000 * chunk);

            // Setup extraneous files
            CreateRandomFile(dir, "onetwothree", 100);
            CreateRandomFile(dir, segment + ".notIn", 50);
            CreateRandomFile(dir, segment + ".notIn2", 51);

            // Now test
            CompoundFileWriter csw = new CompoundFileWriter(dir, "test.cfs");

            System.String[] data = new System.String[] { ".zero", ".one", ".ten", ".hundred", ".big1", ".big2", ".big3", ".big4", ".big5", ".big6", ".big7" };
            for (int i = 0; i < data.Length; i++)
            {
                csw.AddFile(segment + data[i]);
            }
            csw.Close();

            CompoundFileReader csr = new CompoundFileReader(dir, "test.cfs");

            for (int i = 0; i < data.Length; i++)
            {
                IndexInput check = dir.OpenInput(segment + data[i]);
                IndexInput test  = csr.OpenInput(segment + data[i]);
                AssertSameStreams(data[i], check, test);
                AssertSameSeekBehavior(data[i], check, test);
                test.Close();
                check.Close();
            }
            csr.Close();
        }
Exemplo n.º 7
0
        public System.Collections.ArrayList CreateCompoundFile(System.String fileName)
        {
            CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, fileName);

            System.Collections.ArrayList files = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(IndexFileNames.COMPOUND_EXTENSIONS.Length + 1));

            // Basic files
            for (int i = 0; i < IndexFileNames.COMPOUND_EXTENSIONS.Length; i++)
            {
                files.Add(segment + "." + IndexFileNames.COMPOUND_EXTENSIONS[i]);
            }

            // Fieldable norm files
            for (int i = 0; i < fieldInfos.Size(); i++)
            {
                FieldInfo fi = fieldInfos.FieldInfo(i);
                if (fi.isIndexed && !fi.omitNorms)
                {
                    files.Add(segment + "." + IndexFileNames.NORMS_EXTENSION);
                    break;
                }
            }

            // Vector files
            if (fieldInfos.HasVectors())
            {
                for (int i = 0; i < IndexFileNames.VECTOR_EXTENSIONS.Length; i++)
                {
                    files.Add(segment + "." + IndexFileNames.VECTOR_EXTENSIONS[i]);
                }
            }

            // Now merge all added files
            System.Collections.IEnumerator it = files.GetEnumerator();
            while (it.MoveNext())
            {
                cfsWriter.AddFile((System.String)it.Current);
            }

            // Perform the merge
            cfsWriter.Close();

            return(files);
        }
Exemplo n.º 8
0
        public virtual void  TestSingleFile()
        {
            int[] data = new int[] { 0, 1, 10, 100 };
            for (int i = 0; i < data.Length; i++)
            {
                System.String name = "t" + data[i];
                CreateSequenceFile(dir, name, (byte)0, data[i]);
                CompoundFileWriter csw = new CompoundFileWriter(dir, name + ".cfs");
                csw.AddFile(name);
                csw.Close();

                CompoundFileReader csr      = new CompoundFileReader(dir, name + ".cfs");
                IndexInput         expected = dir.OpenInput(name);
                IndexInput         actual   = csr.OpenInput(name);
                AssertSameStreams(name, expected, actual);
                AssertSameSeekBehavior(name, expected, actual);
                expected.Close();
                actual.Close();
                csr.Close();
            }
        }
Exemplo n.º 9
0
 public virtual void  TestTwoFiles()
 {
     CreateSequenceFile(dir, "d1", (byte) 0, 15);
     CreateSequenceFile(dir, "d2", (byte) 0, 114);
     
     CompoundFileWriter csw = new CompoundFileWriter(dir, "d.csf");
     csw.AddFile("d1");
     csw.AddFile("d2");
     csw.Close();
     
     CompoundFileReader csr = new CompoundFileReader(dir, "d.csf");
     IndexInput expected = dir.OpenInput("d1");
     IndexInput actual = csr.OpenInput("d1");
     AssertSameStreams("d1", expected, actual);
     AssertSameSeekBehavior("d1", expected, actual);
     expected.Close();
     actual.Close();
     
     expected = dir.OpenInput("d2");
     actual = csr.OpenInput("d2");
     AssertSameStreams("d2", expected, actual);
     AssertSameSeekBehavior("d2", expected, actual);
     expected.Close();
     actual.Close();
     csr.Close();
 }
Exemplo n.º 10
0
 public virtual void  TestSingleFile()
 {
     int[] data = new int[]{0, 1, 10, 100};
     for (int i = 0; i < data.Length; i++)
     {
         System.String name = "t" + data[i];
         CreateSequenceFile(dir, name, (byte) 0, data[i]);
         CompoundFileWriter csw = new CompoundFileWriter(dir, name + ".cfs");
         csw.AddFile(name);
         csw.Close();
         
         CompoundFileReader csr = new CompoundFileReader(dir, name + ".cfs");
         IndexInput expected = dir.OpenInput(name);
         IndexInput actual = csr.OpenInput(name);
         AssertSameStreams(name, expected, actual);
         AssertSameSeekBehavior(name, expected, actual);
         expected.Close();
         actual.Close();
         csr.Close();
     }
 }
Exemplo n.º 11
0
		public /*internal*/ System.Collections.IList CreateCompoundFile(System.String fileName)
		{
			CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, fileName, checkAbort);
			
			System.Collections.IList files = new System.Collections.ArrayList(IndexFileNames.COMPOUND_EXTENSIONS.Length + 1);
			
			// Basic files
			for (int i = 0; i < IndexFileNames.COMPOUND_EXTENSIONS.Length; i++)
			{
				System.String ext = IndexFileNames.COMPOUND_EXTENSIONS[i];
				
				if (ext.Equals(IndexFileNames.PROX_EXTENSION) && !HasProx())
					continue;
				
				if (mergeDocStores || (!ext.Equals(IndexFileNames.FIELDS_EXTENSION) && !ext.Equals(IndexFileNames.FIELDS_INDEX_EXTENSION)))
					files.Add(segment + "." + ext);
			}
			
			// Fieldable norm files
			for (int i = 0; i < fieldInfos.Size(); i++)
			{
				FieldInfo fi = fieldInfos.FieldInfo(i);
				if (fi.isIndexed && !fi.omitNorms)
				{
					files.Add(segment + "." + IndexFileNames.NORMS_EXTENSION);
					break;
				}
			}
			
			// Vector files
			if (fieldInfos.HasVectors() && mergeDocStores)
			{
				for (int i = 0; i < IndexFileNames.VECTOR_EXTENSIONS.Length; i++)
				{
					files.Add(segment + "." + IndexFileNames.VECTOR_EXTENSIONS[i]);
				}
			}
			
			// Now merge all added files
			System.Collections.IEnumerator it = files.GetEnumerator();
			while (it.MoveNext())
			{
				cfsWriter.AddFile((System.String) it.Current);
			}
			
			// Perform the merge
			cfsWriter.Close();
			
			return files;
		}
        public /*internal*/ ICollection<string> CreateCompoundFile(System.String fileName)
        {
            ICollection<string> files = GetMergedFiles();
            CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, fileName, checkAbort);

			// Now merge all added files
			foreach(var file in files)
			{
				cfsWriter.AddFile(file);
			}
			
			// Perform the merge
			cfsWriter.Close();

            return files;
		}
Exemplo n.º 13
0
		/// <summary>Build compound file for the segment we just flushed </summary>
		internal void  CreateCompoundFile(System.String segment)
		{
			CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION);
			int size = newFiles.Count;
			for (int i = 0; i < size; i++)
				cfsWriter.AddFile((System.String) newFiles[i]);
			
			// Perform the merge
			cfsWriter.Close();
		}
Exemplo n.º 14
0
		/// <summary>Tells the docWriter to close its currently open shared
		/// doc stores (stored fields & vectors files).
		/// Return value specifices whether new doc store files are compound or not.
		/// </summary>
		private bool FlushDocStores()
		{
			lock (this)
			{
				
				System.Collections.IList files = docWriter.Files();
				
				bool useCompoundDocStore = false;
				
				if (files.Count > 0)
				{
					System.String docStoreSegment;
					
					bool success = false;
					try
					{
						docStoreSegment = docWriter.CloseDocStore();
						success = true;
					}
					finally
					{
						if (!success)
						{
							if (infoStream != null)
								Message("hit exception closing doc store segment");
							docWriter.Abort(null);
						}
					}
					
					useCompoundDocStore = mergePolicy.UseCompoundDocStore(segmentInfos);
					
					if (useCompoundDocStore && docStoreSegment != null)
					{
						// Now build compound doc store file
						
						success = false;
						
						int numSegments = segmentInfos.Count;
						System.String compoundFileName = docStoreSegment + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION;
						
						try
						{
							CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, compoundFileName);
							int size = files.Count;
							for (int i = 0; i < size; i++)
								cfsWriter.AddFile((System.String) files[i]);
							
							// Perform the merge
							cfsWriter.Close();
							
							for (int i = 0; i < numSegments; i++)
							{
								SegmentInfo si = segmentInfos.Info(i);
								if (si.GetDocStoreOffset() != - 1 && si.GetDocStoreSegment().Equals(docStoreSegment))
									si.SetDocStoreIsCompoundFile(true);
							}
							Checkpoint();
							success = true;
						}
						finally
						{
							if (!success)
							{
								
								if (infoStream != null)
									Message("hit exception building compound file doc store for segment " + docStoreSegment);
								
								// Rollback to no compound file
								for (int i = 0; i < numSegments; i++)
								{
									SegmentInfo si = segmentInfos.Info(i);
									if (si.GetDocStoreOffset() != - 1 && si.GetDocStoreSegment().Equals(docStoreSegment))
										si.SetDocStoreIsCompoundFile(false);
								}
								deleter.DeleteFile(compoundFileName);
								DeletePartialSegmentsFile();
							}
						}
						
						deleter.Checkpoint(segmentInfos, false);
					}
				}
				
				return useCompoundDocStore;
			}
		}
Exemplo n.º 15
0
 public virtual void  TestRandomFiles()
 {
     // Setup the test segment
     System.String segment = "test";
     int chunk = 1024; // internal buffer size used by the stream
     CreateRandomFile(dir, segment + ".zero", 0);
     CreateRandomFile(dir, segment + ".one", 1);
     CreateRandomFile(dir, segment + ".ten", 10);
     CreateRandomFile(dir, segment + ".hundred", 100);
     CreateRandomFile(dir, segment + ".big1", chunk);
     CreateRandomFile(dir, segment + ".big2", chunk - 1);
     CreateRandomFile(dir, segment + ".big3", chunk + 1);
     CreateRandomFile(dir, segment + ".big4", 3 * chunk);
     CreateRandomFile(dir, segment + ".big5", 3 * chunk - 1);
     CreateRandomFile(dir, segment + ".big6", 3 * chunk + 1);
     CreateRandomFile(dir, segment + ".big7", 1000 * chunk);
     
     // Setup extraneous files
     CreateRandomFile(dir, "onetwothree", 100);
     CreateRandomFile(dir, segment + ".notIn", 50);
     CreateRandomFile(dir, segment + ".notIn2", 51);
     
     // Now test
     CompoundFileWriter csw = new CompoundFileWriter(dir, "test.cfs");
     System.String[] data = new System.String[]{".zero", ".one", ".ten", ".hundred", ".big1", ".big2", ".big3", ".big4", ".big5", ".big6", ".big7"};
     for (int i = 0; i < data.Length; i++)
     {
         csw.AddFile(segment + data[i]);
     }
     csw.Close();
     
     CompoundFileReader csr = new CompoundFileReader(dir, "test.cfs");
     for (int i = 0; i < data.Length; i++)
     {
         IndexInput check = dir.OpenInput(segment + data[i]);
         IndexInput test = csr.OpenInput(segment + data[i]);
         AssertSameStreams(data[i], check, test);
         AssertSameSeekBehavior(data[i], check, test);
         test.Close();
         check.Close();
     }
     csr.Close();
 }
Exemplo n.º 16
0
		/// <summary>Tells the docWriter to close its currently open shared
		/// doc stores (stored fields &amp; vectors files).
		/// Return value specifices whether new doc store files are compound or not.
		/// </summary>
		private bool FlushDocStores()
		{
			lock (this)
			{
                if (infoStream != null)
                {
                    Message("flushDocStores segment=" + docWriter.GetDocStoreSegment());
                }

				bool useCompoundDocStore = false;
                if (infoStream != null)
                {
                    Message("closeDocStores segment=" + docWriter.GetDocStoreSegment());
                }

				System.String docStoreSegment;
				
				bool success = false;
				try
				{
					docStoreSegment = docWriter.CloseDocStore();
					success = true;
				}
				finally
				{
					if (!success && infoStream != null)
					{
						Message("hit exception closing doc store segment");
					}
				}

                if (infoStream != null)
                {
                    Message("flushDocStores files=" + docWriter.ClosedFiles());
                }

				useCompoundDocStore = mergePolicy.UseCompoundDocStore(segmentInfos);
				
				if (useCompoundDocStore && docStoreSegment != null && docWriter.ClosedFiles().Count != 0)
				{
					// Now build compound doc store file
					
					if (infoStream != null)
					{
						Message("create compound file " + docStoreSegment + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION);
					}
					
					success = false;
					
					int numSegments = segmentInfos.Count;
					System.String compoundFileName = docStoreSegment + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION;
					
					try
					{
						CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, compoundFileName);
						System.Collections.IEnumerator it = docWriter.ClosedFiles().GetEnumerator();
						while (it.MoveNext())
						{
							cfsWriter.AddFile((System.String) it.Current);
						}
						
						// Perform the merge
						cfsWriter.Close();
						success = true;
					}
					finally
					{
						if (!success)
						{
							if (infoStream != null)
								Message("hit exception building compound file doc store for segment " + docStoreSegment);
							deleter.DeleteFile(compoundFileName);
						}
					}
					
					for (int i = 0; i < numSegments; i++)
					{
						SegmentInfo si = segmentInfos.Info(i);
						if (si.GetDocStoreOffset() != - 1 && si.GetDocStoreSegment().Equals(docStoreSegment))
							si.SetDocStoreIsCompoundFile(true);
					}
					
					Checkpoint();
					
					// In case the files we just merged into a CFS were
					// not previously checkpointed:
					deleter.DeleteNewFiles(docWriter.ClosedFiles());
				}
				
				return useCompoundDocStore;
			}
		}
Exemplo n.º 17
0
        /** Build compound file for the segment we just flushed */
        internal void CreateCompoundFile(string segment)
        {
            CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION);
            IEnumerator<string> it = flushState.flushedFiles.Keys.GetEnumerator();
            while (it.MoveNext())
                cfsWriter.AddFile(it.Current);

            // Perform the merge
            cfsWriter.Close();
        }
Exemplo n.º 18
0
		/// <summary>Build compound file for the segment we just flushed </summary>
		internal void  CreateCompoundFile(System.String segment)
		{
			
			CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION);
            foreach(string s in flushState.flushedFiles)
			{
				cfsWriter.AddFile(s);
			}
			
			// Perform the merge
			cfsWriter.Close();
		}
Exemplo n.º 19
0
        public System.Collections.ArrayList CreateCompoundFile(System.String fileName)
        {
            CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, fileName);

            System.Collections.ArrayList files = System.Collections.ArrayList.Synchronized(new System.Collections.ArrayList(IndexFileNames.COMPOUND_EXTENSIONS.Length + fieldInfos.Size()));

            // Basic files
            for (int i = 0; i < IndexFileNames.COMPOUND_EXTENSIONS.Length; i++)
            {
                files.Add(segment + "." + IndexFileNames.COMPOUND_EXTENSIONS[i]);
            }

            // Field norm files
            for (int i = 0; i < fieldInfos.Size(); i++)
            {
                FieldInfo fi = fieldInfos.FieldInfo(i);
                if (fi.isIndexed && !fi.omitNorms)
                {
                    files.Add(segment + ".f" + i);
                }
            }

            // Vector files
            if (fieldInfos.HasVectors())
            {
                for (int i = 0; i < IndexFileNames.VECTOR_EXTENSIONS.Length; i++)
                {
                    files.Add(segment + "." + IndexFileNames.VECTOR_EXTENSIONS[i]);
                }
            }

            // Now merge all added files
            System.Collections.IEnumerator it = files.GetEnumerator();
            while (it.MoveNext())
            {
                cfsWriter.AddFile((System.String) it.Current);
            }

            // Perform the merge
            cfsWriter.Close();

            return files;
        }
Exemplo n.º 20
0
        public /*internal*/ System.Collections.Generic.ICollection<string> CreateCompoundFile(System.String fileName)
        {
            System.Collections.Generic.ICollection<string> files = GetMergedFiles();
            CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, fileName, checkAbort);

			// Now merge all added files
			System.Collections.IEnumerator it = files.GetEnumerator();
			while (it.MoveNext())
			{
				cfsWriter.AddFile((System.String) it.Current);
			}
			
			// Perform the merge
			cfsWriter.Close();

            return files;
		}
Exemplo n.º 21
0
 /// <summary>Setup a larger compound file with a number of components, each of
 /// which is a sequential file (so that we can easily tell that we are
 /// reading in the right byte). The methods sets up 20 files - f0 to f19,
 /// the size of each file is 1000 bytes.
 /// </summary>
 private void  SetUp_2()
 {
     CompoundFileWriter cw = new CompoundFileWriter(dir, "f.comp");
     for (int i = 0; i < 20; i++)
     {
         CreateSequenceFile(dir, "f" + i, (byte) 0, 2000);
         cw.AddFile("f" + i);
     }
     cw.Close();
 }
Exemplo n.º 22
0
		/// <summary>Build compound file for the segment we just flushed </summary>
		internal void  CreateCompoundFile(System.String segment)
		{
			
			CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION);
			System.Collections.IEnumerator it = flushState.flushedFiles.GetEnumerator();
			while (it.MoveNext())
			{
				cfsWriter.AddFile((System.String) ((System.Collections.DictionaryEntry) it.Current).Key);
			}
			
			// Perform the merge
			cfsWriter.Close();
		}