/// <summary>Only called when term vectors are enabled.  This
				/// is called the first time we see a given term for
				/// each * document, to allocate a PostingVector
				/// instance that * is used to record data needed to
				/// write the posting * vectors. 
				/// </summary>
				private PostingVector AddNewVector()
				{
					
					if (postingsVectorsUpto == Enclosing_Instance.postingsVectors.Length)
					{
						int newSize;
						if (Enclosing_Instance.postingsVectors.Length < 2)
							newSize = 2;
						else
						{
							newSize = (int) (1.5 * Enclosing_Instance.postingsVectors.Length);
						}
						PostingVector[] newArray = new PostingVector[newSize];
						Array.Copy(Enclosing_Instance.postingsVectors, 0, newArray, 0, Enclosing_Instance.postingsVectors.Length);
						Enclosing_Instance.postingsVectors = newArray;
					}
					
					Enclosing_Instance.p.vector = Enclosing_Instance.postingsVectors[postingsVectorsUpto];
					if (Enclosing_Instance.p.vector == null)
						Enclosing_Instance.p.vector = Enclosing_Instance.postingsVectors[postingsVectorsUpto] = new PostingVector();
					
					postingsVectorsUpto++;
					
					PostingVector v = Enclosing_Instance.p.vector;
					v.p = Enclosing_Instance.p;
					
					int firstSize = Lucene.Net.Index.DocumentsWriter.levelSizeArray[0];
					
					if (doVectorPositions)
					{
						int upto = Enclosing_Instance.vectorsPool.NewSlice(firstSize);
						v.posStart = v.posUpto = Enclosing_Instance.vectorsPool.byteOffset + upto;
					}
					
					if (doVectorOffsets)
					{
						int upto = Enclosing_Instance.vectorsPool.NewSlice(firstSize);
						v.offsetStart = v.offsetUpto = Enclosing_Instance.vectorsPool.byteOffset + upto;
					}
					
					return v;
				}
			internal void  QuickSort(PostingVector[] postings, int lo, int hi)
			{
				if (lo >= hi)
					return ;
				
				int mid = SupportClass.Number.URShift((lo + hi), 1);
				
				if (ComparePostings(postings[lo].p, postings[mid].p) > 0)
				{
					PostingVector tmp = postings[lo];
					postings[lo] = postings[mid];
					postings[mid] = tmp;
				}
				
				if (ComparePostings(postings[mid].p, postings[hi].p) > 0)
				{
					PostingVector tmp = postings[mid];
					postings[mid] = postings[hi];
					postings[hi] = tmp;
					
					if (ComparePostings(postings[lo].p, postings[mid].p) > 0)
					{
						PostingVector tmp2 = postings[lo];
						postings[lo] = postings[mid];
						postings[mid] = tmp2;
					}
				}
				
				int left = lo + 1;
				int right = hi - 1;
				
				if (left >= right)
					return ;
				
				PostingVector partition = postings[mid];
				
				for (; ; )
				{
					while (ComparePostings(postings[right].p, partition.p) > 0)
						--right;
					
					while (left < right && ComparePostings(postings[left].p, partition.p) <= 0)
						++left;
					
					if (left < right)
					{
						PostingVector tmp = postings[left];
						postings[left] = postings[right];
						postings[right] = tmp;
						--right;
					}
					else
					{
						break;
					}
				}
				
				QuickSort(postings, lo, left);
				QuickSort(postings, left + 1, hi);
			}
			/// <summary>If there are fields we've seen but did not see again
			/// in the last run, then free them up.  Also reduce
			/// postings hash size. 
			/// </summary>
			internal void  TrimFields()
			{
				
				int upto = 0;
				for (int i = 0; i < numAllFieldData; i++)
				{
					FieldData fp = allFieldDataArray[i];
					if (fp.lastGen == - 1)
					{
						// This field was not seen since the previous
						// flush, so, free up its resources now
						
						// Unhash
						int hashPos = fp.fieldInfo.name.GetHashCode() & fieldDataHashMask;
						FieldData last = null;
						FieldData fp0 = fieldDataHash[hashPos];
						while (fp0 != fp)
						{
							last = fp0;
							fp0 = fp0.next;
						}
						System.Diagnostics.Debug.Assert(fp0 != null);
						
						if (last == null)
							fieldDataHash[hashPos] = fp.next;
						else
							last.next = fp.next;
						
						if (Enclosing_Instance.infoStream != null)
							Enclosing_Instance.infoStream.WriteLine("  remove field=" + fp.fieldInfo.name);
					}
					else
					{
						// Reset
						fp.lastGen = - 1;
						allFieldDataArray[upto++] = fp;
						
						if (fp.numPostings > 0 && ((float) fp.numPostings) / fp.postingsHashSize < 0.2)
						{
							int hashSize = fp.postingsHashSize;
							
							// Reduce hash so it's between 25-50% full
							while (fp.numPostings < (hashSize >> 1) && hashSize >= 2)
								hashSize >>= 1;
							hashSize <<= 1;
							
							if (hashSize != fp.postingsHash.Length)
								fp.RehashPostings(hashSize);
						}
					}
				}
				
				// If we didn't see any norms for this field since
				// last flush, free it
				for (int i = 0; i < Enclosing_Instance.norms.Length; i++)
				{
					BufferedNorms n = Enclosing_Instance.norms[i];
					if (n != null && n.upto == 0)
						Enclosing_Instance.norms[i] = null;
				}
				
				numAllFieldData = upto;
				
				// Also pare back PostingsVectors if it's excessively
				// large
				if (maxPostingsVectors * 1.5 < postingsVectors.Length)
				{
					int newSize;
					if (0 == maxPostingsVectors)
						newSize = 1;
					else
					{
						newSize = (int) (1.5 * maxPostingsVectors);
					}
					PostingVector[] newArray = new PostingVector[newSize];
					Array.Copy(postingsVectors, 0, newArray, 0, newSize);
					postingsVectors = newArray;
				}
			}
			/// <summary>Do in-place sort of PostingVector array </summary>
			internal void  DoVectorSort(PostingVector[] postings, int numPosting)
			{
				QuickSort(postings, 0, numPosting - 1);
			}