public override void Abort() { Exception th = null; foreach (DocFieldProcessorPerField field in fieldHash) { DocFieldProcessorPerField fieldNext = field; while (fieldNext != null) { DocFieldProcessorPerField next = fieldNext.next; try { fieldNext.Abort(); } catch (Exception t) { if (th == null) { th = t; } } fieldNext = next; } } try { storedConsumer.Abort(); } catch (Exception t) { if (th == null) { th = t; } } try { consumer.Abort(); } catch (Exception t) { if (th == null) { th = t; } } // If any errors occured, throw it. if (th != null) { if (th is Exception) { throw (Exception)th; } // defensive code - we should not hit unchecked exceptions throw new Exception(th.Message, th); } }
private void Rehash() { int newHashSize = (fieldHash.Length * 2); Debug.Assert(newHashSize > fieldHash.Length); DocFieldProcessorPerField[] newHashArray = new DocFieldProcessorPerField[newHashSize]; // Rehash int newHashMask = newHashSize - 1; for (int j = 0; j < fieldHash.Length; j++) { DocFieldProcessorPerField fp0 = fieldHash[j]; while (fp0 != null) { int hashPos2 = fp0.fieldInfo.Name.GetHashCode() & newHashMask; DocFieldProcessorPerField nextFP0 = fp0.next; fp0.next = newHashArray[hashPos2]; newHashArray[hashPos2] = fp0; fp0 = nextFP0; } } fieldHash = newHashArray; hashMask = newHashMask; }
public ICollection <DocFieldConsumerPerField> Fields() { ICollection <DocFieldConsumerPerField> fields = new JCG.HashSet <DocFieldConsumerPerField>(); for (int i = 0; i < fieldHash.Length; i++) { DocFieldProcessorPerField field = fieldHash[i]; while (field != null) { fields.Add(field.consumer); field = field.next; } } Debug.Assert(fields.Count == totalFieldCount); return(fields); }
public override void ProcessDocument(FieldInfos.Builder fieldInfos) { consumer.StartDocument(); storedConsumer.StartDocument(); fieldCount = 0; int thisFieldGen = fieldGen++; // Absorb any new fields first seen in this document. // Also absorb any changes to fields we had already // seen before (eg suddenly turning on norms or // vectors, etc.): foreach (IIndexableField field in docState.doc) { string fieldName = field.Name; // Make sure we have a PerField allocated int hashPos = fieldName.GetHashCode() & hashMask; DocFieldProcessorPerField fp = fieldHash[hashPos]; while (fp != null && !fp.fieldInfo.Name.Equals(fieldName, StringComparison.Ordinal)) { fp = fp.next; } if (fp == null) { // TODO FI: we need to genericize the "flags" that a // field holds, and, how these flags are merged; it // needs to be more "pluggable" such that if I want // to have a new "thing" my Fields can do, I can // easily add it FieldInfo fi = fieldInfos.AddOrUpdate(fieldName, field.IndexableFieldType); fp = new DocFieldProcessorPerField(this, fi); fp.next = fieldHash[hashPos]; fieldHash[hashPos] = fp; totalFieldCount++; if (totalFieldCount >= fieldHash.Length / 2) { Rehash(); } } else { // need to addOrUpdate so that FieldInfos can update globalFieldNumbers // with the correct DocValue type (LUCENE-5192) FieldInfo fi = fieldInfos.AddOrUpdate(fieldName, field.IndexableFieldType); Debug.Assert(fi == fp.fieldInfo, "should only have updated an existing FieldInfo instance"); } if (thisFieldGen != fp.lastGen) { // First time we're seeing this field for this doc fp.fieldCount = 0; if (fieldCount == fields.Length) { int newSize = fields.Length * 2; DocFieldProcessorPerField[] newArray = new DocFieldProcessorPerField[newSize]; Array.Copy(fields, 0, newArray, 0, fieldCount); fields = newArray; } fields[fieldCount++] = fp; fp.lastGen = thisFieldGen; } fp.AddField(field); storedConsumer.AddField(docState.docID, field, fp.fieldInfo); } // If we are writing vectors then we must visit // fields in sorted order so they are written in // sorted order. TODO: we actually only need to // sort the subset of fields that have vectors // enabled; we could save [small amount of] CPU // here. ArrayUtil.IntroSort(fields, 0, fieldCount, fieldsComp); for (int i = 0; i < fieldCount; i++) { DocFieldProcessorPerField perField = fields[i]; perField.consumer.ProcessFields(perField.fields, perField.fieldCount); } }
public override void Abort() { Exception th = null; foreach (DocFieldProcessorPerField field in fieldHash) { DocFieldProcessorPerField fieldNext = field; while (fieldNext != null) { DocFieldProcessorPerField next = fieldNext.next; try { fieldNext.Abort(); } catch (Exception t) when(t.IsThrowable()) { if (th is null) { th = t; } } fieldNext = next; } } try { storedConsumer.Abort(); } catch (Exception t) when(t.IsThrowable()) { if (th is null) { th = t; } } try { consumer.Abort(); } catch (Exception t) when(t.IsThrowable()) { if (th is null) { th = t; } } // If any errors occured, throw it. if (th != null) { if (th.IsRuntimeException()) { ExceptionDispatchInfo.Capture(th).Throw(); // LUCENENET: Rethrow to preserve stack details from the original throw } if (th.IsError()) { ExceptionDispatchInfo.Capture(th).Throw(); // LUCENENET: Rethrow to preserve stack details from the original throw } // defensive code - we should not hit unchecked exceptions throw RuntimeException.Create(th); } }