/* * private class IterableAnonymousInnerClassHelper2 : IEnumerable<BytesRef> * { * private readonly DocValuesConsumer OuterInstance; * * private MergeState MergeState; * private IList<BinaryDocValues> ToMerge; * private IList<Bits> DocsWithField; * * public IterableAnonymousInnerClassHelper2(DocValuesConsumer outerInstance, MergeState mergeState, IList<BinaryDocValues> toMerge, IList<Bits> docsWithField) * { * this.OuterInstance = outerInstance; * this.MergeState = mergeState; * this.ToMerge = toMerge; * this.DocsWithField = docsWithField; * } * * public virtual IEnumerator<BytesRef> GetEnumerator() * { * return new IteratorAnonymousInnerClassHelper2(this); * } * * private class IteratorAnonymousInnerClassHelper2 : IEnumerator<BytesRef> * { * private readonly IterableAnonymousInnerClassHelper2 OuterInstance; * * public IteratorAnonymousInnerClassHelper2(IterableAnonymousInnerClassHelper2 outerInstance) * { * this.OuterInstance = outerInstance; * readerUpto = -1; * nextValue = new BytesRef(); * } * * internal int readerUpto; * internal int docIDUpto; * internal BytesRef nextValue; * internal BytesRef nextPointer; // points to null if missing, or nextValue * internal AtomicReader currentReader; * internal BinaryDocValues currentValues; * internal Bits currentLiveDocs; * internal Bits currentDocsWithField; * internal bool nextIsSet; * * public virtual bool HasNext() * { * return nextIsSet || SetNext(); * } * * public virtual void Remove() * { * throw new System.NotSupportedException(); * } * * public virtual BytesRef Next() * { * if (!HasNext()) * { * throw new Exception(); * } * Debug.Assert(nextIsSet); * nextIsSet = false; * return nextPointer; * } * * private bool SetNext() * { * while (true) * { * if (readerUpto == OuterInstance.ToMerge.Count) * { * return false; * } * * if (currentReader == null || docIDUpto == currentReader.MaxDoc) * { * readerUpto++; * if (readerUpto < OuterInstance.ToMerge.Count) * { * currentReader = OuterInstance.MergeState.Readers[readerUpto]; * currentValues = OuterInstance.ToMerge[readerUpto]; * currentDocsWithField = OuterInstance.DocsWithField[readerUpto]; * currentLiveDocs = currentReader.LiveDocs; * } * docIDUpto = 0; * continue; * } * * if (currentLiveDocs == null || currentLiveDocs.Get(docIDUpto)) * { * nextIsSet = true; * if (currentDocsWithField.Get(docIDUpto)) * { * currentValues.Get(docIDUpto, nextValue); * nextPointer = nextValue; * } * else * { * nextPointer = null; * } * docIDUpto++; * return true; * } * * docIDUpto++; * } * } * } * }*/ /// <summary> /// Merges the sorted docvalues from <code>toMerge</code>. /// <p> /// The default implementation calls <seealso cref="#addSortedField"/>, passing /// an Iterable that merges ordinals and values and filters deleted documents . /// </summary> public virtual void MergeSortedField(FieldInfo fieldInfo, MergeState mergeState, IList <SortedDocValues> toMerge) { AtomicReader[] readers = mergeState.Readers.ToArray(); SortedDocValues[] dvs = toMerge.ToArray(); // step 1: iterate thru each sub and mark terms still in use TermsEnum[] liveTerms = new TermsEnum[dvs.Length]; for (int sub = 0; sub < liveTerms.Length; sub++) { AtomicReader reader = readers[sub]; SortedDocValues dv = dvs[sub]; Bits liveDocs = reader.LiveDocs; if (liveDocs == null) { liveTerms[sub] = dv.TermsEnum(); } else { LongBitSet bitset = new LongBitSet(dv.ValueCount); for (int i = 0; i < reader.MaxDoc; i++) { if (liveDocs.Get(i)) { int ord = dv.GetOrd(i); if (ord >= 0) { bitset.Set(ord); } } } liveTerms[sub] = new BitsFilteredTermsEnum(dv.TermsEnum(), bitset); } } // step 2: create ordinal map (this conceptually does the "merging") OrdinalMap map = new OrdinalMap(this, liveTerms); // step 3: add field AddSortedField(fieldInfo, GetMergeSortValuesEnumerable(map, dvs), // doc -> ord GetMergeSortedFieldDocToOrdEnumerable(readers, dvs, map) ); }
public override SortedDocValues GetSortedDocValues(string field) { EnsureOpen(); OrdinalMap map = null; lock (cachedOrdMaps) { if (!cachedOrdMaps.TryGetValue(field, out map)) { // uncached, or not a multi dv SortedDocValues dv = MultiDocValues.GetSortedValues(@in, field); MultiSortedDocValues docValues = dv as MultiSortedDocValues; if (docValues != null) { map = docValues.Mapping; if (map.owner == CoreCacheKey) { cachedOrdMaps[field] = map; } } return(dv); } } // cached ordinal map if (FieldInfos.FieldInfo(field).DocValuesType != DocValuesType.SORTED) { return(null); } int size = @in.Leaves.Count; SortedDocValues[] values = new SortedDocValues[size]; int[] starts = new int[size + 1]; for (int i = 0; i < size; i++) { AtomicReaderContext context = @in.Leaves[i]; SortedDocValues v = context.AtomicReader.GetSortedDocValues(field) ?? DocValues.EMPTY_SORTED; values[i] = v; starts[i] = context.DocBase; } starts[size] = MaxDoc; return(new MultiSortedDocValues(values, starts, map)); }
public virtual void TestAddToEmpty() { Directory dest = NewDirectory(); Directory src = NewDirectory(); DirectoryTaxonomyWriter srcTW = new DirectoryTaxonomyWriter(src); srcTW.AddCategory(new FacetLabel("Author", "Rob Pike")); srcTW.AddCategory(new FacetLabel("Aardvarks", "Bob")); srcTW.Dispose(); DirectoryTaxonomyWriter destTW = new DirectoryTaxonomyWriter(dest); OrdinalMap map = randomOrdinalMap(); destTW.AddTaxonomy(src, map); destTW.Dispose(); validate(dest, src, map); IOUtils.Close(dest, src); }
private void Dotest(int ncats, int range) { AtomicInteger numCats = new AtomicInteger(ncats); Directory[] dirs = new Directory[2]; for (int i = 0; i < dirs.Length; i++) { dirs[i] = NewDirectory(); var tw = new DirectoryTaxonomyWriter(dirs[i]); ThreadClass[] addThreads = new ThreadClass[4]; for (int j = 0; j < addThreads.Length; j++) { addThreads[j] = new ThreadAnonymousInnerClassHelper(this, range, numCats, tw); } foreach (ThreadClass t in addThreads) { t.Start(); } foreach (ThreadClass t in addThreads) { t.Join(); } tw.Dispose(); } var tw1 = new DirectoryTaxonomyWriter(dirs[0]); OrdinalMap map = randomOrdinalMap(); tw1.AddTaxonomy(dirs[1], map); tw1.Dispose(); validate(dirs[0], dirs[1], map); IOUtils.Close(dirs); }
private void validate(Directory dest, Directory src, OrdinalMap ordMap) { var destTr = new DirectoryTaxonomyReader(dest); try { int destSize = destTr.Size; var srcTR = new DirectoryTaxonomyReader(src); try { var map = ordMap.Map; // validate taxo sizes int srcSize = srcTR.Size; Assert.True(destSize >= srcSize, "destination taxonomy expected to be larger than source; dest=" + destSize + " src=" + srcSize); // validate that all source categories exist in destination, and their // ordinals are as expected. for (int j = 1; j < srcSize; j++) { FacetLabel cp = srcTR.GetPath(j); int destOrdinal = destTr.GetOrdinal(cp); Assert.True(destOrdinal > 0, cp + " not found in destination"); Assert.AreEqual(destOrdinal, map[j]); } } finally { ((TaxonomyReader)srcTR).Dispose(true); } } finally { ((TaxonomyReader)destTr).Dispose(true); } }
private IEnumerable<long?> GetMergeSortedFieldDocToOrdEnumerable(AtomicReader[] readers, SortedDocValues[] dvs, OrdinalMap map) { int readerUpTo = -1; int docIDUpTo = 0; AtomicReader currentReader = null; Bits currentLiveDocs = null; while (true) { if (readerUpTo == readers.Length) { yield break; } if (currentReader == null || docIDUpTo == currentReader.MaxDoc) { readerUpTo++; if (readerUpTo < readers.Length) { currentReader = readers[readerUpTo]; currentLiveDocs = currentReader.LiveDocs; } docIDUpTo = 0; continue; } if (currentLiveDocs == null || currentLiveDocs.Get(docIDUpTo)) { int segOrd = dvs[readerUpTo].GetOrd(docIDUpTo); docIDUpTo++; yield return segOrd == -1 ? -1 : map.GetGlobalOrd(readerUpTo, segOrd); continue; } docIDUpTo++; } }
private IEnumerable <long?> GetMergeSortedSetOrdsEnumerable(AtomicReader[] readers, SortedSetDocValues[] dvs, OrdinalMap map) { int readerUpto = -1; int docIDUpto = 0; AtomicReader currentReader = null; Bits currentLiveDocs = null; long[] ords = new long[8]; int ordUpto = 0; int ordLength = 0; while (true) { if (readerUpto == readers.Length) { yield break; } if (ordUpto < ordLength) { ordUpto++; yield return(ords[ordUpto]); continue; } if (currentReader == null || docIDUpto == currentReader.MaxDoc) { readerUpto++; if (readerUpto < readers.Length) { currentReader = readers[readerUpto]; currentLiveDocs = currentReader.LiveDocs; } docIDUpto = 0; continue; } if (currentLiveDocs == null || currentLiveDocs.Get(docIDUpto)) { Debug.Assert(docIDUpto < currentReader.MaxDoc); SortedSetDocValues dv = dvs[readerUpto]; dv.Document = docIDUpto; ordUpto = ordLength = 0; long ord; while ((ord = dv.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { if (ordLength == ords.Length) { ords = ArrayUtil.Grow(ords, ordLength + 1); } ords[ordLength] = map.GetGlobalOrd(readerUpto, ord); ordLength++; } docIDUpto++; continue; } docIDUpto++; } }
private IEnumerable <long?> GetMergeSortedFieldDocToOrdEnumerable(AtomicReader[] readers, SortedDocValues[] dvs, OrdinalMap map) { int readerUpTo = -1; int docIDUpTo = 0; AtomicReader currentReader = null; Bits currentLiveDocs = null; while (true) { if (readerUpTo == readers.Length) { yield break; } if (currentReader == null || docIDUpTo == currentReader.MaxDoc) { readerUpTo++; if (readerUpTo < readers.Length) { currentReader = readers[readerUpTo]; currentLiveDocs = currentReader.LiveDocs; } docIDUpTo = 0; continue; } if (currentLiveDocs == null || currentLiveDocs.Get(docIDUpTo)) { int segOrd = dvs[readerUpTo].GetOrd(docIDUpTo); docIDUpTo++; yield return(map.GetGlobalOrd(readerUpTo, segOrd)); continue; } docIDUpTo++; } }
/// <summary> /// Takes the categories from the given taxonomy directory, and adds the /// missing ones to this taxonomy. Additionally, it fills the given /// <seealso cref="OrdinalMap"/> with a mapping from the original ordinal to the new /// ordinal. /// </summary> public virtual void AddTaxonomy(Directory taxoDir, OrdinalMap map) { EnsureOpen(); DirectoryReader r = DirectoryReader.Open(taxoDir); try { int size = r.NumDocs; OrdinalMap ordinalMap = map; ordinalMap.Size = size; int @base = 0; TermsEnum te = null; DocsEnum docs = null; foreach (AtomicReaderContext ctx in r.Leaves) { AtomicReader ar = ctx.AtomicReader; Terms terms = ar.Terms(Consts.FULL); te = terms.Iterator(te); while (te.Next() != null) { FacetLabel cp = new FacetLabel(FacetsConfig.StringToPath(te.Term().Utf8ToString())); int ordinal = AddCategory(cp); docs = te.Docs(null, docs, DocsEnum.FLAG_NONE); ordinalMap.AddMapping(docs.NextDoc() + @base, ordinal); } @base += ar.MaxDoc; // no deletions, so we're ok } ordinalMap.AddDone(); } finally { r.Dispose(); } }
/// <summary> /// Creates a new <see cref="MultiSortedSetDocValues"/> over <paramref name="values"/> </summary> internal MultiSortedSetDocValues(SortedSetDocValues[] values, int[] docStarts, OrdinalMap mapping) { Debug.Assert(values.Length == mapping.ordDeltas.Length); Debug.Assert(docStarts.Length == values.Length + 1); this.values = values; this.docStarts = docStarts; this.mapping = mapping; }
private IEnumerable<long?> GetMergeSortedSetOrdsEnumerable(AtomicReader[] readers, SortedSetDocValues[] dvs, OrdinalMap map) { int readerUpto = -1; int docIDUpto = 0; AtomicReader currentReader = null; Bits currentLiveDocs = null; var ords = new long[8]; int ordUpto = 0; int ordLength = 0; while (true) { if (readerUpto == readers.Length) { yield break; } if (ordUpto < ordLength) { var value = ords[ordUpto]; ordUpto++; yield return value; continue; } if (currentReader == null || docIDUpto == currentReader.MaxDoc) { readerUpto++; if (readerUpto < readers.Length) { currentReader = readers[readerUpto]; currentLiveDocs = currentReader.LiveDocs; } docIDUpto = 0; continue; } if (currentLiveDocs == null || currentLiveDocs.Get(docIDUpto)) { Debug.Assert(docIDUpto < currentReader.MaxDoc); SortedSetDocValues dv = dvs[readerUpto]; dv.Document = docIDUpto; ordUpto = ordLength = 0; long ord; while ((ord = dv.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { if (ordLength == ords.Length) { ords = ArrayUtil.Grow(ords, ordLength + 1); } ords[ordLength] = map.GetGlobalOrd(readerUpto, ord); ordLength++; } docIDUpto++; continue; } docIDUpto++; } }
private IEnumerable<BytesRef> GetMergeSortedSetValuesEnumerable(OrdinalMap map, SortedSetDocValues[] dvs) { long currentOrd = 0; while (currentOrd < map.ValueCount) { int segmentNumber = map.GetFirstSegmentNumber(currentOrd); long segmentOrd = map.GetFirstSegmentOrd(currentOrd); var scratch = new BytesRef(); dvs[segmentNumber].LookupOrd(segmentOrd, scratch); currentOrd++; yield return scratch; } }
/* private class IterableAnonymousInnerClassHelper3 : IEnumerable<BytesRef> { private readonly DocValuesConsumer OuterInstance; private SortedDocValues[] Dvs; private OrdinalMap Map; public IterableAnonymousInnerClassHelper3(DocValuesConsumer outerInstance, SortedDocValues[] dvs, OrdinalMap map) { this.OuterInstance = outerInstance; this.Dvs = dvs; this.Map = map; } // ord -> value public virtual IEnumerator<BytesRef> GetEnumerator() { return new IteratorAnonymousInnerClassHelper3(this); } private class IteratorAnonymousInnerClassHelper3 : IEnumerator<BytesRef> { private readonly IterableAnonymousInnerClassHelper3 OuterInstance; public IteratorAnonymousInnerClassHelper3(IterableAnonymousInnerClassHelper3 outerInstance) { this.OuterInstance = outerInstance; scratch = new BytesRef(); } internal readonly BytesRef scratch; internal int currentOrd; public virtual bool HasNext() { return currentOrd < OuterInstance.Map.ValueCount; } public virtual BytesRef Next() { if (!HasNext()) { throw new Exception(); } int segmentNumber = OuterInstance.Map.GetFirstSegmentNumber(currentOrd); int segmentOrd = (int)OuterInstance.Map.GetFirstSegmentOrd(currentOrd); OuterInstance.Dvs[segmentNumber].LookupOrd(segmentOrd, scratch); currentOrd++; return scratch; } public virtual void Remove() { throw new System.NotSupportedException(); } } } private class IterableAnonymousInnerClassHelper4 : IEnumerable<Number> { private readonly DocValuesConsumer OuterInstance; private AtomicReader[] Readers; private SortedDocValues[] Dvs; private OrdinalMap Map; public IterableAnonymousInnerClassHelper4(DocValuesConsumer outerInstance, AtomicReader[] readers, SortedDocValues[] dvs, OrdinalMap map) { this.OuterInstance = outerInstance; this.Readers = readers; this.Dvs = dvs; this.Map = map; } public virtual IEnumerator<Number> GetEnumerator() { return new IteratorAnonymousInnerClassHelper4(this); } private class IteratorAnonymousInnerClassHelper4 : IEnumerator<Number> { private readonly IterableAnonymousInnerClassHelper4 OuterInstance; public IteratorAnonymousInnerClassHelper4(IterableAnonymousInnerClassHelper4 outerInstance) { this.OuterInstance = outerInstance; readerUpto = -1; } internal int readerUpto; internal int docIDUpto; internal int nextValue; internal AtomicReader currentReader; internal Bits currentLiveDocs; internal bool nextIsSet; public virtual bool HasNext() { return nextIsSet || SetNext(); } public virtual void Remove() { throw new System.NotSupportedException(); } public virtual Number Next() { if (!HasNext()) { throw new NoSuchElementException(); } Debug.Assert(nextIsSet); nextIsSet = false; // TODO make a mutable number return nextValue; } private bool SetNext() { while (true) { if (readerUpto == OuterInstance.Readers.Length) { return false; } if (currentReader == null || docIDUpto == currentReader.MaxDoc) { readerUpto++; if (readerUpto < OuterInstance.Readers.Length) { currentReader = OuterInstance.Readers[readerUpto]; currentLiveDocs = currentReader.LiveDocs; } docIDUpto = 0; continue; } if (currentLiveDocs == null || currentLiveDocs.get(docIDUpto)) { nextIsSet = true; int segOrd = OuterInstance.Dvs[readerUpto].GetOrd(docIDUpto); nextValue = segOrd == -1 ? - 1 : (int) OuterInstance.Map.GetGlobalOrd(readerUpto, segOrd); docIDUpto++; return true; } docIDUpto++; } } } }*/ /// <summary> /// Merges the sortedset docvalues from <code>toMerge</code>. /// <p> /// The default implementation calls <seealso cref="#addSortedSetField"/>, passing /// an Iterable that merges ordinals and values and filters deleted documents . /// </summary> public virtual void MergeSortedSetField(FieldInfo fieldInfo, MergeState mergeState, IList<SortedSetDocValues> toMerge) { var readers = mergeState.Readers.ToArray(); var dvs = toMerge.ToArray(); // step 1: iterate thru each sub and mark terms still in use var liveTerms = new TermsEnum[dvs.Length]; for (int sub = 0; sub < liveTerms.Length; sub++) { var reader = readers[sub]; var dv = dvs[sub]; var liveDocs = reader.LiveDocs; if (liveDocs == null) { liveTerms[sub] = dv.TermsEnum(); } else { var bitset = new LongBitSet(dv.ValueCount); for (int i = 0; i < reader.MaxDoc; i++) { if (liveDocs.Get(i)) { dv.Document = i; long ord; while ((ord = dv.NextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { bitset.Set(ord); } } } liveTerms[sub] = new BitsFilteredTermsEnum(dv.TermsEnum(), bitset); } } // step 2: create ordinal map (this conceptually does the "merging") var map = new OrdinalMap(this, liveTerms); // step 3: add field AddSortedSetField(fieldInfo, GetMergeSortedSetValuesEnumerable(map, dvs), // doc -> ord count GetMergeSortedSetDocToOrdCountEnumerable(readers, dvs), // ords GetMergeSortedSetOrdsEnumerable(readers, dvs, map) ); }
/// <summary> /// Merges the sorted docvalues from <code>toMerge</code>. /// <p> /// The default implementation calls <seealso cref="#addSortedField"/>, passing /// an Iterable that merges ordinals and values and filters deleted documents.</p> /// </summary> public virtual void MergeSortedField(FieldInfo fieldInfo, MergeState mergeState, IList<SortedDocValues> toMerge) { AtomicReader[] readers = mergeState.Readers.ToArray(); SortedDocValues[] dvs = toMerge.ToArray(); // step 1: iterate thru each sub and mark terms still in use var liveTerms = new TermsEnum[dvs.Length]; for (int sub = 0; sub < liveTerms.Length; sub++) { AtomicReader reader = readers[sub]; SortedDocValues dv = dvs[sub]; Bits liveDocs = reader.LiveDocs; if (liveDocs == null) { liveTerms[sub] = dv.TermsEnum(); } else { var bitset = new LongBitSet(dv.ValueCount); for (int i = 0; i < reader.MaxDoc; i++) { if (liveDocs.Get(i)) { int ord = dv.GetOrd(i); if (ord >= 0) { bitset.Set(ord); } } } liveTerms[sub] = new BitsFilteredTermsEnum(dv.TermsEnum(), bitset); } } // step 2: create ordinal map (this conceptually does the "merging") var map = new OrdinalMap(this, liveTerms); // step 3: add field AddSortedField(fieldInfo, GetMergeSortValuesEnumerable(map, dvs), // doc -> ord GetMergeSortedFieldDocToOrdEnumerable(readers, dvs, map) ); }
/// <summary> /// Creates a new MultiSortedSetDocValues over <code>values</code> </summary> internal MultiSortedSetDocValues(SortedSetDocValues[] values, int[] docStarts, OrdinalMap mapping) { Debug.Assert(values.Length == mapping.OrdDeltas.Length); Debug.Assert(docStarts.Length == values.Length + 1); this.Values = values; this.DocStarts = docStarts; this.Mapping = mapping; }
/// <summary> /// Returns a SortedSetDocValues for a reader's docvalues (potentially doing extremely slow things). /// <p> /// this is an extremely slow way to access sorted values. Instead, access them per-segment /// with <seealso cref="AtomicReader#getSortedSetDocValues(String)"/> /// </p> /// </summary> public static SortedSetDocValues GetSortedSetValues(IndexReader r, string field) { IList<AtomicReaderContext> leaves = r.Leaves; int size = leaves.Count; if (size == 0) { return null; } else if (size == 1) { return leaves[0].AtomicReader.GetSortedSetDocValues(field); } bool anyReal = false; SortedSetDocValues[] values = new SortedSetDocValues[size]; int[] starts = new int[size + 1]; for (int i = 0; i < size; i++) { AtomicReaderContext context = leaves[i]; SortedSetDocValues v = context.AtomicReader.GetSortedSetDocValues(field); if (v == null) { v = DocValues.EMPTY_SORTED_SET; } else { anyReal = true; } values[i] = v; starts[i] = context.DocBase; } starts[size] = r.MaxDoc; if (!anyReal) { return null; } else { TermsEnum[] enums = new TermsEnum[values.Length]; for (int i = 0; i < values.Length; i++) { enums[i] = values[i].TermsEnum(); } OrdinalMap mapping = new OrdinalMap(r.CoreCacheKey, enums); return new MultiSortedSetDocValues(values, starts, mapping); } }
void EmitMapMethod(TypeBuilder type, MappingResult result) { MethodBuilder builder = type .DefineMethod("MapInternal", MethodAttributes.Public | MethodAttributes.HideBySig | MethodAttributes.Virtual, typeof(T), new Type[] { typeof(IDataReader) }); // define that the method is allowed to acess non-public members. /*Type permission = typeof(ReflectionPermissionAttribute); * ConstructorInfo ctor = * permission * .GetConstructor(new[] { typeof(SecurityAction) }); * PropertyInfo access = permission.GetProperty("Flags"); * var reflection_permission_attribute = * new CustomAttributeBuilder(ctor, new object[] { SecurityAction.Demand }, * new[] { access }, * new object[] { * ReflectionPermissionFlag.MemberAccess | * ReflectionPermissionFlag.RestrictedMemberAccess * }); * * builder.SetCustomAttribute(reflection_permission_attribute);*/ ILGenerator il = builder.GetILGenerator(); // Create a new instance of the T using the associated class loader and // stores in a local variable. il.DeclareLocal(type_t_); MethodInfo callable = typeof(CallableDelegate <T>).GetMethod("Invoke"); il.Emit(OpCodes.Ldarg_0); il.Emit(OpCodes.Ldfld, result.LoaderField); il.Emit(OpCodes.Callvirt, callable); il.Emit(OpCodes.Stloc_0); // Set the value of the properties of the newly created T object. OrdinalMap[] fields = result.OrdinalsMapping; for (int i = 0, j = fields.Length; i < j; i++) { OrdinalMap field = fields[i]; int ordinal = field.Key; PropertyInfo property = field.Value; MethodInfo get_x_method = Dynamics_.GetDataReaderMethod( Dynamics_.GetDataReaderMethodName(field.RawType ?? property.PropertyType), data_reader_type_); // Get the set method of the current property. If the property does // not have a set method ignores it. MethodInfo set_x_property = property.GetSetMethod(true); if (set_x_property == null) { throw new ArgumentException( "The property {0} does not have a set method.".Fmt(property.Name)); } // loaded the "data transfer object" il.Emit(OpCodes.Ldloc_0); // if the conversor method is defined we need to load the // "this" pointer onto the stack before the data reader, so we can // chain the conversion method call after the value is retrieved // from the data reader. MethodInfo conversor = null; if (field.Conversor != null) { conversor = (field.Conversor.Body as MethodCallExpression).Method; if (!conversor.IsStatic || !conversor.IsPublic) { throw new ArgumentException( "The \"conversor\" method of the property {0} is not static or public" .Fmt(property.Name)); } } // loads the data reader il.Emit(OpCodes.Ldarg_1); // load the ordinals_ array il.Emit(OpCodes.Ldarg_0); il.Emit(OpCodes.Ldfld, result.OrdinalsField); // load the element of the array at |ordinal| position EmitLoad(il, ordinal); il.Emit(OpCodes.Ldelem_I4); // call the "get...(int i)" method of the datareader // -> i will be equals to the element loaded from the // array at positiom "ordinal" il.Emit(OpCodes.Callvirt, get_x_method); // the stack now contains the returned value of "get...(int i)" // method. // convert the result of get method and... if (conversor != null) { il.Emit(OpCodes.Call, conversor); } // store it on the loaded field. il.Emit(OpCodes.Callvirt, set_x_property); } ConstantMap[] constant_maps = result.ConstantMappings; for (int i = 0, j = constant_maps.Length; i < j; i++) { ITypeMap map = constant_maps[i].Key; PropertyInfo property = constant_maps[i].Value; if (map.MapType != TypeMapType.Ignore) { // Get the set method of the current property. If the property does // not have a set method ignores it. MethodInfo set_x_property = property.GetSetMethod(true); if (set_x_property == null) { continue; } il.Emit(OpCodes.Ldloc_0); EmitLoad(il, map); il.Emit(OpCodes.Callvirt, set_x_property); } } // load the local T and return. il.Emit(OpCodes.Ldloc_0); il.Emit(OpCodes.Ret); }