Esempio n. 1
0
        public override void WriteEndVersion(Process process, AbstractConnection input, Entity entity, bool force = false) {
            if (entity.Updates + entity.Inserts <= 0 && !force)
                return;

            var versionType = entity.Version == null ? "string" : entity.Version.SimpleType;
            var end = entity.End ?? new DefaultFactory(Logger).Convert(entity.End, versionType);

            using (var dir = LuceneDirectoryFactory.Create(this, TflBatchEntity(entity.ProcessName))) {
                using (var writer = new IndexWriter(dir, new KeywordAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED)) {
                    var doc = new Document();
                    doc.fields.Add(new NumericField("id", Libs.Lucene.Net.Document.Field.Store.YES, true).SetIntValue(entity.TflBatchId));
                    doc.fields.Add(new Libs.Lucene.Net.Document.Field("process", entity.ProcessName, Libs.Lucene.Net.Document.Field.Store.YES, Libs.Lucene.Net.Document.Field.Index.NOT_ANALYZED_NO_NORMS));
                    doc.fields.Add(new Libs.Lucene.Net.Document.Field("connection", input.Name, Libs.Lucene.Net.Document.Field.Store.YES, Libs.Lucene.Net.Document.Field.Index.NOT_ANALYZED_NO_NORMS));
                    doc.fields.Add(new Libs.Lucene.Net.Document.Field("entity", entity.Alias, Libs.Lucene.Net.Document.Field.Store.YES, Libs.Lucene.Net.Document.Field.Index.NOT_ANALYZED_NO_NORMS));
                    doc.fields.Add(new NumericField("updates", Libs.Lucene.Net.Document.Field.Store.YES, true).SetLongValue(entity.Updates));
                    doc.fields.Add(new NumericField("inserts", Libs.Lucene.Net.Document.Field.Store.YES, true).SetLongValue(entity.Inserts));
                    doc.fields.Add(new NumericField("deletes", Libs.Lucene.Net.Document.Field.Store.YES, true).SetLongValue(entity.Deletes));
                    doc.fields.Add(LuceneWriter.CreateField("version", versionType, new SearchType { Analyzer = "keyword" }, end));
                    doc.fields.Add(new Libs.Lucene.Net.Document.Field("version_type", versionType, Libs.Lucene.Net.Document.Field.Store.YES, Libs.Lucene.Net.Document.Field.Index.NOT_ANALYZED_NO_NORMS));
                    doc.fields.Add(new NumericField("tflupdate", Libs.Lucene.Net.Document.Field.Store.YES, true).SetLongValue(DateTime.UtcNow.Ticks));
                    writer.AddDocument(doc);
                    writer.Commit();
                    writer.Optimize();
                }
            }
        }
Esempio n. 2
0
		internal void  AddDocument(Document.Document doc)
		{
			indexStream.WriteLong(fieldsStream.FilePointer);

			System.Collections.Generic.IList<IFieldable> fields = doc.GetFields();
			int storedCount = fields.Count(field => field.IsStored);
			fieldsStream.WriteVInt(storedCount);
			
			foreach(IFieldable field in fields)
			{
				if (field.IsStored)
					WriteField(fieldInfos.FieldInfo(field.Name), field);
			}
		}
Esempio n. 3
0
		/// <summary>Adds field info for a Document. </summary>
		public void  Add(Document.Document doc)
		{
			lock (this)
			{
				System.Collections.Generic.IList<IFieldable> fields = doc.GetFields();
                foreach(IFieldable field in fields)
                {
                    Add(field.Name, field.IsIndexed, field.IsTermVectorStored,
                        field.IsStorePositionWithTermVector, field.IsStoreOffsetWithTermVector, field.OmitNorms,
                        false, field.OmitTermFreqAndPositions);
                }
			}
		}
Esempio n. 4
0
			private void  InitBlock(Document enclosingInstance)
			{
				this.enclosingInstance = enclosingInstance;
				iter = Enclosing_Instance.fields.GetEnumerator();
			}
Esempio n. 5
0
			public AnonymousClassEnumeration(Document enclosingInstance)
			{
				InitBlock(enclosingInstance);
			}
Esempio n. 6
0
		// Add the size of field as a byte[] containing the 4 bytes of the integer byte size (high order byte first; char = 2 bytes)
		// Read just the size -- caller must skip the field content to continue reading fields
		// Return the size in bytes or chars, depending on field type
		private int AddFieldSize(Document.Document doc, FieldInfo fi, bool binary, bool compressed)
		{
			int size = fieldsStream.ReadVInt(), bytesize = binary || compressed?size:2 * size;
			var sizebytes = new byte[4];
			sizebytes[0] = (byte) (Number.URShift(bytesize, 24));
			sizebytes[1] = (byte) (Number.URShift(bytesize, 16));
			sizebytes[2] = (byte) (Number.URShift(bytesize, 8));
			sizebytes[3] = (byte) bytesize;
			doc.Add(new Field(fi.name, sizebytes, Field.Store.YES));
			return size;
		}
Esempio n. 7
0
		private void AddField(Document.Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize)
		{
			//we have a binary stored field, and it may be compressed
			if (binary)
			{
				int toRead = fieldsStream.ReadVInt();
				var b = new byte[toRead];
				fieldsStream.ReadBytes(b, 0, b.Length);
				doc.Add(compressed ? new Field(fi.name, Uncompress(b), Field.Store.YES) : new Field(fi.name, b, Field.Store.YES));
			}
			else
			{
				const Field.Store store = Field.Store.YES;
				Field.Index index = FieldExtensions.ToIndex(fi.isIndexed, tokenize);
				Field.TermVector termVector = FieldExtensions.ToTermVector(fi.storeTermVector, fi.storeOffsetWithTermVector, fi.storePositionWithTermVector);
				
				AbstractField f;
				if (compressed)
				{
					int toRead = fieldsStream.ReadVInt();
					
					var b = new byte[toRead];
					fieldsStream.ReadBytes(b, 0, b.Length);
					f = new Field(fi.name, false, System.Text.Encoding.GetEncoding("UTF-8").GetString(Uncompress(b)), store, index,
					              termVector) {OmitTermFreqAndPositions = fi.omitTermFreqAndPositions, OmitNorms = fi.omitNorms};
				}
				else
				{
					f = new Field(fi.name, false, fieldsStream.ReadString(), store, index, termVector)
					    	{OmitTermFreqAndPositions = fi.omitTermFreqAndPositions, OmitNorms = fi.omitNorms};
				}

				doc.Add(f);
			}
		}
Esempio n. 8
0
		private void  AddFieldLazy(Document.Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize)
		{
			if (binary)
			{
				int toRead = fieldsStream.ReadVInt();
				long pointer = fieldsStream.FilePointer;
				//was: doc.add(new Fieldable(fi.name, b, Fieldable.Store.YES));
				doc.Add(new LazyField(this, fi.name, Field.Store.YES, toRead, pointer, binary, compressed));

				//Need to move the pointer ahead by toRead positions
				fieldsStream.Seek(pointer + toRead);
			}
			else
			{
				const Field.Store store = Field.Store.YES;
				Field.Index index = FieldExtensions.ToIndex(fi.isIndexed, tokenize);
				Field.TermVector termVector = FieldExtensions.ToTermVector(fi.storeTermVector, fi.storeOffsetWithTermVector, fi.storePositionWithTermVector);
				
				AbstractField f;
				if (compressed)
				{
					int toRead = fieldsStream.ReadVInt();
					long pointer = fieldsStream.FilePointer;
					f = new LazyField(this, fi.name, store, toRead, pointer, binary, compressed);
					//skip over the part that we aren't loading
					fieldsStream.Seek(pointer + toRead);
					f.OmitNorms = fi.omitNorms;
					f.OmitTermFreqAndPositions = fi.omitTermFreqAndPositions;
				}
				else
				{
					int length = fieldsStream.ReadVInt();
					long pointer = fieldsStream.FilePointer;
					//Skip ahead of where we are by the length of what is stored
                    if (format >= FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES)
                    {
                        fieldsStream.Seek(pointer + length);
                    }
                    else
                    {
                        fieldsStream.SkipChars(length);
                    }
					f = new LazyField(this, fi.name, store, index, termVector, length, pointer, binary, compressed)
					    	{OmitNorms = fi.omitNorms, OmitTermFreqAndPositions = fi.omitTermFreqAndPositions};
				}

				doc.Add(f);
			}
		}
Esempio n. 9
0
        public static Document CreateDocument(Dictionary<string, KeyValuePair<Field, SearchType>> fieldMap, Row row) {
            var doc = new Document();
            foreach (var pair in fieldMap) {
                var name = pair.Key;
                var field = pair.Value.Key;
                var searchType = pair.Value.Value;

                if (!searchType.Store && !searchType.Index)
                    continue;

                doc.Add(CreateField(name, field.SimpleType, searchType, row[field.Alias]));
            }
            return doc;

        }