private JArray PerformBulkOperation(string index, IndexQuery indexQuery, bool allowStale, Func<string, TransactionInformation, object> batchOperation) { var array = new JArray(); database.TransactionalStorage.Batch(actions => { var bulkIndexQuery = new IndexQuery { Query = indexQuery.Query, Start = indexQuery.Start, Cutoff = indexQuery.Cutoff, PageSize = int.MaxValue, FieldsToFetch = new[] { "__document_id" }, SortedFields = indexQuery.SortedFields }; bool stale; var queryResults = database.QueryDocumentIds(index, bulkIndexQuery, out stale); if (stale) { if (allowStale == false) { throw new InvalidOperationException( "Bulk operation cancelled because the index is stale and allowStale is false"); } } foreach (var documentId in queryResults) { var result = batchOperation(documentId, transactionInformation); array.Add(JObject.FromObject(result, new JsonSerializer { Converters = { new JsonEnumConverter() } })); } }); return array; }
public static IndexQuery GetIndexQueryFromHttpContext(this IHttpContext context, int maxPageSize) { var query = new IndexQuery { Query = context.Request.QueryString["query"] ?? "", Start = context.GetStart(), Cutoff = context.GetCutOff(), PageSize = context.GetPageSize(maxPageSize), FieldsToFetch = context.Request.QueryString.GetValues("fetch"), SortedFields = context.Request.QueryString.GetValues("sort") .EmptyIfNull() .Select(x => new SortedField(x)) .ToArray() }; double lat = context.GetLat(), lng = context.GetLng(), radius = context.GetRadius(); if (lat != 0 || lng != 0 || radius != 0) { return new SpatialIndexQuery(query) { Latitude = lat, Longitude = lng, Radius = radius, SortByDistance = context.SortByDistance() }; } return query; }
private QueryResult ExecuteActualQuery(IndexQuery query, DynamicQueryMapping map, Tuple<string, bool> touchTemporaryIndexResult, string realQuery) { // Perform the query until we have some results at least QueryResult result; var sp = Stopwatch.StartNew(); while (true) { result = documentDatabase.Query(map.IndexName, new IndexQuery { Cutoff = query.Cutoff, PageSize = query.PageSize, Query = realQuery, Start = query.Start, FieldsToFetch = query.FieldsToFetch, GroupBy = query.GroupBy, AggregationOperation = query.AggregationOperation, SortedFields = query.SortedFields, }); if (!touchTemporaryIndexResult.Item2 || !result.IsStale || result.Results.Count >= query.PageSize || sp.Elapsed.TotalSeconds > 15) { return result; } Thread.Sleep(100); } }
public JArray DeleteByIndex(string indexName, IndexQuery queryToDelete, bool allowStale) { return PerformBulkOperation(indexName, queryToDelete, allowStale, (docId, tx) => { database.Delete(docId, null, tx); return new { Document = docId, Deleted = true }; }); }
public JArray UpdateByIndex(string indexName, IndexQuery queryToUpdate, PatchRequest[] patchRequests, bool allowStale) { return PerformBulkOperation(indexName, queryToUpdate, allowStale, (docId, tx) => { var patchResult = database.ApplyPatch(docId, null, patchRequests, tx); return new { Document = docId, Result = patchResult }; }); }
/// <summary> /// Initializes a new instance of the <see cref="SpatialIndexQuery"/> class. /// </summary> /// <param name="query">The query.</param> public SpatialIndexQuery(IndexQuery query) { Query = query.Query; Start = query.Start; Cutoff = query.Cutoff; PageSize = query.PageSize; FieldsToFetch = query.FieldsToFetch; SortedFields = query.SortedFields; }
public void can_encode_and_decode_IndexQuery_CutOff() { var expected = DateTime.UtcNow; var indexQuery = new IndexQuery(); indexQuery.Cutoff = expected; IndexQuery result = EncodeAndDecodeIndexQuery(indexQuery); Assert.Equal(expected, result.Cutoff); }
public void can_encode_and_decode_IndexQuery_Start() { int expected = Some.Integer(); var indexQuery = new IndexQuery(); indexQuery.Start = expected; IndexQuery result = EncodeAndDecodeIndexQuery(indexQuery); Assert.Equal(expected, result.Start); }
public void can_encode_and_decode_IndexQuery_PageSize() { var expected = Some.Integer(); var indexQuery = new IndexQuery(); indexQuery.PageSize = expected; IndexQuery result = EncodeAndDecodeIndexQuery(indexQuery); Assert.Equal(expected, result.PageSize); }
public void can_encode_and_decode_IndexQuery_Query_pound() { var expected = Some.String() + '#' + Some.String(); var indexQuery = new IndexQuery(); indexQuery.Query = expected; IndexQuery result = EncodeAndDecodeIndexQuery(indexQuery); Assert.Equal(expected, result.Query); }
public void can_encode_and_decode_IndexQuery_FieldsToFetch() { var firstField = Some.String(); var secondField = Some.String(); var indexQuery = new IndexQuery(); indexQuery.FieldsToFetch = new string[] { firstField, secondField }; IndexQuery result = EncodeAndDecodeIndexQuery(indexQuery); Assert.Equal(2, result.FieldsToFetch.Length); Assert.Equal(firstField, result.FieldsToFetch[0]); Assert.Equal(secondField, result.FieldsToFetch[1]); }
public QueryResult ExecuteDynamicQuery(string entityName, IndexQuery query) { // Create the map var map = DynamicQueryMapping.Create(documentDatabase, query, entityName); var touchTemporaryIndexResult = GetAppropriateIndexToQuery(entityName, query, map); map.IndexName = touchTemporaryIndexResult.Item1; // Re-write the query string realQuery = map.Items.Aggregate(query.Query, (current, mapItem) => current.Replace(mapItem.QueryFrom, mapItem.To)); return ExecuteActualQuery(query, map, touchTemporaryIndexResult, realQuery); }
private Tuple<string, bool> GetAppropriateIndexToQuery(string entityName, IndexQuery query, DynamicQueryMapping map) { var appropriateIndex = new DynamicQueryOptimizer(documentDatabase).SelectAppropriateIndex(entityName, query); if (appropriateIndex != null) { if (appropriateIndex.StartsWith("Temp/"))// temporary index, we need to increase its usage { return TouchTemporaryIndex(appropriateIndex, "Auto/" + appropriateIndex.Substring(5), () => documentDatabase.IndexDefinitionStorage.GetIndexDefinition(appropriateIndex)); } return Tuple.Create(appropriateIndex, false); } return TouchTemporaryIndex(map.TemporaryIndexName, map.PermanentIndexName, map.CreateIndexDefinition); }
private static TopDocs ExecuteQuery(IndexSearcher searcher, IndexQuery indexQuery, Query luceneQuery) { if(indexQuery.PageSize == int.MaxValue) // we want all docs { var gatherAllCollector = new GatherAllCollector(); searcher.Search(luceneQuery, gatherAllCollector); return gatherAllCollector.ToTopDocs(); } // NOTE: We get Start + Pagesize results back so we have something to page on if (indexQuery.SortedFields != null && indexQuery.SortedFields.Length > 0) { var sort = new Sort(indexQuery.SortedFields.Select(x => x.ToLuceneSortField()).ToArray()); return searcher.Search(luceneQuery, null, indexQuery.PageSize + indexQuery.Start, sort); } return searcher.Search(luceneQuery, null, indexQuery.PageSize + indexQuery.Start); }
public IEnumerable<IndexQueryResult> Query(IndexQuery indexQuery) { using (searcher.Use()) { var search = ExecuteQuery(searcher.Searcher, indexQuery, GetLuceneQuery(indexQuery)); indexQuery.TotalSize.Value = search.totalHits; var previousDocuments = new HashSet<string>(); for (var i = indexQuery.Start; i < search.totalHits && (i - indexQuery.Start) < indexQuery.PageSize; i++) { var document = searcher.Searcher.Doc(search.scoreDocs[i].doc); if (IsDuplicateDocument(document, indexQuery.FieldsToFetch, previousDocuments)) continue; yield return RetrieveDocument(document, indexQuery.FieldsToFetch); } } }
private JArray PerformBulkOperation(string index, IndexQuery indexQuery, bool allowStale, Func<string, TransactionInformation, object> batchOperation) { var array = new JArray(); var bulkIndexQuery = new IndexQuery { Query = indexQuery.Query, Start = indexQuery.Start, Cutoff = indexQuery.Cutoff, PageSize = int.MaxValue, FieldsToFetch = new[] { "__document_id" }, SortedFields = indexQuery.SortedFields }; bool stale; var queryResults = database.QueryDocumentIds(index, bulkIndexQuery, out stale); if (stale) { if (allowStale == false) { throw new InvalidOperationException( "Bulk operation cancelled because the index is stale and allowStale is false"); } } var enumerator = queryResults.GetEnumerator(); const int batchSize = 1024; while (true) { var batchCount = 0; database.TransactionalStorage.Batch(actions => { while (batchCount < batchSize && enumerator.MoveNext()) { batchCount++; var result = batchOperation(enumerator.Current, transactionInformation); array.Add(JObject.FromObject(result, new JsonSerializer { Converters = { new JsonEnumConverter() } })); } }); if (batchCount < batchSize) break; } return array; }
public void can_encode_and_decode_IndexQuery_Query() { // Fails when at least '&' is in the Query, not sure if that is acceptable // Fails because the value has not by url decoded, I couldn't find code doing the url decode // after GetIndexQueryFromHttpContext() so there may be another bug. //var expected = new string(Enumerable.Range(0, 255).Select(i => (char)i) // .Where(c => !Char.IsControl(c)).ToArray()); var expected = Some.String(); var indexQuery = new IndexQuery(); indexQuery.Query = expected; IndexQuery result = EncodeAndDecodeIndexQuery(indexQuery); Assert.Equal(expected, result.Query); }
public IEnumerable<IndexQueryResult> Query(IndexQuery indexQuery, Func<IndexQueryResult, bool> shouldIncludeInResults) { AssertQueryDoesNotContainFieldsThatAreNotIndexes(indexQuery.Query, indexQuery.SortedFields); IndexSearcher indexSearcher; using (searcher.Use(out indexSearcher)) { var luceneQuery = GetLuceneQuery(indexQuery); var start = indexQuery.Start; var pageSize = indexQuery.PageSize; var returnedResults = 0; var skippedResultsInCurrentLoop = 0; do { if (skippedResultsInCurrentLoop > 0) { start = start + pageSize; // trying to guesstimate how many results we will need to read from the index // to get enough unique documents to match the page size pageSize = skippedResultsInCurrentLoop * indexQuery.PageSize; skippedResultsInCurrentLoop = 0; } var search = ExecuteQuery(indexSearcher, luceneQuery, start, pageSize, indexQuery); indexQuery.TotalSize.Value = search.totalHits; for (var i = start; i < search.totalHits && (i - start) < pageSize; i++) { var document = indexSearcher.Doc(search.scoreDocs[i].doc); var indexQueryResult = RetrieveDocument(document, indexQuery.FieldsToFetch); if (shouldIncludeInResults(indexQueryResult) == false) { indexQuery.SkippedResults.Value++; skippedResultsInCurrentLoop++; continue; } returnedResults++; yield return indexQueryResult; if (returnedResults == indexQuery.PageSize) yield break; } } while (skippedResultsInCurrentLoop > 0 && returnedResults < indexQuery.PageSize); } }
public QueryResult ExecuteDynamicQuery(string entityName, IndexQuery query) { // Create the map var map = DynamicQueryMapping.Create(query.Query, entityName); // Get the index name string indexName = FindDynamicIndexName(map); // Re-write the query string realQuery = map.Items.Aggregate(query.Query, (current, mapItem) => current.Replace(mapItem.From, mapItem.To)); // Perform the query until we have some results at least QueryResult result; var sp = Stopwatch.StartNew(); while (true) { result = documentDatabase.Query(indexName, new IndexQuery { Cutoff = query.Cutoff, PageSize = query.PageSize, Query = realQuery, Start = query.Start, FieldsToFetch = query.FieldsToFetch, SortedFields = query.SortedFields, }); if (!result.IsStale || result.Results.Count >= query.PageSize || sp.Elapsed.TotalSeconds > 15) { return result; } Thread.Sleep(100); } }
public IEnumerable<IndexQueryResult> Query(IndexQuery indexQuery) { IndexSearcher indexSearcher; using (searcher.Use(out indexSearcher)) { var previousDocuments = new HashSet<string>(); var luceneQuery = GetLuceneQuery(indexQuery); var start = indexQuery.Start; var pageSize = indexQuery.PageSize; var skippedDocs = 0; var returnedResults = 0; do { if(skippedDocs > 0) { start = start + pageSize; // trying to guesstimate how many results we will need to read from the index // to get enough unique documents to match the page size pageSize = skippedDocs * indexQuery.PageSize; skippedDocs = 0; } var search = ExecuteQuery(indexSearcher, luceneQuery, start, pageSize, indexQuery.SortedFields); indexQuery.TotalSize.Value = search.totalHits; for (var i = start; i < search.totalHits && (i - start) < pageSize; i++) { var document = indexSearcher.Doc(search.scoreDocs[i].doc); if (IsDuplicateDocument(document, indexQuery.FieldsToFetch, previousDocuments)) { skippedDocs++; continue; } returnedResults++; yield return RetrieveDocument(document, indexQuery.FieldsToFetch); } } while (skippedDocs > 0 && returnedResults < indexQuery.PageSize); } }
Task<DocumentViewModel[]> BuildQuery(int start, int pageSize) { using (var session = server.OpenSession()) { var indexName = CurrentIndex; var query = new IndexQuery { Start = start, PageSize = pageSize, Query = QueryTerms }; return session.Advanced.AsyncDatabaseCommands .QueryAsync(indexName, query, null) .ContinueWith(x => { QueryResults.GetTotalResults = () => x.Result.TotalResults; QueryResultsStatus = DetermineResultsStatus(x.Result); return x.Result.Results .Select(obj => new DocumentViewModel(obj.ToJsonDocument())) .ToArray(); }); } }
public void Query(string index, IndexQuery query, string[] includes, CallbackFunction.Load<QueryResult> callback) { this.Client.Query(index, query, includes, callback); }
public ExecutingQueryInfo(DateTime startTime, IndexQuery queryInfo) { StartTime = startTime; QueryInfo = queryInfo; stopwatch = Stopwatch.StartNew(); }
private void FindIndexName(DocumentDatabase database, DynamicQueryMapping map, IndexQuery query) { var targetName = map.ForEntityName ?? "AllDocs"; var combinedFields = String.Join("And", map.Items .OrderBy(x => x.To) .Select(x => x.To)); var indexName = combinedFields; if (map.SortDescriptors != null && map.SortDescriptors.Length > 0) { indexName = string.Format("{0}SortBy{1}", indexName, String.Join("", map.SortDescriptors .Select(x => x.Field) .OrderBy(x => x))); } if (map.HighlightedFields != null && map.HighlightedFields.Length > 0) { indexName = string.Format("{0}Highlight{1}", indexName, string.Join("", map.HighlightedFields.OrderBy(x => x))); } string groupBy = null; if (database.Configuration.RunInUnreliableYetFastModeThatIsNotSuitableForProduction == false && database.Configuration.RunInMemory == false) { indexName = IndexingUtil.FixupIndexName(indexName, database.Configuration.DataDirectory); } var permanentIndexName = indexName.Length == 0 ? string.Format("Auto/{0}{1}", targetName, groupBy) : string.Format("Auto/{0}/By{1}{2}", targetName, indexName, groupBy); map.IndexName = permanentIndexName; }
private static TopDocs ExecuteQuery(IndexSearcher searcher, IndexQuery indexQuery, Query luceneQuery) { // NOTE: We get Start + Pagesize results back so we have something to page on TopDocs search; if (indexQuery.SortedFields != null && indexQuery.SortedFields.Length > 0) { var sort = new Sort(indexQuery.SortedFields.Select(x => x.ToLuceneSortField()).ToArray()); search = searcher.Search(luceneQuery, null, indexQuery.PageSize + indexQuery.Start, sort); } else { search = searcher.Search(luceneQuery, null, indexQuery.PageSize + indexQuery.Start); } return search; }
public void can_encode_and_decode_IndexQuery_SortedFields() { SortedField sf1 = new SortedField(Some.String()) { Field = "sf1", Descending = true }; SortedField sf2 = new SortedField(Some.String()) { Field = "sf2", Descending = false }; SortedField[] expected = new[] { sf1, sf2 }; var indexQuery = new IndexQuery(); indexQuery.SortedFields = expected; IndexQuery result = EncodeAndDecodeIndexQuery(indexQuery); Assert.Equal(2, result.SortedFields.Length); Assert.Equal("sf1", result.SortedFields[0].Field); Assert.Equal(true, result.SortedFields[0].Descending); Assert.Equal("sf2", result.SortedFields[1].Field); Assert.Equal(false, result.SortedFields[1].Descending); }
public QueryResult Query(string index, IndexQuery query) { var list = new List<JObject>(); var stale = false; TransactionalStorage.Batch( actions => { stale = actions.DoesTasksExistsForIndex(index); var indexFailureInformation = actions.GetFailureRate(index); if (indexFailureInformation.IsInvalidIndex) { throw new IndexDisabledException(indexFailureInformation); } var loadedIds = new HashSet<string>(); var collection = from queryResult in IndexStorage.Query(index, query) select RetrieveDocument(actions, queryResult, loadedIds) into doc where doc != null select doc.ToJson(); list.AddRange(collection); actions.Commit(); }); return new QueryResult { Results = list.ToArray(), IsStale = stale, TotalResults = query.TotalSize.Value }; }
/// <summary> /// Begins the async query. /// </summary> /// <param name="index">The index.</param> /// <param name="query">The query.</param> /// <returns></returns> public Task<QueryResult> QueryAsync(string index, IndexQuery query) { EnsureIsNotNullOrEmpty(index, "index"); var path = query.GetIndexQueryUrl(url, index, "indexes"); var request = HttpJsonRequest.CreateHttpJsonRequest(this, path, "GET", credentials); return Task.Factory.FromAsync<string>(request.BeginReadResponseString, request.EndReadResponseString, null) .ContinueWith(task => { JToken json; using (var reader = new JsonTextReader(new StringReader(task.Result))) json = (JToken) convention.CreateSerializer().Deserialize(reader); return new QueryResult { IsStale = Convert.ToBoolean(json["IsStale"].ToString()), IndexTimestamp = json.Value<DateTime>("IndexTimestamp"), Results = json["Results"].Children().Cast<JObject>().ToList(), TotalResults = Convert.ToInt32(json["TotalResults"].ToString()), SkippedResults = Convert.ToInt32(json["SkippedResults"].ToString()) }; }); }
public QueryResult Query(string index, IndexQuery query) { var list = new List<JObject>(); var stale = false; TransactionalStorage.Batch( actions => { stale = actions.Tasks.DoesTasksExistsForIndex(index, query.Cutoff); var indexFailureInformation = actions.Indexing.GetFailureRate(index); if (indexFailureInformation.IsInvalidIndex) { throw new IndexDisabledException(indexFailureInformation); } var loadedIds = new HashSet<string>(); var collection = from queryResult in IndexStorage.Query(index, query) select RetrieveDocument(actions, queryResult, loadedIds) into doc let processedDoc = ExecuteReadTriggersOnRead(ProcessReadVetoes(doc, null, ReadOperation.Query), null, ReadOperation.Query) where processedDoc != null select processedDoc.ToJson(); list.AddRange(collection); }); return new QueryResult { Results = list.ToArray(), IsStale = stale, TotalResults = query.TotalSize.Value }; }
public IEnumerable<string> QueryDocumentIds(string index, IndexQuery query, out bool stale) { bool isStale = false; HashSet<string> loadedIds = null; TransactionalStorage.Batch( actions => { isStale = actions.Tasks.DoesTasksExistsForIndex(index, query.Cutoff); var indexFailureInformation = actions.Indexing.GetFailureRate(index) ; if (indexFailureInformation.IsInvalidIndex) { throw new IndexDisabledException(indexFailureInformation); } loadedIds = new HashSet<string>(from queryResult in IndexStorage.Query(index, query) select queryResult.Key); }); stale = isStale; return loadedIds; }
private void FindIndexName(DocumentDatabase database, DynamicQueryMapping map, IndexQuery query) { var targetName = map.ForEntityName ?? "AllDocs"; var combinedFields = String.Join("And", map.Items .OrderBy(x => x.To) .Select(x => x.To)); var indexName = combinedFields; if (map.SortDescriptors != null && map.SortDescriptors.Length > 0) { indexName = string.Format("{0}SortBy{1}", indexName, String.Join("", map.SortDescriptors .Select(x => x.Field) .OrderBy(x => x))); } if (map.HighlightedFields != null && map.HighlightedFields.Length > 0) { indexName = string.Format("{0}Highlight{1}", indexName, string.Join("", map.HighlightedFields.OrderBy(x => x))); } string groupBy = null; if (AggregationOperation != AggregationOperation.None) { if (query.GroupBy != null && query.GroupBy.Length > 0) { groupBy += "/" + AggregationOperation + "By" + string.Join("And", query.GroupBy); } else { groupBy += "/" + AggregationOperation; } if (DynamicAggregation) { groupBy += "Dynamically"; } } if (database.Configuration.RunInUnreliableYetFastModeThatIsNotSuitableForProduction == false && database.Configuration.RunInMemory == false) { // Hash the name if it's too long (as a path) if ((database.Configuration.DataDirectory.Length + indexName.Length) > 230) { using (var sha256 = SHA256.Create()) { var bytes = sha256.ComputeHash(Encoding.UTF8.GetBytes(indexName)); indexName = Convert.ToBase64String(bytes); } } } var permanentIndexName = indexName.Length == 0 ? string.Format("Auto/{0}{1}", targetName, groupBy) : string.Format("Auto/{0}/By{1}{2}", targetName, indexName, groupBy); var temporaryIndexName = indexName.Length == 0 ? string.Format("Temp/{0}{1}", targetName, groupBy) : string.Format("Temp/{0}/By{1}{2}", targetName, indexName, groupBy); map.IndexName = permanentIndexName; }
public void AddExistingIndexDefinition(IndexDefinition indexDefinition, DocumentDatabase database, IndexQuery query) { var abstractViewGenerator = database.IndexDefinitionStorage.GetViewGenerator(indexDefinition.Name); if (abstractViewGenerator == null) { return; // No biggy, it just means we'll have two small indexes and we'll do this again later } this.Items = this.Items.Union( abstractViewGenerator.Fields .Where(field => this.Items.All(item => item.To != field) && !field.StartsWith("__")) .Select(field => new DynamicQueryMappingItem() { From = field, To = ReplaceInvalidCharactersForFields(field), QueryFrom = EscapeParentheses(field) }) ).ToArray(); this.SortDescriptors = this.SortDescriptors.Union( indexDefinition.SortOptions .Where(option => this.SortDescriptors.All(desc => desc.Field != option.Key)) .Select(option => new DynamicSortInfo() { Field = option.Key, FieldType = option.Value }) ).ToArray(); foreach (var fieldStorage in abstractViewGenerator.Stores) { KeyValuePair <string, FieldStorage> storage = fieldStorage; extraActionsToPerform.Add(def => def.Stores[storage.Key] = storage.Value); } foreach (var fieldIndex in abstractViewGenerator.Indexes) { KeyValuePair <string, FieldIndexing> index = fieldIndex; extraActionsToPerform.Add(def => def.Indexes[index.Key] = index.Value); } foreach (var fieldTermVector in abstractViewGenerator.TermVectors) { KeyValuePair <string, FieldTermVector> vector = fieldTermVector; extraActionsToPerform.Add(def => def.TermVectors[vector.Key] = vector.Value); } this.FindIndexName(database, this, query); }
private static IndexQuery EncodeAndDecodeIndexQuery(IndexQuery query) { string indexQueryUrl = query.GetIndexQueryUrl(Some.String(), Some.String(), Some.String()); // indexQueryUrl is in the form "/path?querystring#anchor" string indexQueryQuerystring = indexQueryUrl.Substring(indexQueryUrl.IndexOf("?")+1); int indexOfPoint = indexQueryQuerystring.IndexOf('#'); if (indexOfPoint != -1) { indexQueryQuerystring = indexQueryQuerystring.Substring(0, indexOfPoint); } IHttpRequest request = MockRepository.GenerateStub<IHttpRequest>(); IHttpContext context = MockRepository.GenerateMock<IHttpContext>(); context.Stub(c => c.Request).Return(request); request.Stub(r => r.QueryString).Return(HttpUtility.ParseQueryString(indexQueryQuerystring)); return context.GetIndexQueryFromHttpContext(1024); }
private Query GetLuceneQuery(IndexQuery indexQuery) { var query = indexQuery.Query; Query luceneQuery; if (string.IsNullOrEmpty(query)) { log.DebugFormat("Issuing query on index {0} for all documents", name); luceneQuery = new MatchAllDocsQuery(); } else { log.DebugFormat("Issuing query on index {0} for: {1}", name, query); luceneQuery = QueryBuilder.BuildQuery(query); } return luceneQuery; }