public override int DoHighlight(IndexReader reader, int doc, string field, Document document, Analyzer analyzer, string text)
            {
                TokenStream ts = TokenSources.GetAnyTokenStream(reader, doc, field, document, analyzer);

                TextFragment[]
                frag = highlighter.GetBestTextFragments(ts, text, outerInstance.m_mergeContiguous, outerInstance.m_maxFrags);
                return(frag != null ? frag.Length : 0);
            }
Ejemplo n.º 2
0
        /** GET HIGHLIGHTER FRAGMENT*/

        public static string[] TextHighlighter(Query query, string text, StandardAnalyzer analys, IndexSearcher searcher, ScoreDoc doc)
        {
            QueryScorer         scorer      = new QueryScorer(query);
            SimpleHTMLFormatter formater    = new SimpleHTMLFormatter("<b>", "</b>");
            Highlighter         highlighter = new Highlighter(formater, scorer);
            TokenStream         tokenStream = TokenSources.GetAnyTokenStream(searcher.IndexReader, doc.Doc, "Contents", analys);

            string[] frags = highlighter.GetBestFragments(tokenStream, text, 3);
            return(frags);
        }
Ejemplo n.º 3
0
        private static string HighlightText(int id, string field, Highlighter highlighter, IndexSearcher searcher, Analyzer analyzer)
        {
            Lucene.Net.Documents.Document doc = searcher.Doc(id);
            var highlightedText = "";
            var text            = doc.Get(field);

            if (text == null)
            {
                return(String.Empty);
            }
            TokenStream tokenStream = TokenSources.GetAnyTokenStream(searcher.IndexReader, id, field, analyzer);

            highlightedText = highlighter.GetBestFragments(tokenStream, text, 2, "...");

            return(highlightedText);
        }
Ejemplo n.º 4
0
        private string Highlight(int numId, string pattern, string html)
        {
            if (!string.IsNullOrWhiteSpace(pattern))
            {
                using (Analyzer analyzer = new StandardAnalyzer(LuceneVersion.LUCENE_48))
                    using (Lucene.Net.Store.Directory index = new SimpleFSDirectory(Path.ChangeExtension(_bookFile.FullName,
                                                                                                         Convert.ToInt32(LuceneVersion.LUCENE_48).ToString())))
                        using (IndexReader reader = DirectoryReader.Open(index))
                        {
                            Lucene.Net.Search.IndexSearcher searcher = new Lucene.Net.Search.IndexSearcher(reader);
                            Lucene.Net.Search.TopDocs       docs     = searcher.Search(
                                Lucene.Net.Search.NumericRangeQuery.NewInt32Range(nameof(TabHtmlText.NumId), numId, numId, true,
                                                                                  true), 1);

                            int docId = docs.ScoreDocs.First().Doc;

                            QueryScorer scorer =
                                new QueryScorer(new QueryParser(LuceneVersion.LUCENE_48, nameof(TabHtmlText.Html), analyzer)
                                                .Parse(pattern));
                            Highlighter highlighter =
                                new Highlighter(new SimpleHTMLFormatter("<span style=\"background-color: yellow\">", "</span>"),
                                                scorer)
                            {
                                TextFragmenter = new NullFragmenter()
                            };

                            using (TokenStream stream =
                                       TokenSources.GetAnyTokenStream(reader, docId, nameof(TabHtmlText.Html), analyzer))
                            {
                                return(highlighter.GetBestFragment(stream, html));
                            }
                        }
            }

            return(html);
        }
Ejemplo n.º 5
0
        public ReadOnlyHitCollection Search(string searchExpression, Model.Filter filter = null, string sortField = null, int page = 1, int hitsPerPage = 10)
        {
            if (string.IsNullOrEmpty(searchExpression))
            {
                throw new SearchException("Must have searchExpression");
            }
            string defaultFieldName = Fields.Content;
            var    analyzer         = AnalyzerBuilder.CreateAnalyzer();
            Query  query;

            try
            {
                query = new QueryParser(Version.LUCENE_30, defaultFieldName, analyzer).Parse(searchExpression.ToLower());
            }
            catch (ParseException ex)
            {
                throw new SearchException(string.Format("Sorry, '{0}' isn't something we can search for so far.", searchExpression), ex);
            }

            var indexDirectory = new SimpleFSDirectory(new DirectoryInfo(_configuration.IndexPath));

            List <Hit> onePageOfHits;
            int        totalHits;

            using (var reader = IndexReader.Open(indexDirectory, true))
            {
                //  Get one page of hits
                var hits     = new List <Hit>();
                var searcher = new IndexSearcher(reader);

                var termsFilter = filter != null && !string.IsNullOrEmpty(filter.Field)
                    ? new FieldCacheTermsFilter(filter.Field, filter.Terms.ToArray())
                    : null;
                var sort = !string.IsNullOrEmpty(sortField)
                    ? new Sort(new SortField(sortField, SortField.STRING))
                    : Sort.RELEVANCE;

                ScoreDoc[] scoreDocs = searcher.Search(query, termsFilter, MaxNumberOfHits, sort).ScoreDocs;
                totalHits = scoreDocs.Length;

                foreach (var scoreDoc in scoreDocs)
                {
                    int    docId    = scoreDoc.Doc;
                    string filePath = searcher.Doc(docId).Get(Fields.Path);
                    string language = searcher.Doc(docId).Get(Fields.Language);
                    var    hit      = new Hit(docId, _configuration.ContentRootPath, filePath, scoreDoc.Score, language);
                    hits.Add(hit);
                }

                onePageOfHits = hits.GetPage(page, hitsPerPage).ToList();

                // Get offsets and higlights on the page we are going to return
                foreach (var hit in onePageOfHits)
                {
                    var primitiveQuery = query.Rewrite(reader);
                    var terms          = new HashSet <Term>();
                    primitiveQuery.ExtractTerms(terms);
                    string searchField = string.Empty;
                    if (terms.Count == 0)
                    {
                        // There can be all kinds of queires
                        var prefixQuery = query as PrefixQuery;
                        if (prefixQuery != null)
                        {
                            searchField    = prefixQuery.Prefix.Field;
                            primitiveQuery = prefixQuery;
                        }
                    }
                    else
                    {
                        // TODO: There can be multiple term fields, like code: and method:
                        searchField = terms.First().Field;
                    }

                    var termFreqVector     = reader.GetTermFreqVector(hit.DocId, Fields.Content);
                    var termPositionVector = termFreqVector as TermPositionVector;
                    if (termFreqVector == null || termPositionVector == null)
                    {
                        throw new ArgumentException("Must have term frequencies and positions vectors");
                    }

                    // No offsets for prefix and other non-term based queries
                    const int maxOffsetNumber = 10;
                    foreach (var term in terms)
                    {
                        int termIndex = termFreqVector.IndexOf(term.Text); // Meaning get me this term, not text yet.
                        if (termIndex != -1)
                        {
                            foreach (var offset in termPositionVector.GetOffsets(termIndex))
                            {
                                if (hit.Offsets.Count < maxOffsetNumber)
                                {
                                    hit.Offsets.Add(new Offset
                                    {
                                        StartOffset = offset.StartOffset,
                                        EndOffset   = offset.EndOffset
                                    });
                                }
                            }
                        }
                    }

                    // Highlighter from contrib package
                    var tokenStream = TokenSources.GetTokenStream(termPositionVector);
                    var scorer      = new QueryScorer(primitiveQuery, searchField);
                    var fragmenter  = new SimpleSpanFragmenter(scorer);
                    var formatter   = new SimpleHTMLFormatter("<kbd>", "</kbd>");
                    var highlighter = new Highlighter(formatter, scorer)
                    {
                        TextFragmenter = fragmenter
                    };

                    string text;
                    using (var sr = new StreamReader(hit.FilePath))
                    {
                        text = sr.ReadToEnd();
                    }
                    string bestFragment = highlighter.GetBestFragment(tokenStream, text);
                    if (!string.IsNullOrEmpty(bestFragment))
                    {
                        hit.BestFragment = EscapeHtmlMarkup(bestFragment);
                    }
                }
            }

            return(new ReadOnlyHitCollection(onePageOfHits, totalHits));
        }
        public async Task ExecuteTask(Guid taskId, CancellationToken cancellationToken)
        {
            // Prepare record
            _logger.LogInformation("Retrieving task {TaskId}", taskId);
            var task = await _rekeyingTasks.GetOne(taskId, cancellationToken);

            task.RekeyingInProgress = true;

            // Create task to perform regular updates to UI (every 15s)
            _logger.LogInformation("Starting log update task");
            var logUpdateCancellationTokenSource = new CancellationTokenSource();
            var logUpdateTask = Task.Run(async() =>
            {
                while (task.RekeyingInProgress)
                {
                    await Task.Delay(15 * 1000);
                    await _rekeyingTasks.Update(task, cancellationToken);
                }
            }, logUpdateCancellationTokenSource.Token);

            // Retrieve the secret configuration and its resources
            var secret = await _managedSecrets.GetOne(task.ManagedSecretId, cancellationToken);

            _logger.LogInformation("Retrieving resources for secret {SecretId}", secret.ObjectId);
            var resources = await _resources.Get(r => secret.ResourceIds.Contains(r.ObjectId), cancellationToken);

            var workflowCollection = await _authJanitorService.ExecuteAsync(
                secret.ValidPeriod,
                async (pwac) =>
            {
                if (!task.Attempts.Any(a => a.StartedExecution == pwac.StartedExecution))
                {
                    task.Attempts.Add(pwac);
                    await _rekeyingTasks.Update(task, cancellationToken);
                }
            },
                resources.Select(r =>
            {
                TokenSources tokenSource = TokenSources.Unknown;
                string tokenParameter = string.Empty;
                switch (secret.TaskConfirmationStrategies)
                {
                case TaskConfirmationStrategies.AdminSignsOffJustInTime:
                    tokenSource = TokenSources.OBO;
                    break;

                case TaskConfirmationStrategies.AdminCachesSignOff:
                    tokenSource = TokenSources.Persisted;
                    tokenParameter = task.PersistedCredentialId.ToString();
                    break;

                case TaskConfirmationStrategies.AutomaticRekeyingAsNeeded:
                case TaskConfirmationStrategies.AutomaticRekeyingScheduled:
                case TaskConfirmationStrategies.ExternalSignal:
                    tokenSource = TokenSources.ServicePrincipal;
                    break;
                }
                return(new ProviderExecutionParameters()
                {
                    ProviderType = r.ProviderType,
                    ProviderConfiguration = r.ProviderConfiguration,
                    AgentId = secret.ExecutingAgentId,
                    TokenSource = tokenSource,
                    TokenParameter = tokenParameter
                });
            }).ToArray());

            // Update Task record
            _logger.LogInformation("Completing task record");
            task.RekeyingInProgress = false;
            task.RekeyingCompleted  = (workflowCollection?.HasBeenExecuted).GetValueOrDefault();
            task.RekeyingCompleted  = (workflowCollection?.HasBeenExecutedSuccessfully).GetValueOrDefault();
            await _rekeyingTasks.Update(task, cancellationToken);

            if (workflowCollection.HasBeenExecutedSuccessfully)
            {
                if (task.ConfirmationType.UsesOBOTokens())
                {
                    await _eventDispatcherService.DispatchEvent(AuthJanitorSystemEvents.RotationTaskCompletedManually, nameof(TaskExecutionMetaService.ExecuteTask), task);
                }
                else
                {
                    await _eventDispatcherService.DispatchEvent(AuthJanitorSystemEvents.RotationTaskCompletedAutomatically, nameof(TaskExecutionMetaService.ExecuteTask), task);
                }
            }
            else
            {
                await _eventDispatcherService.DispatchEvent(AuthJanitorSystemEvents.RotationTaskAttemptFailed, nameof(TaskExecutionMetaService.ExecuteTask), task);
            }
        }