private void MetaMenuItem_Click(object sender, RoutedEventArgs e) { System.Windows.Controls.MenuItem tv = sender as System.Windows.Controls.MenuItem; FileViewModel fileTopic = tv.DataContext as FileViewModel; var results = new Dictionary <string, object>(); if (fileTopic.LogFile != null) { string tmpStr = string.Empty; { foreach (Type analyzerType in PluginFactory.FindAnalyzers(fileTopic.LogFile.GetType())) { IAnalyzer analyzer = PluginFactory.CreateAnalyzer(analyzerType); foreach (var result in analyzer.Analyze(fileTopic.LogFile)) { results.Add(result.Key, result.Value); } } } foreach (var item in results) { tmpStr += item.Key + ": " + item.Value.ToString() + Environment.NewLine; } MessageBox.Show(tmpStr, Properties.Resources.computedDataCaption); } }
/// <summary> /// Reads and reflects the given VB Classic text file into a usable form. /// </summary> /// <param name="partitionedFile">An instance of <see cref="VbPartitionedFile"/> representing the VB Classic module to reflect.</param> /// <returns></returns> /// <exception cref="ArgumentNullException"><paramref name="partitionedFile"/> was null.</exception> /// <exception cref="InvalidOperationException">There was no analyzer considered fitting for the underlying file.</exception> public static IVbModule GetReflectedModule(VbPartitionedFile partitionedFile) { if (partitionedFile == null) { throw new ArgumentNullException("partitionedFile"); } if (Tokenizer == null) { throw new InvalidOperationException("No tokenizer defined for analyzing the file!"); } IReadOnlyList <IToken> tokens = Tokenizer.GetTokens(partitionedFile.GetMergedContent()); TokenStreamReader reader = new TokenStreamReader(tokens); IAnalyzer analyzer = null; if (!AnalyzerFactory.TryGetAnalyzerForFile(reader, out analyzer)) { // TODO: Dedicated exception for this. throw new InvalidOperationException("Could not analyze the given file!"); } reader.Rewind(); return(analyzer.Analyze(reader)); }
private async Task UploadLoop() { while (await processingQueue.OutputAvailableAsync()) { try { var file = await processingQueue.TakeAsync(); file.UploadStatus = UploadStatus.InProgress; // test if replay is eligible for upload (not AI, PTR, Custom, etc) var replay = _analyzer.Analyze(file); if (file.UploadStatus == UploadStatus.InProgress) { // if it is, upload it await _uploader.Upload(replay, file); } SaveReplayList(); if (ShouldDelete(file, replay)) { DeleteReplay(file); } } catch (Exception ex) { _log.Error(ex, "Error in upload loop"); } } }
public async Task <IActionResult> QueueAnalyzes( Guid mrRecordId, [FromBody] QueueAnalyzesRequest request, [FromServices] IAnalyzer analyzer) { var targetRecord = await dbContext.MrRecords.SingleOrDefaultAsync(r => r.Id == mrRecordId); if (targetRecord == null) { return(NotFound()); } var algs = await dbContext.MrAlgorithms.Where(a => request.Algorithms.Contains(a.Id)).ToListAsync(); if (algs.Count != request.Algorithms.Count) { return(NotFound()); } var analyzes = algs.Select(a => new MrAnalyze { MrAlgorithm = a, MrRecord = targetRecord, Status = MrAnalyzeStatus.InQueue }).ToList(); dbContext.AddRange(analyzes); await dbContext.SaveChangesAsync(); await Task.WhenAll(analyzes.Select(a => analyzer.Analyze(a))); return(Ok()); }
private void DoRemove(string docId) { foreach (var field in _ix.Fields.Keys) { if (!_ix.Fields[field].ContainsKey(docId)) { continue; } _ix.Fields[field].Remove(docId); var containerId = docId.ToDocContainerId(); var containerFileName = Path.Combine(_directory, containerId + ".dc"); DocContainer container; if (!_docContainers.TryGetValue(containerId, out container)) { container = new DocContainer(_directory, containerId); _docContainers[containerId] = container; } if (File.Exists(containerFileName)) { Document doc; if (container.TryGet(docId, out doc)) { container.Remove(docId); IEnumerable <string> tokens; if (field[0] == '_') { tokens = new[] { doc.Fields[field] }; } else { tokens = _analyzer.Analyze(doc.Fields[field]); } foreach (var token in tokens) { var fieldTokenId = string.Format("{0}.{1}", field, token); var postingsFile = GetPostingsFile(field, token); postingsFile.Postings.Remove(docId); if (postingsFile.NumDocs() == 0) { var pbucketId = field.ToPostingsContainerId(); var pContainer = _postingsContainers[pbucketId]; pContainer.Remove(token); _postingsFiles.Remove(fieldTokenId); var trie = GetTrie(field); trie.Remove(token); } else { _postingsWorker.Enqueue(postingsFile); } } } } } }
public IEnumerable <Term> Parse(string query) { var termCount = 0; foreach (var term in query.Split(' ')) { var segments = term.Split(':'); var field = segments[0]; var value = segments[1]; var and = false; var not = false; var prefix = false; var fuzzy = false; if (0 == termCount++) { and = true; } if (field[0] == '+') { field = new string(field.Skip(1).ToArray()); and = true; } else if (field[0] == '-') { field = new string(field.Skip(1).ToArray()); not = true; } if (value[value.Length - 1] == '*') { value = new string(value.Take(value.Length - 1).ToArray()); prefix = true; } else if (value[value.Length - 1] == '~') { value = new string(value.Take(value.Length - 1).ToArray()); fuzzy = true; } foreach (var token in _analyzer.Analyze(value)) { yield return(new Term { Field = field, Token = token, And = and, Not = not, Prefix = prefix, Fuzzy = fuzzy }); } } }
private IEnumerable <string> GetTokens(Field field) { if (!field.Flags.HasFlag(FieldFlags.Analyzed)) { return new[] { ReadFieldValue(field) } } ; return(_analyzer.Analyze(field.OpenReader)); } IEnumerable <long> IQueryRunner.Search(string fieldName, string value) { var result = new HashSet <long>(); foreach (var token in AnalyzeQuery(value)) { var postings = _store.GetPostings(fieldName, token); result.UnionWith(postings); } return(result); }
private QueryContext CreateTerm(string field, IEnumerable <string> words, int position) { var analyze = field[0] != '_'; QueryContext qc = null; foreach (var word in words) { if (analyze) { var tokenOperator = word.Last(); var analyzable = word; if (tokenOperator == '~' || tokenOperator == '*') { analyzable = word.Substring(0, word.Length - 1); } var analyzed = _analyzer.Analyze(analyzable).ToArray(); foreach (string token in analyzed) { if (qc == null) { qc = Parse(field, token, tokenOperator, position); } else { var q = Parse(field, token, tokenOperator, position); q.And = false; q.Not = false; qc.Children.Add(q); } } } else { if (qc == null) { qc = Parse(field, word); } else { var q = Parse(field, word); q.And = false; q.Not = false; qc.Children.Add(q); } } } return(qc); }
public void Write(Document doc) { foreach (var field in doc.Fields) { int fieldId; if (!_fieldIndex.TryGetValue(field.Key, out fieldId)) { fieldId = GetNextFreeFieldId(); _fieldIndex.Add(field.Key, fieldId); } FieldFile ff; if (!_fieldFiles.TryGetValue(fieldId, out ff)) { var fileName = Path.Combine(_directory, fieldId + ".fld"); ff = new FieldFile(fileName); _fieldFiles.Add(fieldId, ff); } var docTokensAndTheirPositions = new Dictionary <string, List <int> >(); foreach (var value in field.Value) { _docFile.Write(doc.Id, field.Key, value); var tokens = _analyzer.Analyze(value); for (int position = 0; position < tokens.Length; position++) { var token = tokens[position]; List <int> positions; if (!docTokensAndTheirPositions.TryGetValue(token, out positions)) { positions = new List <int>(); docTokensAndTheirPositions.Add(token, positions); } positions.Add(position); } } foreach (var tokenAndItsPositions in docTokensAndTheirPositions) { foreach (var position in tokenAndItsPositions.Value) { ff.Write(doc.Id, tokenAndItsPositions.Key, position); } } } }
public void Eval(string field, string value, IAnalyzer analyzer, Dictionary <string, object> postingData) { var analyze = field[0] != '_'; var tokens = analyze ? analyzer.Analyze(value) : new[] { value }; foreach (var token in tokens) { if (postingData.ContainsKey(token)) { postingData[token] = (int)postingData[token] + 1; } else { postingData.Add(token, 1); } } }
private void AnalyzeThread() { string path = null; txtPath.Dispatcher.Invoke(new Action(() => { path = txtPath.Text; })); StringBuilder sb = new StringBuilder(); DirectoryInfo dir = new DirectoryInfo(path); if (dir.Exists) { Dispatcher.BeginInvoke(new Action(() => { this.Cursor = Cursors.Wait; lblStatus.Content = "Analyzing folders..."; btnAnalyze.IsEnabled = false; btnGo.IsEnabled = false; })); _analyzer = new DefaultAnalyzer(dir); _analyzer.FileAnalyzed += analyzer_FileAnalyzed; _result = _analyzer.Analyze(); sb.AppendLine("Analysis of folder \"" + dir.FullName + "\" has returned the following results:"); sb.AppendLine(); sb.AppendLine("Subfolders found: " + _result.FoldersAffected.ToString()); sb.AppendLine("Files found in folder and subfolders: " + _result.FilesAffected.ToString()); sb.AppendLine("Duplicated files in folder and subfolders: " + _result.DuplicatedFiles.Length.ToString()); } else { MessageBox.Show("The chosen folder does not exists.", "Pay attention...", MessageBoxButton.OK, MessageBoxImage.Exclamation); } Dispatcher.BeginInvoke(new Action(() => { txbResult.Text = sb.ToString(); this.Cursor = null; lblStatus.Content = "Ready"; progBar.Value = 0.0d; btnAnalyze.IsEnabled = true; btnGo.IsEnabled = true; })); }
protected IEnumerable <AnalyzerTreeNode> FetchChildren(CancellationToken ct) { if (symbol is IEntity) { var context = new AnalyzerContext() { CancellationToken = ct, Language = Language, AssemblyList = MainWindow.Instance.CurrentAssemblyList }; foreach (var result in analyzer.Analyze(symbol, context)) { yield return(SymbolTreeNodeFactory(result)); } } else { throw new NotSupportedException("Currently symbols that are not entities are not supported!"); } }
private void Worker() { while (!_unload) { TAnalyzedType workItem; lock (_queueLock) { workItem = GetNextItem(); } _isAnalyzing = true; if (workItem != null) { _analyzer.Analyze(workItem); _isAnalyzing = false; } else { _event.WaitOne(); } } }
private QueryContext CreateTerm(string field, string word, int positionInQuery) { var analyze = field[0] != '_' && field.Length > 1 && field[1] != '_'; QueryContext root = null; if (analyze) { var tokenOperator = word.Trim().Last(); var analyzable = word.Trim(); if (tokenOperator == '~' || tokenOperator == '*') { analyzable = analyzable.Substring(0, analyzable.Length - 1); } var analyzed = _analyzer.Analyze(analyzable).ToArray(); var operatorLessField = field.Replace("+", ""); foreach (string token in analyzed) { if (root == null) { var t = Parse(field, token, tokenOperator, positionInQuery); root = t; } else { var t = Parse(operatorLessField, token, tokenOperator, positionInQuery + 1); root.Add(t); } } } else { root = Parse(field, word); } return(root); }
static void Main(string[] args) { try { Setup(args); if (_help) { PrintHelp(); } else if (_install) { Install(); } else { Validate(); IList <IEntry> entries; using (var log = new LogStreamReader(_logFile)) { entries = _processor.GetEntries(log); } var report = _analyzer.Analyze(entries); Console.WriteLine(_reporter.PrintReport(report)); } } catch (Exception e) { PrintException(e); } if (_waitKeyPressed) { Console.WriteLine("Press any key to exit..."); Console.ReadKey(); } }
protected IEnumerable <AnalyzerTreeNode> FetchChildren(CancellationToken ct) { if (symbol is IEntity) { var context = new AnalyzerContext() { CancellationToken = ct, Language = Language, AssemblyList = MainWindow.Instance.CurrentAssemblyList }; var results = analyzer.Analyze(symbol, context).Select(SymbolTreeNodeFactory); if (context.SortResults) { results = results.OrderBy(tn => tn.Text?.ToString(), NaturalStringComparer.Instance); } return(results); } else { throw new NotSupportedException("Currently symbols that are not entities are not supported!"); } }
private QueryContext CreateTerm(string field, string word, int positionInQuery) { var analyze = field[0] != '_' && field.Length > 1 && field[1] != '_'; QueryContext query = null; if (analyze) { var tokenOperator = word.Trim().Last(); var analyzable = word.Trim(); if (tokenOperator == '~' || tokenOperator == '*') { analyzable = analyzable.Substring(0, analyzable.Length - 1); } var analyzed = _analyzer.Analyze(analyzable).ToArray(); foreach (string token in analyzed) { if (query == null) { query = Parse(field, token, tokenOperator, positionInQuery); } else { var child = Parse(field, token, tokenOperator, positionInQuery + 1); child.And = false; child.Not = false; ((List <QueryContext>)query.Children).Add(child); } } } else { query = Parse(field, word); } return(query); }
static void Main(string[] args) { try { UnityContainer container = new UnityContainer(); container.RegisterType <ILog, LogConsole>(); container.RegisterType <IRawDataQueue, RdqMemoryThreadSafe>(); container.RegisterType <ISocialMediaProvider, SmpTwitterSample>(); container.RegisterType <IDataStore, DsMemoryThreadSafe>(); container.RegisterType <IAnalyzer, AnalyzerLocal>(); container.RegisterType <IReporter, ReporterConsole>(); IRawDataQueue rdq = container.Resolve <IRawDataQueue>(); ISocialMediaProvider smp = container.Resolve <ISocialMediaProvider>(); IDataStore ds = container.Resolve <IDataStore>(); IAnalyzer analyzer = container.Resolve <IAnalyzer>(); IReporter reporter = container.Resolve <IReporter>(); //Set up a cancellation token to end the collection when appropriate using (CancellationTokenSource cancellationTokenSource = new CancellationTokenSource()) { CancellationToken cancel = cancellationTokenSource.Token; smp.Retrieve(rdq, cancel); analyzer.Analyze(rdq, ds, cancel); reporter.Send(ds, cancel); Console.WriteLine("Press 'Enter' to stop."); Console.ReadLine(); cancellationTokenSource.Cancel(); } Console.WriteLine("Cancellation request recieved. Press 'Enter' to exit program."); Console.ReadLine(); } catch (Exception ex) { Console.WriteLine("An exception occurred. See details below. Press 'Enter' to close program.\n{0}", ex.Message); Console.ReadLine(); } }
/// <summary> /// Analyzes the current expression. /// </summary> /// <typeparam name="TResult">The type of the result.</typeparam> /// <param name="analyzer">The analyzer.</param> /// <returns> /// The analysis result. /// </returns> public override TResult Analyze <TResult>(IAnalyzer <TResult> analyzer) { return(analyzer.Analyze(this)); }
public AnalyzerModel(IAnalyzer analyzer) { Analyzer = analyzer; OverrideFactor = analyzer.DefaultFactor; _latestStats = analyzer.Analyze(null, null, Enumerable.Empty <Move>()); }
public int i_AnalyzeText(string str_FilePath) { if (str_FilePath.Length <= 0) { return(-2); } int i_wf = 0; string str_line = ""; string str_NewFilePath = Regex.Replace(str_FilePath, "\\.[^\\.]*$", "-processed.xhtml"); StreamReader sr_in; StreamWriter sw_out; try { sr_in = new StreamReader(str_FilePath, Encoding.Default); sw_out = new StreamWriter(str_NewFilePath, false, Encoding.UTF8); } catch (Exception ex) { MessageBox.Show("An error occurred when opening the source file: " + ex.Message); return(-1); } bool b_HasParagraphs = false; sw_out.WriteLine("<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<html>\n<body>"); while (!sr_in.EndOfStream) { Application.DoEvents(); // to be replaced by introducing a separater thread str_line = sr_in.ReadLine(); if (Regex.IsMatch(str_line, "^\\s*(<[^>]*>)*\\s*$")) // e.g. <meta ...></meta> or an empty line { sw_out.WriteLine(str_line); continue; } if (b_HasParagraphs) { sw_out.WriteLine("</p><p>"); } else { sw_out.WriteLine("<p>"); b_HasParagraphs = true; } MatchCollection coll_Matches = Regex.Matches(str_line, "\\s*([^\\w\\d]*)([\\w\\d]*)([^\\w\\d]*$|[\\.,\\?\\!\\)\\]\\}»]+|[:;\\-—\\\"]|[^\\w\\d\\s]*\\s)"); foreach (Match m in coll_Matches) { string str_left = m.Groups[1].Value; string str_wf = m.Groups[2].Value; string str_wf_lc = str_wf.ToLower(); string str_right = m.Groups[3].Value; if (Regex.IsMatch(str_wf, "[1234567890]")) { sw_out.WriteLine(str_left + "<w><ana lex=\"" + str_wf + "\" gr=\"NUM,ciph\"></ana>" + str_wf + "</w>" + str_right); ++i_wf; continue; } if (Regex.IsMatch(str_wf, "[a-zA-Z]")) { sw_out.WriteLine(str_left + "<w><ana lex=\"" + str_wf + "\" gr=\"NONLEX\"></ana>" + str_wf + "</w>" + str_right); ++i_wf; continue; } if (str_wf.Length <= 0) { if (str_left.Length > 0 || str_right.Length > 0) { sw_out.WriteLine(str_left + str_right); } continue; } if (DictFrequentWordforms.ContainsKey(str_wf_lc)) { sw_out.WriteLine(str_left + DictFrequentWordforms[str_wf_lc] + str_wf + "</w>" + str_right); ++i_wf; continue; } try { Analyzer.Analyze(str_wf_lc); } catch (Exception ex) { Console.WriteLine("Error while analyzing: " + str_wf); sw_out.WriteLine(m.Groups[0].Value); continue; } if (Analyzer.Count <= 0) { sw_out.WriteLine(m.Groups[0].Value); continue; } StringBuilder strb_ana = new StringBuilder(); strb_ana.Append(str_left); strb_ana.Append("<w>"); foreach (IWordForm wf in Analyzer) { strb_ana.Append(str_BuildAnalysisString(wf)); } strb_ana.Append(str_wf); strb_ana.Append("</w>"); strb_ana.Append(str_right); sw_out.WriteLine(strb_ana.ToString()); ++i_wf; } } if (b_HasParagraphs) { sw_out.WriteLine("</p>"); } sw_out.WriteLine("</body>\n</html>"); sr_in.Close(); sw_out.Close(); return(i_wf); }
public IList <QueryContext> Parse(string query) { if (string.IsNullOrWhiteSpace(query)) { throw new ArgumentException("query"); } if (query[0] == ' ' || query[0] == '-') { throw new ArgumentException("first query must be inclusive (and)"); } int state = 0; var queries = new List <QueryContext>(); // ----------- // read states // ----------- // // state 0: read key until key/value delimiter // state 1: read value until either suffix operator or term delimiter // state 2: read term delimiter // state 3: yield term // // allowed state transitions: // // 0->1 // 1->2 // 2->3 // 3->0 // // state transitions given the following query: // // title:'first'~+title:'blood' // 0.....1......230.....1.......23 // // phrases are enclosed with double-quotes, terms with single quotes // and dates with backslashes. numbers are not enclosed: // // title:"john rambo"+genre:'action'+created<\2000-01-01\+rating>3 // var segment = new QuerySegment(); var not = false; var or = false; var prevNot = false; var prevOr = false; bool isPhrase = false; bool isTerm = false; bool isDate = false; Action appendQuery = () => { Query q = null; var key = new string(segment.Buf0.ToArray()); var value = new string(segment.Buf1.ToArray()); if (segment.IsTerm) { var values = _analyzer.Analyze(value); if (values.Count == 1) { q = new TermQuery(key, values[0]); } else { q = new PhraseQuery(key, values); } } else if (segment.IsPhrase) { var values = _analyzer.Analyze(value); q = new PhraseQuery(key, values); } else if (segment.IsDate) { q = new TermQuery(key, DateTime.Parse(value)); } else { q = new TermQuery(key, long.Parse(value)); } q.GreaterThan = segment.Gt; q.LessThan = segment.Lt; q.Not = prevNot; q.Or = prevOr; q.Fuzzy = segment.Fz; q.Prefix = segment.Px; if (segment.Fz) { q.Similarity = _fuzzySimilarity; } var qc = new QueryContext(); qc.Query = q; Append(queries, qc); segment = new QuerySegment(); prevNot = not; prevOr = or; state = 0; }; for (int index = 0; index < query.Length; index++) { var c = query[index]; if (state == 3) { appendQuery(); } if (state == 0) { if (a0.Contains(c)) { if (c == '<') { segment.Lt = true; } else if (c == '>') { segment.Gt = true; } state = 1; } else { if (c != '+') { segment.Buf0.Add(c); } } } else if (state == 1) { if (!isPhrase && !isTerm && !isDate && a1.Contains(c)) { if (c == '-') { not = true; state = 3; } else if (c == '+') { state = 3; } else if (c == ' ') { or = true; state = 3; } else if (c == '*') { segment.Px = true; state = 2; } else if (c == '~') { segment.Fz = true; state = 2; } else if (c == '"') { isPhrase = true; segment.IsPhrase = true; } else if (c == '\'') { isTerm = true; segment.IsTerm = true; } else if (c == '\\') { isDate = true; segment.IsDate = true; } } else if (isPhrase && c == '"') { isPhrase = false; } else if (isTerm && c == '\'') { isTerm = false; } else if (isDate && c == '\\') { isDate = false; } else { segment.Buf1.Add(c); } } else // state == 2 { if (a2.Contains(c)) { if (c == '-') { not = true; } else if (c == ' ') { or = true; } state = 3; } else { segment.Buf1.Add(c); } } } if (state > 0) { appendQuery(); } return(queries); }