public void SetUp() { //fakes init _fakeReceiver = Substitute.For <ITransponderReceiver>(); //Real init _realUtility = new Utility(); _realDecoder = new Decoder(_fakeReceiver, _realUtility); _realAnalyser = new Analyser(_realUtility, _realDecoder); _realAnalyser.SeparationEvent += (o, args) => { ++_nSeparationEventsRaised; }; _realAnalyser.AnalysedDataReadyEvent += (o, args) => { ++_nAnalysedDataReadyEventsRaised; }; _realAnalyser.TrackEnteredAirSpaceEvent += (o, args) => { ++_nTrackEnteredAirspaceEventsRaised; }; _realAnalyser.TrackLeftAirSpaceEvent += (o, args) => { ++_nTrackLeftAirspaceEventsRaised; }; }
public Runner(IFileSystem fileSystem, string path, string[] extensions, IAnalyser analyser, string outputFilePath = "") { if (fileSystem == null) { throw new ArgumentNullException(nameof(fileSystem)); } if (string.IsNullOrEmpty(path)) { throw new ArgumentNullException(nameof(path)); } if (extensions == null || !extensions.Any()) { throw new ArgumentNullException(nameof(extensions)); } if (analyser == null) { throw new ArgumentNullException(nameof(analyser)); } if (!Directory.Exists(path)) { throw new ArgumentException("Folder does not exist", nameof(path)); } _fileSystem = fileSystem; _path = path; _extensions = extensions; _analyser = analyser; _outputFilePath = outputFilePath; _filesAnalysed = new List <FileAnalysed>(); _container = Container.GetInstance(); }
public void AddAnalyser(IAnalyser analyser) { if (analyser == null) { throw new ArgumentNullException(nameof(analyser)); } _analysers.Add(analyser); }
public ViewModel() { ButtonCommand = new Command ( () => { Run(); } ); analyser = new Analyser((s) => LogLine(s)); }
public static void SaveAnalyserResult(IAnalyser <object> analyser, object result) { var json = JsonConvert.SerializeObject(result, Formatting.Indented); File.WriteAllText( $"{DateTime.Now:yyyy-MMM-dd HHmmsss} - {analyser.Name}.json", json); }
public static HistoryAnalysis DefaultAnalysis(this IAnalyser analyser) { return(new HistoryAnalysis { AnalysedDuration = TimeSpan.Zero, DriverRating = 0m }); }
public CvBankasDataService(IHttpClientFactory httpClientFactory, IScraperFactory scraperFactory, IUnitOfWork unitOfWork) { _unitOfWork = unitOfWork; _analyser = scraperFactory.BuildAnalyser(JobPortals.CvBankas); _scraper = scraperFactory.BuildScraper(JobPortals.CvBankas); _httpClient = httpClientFactory.CreateClient(JobPortals.CvBankas.GetDescription()); _scrapeClient = new ScrapeClient(_httpClient, _scraper); }
public ConsoleOutPutter(IAnalyser analyser) { _Analyser = analyser; _Analyser.AnalysedDataReadyEvent += OutPutAircraftDataEventHandler; _Analyser.SeparationEvent += OutputSeparationTasks; _Analyser.TrackEnteredAirSpaceEvent += OutputTrackEnteredAirSpaceEventHandler; _Analyser.TrackLeftAirSpaceEvent += OutputTrackLeftAirSpaceEventHandler; }
public AnalyseHistoryCommand(IReadOnlyCollection <string> arguments) { //Read the first argument which is analyser type var analysisType = arguments.ElementAt(0); //Read the second argument which is name of the file from which data is to be loaded for anlysis _source = arguments.ElementAt(1); //Get appropriate analyzer instance based on type _analyser = AnalyserLookup.GetAnalyser(analysisType); }
public AnalyseHistoryCommand(IReadOnlyCollection <string> arguments) { var analysisType = arguments.Count() > 1 ? arguments.First() : arguments.Single(); bool bypass; bool.TryParse(arguments.ElementAt(1), out bypass); // Never mind exception here BypassPenlty = bypass; DataSourcePath = arguments.Count() > 2 ? arguments.ElementAt(2) : string.Empty; _analyser = AnalyserLookup.GetAnalyser(analysisType); }
private static async Task Autocomplete(IAnalyser analyser) { IEnumerable <CompletionItem> autocomplete = await analyser.GetAutoCompleteAsync(); var forDisplay = string.Join(Environment.NewLine, autocomplete.Select(c => string.Join(", ", c.Tags) + " " + c.DisplayText)); if (forDisplay != string.Empty) { WriteLine(forDisplay); } }
public static HistoryAnalysis ComputeHistoryAnalysis(this IAnalyser analyser, IEnumerable <dynamic> result, List <Period> undocumented) { // Compute weighted average and duration return(new HistoryAnalysis { // If driver finishes ride early than designated time, total duration needs to be computed AnalysedDuration = TimeSpan.FromTicks(result.Sum(item => (long)item.Duration.Ticks) - undocumented.Sum(item => (item.End - item.Start).Ticks)), DriverRating = decimal.Divide(decimal.Divide(result.Sum(item => (decimal)item.Total), result.Sum(item => (long)item.Duration.Ticks)), undocumented.Count() > 0 ? 2 : 1) }); }
public Log(IAnalyser analyser) { _analyser = analyser; _listOfTwoPlanesAlreadyInvolvedInSeparationEvents = new List <SeparationAircraftsData>(); _analyser.SeparationEvent += LogOnSeparationEvent; var fd = File.Create("log.txt"); fd.Close(); File.AppendAllText(@"log.txt", $"Seperation events log: " + Environment.NewLine + Environment.NewLine); }
public CvOnlineDataService(IHttpClientFactory httpClientFactory, IScraperFactory scraperFactory, IUnitOfWork unitOfWork) { _parser = new CvOnlineParser(); _unitOfWork = unitOfWork; _analyser = scraperFactory.BuildAnalyser(JobPortals.CvOnline); _scraper = scraperFactory.BuildScraper(JobPortals.CvOnline); _filter = scraperFactory.BuildUrlFilter(JobPortals.CvOnline); _httpClient = httpClientFactory.CreateClient(JobPortals.CvOnline.GetDescription()); _scrapeClient = new ScrapeClient(_httpClient, _scraper); }
private static async Task Tokenize(IAnalyser analyser) { var tokens = await analyser.GetTokensAsync(); WriteLine("Tokenized source: "); int lineId = 0; foreach (var token in tokens.OrderBy(t => t.Start)) { WriteCodeBlock(token.Text, token.Type, ref lineId); } WriteLine(); }
public AnalyseHistoryCommand(IReadOnlyCollection <string> arguments) { var analysisType = arguments.ElementAt(0); if (arguments.Count > 1 && arguments.ElementAt(1) != null) { _drivingDataFilePath = arguments.ElementAt(1); } _analyser = AnalyserLookup.GetAnalyser(analysisType); if (arguments.Count > 2 && arguments.ElementAt(2) != null && arguments.ElementAt(2).ToLower() == "true") { _penalise = true; } }
public void SetUp() { /* * We dont create a mock of our class, as it is the class from which we test from */ // If we test multiple classes in conjunction, we add mocks here: // This is a fake, therefor we substitute it. _fakeTransponderReceiver = Substitute.For <ITransponderReceiver>(); // We instantiate our 'real' variables _realUtility = new Utility(); _realDecoder = new classes.Decoder(_fakeTransponderReceiver, _realUtility); _realAnalyser = new Analyser(_realUtility, _realDecoder); _realLog = new Log(_realAnalyser); }
/// <summary> /// Sets up the analyser to we can get straight to using it. /// </summary> /// <param name="analyser">The analyser to setup for use.</param> /// <param name="azureConfig">(Optional) Azure configuration details.</param> public Analyse(Analyser analyser, AzureConfig azureConfig = null) { try { _analyser = analyser switch { Analyser.Azure => new AzureTextAnalytics(azureConfig), Analyser.Vader => new Vader() }; } catch (Exception e) { Console.WriteLine(e); throw; } }
private void AnalysisBt(string filename) { SetLogger(BTToolLogger.Start('f', filename)); // 清理上一次的工作 treeView.Nodes.Clear(); _btAnalyser = new CommonAnalyser(); rootNode = null; // 读入BT文件 byte[] buffer = null; using (FileStream stream = new FileStream(filename, FileMode.Open)) { buffer = new byte[stream.Length]; stream.Read(buffer, 0, (int)stream.Length); } // 解析 IBNode rootBNode = null; try { rootBNode = this._btAnalyser.Analysis(buffer); rootNode = rootBNode; } catch { SetLogger(BTToolLogger.Start('e', filename)); tabControl.SelectedIndex = 1; filename = null; return; } SetLogger(BTToolLogger.End('f')); SetLogger(BTToolLogger.Start('s')); // 构建树 TreeNode rootTNode = new TreeNode(); ConstructTree(rootTNode, rootBNode); rootTNode.Expand(); treeView.Nodes.Add(rootTNode); SetLogger(BTToolLogger.End('s')); }
public static HistoryAnalysis Analyse(this IAnalyser analyser, IReadOnlyCollection <Period> history, bool bypassPenalty, TimeSpan?start = null, TimeSpan?end = null) { // Note: Assumption here is existing analysers implementation should not be amended // It's actually going one step in reverse and some additional computation var result = analyser.Analyse(history); if (bypassPenalty) { List <Period> undocumented = null; // Assumption is no Stat and End time mentioned history.Filter(start ?? TimeSpan.Zero, end ?? TimeSpan.Zero, out undocumented); // Double the rating result.DriverRating = decimal.Multiply(result.DriverRating, undocumented.Count() > 0 ? 2 : 1); } return(result); }
public static async Task <T> CompileAndRun <T>(this IAnalyser analyser, string type, string methodName, BindingFlags methodFlags, object instance = null, params object[] parameters) { if (analyser is null) { throw new ArgumentNullException(nameof(analyser)); } if (type is null) { throw new ArgumentNullException(nameof(type)); } if (methodName is null) { throw new ArgumentNullException(nameof(methodName)); } CompilerResult compiled = await analyser.CompileAsync(); if (!compiled.Success) { return(default);
private static async Task Diagnostics(IAnalyser analyser) { IReadOnlyCollection <Diagnostic> diagnosticsResult = await analyser.GetDiagnosticsAsync(); if (diagnosticsResult.Count == 0) { WriteLineInColor("No diagnostics", ConsoleColor.Green); return; } WriteLineInColor("Diagnostics: ", ConsoleColor.White); foreach (Diagnostic diagnostic in diagnosticsResult.OrderByDescending(d => d.Severity)) { var start = diagnostic.Location.SourceSpan.Start; var length = diagnostic.Location.SourceSpan.Length; ConsoleColor colorOfDiagnostic; switch (diagnostic.Severity) { case DiagnosticSeverity.Info: colorOfDiagnostic = ConsoleColor.Cyan; break; case DiagnosticSeverity.Warning: colorOfDiagnostic = ConsoleColor.Yellow; break; case DiagnosticSeverity.Error: colorOfDiagnostic = ConsoleColor.Red; break; case DiagnosticSeverity.Hidden: default: colorOfDiagnostic = ConsoleColor.White; break; } WriteLineInColor($"From {start} to {start + length}: {diagnostic}", colorOfDiagnostic); } }
/// <summary> /// 读入BT文件,并解析,但不生成树 /// </summary> /// <param name="filename">bt文件名</param> public void OpenFile(string filename) { _btAnalyser = new CommonAnalyser(); _tRootNode = null; // 读入BT文件 byte[] buffer = null; using (FileStream stream = new FileStream(filename, FileMode.Open)) { buffer = new byte[stream.Length]; stream.Read(buffer, 0, (int)stream.Length); } // 解析 try { _bRootNode = this._btAnalyser.Analyse(buffer); } catch { throw; } _tRootNode = null; // 重置标志 }
public ScrapePageUrls(IUnitOfWork unitOfWork, IAnalyser analyser, ScrapeClient scrapeClient) : base(unitOfWork, analyser, scrapeClient) { _scrapeClient = scrapeClient; }
private static int counter = 0; // TODO: improve public CompositeAnalyser(IAnalyser[] analysers) { this.analysers = analysers; Id = "Composite-" + (++counter); }
public CompositeAnalyser(IAnalyser[] analysers) { this.analysers = analysers; }
public CvLtAnalyserTests() { _analyser = new CvLtAnalyser(); }
public AnalyseHistoryCommand(IReadOnlyCollection <string> arguments) { var analysisType = arguments.Single(); _analyser = AnalyserLookup.GetAnalyser(analysisType); }
public static Task <T> CompileAndRun <T>(this IAnalyser analyser, string type, string methodName, object instance = null, params object[] parameters) { return(CompileAndRun <T>(analyser, type, methodName, BindingFlags.Default, instance, parameters)); }
public void Setup() { _analyser = new Vader(); }
public ScrapePageInfos(IUnitOfWork unitOfWork, IAnalyser analyser, ScrapeClient scrapeClient) : base(unitOfWork, analyser, scrapeClient) { }
public static async Task CompileAndRun(this IAnalyser analyser, string type, string methodName, BindingFlags methodFlags, object instance = null, params object[] parameters) { await CompileAndRun <object>(analyser, type, methodName, methodFlags, instance, parameters); }