public IEnumerable <AnalysisResult> GetResults(AnalysisData data) { return(this.heuristics .GroupBy(h => h.Language) .Select(group => new { language = group.Key, matches = Run(group, data), groupCount = group.Count() }) .Select(x => new AnalysisResult(x.language, x.matches, CalculateScore(this.heuristics.Count, x.groupCount, x.matches.Count())))); }
public static ReportsDataSet.AnalysisResultDataTable GetAnalysisResultTable(ConnectionFactory factory, Config config, int patientId, int analysisId) { DataTable dataTable = new DataTable(); string cmdText = @" select Code, AnalysisTypes.Name as AnalysisTypeName, ExecutionDate, AnalysisData, HandbookGroupId from Analyses left join AnalysisTypes on AnalysisTypes.Id=AnalysisTypeId where ExecutionDate is not null and "; cmdText += analysisId == 0 ? "PatientId=@PatientId order by ExecutionDate" : "AnalysisId=@AnalysisId"; using (GmConnection conn = factory.CreateConnection()) { GmCommand cmd = new GmCommand(conn, cmdText); cmd.AddInt("PatientId", patientId); cmd.AddInt("AnalysisId", analysisId); cmd.Fill(dataTable); } ReportsDataSet.AnalysisResultDataTable dtAnalysisResult = new ReportsDataSet.AnalysisResultDataTable(); foreach (DataRow dr in dataTable.Rows) { AnalysisData ad = AnalysisData.DeserializeString(dr["AnalysisData"] as string); ReportsDataSet.AnalysisResultRow row = dtAnalysisResult.NewAnalysisResultRow(); row.AnalysisCode = (string)dr["Code"]; row.AnalysisName = (string)dr["AnalysisTypeName"]; row.ExecutionDate = ((DateTime)dr["ExecutionDate"]).ToString("dd.MM.yy"); HandbookGroup hg = config.GetHandbookGroup((string)dr["HandbookGroupId"]); row.Result = ad.GetText(hg); dtAnalysisResult.AddAnalysisResultRow(row); } return(dtAnalysisResult); }
/// <summary> /// Initialize the analysis with existing data. /// </summary> /// <param name="data"></param> public void Init(AnalysisData data) { onsetIndices.Clear(); _magnitude.Clear(); _flux.Clear(); _magnitudeSmooth.Clear(); _magnitudeAvg.Clear(); _onsets.Clear(); _magnitude.AddRange(data.magnitude); _flux.AddRange(data.flux); _magnitudeSmooth.AddRange(data.magnitudeSmooth); _magnitudeAvg.AddRange(data.magnitudeAvg); _magnitude.TrimExcess(); _flux.TrimExcess(); _magnitudeSmooth.TrimExcess(); _magnitudeAvg.TrimExcess(); foreach (KeyValuePair <int, Onset> onset in data.onsets) { _onsets.Add(onset.Key, onset.Value); } }
public static List <double[]> Estimate_Matrix_LastBizWeight(commonClass.DataParams dataParm, StringCollection stockCodeList, StringCollection strategyList) { //System.Diagnostics.Stopwatch stopWatch = new System.Diagnostics.Stopwatch(); //stopWatch.Start(); List <double[]> retList = new List <double[]>(); for (int rowId = 0; rowId < stockCodeList.Count; rowId++) { Data.ClearCache(); AnalysisData analysisData = new AnalysisData(stockCodeList[rowId], dataParm); double[] rowRetList = new double[strategyList.Count]; for (int colId = 0; colId < strategyList.Count; colId++) { Data.TradePoints tradePoints = Analysis(analysisData, strategyList[colId]); if (tradePoints != null && tradePoints.Count > 0) { rowRetList[colId] = (tradePoints[tradePoints.Count - 1] as TradePointInfo).BusinessInfo.Weight; //if (common.Settings.sysDebugMode && (tradePoints[tradePoints.Count - 1] as TradePointInfo).DataIdx != analysisData.Close.Count - 1) //{ // commonClass.SysLibs.WriteSysLog( AppTypes.SyslogTypes.Others,stockCodeList[rowId], strategyList[colId]); //} } else { rowRetList[colId] = double.NaN; } } retList.Add(rowRetList); } //stopWatch.Stop(); //string tmp = common.dateTimeLibs.TimeSpan2String(stopWatch.Elapsed); return(retList); }
/// <summary> /// Calculated primary strategy data. If an strategy has several output,one is called "primary data" /// and others are calles "exatra data". /// </summary> /// <param name="myData"> Data used to calculate strategy data.</param> /// <param name="meta">strategy meta data</param> /// <returns>Null if error</returns> public static Data.TradePoints Analysis(AnalysisData myData, Meta meta) { string cacheName = "data-" + myData.DataStockCode + "-" + myData.DataTimeRange.ToString() + "-" + myData.DataTimeScale.Code + "-" + meta.ClassType.Name; object[] processParas = new object[2]; processParas[0] = myData; processParas[1] = meta.Parameters; //First , find in cache Data.TradePoints tradePoints = (Data.TradePoints)Data.FindInCache(cacheName); if (tradePoints != null) { return(tradePoints); } //Then, Call Execute() method to get trading points. object strategyInstance = GetStrategyInstance(meta.ClassType); if (strategyInstance == null) { return(null); } tradePoints = (Data.TradePoints)meta.ClassType.InvokeMember("Execute", BindingFlags.InvokeMethod | BindingFlags.Instance | BindingFlags.Public, null, strategyInstance, processParas); Data.AddToCache(cacheName, tradePoints); return(tradePoints); }
private void Visit(IEnumerable <IOperation> operations, AnalysisData analysisData, CancellationToken cancellationToken) { Debug.Assert(_currentAnalysisData == null); Debug.Assert(_currentRootOperation == null); Debug.Assert(_pendingWritesMap == null); _pendingWritesMap = PooledDictionary <IAssignmentOperation, PooledHashSet <(ISymbol, IOperation)> > .GetInstance(); try { _currentAnalysisData = analysisData; _cancellationToken = cancellationToken; foreach (var operation in operations) { cancellationToken.ThrowIfCancellationRequested(); _currentRootOperation = operation; Visit(operation); } } finally { _currentAnalysisData = null; _currentRootOperation = null; _cancellationToken = default; foreach (var pendingWrites in _pendingWritesMap.Values) { pendingWrites.Free(); } _pendingWritesMap.Free(); _pendingWritesMap = null; } }
public void Init(AnalysisData data) { totalFrames = data.magnitude.Count; onsetIndices.Clear(); _magnitude.Clear(); _flux.Clear(); _magnitudeSmooth.Clear(); _magnitudeAvg.Clear(); _onsets.Clear(); _magnitude.AddRange(data.magnitude); _flux.AddRange(data.flux); _magnitudeSmooth.AddRange(data.magnitudeSmooth); _magnitudeAvg.AddRange(data.magnitudeAvg); _magnitude.TrimExcess(); _flux.TrimExcess(); _magnitudeSmooth.TrimExcess(); _magnitudeAvg.TrimExcess(); foreach (KeyValuePair <int, Onset> item in data.onsets) { _onsets.Add(item.Key, item.Value); } }
public static List <double[]> Estimate_Matrix_LastBizWeight(AppTypes.TimeRanges timeRange, AppTypes.TimeScale timeScale, StringCollection stockCodeList, StringCollection strategyList) { List <double[]> retList = new List <double[]>(); for (int rowId = 0; rowId < stockCodeList.Count; rowId++) { Data.ClearCache(); AnalysisData analysisData = new AnalysisData(timeRange, timeScale, stockCodeList[rowId], DataAccessMode.Local); double[] rowRetList = new double[strategyList.Count]; for (int colId = 0; colId < strategyList.Count; colId++) { Data.TradePoints tradePoints = Analysis(analysisData, strategyList[colId]); if (tradePoints != null && tradePoints.Count > 0) { rowRetList[colId] = (tradePoints[tradePoints.Count - 1] as TradePointInfo).BusinessInfo.Weight; } else { rowRetList[colId] = double.NaN; } } retList.Add(rowRetList); } return(retList); }
public static List <decimal[]> Estimate_Matrix_Profit(AppTypes.TimeRanges timeRange, AppTypes.TimeScale timeScale, StringCollection stockCodeList, StringCollection strategyList, EstimateOptions option) { List <decimal[]> retList = new List <decimal[]>(); for (int rowId = 0; rowId < stockCodeList.Count; rowId++) { Data.ClearCache(); AnalysisData analysisData = new AnalysisData(timeRange, timeScale, stockCodeList[rowId], DataAccessMode.Local); decimal[] rowRetList = new decimal[strategyList.Count]; for (int colId = 0; colId < strategyList.Count; colId++) { Data.TradePoints advices = Analysis(analysisData, strategyList[colId]); if (advices != null) { rowRetList[colId] = EstimateTrading_Profit(analysisData, ToTradePointInfo(advices), option); } else { rowRetList[colId] = 0; } } retList.Add(rowRetList); } return(retList); }
private static void ExportDBC(string targetPath, RadGridView gridView, GridViewData gridData, XcpData xcpData, AnalysisData analysisData) { DbcDetailData(targetPath, gridView, gridData); AddDBCDetailGroup(targetPath); AddCanChInfo(targetPath, xcpData, analysisData, ProtocolType.DBC); }
public Analysis(int start, int end, string name) { int spectrumSize = (RhythmTool.fftWindowSize / 2); this.name = name; this.end = Mathf.Clamp(end, 0, spectrumSize); this.start = Mathf.Clamp(start, 0, this.end); if (end < start || start < 0 || end < 0 || start >= spectrumSize || end > spectrumSize) { Debug.LogWarning("Invalid range for analysis " + name + ". Range must be within " + spectrumSize + " and start cannot come after end."); } _magnitude = new List <float>(); _flux = new List <float>(); _magnitudeSmooth = new List <float>(); _magnitudeAvg = new List <float>(); _onsets = new Dictionary <int, Onset>(1000); analysisData = new AnalysisData(name, _magnitude, _flux, _magnitudeSmooth, _magnitudeAvg, _onsets); onsetIndices = new List <int>(1000); points = new List <int>(); }
private static BasicBlockAnalysisData AnalyzeLocalFunctionOrLambdaInvocation( IMethodSymbol localFunctionOrLambda, ControlFlowGraph cfg, AnalysisData parentAnalysisData, CancellationToken cancellationToken) { Debug.Assert(localFunctionOrLambda.IsLocalFunction() || localFunctionOrLambda.IsAnonymousFunction()); cancellationToken.ThrowIfCancellationRequested(); using var analyzer = new DataFlowAnalyzer(cfg, localFunctionOrLambda, (FlowGraphAnalysisData)parentAnalysisData); var resultBlockAnalysisData = CustomDataFlowAnalysis <BasicBlockAnalysisData> .Run(cfg, analyzer, cancellationToken); if (resultBlockAnalysisData == null) { // Unreachable exit block from lambda/local. // So use our current analysis data. return(parentAnalysisData.CurrentBlockAnalysisData); } // We need to return a cloned basic block analysis data as disposing the DataFlowAnalyzer // created above will dispose all basic block analysis data instances allocated by it. var clonedBasicBlockData = parentAnalysisData.CreateBlockAnalysisData(); clonedBasicBlockData.SetAnalysisDataFrom(resultBlockAnalysisData); return(clonedBasicBlockData); }
/// <summary> /// Get estimated profit from adviced trade point /// </summary> /// <param name="data"></param> /// <param name="tradePoints"></param> /// <param name="options"></param> /// <returns></returns> public static decimal EstimateTrading_Profit(AnalysisData data, TradePointInfo[] tradePoints, EstimateOptions options) { EstimateSum myEstimateSum = new EstimateSum(); EstimateTrading(data, tradePoints, options, myEstimateSum, null, AfterEstimation_GetProfit); return(myEstimateSum.total); }
/// <summary> /// Constructor with all data /// </summary> /// <param name="d"></param> public GenericStrategy(AnalysisData d) { this.data = d; this.adviceInfo = new StrategyData.TradePoints(); this.last_position = 0; this.trailing_stop = -1; }
public IEnumerable<AnalysisResult> GetResults(AnalysisData data) { return this.heuristics .GroupBy(h => h.Language) .Select(group => new { language = group.Key, matches = Run(group, data), groupCount = group.Count() }) .Select(x => new AnalysisResult(x.language, x.matches, CalculateScore(this.heuristics.Count, x.groupCount, x.matches.Count()))); }
/// <summary> /// Runs a fast, non-precise operation tree based analysis to compute symbol usage results /// for symbol read/writes. /// </summary> public static SymbolUsageResult Run(IOperation rootOperation, ISymbol owningSymbol, CancellationToken cancellationToken) { AnalysisData analysisData = null; using (analysisData = OperationTreeAnalysisData.Create(owningSymbol, AnalyzeLocalFunction)) { var operations = SpecializedCollections.SingletonEnumerable(rootOperation); Walker.AnalyzeOperationsAndUpdateData(operations, analysisData, cancellationToken); return(analysisData.ToResult()); } // Local functions. BasicBlockAnalysisData AnalyzeLocalFunction(IMethodSymbol localFunction) { var localFunctionOperation = rootOperation.Descendants() .FirstOrDefault(o => Equals((o as ILocalFunctionOperation)?.Symbol, localFunction)); // Can likely be null for broken code. if (localFunctionOperation != null) { var operations = SpecializedCollections.SingletonEnumerable(localFunctionOperation); Walker.AnalyzeOperationsAndUpdateData(operations, analysisData, cancellationToken); } return(analysisData.CurrentBlockAnalysisData); } }
void Start() { rhythmTool.SongLoaded += OnSongLoaded; low = rhythmTool.low; rhythmTool.audioClip = audioClip; }
public string getDataInfo(RequestData data) { var list = AnalysisData.GetAreaMonitorData(Request.QueryString["Companyid"], DateTime.Parse(Request.QueryString["startTime"]), DateTime.Parse(Request.QueryString["endTime"]), Request.QueryString["selectData"], "VOCsItem"); string d = new JavaScriptSerializer().Serialize(list); return(d); }
private static void TokenComplete(ref AnalysisData data) { if (!String.IsNullOrEmpty(data.curToken)) { data.tokens.Add(data.curToken); data.curToken = String.Empty; } }
private float lastFrec; // la ultima frecuencia que activo la creacion de bloques // Use this for initialization void Start() { lowFrec = rhythmTool.low; SetUpRhythmTool(); CreatPoolBlocks(); }
private void SaveAnalysisData(IAnalysisData <IAnalysisDataRow> ad, AnalysisInformation ai) { var xArray = ad.Data.Select(d => d.X).ToArray(); var yArray = ad.Data.Select(d => d.Y).ToArray(); var adEntity = new AnalysisData(ad.XMeaning, xArray, ad.YMeaning, yArray, ai, this.controller); adEntity.Save(); }
private static bool CheckSemanticVersions(Project project, AnalysisData existingData, VersionArgument versions) { if (existingData == null) { return(false); } return(project.CanReusePersistedDependentSemanticVersion(versions.ProjectVersion, versions.DataVersion, existingData.DataVersion)); }
public Segmenter(AnalysisData analysis) { this.analysis = analysis; _changes = new Dictionary <int, float>(); _changeIndices = new List <int>(); changes = new ReadOnlyDictionary <int, float>(_changes); changeIndices = _changeIndices.AsReadOnly(); }
private static ImmutableArray <DiagnosticData> GetExistingItems(AnalysisData existingData) { if (existingData == null) { return(ImmutableArray <DiagnosticData> .Empty); } return(existingData.Items); }
private static List <AnalysisData> ConvertToAnalyisWeatherData(Rootobject weatherData) { var metronormDataList = GetMetronormData(); //month/day //01/01 1:00 List <AnalysisData> weatherDataList = new List <AnalysisData>(); foreach (var item in weatherData.list) { //2018-08-12 03:00:00 string dateTime = item.dt_txt.Replace('-', '/').Substring(0, item.dt_txt.Length - 3); string[] dateTimeComp = dateTime.Split(' '); string[] time = dateTimeComp[1].Split(':'); int hour = int.Parse(time[0]); int previousHour = hour - 1; int nextHour = hour + 1; string previousHourString = previousHour.ToString(); string nextHourString = nextHour.ToString(); if (previousHour < 10) { previousHourString = "0" + previousHourString; } if (nextHour < 10) { nextHourString = "0" + nextHourString; } string previousHourDateTime = dateTimeComp[0] + " " + previousHourString + ":" + time[1]; string nextHourDateTime = dateTimeComp[0] + " " + nextHourString.ToString() + ":" + time[1]; float temperature = item.main.temp; int cloads = item.clouds.all; AnalysisData metronormDataThisHour = metronormDataList.Where(j => j.SiteDateTime == dateTime.Remove(0, 5)).First(); if (hour == 0) { AnalysisData metronormDataNextHour = metronormDataList.Where(j => j.SiteDateTime == nextHourDateTime.Remove(0, 5)).First(); weatherDataList.Add(GenerateAnalysisData(dateTime, temperature, cloads, metronormDataThisHour)); weatherDataList.Add(GenerateAnalysisData(nextHourDateTime, temperature, cloads, metronormDataNextHour)); } else if (hour == 23) { AnalysisData metronormDataPreviousHour = metronormDataList.Where(j => j.SiteDateTime == previousHourDateTime.Remove(0, 5)).First(); weatherDataList.Add(GenerateAnalysisData(previousHourDateTime, temperature, cloads, metronormDataPreviousHour)); weatherDataList.Add(GenerateAnalysisData(dateTime, temperature, cloads, metronormDataThisHour)); } else { AnalysisData metronormDataNextHour = metronormDataList.Where(j => j.SiteDateTime == nextHourDateTime.Remove(0, 5)).First(); AnalysisData metronormDataPreviousHour = metronormDataList.Where(j => j.SiteDateTime == previousHourDateTime.Remove(0, 5)).First(); weatherDataList.Add(GenerateAnalysisData(previousHourDateTime, temperature, cloads, metronormDataPreviousHour)); weatherDataList.Add(GenerateAnalysisData(dateTime, temperature, cloads, metronormDataThisHour)); weatherDataList.Add(GenerateAnalysisData(nextHourDateTime, temperature, cloads, metronormDataNextHour)); } } return(weatherDataList); }
public static Data.TradePoints Analysis(AnalysisData myData, string strategyCode) { Meta meta = FindMetaByCode(strategyCode); if (meta == null) { return(null); } return(Analysis(myData, meta)); }
public void AnalysisDataGetDataStringFromObjectShouldReturnArrayFromStringObject() { var analysis = new AnalysisData("Test", demoX, "Test", demoY, this.analysisInfoMock); var actualX = analysis.GetDataFromStringObject(DataType.X); var actualY = analysis.GetDataFromStringObject(DataType.Y); Assert.That(Enumerable.SequenceEqual(demoX, actualX)); Assert.That(Enumerable.SequenceEqual(demoY, actualY)); }
public AnalysisLineViewModel(SideColor gameStartedBy, AnalysisData analysisData, IList <MoveData> moves) { this.gameStartedBy = gameStartedBy; this.analysisData = analysisData; this.isWhiteFirstMove = gameStartedBy == SideColor.White && moves.First().Index % 2 == 0 || gameStartedBy == SideColor.Black && moves.First().Index % 2 != 0; this.Moves = moves; }
private bool CanUseDocumentState(AnalysisData existingData, VersionStamp textVersion, VersionStamp dataVersion) { if (existingData == null) { return(false); } // make sure data stored in the cache is one from its previous text update return(existingData.DataVersion.Equals(dataVersion) && existingData.TextVersion.Equals(textVersion)); }
protected override void FilterDiagnostics(AnalysisData analysisData, Func <DiagnosticData, bool> predicateOpt) { if (predicateOpt == null) { AppendDiagnostics(analysisData.Items.Where(d => this.DiagnosticIds == null || this.DiagnosticIds.Contains(d.Id))); return; } AppendDiagnostics(analysisData.Items.Where(d => this.DiagnosticIds == null || this.DiagnosticIds.Contains(d.Id)).Where(predicateOpt)); }
private static bool CheckSyntaxVersions(Document document, AnalysisData existingData, VersionArgument versions) { if (existingData == null) { return(false); } return(document.CanReusePersistedTextVersion(versions.TextVersion, existingData.TextVersion) && document.CanReusePersistedSyntaxTreeVersion(versions.DataVersion, existingData.DataVersion)); }
protected override void FilterDiagnostics(AnalysisData analysisData) { // we don't care about result return; }
private static ImmutableArray<DiagnosticData> GetExistingDiagnostics(AnalysisData analysisData) { if (analysisData == null) { return ImmutableArray<DiagnosticData>.Empty; } return analysisData.Items; }
private static bool CheckSemanticVersions(Project project, AnalysisData existingData, VersionArgument versions) { if (existingData == null) { return false; } return VersionStamp.CanReusePersistedVersion(versions.TextVersion, existingData.TextVersion) && project.CanReusePersistedDependentSemanticVersion(versions.ProjectVersion, versions.DataVersion, existingData.DataVersion); }
public List<Test> getTests(Int32 numTests, PatientObject patient) { if (Authenticated_AorC()) { try { List<Test> tests = new List<Test>(); Test filler = new Test(); filler.anal = new List<AnalysisData>(); Movement.Database.Patient p1 = new Movement.Database.Patient(patient.ID); ReadOnlyCollection<Movement.Database.Test> testList; testList = p1.GetAllTests(); AnalysisData d1 = new AnalysisData(); if (numTests == 0) { for (int i = 0; i < testList.Count; i++) { filler.anal = new List<AnalysisData>(); filler.hand = testList[i].Hand; filler.mode = testList[i].Mode; filler.ID = testList[i].TestID; filler.script.scriptID = testList[i].TestScript.ScriptID; filler.timestamp = testList[i].Timestamp; filler.rotation = testList[i].Rotation; filler.script.type = testList[i].TestScript.ScriptType.Name; foreach (KeyValuePair<Movement.Analysis.AnalysisMetric, Movement.Database.TestAnalysisComponent> data in testList[i].Analysis.Components) { d1.metric = data.Key; d1.max = data.Value.Max; d1.min = data.Value.Min; d1.mean = data.Value.Mean; d1.stdDev = data.Value.StdDev; filler.anal.Add(d1); } tests.Add(filler); } } else if (numTests < testList.Count) { for (int j = 0; j < numTests; j++) { filler.anal = new List<AnalysisData>(); filler.hand = testList[j].Hand; filler.mode = testList[j].Mode; filler.ID = testList[j].TestID; filler.script.scriptID = testList[j].TestScript.ScriptID; filler.timestamp = testList[j].Timestamp; filler.rotation = testList[j].Rotation; filler.script.type = testList[j].TestScript.ScriptType.Name; filler.isNormal = testList[j].AnalysisIsNormal; foreach (KeyValuePair<Movement.Analysis.AnalysisMetric, Movement.Database.TestAnalysisComponent> data in testList[j].Analysis.Components) { d1.metric = data.Key; d1.max = data.Value.Max; d1.min = data.Value.Min; d1.mean = data.Value.Mean; d1.stdDev = data.Value.StdDev; filler.anal.Add(d1); } tests.Add(filler); } } else if (numTests > testList.Count) { for (int i = 0; i < testList.Count; i++) { filler.anal = new List<AnalysisData>(); filler.hand = testList[i].Hand; filler.mode = testList[i].Mode; filler.ID = testList[i].TestID; filler.script.scriptID = testList[i].TestScript.ScriptID; filler.timestamp = testList[i].Timestamp; filler.rotation = testList[i].Rotation; filler.script.type = testList[i].TestScript.ScriptType.Name; foreach (KeyValuePair<Movement.Analysis.AnalysisMetric, Movement.Database.TestAnalysisComponent> data in testList[i].Analysis.Components) { d1.metric = data.Key; d1.max = data.Value.Max; d1.min = data.Value.Min; d1.mean = data.Value.Mean; d1.stdDev = data.Value.StdDev; filler.anal.Add(d1); } tests.Add(filler); } } return tests; } catch (Exception e) { Log(e); return new List<Test>(); } } else { throw new UnauthorizedAccessException("You are not authorized to perform this action!"); } }
private static async Task PersistProjectData(Project project, DiagnosticState state, AnalysisData data) { // TODO: Cancellation is not allowed here to prevent data inconsistency. But there is still a possibility of data inconsistency due to // things like exception. For now, I am letting it go and let v2 engine take care of it properly. If v2 doesnt come online soon enough // more refactoring is required on project state. // clear all existing data state.Remove(project.Id); foreach (var document in project.Documents) { state.Remove(document.Id); } // quick bail out if (data.Items.Length == 0) { return; } // save new data var group = data.Items.GroupBy(d => d.DocumentId); foreach (var kv in group) { if (kv.Key == null) { // save project scope diagnostics await state.PersistAsync(project, new AnalysisData(data.TextVersion, data.DataVersion, kv.ToImmutableArrayOrEmpty()), CancellationToken.None).ConfigureAwait(false); continue; } // save document scope diagnostics var document = project.GetDocument(kv.Key); if (document == null) { continue; } await state.PersistAsync(document, new AnalysisData(data.TextVersion, data.DataVersion, kv.ToImmutableArrayOrEmpty()), CancellationToken.None).ConfigureAwait(false); } }
private static bool CheckSyntaxVersions(Document document, AnalysisData existingData, VersionArgument versions) { if (existingData == null) { return false; } return document.CanReusePersistedTextVersion(versions.TextVersion, existingData.TextVersion) && document.CanReusePersistedSyntaxTreeVersion(versions.DataVersion, existingData.DataVersion); }
protected override void FilterDiagnostics(AnalysisData analysisData) { AppendDiagnostics(analysisData.Items.Where(d => this.DiagnosticIds == null || this.DiagnosticIds.Contains(d.Id))); }
private static IEnumerable<string> Run(IEnumerable<IHeuristic> heuristics, AnalysisData data) => heuristics .Where(x => x.GetResult(data) == true) .Select(x => x.Name);
private bool CanUseDocumentState(AnalysisData existingData, VersionStamp textVersion, VersionStamp dataVersion) { if (existingData == null) { return false; } // make sure data stored in the cache is one from its previous text update return existingData.DataVersion.Equals(dataVersion) && existingData.TextVersion.Equals(textVersion); }
protected abstract void FilterDiagnostics(AnalysisData analysisData);
private ImmutableArray<DiagnosticData> UpdateDocumentDiagnostics( AnalysisData existingData, ImmutableArray<TextSpan> range, ImmutableArray<DiagnosticData> memberDiagnostics, SyntaxTree tree, SyntaxNode member, int memberId) { // get old span var oldSpan = range[memberId]; // get old diagnostics var diagnostics = existingData.Items; // check quick exit cases if (diagnostics.Length == 0 && memberDiagnostics.Length == 0) { return diagnostics; } // simple case if (diagnostics.Length == 0 && memberDiagnostics.Length > 0) { return memberDiagnostics; } // regular case var result = new List<DiagnosticData>(); // update member location Contract.Requires(member.FullSpan.Start == oldSpan.Start); var delta = member.FullSpan.End - oldSpan.End; var replaced = false; foreach (var diagnostic in diagnostics) { if (diagnostic.TextSpan.Start < oldSpan.Start) { result.Add(diagnostic); continue; } if (!replaced) { result.AddRange(memberDiagnostics); replaced = true; } if (oldSpan.End <= diagnostic.TextSpan.Start) { result.Add(UpdatePosition(diagnostic, tree, delta)); continue; } } // if it haven't replaced, replace it now if (!replaced) { result.AddRange(memberDiagnostics); replaced = true; } return result.ToImmutableArray(); }
public bool GetResult(AnalysisData data) => this.analysisFunc(data);
protected abstract void FilterDiagnostics(AnalysisData analysisData, Func<DiagnosticData, bool> predicateOpt = null);
protected override void FilterDiagnostics(AnalysisData analysisData, Func<DiagnosticData, bool> predicateOpt) { if (predicateOpt == null) { AppendDiagnostics(analysisData.Items.Where(d => this.DiagnosticIds == null || this.DiagnosticIds.Contains(d.Id))); return; } AppendDiagnostics(analysisData.Items.Where(d => this.DiagnosticIds == null || this.DiagnosticIds.Contains(d.Id)).Where(predicateOpt)); }
protected override void FilterDiagnostics(AnalysisData analysisData, Func<DiagnosticData, bool> predicateOpt = null) { // we don't care about result return; }