/// <summary> /// Save signals for a specific container in a queue. This is done in scenario where the container /// is not yet ready to accept signals. /// </summary> /// <param name="analysis"></param> /// <param name="data"></param> /// <returns></returns> private Task SaveToQueueAsync(AnalysisContainer analysis, ScenarioData data) { Assert.IsNotNull(data, "data"); var key = analysis.GetUniqueIdentity(); ConcurrentQueue <ScenarioData> existingQueue; if (this.signalWaitingToBeProcessedMap.TryGetValue(key, out existingQueue)) { if (existingQueue.Contains(data)) { throw new SignalAlreadyPresentException(string.Format(CultureInfo.InvariantCulture, "Save Queue. Analysis: {0}, Data: {1}", analysis, data)); } existingQueue.Enqueue(data); } else { // A tiny window exists for race condition. I thought about using AddOrUpdate but that won't help either since // the update lambda is called outside lock. So keep it the way it is for now. In future, consider using a lock // for entire operation. existingQueue = this.signalWaitingToBeProcessedMap.GetOrAdd(key, new ConcurrentQueue <ScenarioData>(new[] { data })); } // Persist it. return(this.signalWaitingToBeProcessedStoreInstance.SetEntityAsync( data.GetUniqueIdentity(), HandyUtil.Serialize(existingQueue), this.CancelToken)); }
/// <summary> /// Check if a container is in position to accept updates /// </summary> /// <remarks> /// Updates can only be accepted once main analysis is finished. This routines checks that. /// </remarks> /// <param name="analysis"></param> /// <returns></returns> private async Task <bool> IsAnalysisContainerInPositionToAcceptEventsAsync(AnalysisContainer analysis) { Assert.IsNotNull(analysis, "analysis"); var schedulingInfo = (await this.analysisMetadataObjectStore.GetTypedObjectAsync(analysis.GetUniqueIdentity()).ConfigureAwait(false)).SchedulingInfo; return(schedulingInfo.CurrentStatus == AnalysisStatus.Suspended && schedulingInfo.GetContinuationType() == ContinuationType.WaitForInterest); }
/// <summary> /// Entry point for a an Analysis container to Begin. This is true for the main analysis as well /// as the update to an existing analysis container /// </summary> /// <param name="analysis"></param> /// <returns></returns> /// <exception cref="Exception"></exception> private async Task ScheduleAnalysisAsync(AnalysisContainer analysis) { Assert.IsNotNull(analysis, "analysis"); var key = analysis.GetUniqueIdentity(); this.Logger.LogMessage("ScheduleAnalysisAsync : Key : {0}", key); // Get the Associated Analysis Metadata. var analysisMetadata = await this.analysisMetadataObjectStore.GetOrAddTypedObjectAsync(key, new AnalysisMetadata(key)).ConfigureAwait(false); if (analysis.InterestFilter != null) { analysisMetadata.HasRegisteredInterest = true; await this.analysisMetadataObjectStore.PersistTypedObjectAsync(analysisMetadata).ConfigureAwait(false); } this.AssertIfComputePipelineInFaultedState(); // Push the signal into our dataflow pipeline. var result = await this.signalConsumeDataFlow.SendAsync(analysis, this.CancelToken).ConfigureAwait(false); if (!result) { this.Logger.LogWarning(string.Format(CultureInfo.InvariantCulture, "Failed to Process Analysis: '{0}'", analysis)); throw new Exception(string.Format(CultureInfo.InvariantCulture, "Failed to Process Analysis: '{0}'", analysis)); } }
private async Task ResumeAnalysisAsync(Guid key, AnalysisContainer analysis) { this.Logger.LogMessage("ResumeAnalysisAsync:: Resuming Analysis Key:{0}, Value: {1}, ProgressMarkder: {2}", key, analysis, analysis.GetProgressedTill()); var metadata = await this.analysisMetadataObjectStore.GetTypedObjectAsync(key).ConfigureAwait(false); metadata.SchedulingInfo.MarkQueued(); // Persist the updated metadata await this.analysisMetadataObjectStore.PersistTypedObjectAsync(metadata); // Schedule the analysis. await this.ScheduleAnalysisAsync(analysis); }
private void Start() { if (iGetSentence == null) { iGetSentence = objSentence.GetComponent <IGetSentence>(); } analysisContainer = iGetSentence.GetSentence(); for (int i = 0; i < analysisContainer.categoryRetList.Count; i++) { Debug.Log("名前:" + analysisContainer.categoryRetList[i].categoryName + " 数 :" + analysisContainer.categoryRetList[i].thisNameNum); } }
/// <summary> /// 分析メソッド /// </summary> public AnalysisContainer Analysis(string[] str) { AnalysisContainer testContainer = new AnalysisContainer(); AnalysisContainer.CategoryRet categoryRet = new AnalysisContainer.CategoryRet(); for (int index = 0; index < categoryData.Length; index++) { //Debug.Log(categoryData[]); // カテゴリデータを取得 dictionaly = categoryData[index].ReturnDictionary(); // 辞書にある文字数分配列を確保 //int[] count = new int[dictionaly.Length]; int count = 0; for (int i = 0; i < dictionaly.Length; i++) { for (int j = 0; j < str.Length; j++) { // 指定した文字が存在するかどうか取得する MatchCollection matche = Regex.Matches(str[j], dictionaly[i]); foreach (Match m in matche) { // Debug.Log(m + "へ格納"); count++; } } } // Debug.Log(dictionaly[i] + " は" + count[i] + "個ありました"); Debug.Log(categoryData[index] + " は" + count + "個ありました"); // マッチした名前を格納 categoryRet.categoryName = categoryData[index].getCategoryName; // 名前がマッチした回数を格納 categoryRet.thisNameNum = count; // 構造体を格納 testContainer.categoryRetList.Add(categoryRet); } return(testContainer); }
private async Task PreAnalysisActionsAsync(AnalysisContainer analysis) { var key = analysis.GetUniqueIdentity(); var analysisMetadata = await this.analysisMetadataObjectStore.GetTypedObjectAsync(key); analysisMetadata.SchedulingInfo.MarkStarted(); //// We don't increment analysis attempted count if this is simply an update. ////if (schedulingInfo.GetContinuationType() != ContinuationType.WaitForInterest) ////{ //// schedulingInfo.IncrementAnalysisAttemptedCount(); ////} analysisMetadata.LastInvokedTime = DateTimeOffset.UtcNow; await this.analysisMetadataObjectStore.PersistTypedObjectAsync(analysisMetadata).ConfigureAwait(false); }
/// <inheritdoc /> public override Task <Continuation> DoAnalysisAsync(AnalysisContainer reconfigAnalysis) { Assert.IsNotNull(reconfigAnalysis, "We expect Analysis of Type ReconfigInstanceAnalysisDetails"); this.Logger.LogMessage("Current State {0}", reconfigAnalysis.GetProgressedTill()); if (reconfigAnalysis.GetProgressedTill() == ProgressTracker.NotStarted) { this.Logger.LogMessage("DoAnalysisAsync:: Populating Duration"); this.PopulateStartEndTimes((ReconfigurationAnalysisEvent)reconfigAnalysis.AnalysisEvent); reconfigAnalysis.SetProgressedTill(ProgressTracker.Finished); } if (reconfigAnalysis.GetProgressedTill() == ProgressTracker.Finished) { return(Task.FromResult(Continuation.Done)); } throw new Exception(string.Format(CultureInfo.InvariantCulture, "Progress Stage {0} not Valid", reconfigAnalysis.GetProgressedTill())); }
public AnalysisContainer Analyze(string fileName) { if (string.IsNullOrEmpty(fileName)) { throw new ArgumentNullException(nameof(fileName), "FileName argument was null"); } if (!File.Exists(fileName)) { throw new FileNotFoundException($"File {fileName} was not found"); } using var stream = File.OpenRead(fileName); var container = new AnalysisContainer { Analysis = new Dictionary <string, List <string> >(), FileType = null, Scannable = false }; foreach (var analyzer in _analyzers) { try { container.Scannable = true; container.FileType = analyzer.Name; container.Analysis = analyzer.Analyze(stream); break; } catch (Exception) { // Assume it just wasn't the proper type } } return(container); }
private async Task KickOffAnalysisAsync(AnalysisContainer analysis) { await this.PreAnalysisActionsAsync(analysis).ConfigureAwait(false); this.CancelToken.ThrowIfCancellationRequested(); var key = analysis.GetUniqueIdentity(); var schedulingInfo = (await this.analysisMetadataObjectStore.GetTypedObjectAsync(key)).SchedulingInfo; Continuation continuation = null; ExceptionDispatchInfo dispatchInfo = null; try { Assert.IsNotNull(AgentDirectory.SingleInstance, "AgentDirectory.SingleInstance != null"); var agent = AgentDirectory.SingleInstance.GetOrCreateAgentInstance(analysis.Agent); if (schedulingInfo.GetContinuationType() == ContinuationType.WaitForInterest) { this.Logger.LogMessage("KickOffAnalysisAsync:: Launching Update. Key : {0}", key); continuation = await agent.UpdateAnalysisAsync(analysis).ConfigureAwait(false); } else { this.Logger.LogMessage("KickOffAnalysisAsync:: Calling Main Analysis. Key : {0}", key); continuation = await agent.DoAnalysisAsync(analysis).ConfigureAwait(false); } this.Logger.LogMessage("KickOffAnalysisAsync:: Continuation : {0} Analysis {1}, key: {2}", continuation, analysis, key); } catch (Exception exp) { this.Logger.LogMessage("KickOffAnalysisAsync:: Exception {0} Encountered while Analysing Container: {1}", exp, analysis); dispatchInfo = ExceptionDispatchInfo.Capture(exp); } await this.PostAnalysisActionsAsync(analysis, continuation, dispatchInfo).ConfigureAwait(false); }
} // end function public bool WriteExperimentFile_Excel(string filename, AnalysisContainer analysis) { if (filename == null) { m_lastErrorString = "Filename == null"; return(false); } bool success = true; if (File.Exists(filename)) { MessageBoxResult result = MessageBox.Show("Files for: " + filename + " already exists! Do you want to over write it?", "File Already Exists", MessageBoxButton.YesNo, MessageBoxImage.Exclamation); switch (result) { case MessageBoxResult.Yes: File.Delete(filename); break; case MessageBoxResult.No: success = false; m_lastErrorString = "File already exists"; break; } } if (success) { try { string delimiter = "\t"; // \t = tab using (FileStream fs = new FileStream(filename, FileMode.CreateNew, FileAccess.Write)) { using (StreamWriter sw = new StreamWriter(fs)) { // START write column headers sw.Write("Time" + delimiter); StringBuilder builder = new StringBuilder(); for (int r = 0; r < m_plateType.Rows; r++) { for (int c = 0; c < m_plateType.Cols; c++) { builder.Append((char)(65 + r)).Append(c + 1).Append(delimiter); } } builder.Remove(builder.Length - delimiter.Length, delimiter.Length); // remove last delimiter sw.WriteLine(builder.ToString()); // END write column headers // START writing data frames success = m_wgDB.GetAllAnalysisFramesForAnalysis(analysis.AnalysisID); if (success) { foreach (AnalysisFrameContainer aFrame in m_wgDB.m_analysisFrameList) { string timeString = aFrame.SequenceNumber.ToString(); sw.Write(timeString + delimiter); string[] values = aFrame.ValueString.Split(','); foreach (string val in values) { sw.Write(val + delimiter); } sw.WriteLine(""); } } // END writing data frames } // END using StreamWriter } // END using FileStream } // end try catch (Exception e) { success = false; m_lastErrorString = e.Message; } } return(success); } // end function
/// <inheritdoc /> public override async Task <Continuation> DoAnalysisAsync(AnalysisContainer analysis) { if (analysis.GetProgressedTill() == ProgressTracker.NotStarted) { PrimaryMoveAnalysisEvent primaryMoveAnalysisEvent = analysis.AnalysisEvent as PrimaryMoveAnalysisEvent; primaryMoveAnalysisEvent.Reason = PrimaryMoveReason.Unknown; var reconfigRecord = primaryMoveAnalysisEvent.TriggerReconfigurationCompletedTraceRecord; primaryMoveAnalysisEvent.PreviousPrimaryContext = await this.primaryReplicaContextStore.GetPrimaryReplicaContextAsync(reconfigRecord.PartitionId).ConfigureAwait(false); if (primaryMoveAnalysisEvent.PreviousPrimaryContext == null) { this.Logger.LogWarning("PreviousPrimaryContext is null, cannot perform PrimaryMoveAnalysis."); analysis.SetProgressedTill(ProgressTracker.Finished); return(Continuation.Done); } primaryMoveAnalysisEvent.CurrentPrimaryContext = new PrimaryReplicaContext(reconfigRecord.PartitionId, reconfigRecord.NodeName, reconfigRecord.NodeInstanceId, reconfigRecord.TimeStamp.Ticks); if (primaryMoveAnalysisEvent.CurrentPrimaryContext == null) { this.Logger.LogWarning("CurrentPrimaryContext is null, cannot perform PrimaryMoveAnalysis."); analysis.SetProgressedTill(ProgressTracker.Finished); return(Continuation.Done); } // CurrentPrimaryContext becomes the PreviousPrimaryContext for the next analysis await this.primaryReplicaContextStore.SavePrimaryReplicaContextAsync(primaryMoveAnalysisEvent.CurrentPrimaryContext).ConfigureAwait(false); analysis.SetProgressedTill(ProgressTracker.Checkpoint1); return(Continuation.ResumeImmediately); } else if (analysis.GetProgressedTill() == ProgressTracker.Checkpoint1) { PrimaryMoveAnalysisEvent primaryMoveAnalysisEvent = analysis.AnalysisEvent as PrimaryMoveAnalysisEvent; if (primaryMoveAnalysisEvent.TriggerReconfigurationCompletedTraceRecord.ReconfigType == ReconfigurationType.Failover) { primaryMoveAnalysisEvent.Reason = PrimaryMoveReason.Failover; analysis.SetProgressedTill(ProgressTracker.Checkpoint2); return(Continuation.ResumeImmediately); } else if (primaryMoveAnalysisEvent.TriggerReconfigurationCompletedTraceRecord.ReconfigType == ReconfigurationType.SwapPrimary) { primaryMoveAnalysisEvent.Reason = PrimaryMoveReason.SwapPrimary; analysis.SetProgressedTill(ProgressTracker.Checkpoint3); return(Continuation.ResumeImmediately); } } else if (analysis.GetProgressedTill() == ProgressTracker.Checkpoint2) { PrimaryMoveAnalysisEvent primaryMoveAnalysisEvent = analysis.AnalysisEvent as PrimaryMoveAnalysisEvent; bool dueToNodeDown = await this.AnalyzeNodeDownAsync(primaryMoveAnalysisEvent).ConfigureAwait(false); if (!dueToNodeDown) { analysis.SetProgressedTill(ProgressTracker.Checkpoint4); return(Continuation.ResumeImmediately); } else { analysis.SetProgressedTill(ProgressTracker.Finished); primaryMoveAnalysisEvent.AnalysisEndTimeStamp = DateTime.UtcNow; return(Continuation.Done); } } else if (analysis.GetProgressedTill() == ProgressTracker.Checkpoint3) { PrimaryMoveAnalysisEvent primaryMoveAnalysisEvent = analysis.AnalysisEvent as PrimaryMoveAnalysisEvent; await this.AnalyzeCRMOperationAsync(primaryMoveAnalysisEvent).ConfigureAwait(false); analysis.SetProgressedTill(ProgressTracker.Finished); primaryMoveAnalysisEvent.AnalysisEndTimeStamp = DateTime.UtcNow; return(Continuation.Done); } else if (analysis.GetProgressedTill() == ProgressTracker.Checkpoint4) { PrimaryMoveAnalysisEvent primaryMoveAnalysisEvent = analysis.AnalysisEvent as PrimaryMoveAnalysisEvent; var replicaStateChangeTraceRecordList = await this.primaryMoveAnalysisQueryStoreReader.GetReplicaStateChangeTraceRecordsAsync(primaryMoveAnalysisEvent).ConfigureAwait(false); if (replicaStateChangeTraceRecordList == null || !replicaStateChangeTraceRecordList.Any()) { this.Logger.LogWarning("No replica closing traces found with duration {0}, cannot perform further analysis.", primaryMoveAnalysisEvent.GetDuration()); analysis.SetProgressedTill(ProgressTracker.Finished); return(Continuation.Done); } primaryMoveAnalysisEvent.ReasonActivityId = replicaStateChangeTraceRecordList.First().ReasonActivityId; primaryMoveAnalysisEvent.ReasonActivityType = replicaStateChangeTraceRecordList.First().ReasonActivityType; primaryMoveAnalysisEvent.AddCorrelatedTraceRecordRange(replicaStateChangeTraceRecordList); if (replicaStateChangeTraceRecordList.First().ReasonActivityType == ActivityType.ServicePackageEvent) { primaryMoveAnalysisEvent.Reason = PrimaryMoveReason.ApplicationHostDown; analysis.SetProgressedTill(ProgressTracker.Checkpoint5); return(Continuation.ResumeImmediately); } else if (replicaStateChangeTraceRecordList.First().ReasonActivityType == ActivityType.ClientReportFaultEvent || replicaStateChangeTraceRecordList.First().ReasonActivityType == ActivityType.ServiceReportFaultEvent) { // TODO: Break report fault analysis into two separate analyses because ReplicaStateChange already shows which one of the two happened primaryMoveAnalysisEvent.Reason = PrimaryMoveReason.ClientApiReportFault; analysis.SetProgressedTill(ProgressTracker.Checkpoint6); return(Continuation.ResumeImmediately); } } else if (analysis.GetProgressedTill() == ProgressTracker.Checkpoint5) { PrimaryMoveAnalysisEvent primaryMoveAnalysisEvent = analysis.AnalysisEvent as PrimaryMoveAnalysisEvent; await this.AnalyzeAppHostDownAsync(primaryMoveAnalysisEvent).ConfigureAwait(false); analysis.SetProgressedTill(ProgressTracker.Finished); primaryMoveAnalysisEvent.AnalysisEndTimeStamp = DateTime.UtcNow; return(Continuation.Done); } else if (analysis.GetProgressedTill() == ProgressTracker.Checkpoint6) { PrimaryMoveAnalysisEvent primaryMoveAnalysisEvent = analysis.AnalysisEvent as PrimaryMoveAnalysisEvent; await this.AnalyzeReportFaultAsync(primaryMoveAnalysisEvent).ConfigureAwait(false); analysis.SetProgressedTill(ProgressTracker.Finished); primaryMoveAnalysisEvent.AnalysisEndTimeStamp = DateTime.UtcNow; return(Continuation.Done); } throw new Exception(string.Format(CultureInfo.InvariantCulture, "Progress Stage {0} not Valid", analysis.GetProgressedTill())); }
/// <summary> /// Update an Analysis. This routine will be called whenever we see new data /// that an analysis has expressed interest in. The routine is only called once /// the main analysis is finished. /// </summary> /// <param name="analysis"></param> /// <param name="token"></param> /// <returns></returns> public virtual Task <Continuation> UpdateAnalysisAsync(AnalysisContainer analysis) { return(Task.FromResult(Continuation.Done)); }
/// <summary> /// Do Analysis /// </summary> /// <param name="analysis"></param> /// <param name="token"></param> /// <returns></returns> public abstract Task <Continuation> DoAnalysisAsync(AnalysisContainer analysis);
private async Task PostAnalysisActionsAsync(AnalysisContainer analysis, Continuation continuation, ExceptionDispatchInfo expDispatchInfo) { var key = analysis.GetUniqueIdentity(); var analysisMetadata = await this.analysisMetadataObjectStore.GetTypedObjectAsync(key).ConfigureAwait(false); analysisMetadata.SchedulingInfo.StartUse(); // if we have encountered an exception while processing this container, mark as failed. if (expDispatchInfo != null) { // expDispatchInfo.SourceException.Data.Add("FaultingAnalysisSchedulingInfo", schedulingInfo); // expDispatchInfo.SourceException.Data.Add("FaultingAnalysis", analysis); analysis.AddExceptionSeen(expDispatchInfo.SourceException); analysisMetadata.SchedulingInfo.MarkFailed(); } else { analysisMetadata.SchedulingInfo.SetContinuation(continuation); } analysis.SetAnalysisStatus(analysisMetadata.SchedulingInfo.CurrentStatus); await this.analysisContainerObjectStore.PersistTypedObjectAsync(analysis).ConfigureAwait(false); analysisMetadata.SchedulingInfo.StopUse(); // We delete the object from presisted store once the analysis is finished (Finished, or Failed) if (analysisMetadata.SchedulingInfo.CurrentStatus == AnalysisStatus.Completed || analysisMetadata.SchedulingInfo.CurrentStatus == AnalysisStatus.Failed) { await this.analysisMetadataObjectStore.DeletedTypedObjectAsync(analysisMetadata).ConfigureAwait(false); } else { // Persist it. await this.analysisMetadataObjectStore.PersistTypedObjectAsync(analysisMetadata); } if (this.consumerMap.ContainsKey(analysis.Agent)) { // TODO: Today we expect analysisConsumer to be very light weight. In future, we can potentially // post them in a new Task. foreach (var oneConsumer in this.consumerMap[analysis.Agent]) { if (analysisMetadata.SchedulingInfo.CurrentStatus == AnalysisStatus.Completed && (oneConsumer.Value == ConsumeOptions.Finished || oneConsumer.Value == ConsumeOptions.OnlySuccessfullyFinished)) { await oneConsumer.Key.ConsumeAsync(analysis.AnalysisEvent, this.CancelToken).ConfigureAwait(false); continue; } if (analysisMetadata.SchedulingInfo.CurrentStatus == AnalysisStatus.Failed && (oneConsumer.Value == ConsumeOptions.Finished || oneConsumer.Value == ConsumeOptions.OnlyFailed)) { await oneConsumer.Key.ConsumeAsync(analysis.AnalysisEvent, this.CancelToken).ConfigureAwait(false); } } } // Re-throw the exception. if (expDispatchInfo != null) { expDispatchInfo.Throw(); } }