protected IDictionary <ResultKey, TResult> MakeChromInfoResultsMap <TChromInfo, TResult>( Results <TChromInfo> results, Func <ResultFile, TResult> newResultFunc) where TChromInfo : ChromInfo { var resultMap = new Dictionary <ResultKey, TResult>(); if (results == null) { return(resultMap); } for (int replicateIndex = 0; replicateIndex < results.Count; replicateIndex++) { var replicate = new Replicate(DataSchema, replicateIndex); var files = results[replicateIndex]; if (null == files) { continue; } for (int fileIndex = 0; fileIndex < files.Count; fileIndex++) { var chromInfo = files[fileIndex]; if (null == chromInfo) { continue; } var key = new ResultKey(replicate, fileIndex); var resultFile = new ResultFile(replicate, chromInfo.FileId, ResultFile.GetOptStep(chromInfo)); resultMap.Add(key, newResultFunc(resultFile)); } } return(resultMap); }
public Results <TItem> Update(Results <TItem> results, IDictionary <ResultKey, TResult> resultObjects) { if (results == null) { return(null); } var newChromInfos = new List <IList <TItem> >(); for (int replicateIndex = 0; replicateIndex < results.Count; replicateIndex++) { var replicate = new Replicate(SkylineDataSchema, replicateIndex); var list = new List <TItem>(); for (int fileIndex = 0; fileIndex < results[replicateIndex].Count; fileIndex++) { var chromInfo = results[replicateIndex][fileIndex]; if (chromInfo != null) { var resultKey = new ResultKey(replicate, fileIndex); TResult resultObject; if (resultObjects.TryGetValue(resultKey, out resultObject)) { var newAnnotations = AnnotationUpdater.UpdateAnnotations(GetAnnotations(chromInfo), resultObject); chromInfo = ChangeAnnotations(chromInfo, newAnnotations); } } list.Add(chromInfo); } newChromInfos.Add(list); } return(Results <TItem> .Merge(results, newChromInfos)); }
public void Create(out IResultKey @object, String assemblyName, RuntimeInfo targetRuntime, ProcessorArchitecture targetArchitecture, RuntimeInfo executionRuntime, ProcessorArchitecture executionArchitecture, String fileName, String methodName) => @object = new ResultKey(assemblyName, targetRuntime, targetArchitecture, executionRuntime, executionArchitecture, fileName, methodName);
private VerificationResult CreateAnomalyChecksAndRunEverything( DataFrame data, IMetricsRepository repository, Check otherCheck, IEnumerable <IAnalyzer <IMetric> > additionalRequiredAnalyzers) { // We only want to use historic data with the EU tag for the anomaly checks since the new // data point is from the EU marketplace var filterEU = new Dictionary <string, string> { { "marketplace", "EU" } }; // We only want to use data points before the date time associated with the current // data point and only ones that are from 2018 var afterDateTime = CreateDate(2018, 1, 1); var beforeDateTime = CreateDate(2018, 8, 1); // Config for the size anomaly check var sizeAnomalyCheckConfig = new AnomalyCheckConfig(CheckLevel.Error, "Size only increases", filterEU, afterDateTime, beforeDateTime); var sizeAnomalyDetectionStrategy = new AbsoluteChangeStrategy(0); // Config for the mean sales anomaly check var meanSalesAnomalyCheckConfig = new AnomalyCheckConfig( CheckLevel.Warning, "Sales mean within 2 standard deviations", filterEU, afterDateTime, beforeDateTime ); var meanSalesAnomalyDetectionStrategy = new OnlineNormalStrategy(upperDeviationFactor: 2, lowerDeviationFactor: Option <double> .None, ignoreAnomalies: false); // ResultKey to be used when saving the results of this run var currentRunResultKey = new ResultKey(CreateDate(2018, 8, 1), new Dictionary <string, string> { { "marketplace", "EU" } }); return(new VerificationSuite() .OnData(data) .AddCheck(otherCheck) .AddRequiredAnalyzers(additionalRequiredAnalyzers) .UseRepository(repository) // Add the Size anomaly check .AddAnomalyCheck(sizeAnomalyDetectionStrategy, Initializers.Size(), sizeAnomalyCheckConfig) // Add the Mean sales anomaly check .AddAnomalyCheck(meanSalesAnomalyDetectionStrategy, Initializers.Mean("sales"), meanSalesAnomalyCheckConfig) // Save new data point in the repository after we calculated everything .SaveOrAppendResult(currentRunResultKey) .Run()); }
public override AnalysisResult Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) { JsonDocument.TryParseValue(ref reader, out JsonDocument document); JsonElement resultKey = document.RootElement.GetProperty(SerdeExt.RESULT_KEY_FIELD); JsonElement analyzer = document.RootElement.GetProperty(SerdeExt.ANALYZER_CONTEXT_FIELD); ResultKey resultKeyDe = JsonSerializer.Deserialize <ResultKey>(resultKey.GetRawText(), options); AnalyzerContext analyzerContextDe = JsonSerializer.Deserialize <AnalyzerContext>(analyzer.GetRawText(), options); return(new AnalysisResult(resultKeyDe, analyzerContextDe)); }
public static void GenerateKeyPair(ResultKey ret) { SM2 sm2 = SM2.Instance; AsymmetricCipherKeyPair key = sm2.ecc_key_pair_generator.GenerateKeyPair(); ECPrivateKeyParameters ecpriv = (ECPrivateKeyParameters)key.Private; ECPublicKeyParameters ecpub = (ECPublicKeyParameters)key.Public; BigInteger privateKey = ecpriv.D; ECPoint publicKey = ecpub.Q; System.Console.Out.WriteLine("公钥: " + Hex.ToHexString(publicKey.GetEncoded()).ToUpper()); System.Console.Out.WriteLine("私钥: " + Hex.ToHexString(privateKey.ToByteArray()).ToUpper()); //System.Console.Out.WriteLine("公钥: " + Encoding.ASCII.GetString(Hex.Encode(publicKey.GetEncoded())).ToUpper()); //System.Console.Out.WriteLine("私钥: " + Encoding.ASCII.GetString(Hex.Encode(privateKey.ToByteArray())).ToUpper()); ret.bytePubkey = publicKey.GetEncoded(); ret.bytePrikey = privateKey.ToByteArray(); ret.base64StrPubkey = Convert.ToBase64String(ret.bytePubkey); ret.base64StrPrikey = Convert.ToBase64String(ret.bytePrikey); }
public void save_and_retrieve_AnalyzerContexts() => Evaluate(_session, (context, repository) => { ResultKey resultKey = new ResultKey(DATE_ONE, new Dictionary <string, string>(REGION_EU)); repository.Save(resultKey, context); AnalyzerContext loadResults = repository.LoadByKey(resultKey).Value; DataFrame loadedResultsAsDataFrame = loadResults.SuccessMetricsAsDataFrame(_session, Enumerable.Empty <IAnalyzer <IMetric> >()); DataFrame resultAsDataFrame = context.SuccessMetricsAsDataFrame(_session, Enumerable.Empty <IAnalyzer <IMetric> >()); AssertSameRows(loadedResultsAsDataFrame, resultAsDataFrame); loadResults .SuccessMetricsAsJson(Enumerable.Empty <IAnalyzer <IMetric> >()) .ShouldBe(context .SuccessMetricsAsJson(Enumerable.Empty <IAnalyzer <IMetric> >())); });
public void should_execute_anomaly_detection_example() { // Anomaly detection operates on metrics stored in a metric repository, so lets create one InMemoryMetricsRepository metricsRepository = new InMemoryMetricsRepository(); // This is the key which we use to store the metrics for the dataset from yesterday ResultKey yesterdayKeys = new ResultKey(DateTime.Now.Ticks - 24 * 60 * 1000); /* In this simple example, we assume that we compute metrics on a dataset every day and we want * to ensure that they don't change drastically. For sake of simplicity, we just look at the * size of the data */ /* Yesterday, the data had only two rows */ var yesterdaysDataset = LoadAnomalyDetectionData(new List <object[]> { new object[] { 1, "Thingy A", "awesome thing.", "high", 0 }, new object[] { 2, "Thingy B", "available at http://thingb.com", null, 0 } }); /* We test for anomalies in the size of the data, it should not increase by more than 2x. Note * that we store the resulting metrics in our repository */ new VerificationSuite() .OnData(yesterdaysDataset) .UseRepository(metricsRepository) .SaveOrAppendResult(yesterdayKeys) .AddAnomalyCheck( new RelativeRateOfChangeStrategy(maxRateIncrease: 2.0), Size() ) .Run() .Debug(_helper.WriteLine); /* Todays data has five rows, so the data size more than doubled and our anomaly check should * catch this */ var todaysDataset = LoadAnomalyDetectionData(new List <object[]> { new object[] { 1, "Thingy A", "awesome thing.", "high", 0 }, new object[] { 2, "Thingy B", "available at http://thingb.com", null, 0 }, new object[] { 3, null, null, "low", 5 }, new object[] { 4, "Thingy D", "checkout https://thingd.ca", "low", 10 }, new object[] { 5, "Thingy W", null, "high", 12 } }); /* The key for today's result */ var todaysKey = new ResultKey(DateTime.Now.Ticks - 24 * 60 * 1000); /* Repeat the anomaly check for today's data */ var verificationResult = new VerificationSuite() .OnData(todaysDataset) .UseRepository(metricsRepository) .SaveOrAppendResult(todaysKey) .AddAnomalyCheck( new RelativeRateOfChangeStrategy(maxRateIncrease: 2.0), Size() ) .Run(); verificationResult.Status.ShouldBe(CheckStatus.Warning); _helper.WriteLine("Anomaly detected in the Size() metric!"); /* Lets have a look at the actual metrics. */ metricsRepository .Load() .ForAnalyzers(new[] { Size() }) .GetSuccessMetricsAsDataFrame(_session) .Show(); }
public AnalysisResult(ResultKey resultKey, AnalyzerContext analyzerContext) { ResultKey = resultKey; AnalyzerContext = analyzerContext; }
/// <summary> /// This method represented simple use of OpenAPI, initialization of environment, creation of model, linear calculation and reading results /// </summary> static private void RunSCIAOpenAPI_simple() { //Initialization of OpenAPI environment using (SCIA.OpenAPI.Environment env = new SCIA.OpenAPI.Environment(SciaEngineerFullPath, AppLogPath, "1.0.0.0"))// path to the location of your installation and temp path for logs) { //Run SCIA Engineer application bool openedSE = env.RunSCIAEngineer(SCIA.OpenAPI.Environment.GuiMode.ShowWindowShow); if (!openedSE) { throw new InvalidOperationException($"Cannot run SCIA Engineer"); } //Open project SCIA.OpenAPI.EsaProject proj = env.OpenProject(SciaEngineerProjecTemplate); if (proj == null) { throw new InvalidOperationException($"Cannot open project"); } //method which create model CreateModel(proj.Model); #region ---------- Calculation---------- #region Send Model to SCIA Engineer //Refresh model in SCIA Engineer from local ADM proj.Model.RefreshModel_ToSCIAEngineer(); #endregion #region Calculate // Run calculation proj.RunCalculation(); #endregion #endregion #region ---------- Results ------------- //Initialize Results API using (ResultsAPI rapi = proj.Model.InitializeResultsAPI()) { if (rapi != null) { //Create container for 1D results Result IntFor1Db1 = new Result(); //Results key for internal forces on beam 1 ResultKey keyIntFor1Db1 = new ResultKey { CaseType = eDsElementType.eDsElementType_LoadCase, CaseId = Lc1Id, EntityType = eDsElementType.eDsElementType_Beam, EntityName = beamName, Dimension = eDimension.eDim_1D, ResultType = eResultType.eFemBeamInnerForces, CoordSystem = eCoordSystem.eCoordSys_Local }; //Load 1D results based on results key IntFor1Db1 = rapi.LoadResult(keyIntFor1Db1); if (IntFor1Db1 != null) { Console.WriteLine(IntFor1Db1.GetTextOutput()); } //combination //Create container for 1D results Result IntFor1Db1Combi = new Result(); //Results key for internal forces on beam 1 ResultKey keyIntFor1Db1Combi = new ResultKey { EntityType = eDsElementType.eDsElementType_Beam, EntityName = beamName, CaseType = eDsElementType.eDsElementType_Combination, CaseId = C1Id, Dimension = eDimension.eDim_1D, ResultType = eResultType.eFemBeamInnerForces, CoordSystem = eCoordSystem.eCoordSys_Local }; // Load 1D results based on results key IntFor1Db1Combi = rapi.LoadResult(keyIntFor1Db1Combi); if (IntFor1Db1Combi != null) { Console.WriteLine(IntFor1Db1Combi.GetTextOutput()); } ResultKey keyReactionsSu1 = new ResultKey { CaseType = eDsElementType.eDsElementType_LoadCase, CaseId = Lc1Id, EntityType = eDsElementType.eDsElementType_Node, EntityName = "n1", Dimension = eDimension.eDim_reactionsPoint, ResultType = eResultType.eReactionsNodes, CoordSystem = eCoordSystem.eCoordSys_Global }; Result reactionsSu1 = new Result(); reactionsSu1 = rapi.LoadResult(keyReactionsSu1); if (reactionsSu1 != null) { Console.WriteLine(reactionsSu1.GetTextOutput()); } Result Def2Ds1 = new Result(); //Results key for internal forces on slab ResultKey keySlab = new ResultKey { EntityType = eDsElementType.eDsElementType_Slab, EntityName = SlabName, CaseType = eDsElementType.eDsElementType_LoadCase, CaseId = Lc1Id, Dimension = eDimension.eDim_2D, ResultType = eResultType.eFemDeformations, CoordSystem = eCoordSystem.eCoordSys_Local }; Def2Ds1 = rapi.LoadResult(keySlab); if (Def2Ds1 != null) { Console.WriteLine(Def2Ds1.GetTextOutput()); double maxvalue = 0; double pivot; for (int i = 0; i < Def2Ds1.GetMeshElementCount(); i++) { pivot = Def2Ds1.GetValue(2, i); if (System.Math.Abs(pivot) > System.Math.Abs(maxvalue)) { maxvalue = pivot; } ; } ; Console.WriteLine("Maximum deformation on slab:"); Console.WriteLine(maxvalue); } } else { throw new Exception("No results accessible"); } Console.WriteLine($"Press key to exit"); Console.ReadKey(); } #endregion proj.CloseProject(SCIA.OpenAPI.SaveMode.SaveChangesNo); env.Dispose(); } }
public string ToMessage(this ResultKey key) { return(ResultMessage.Instance.GetString(key.ToString())); }
public void Create(out IResultKey @object, ITestScenario scenario, String fileName, String methodName) => @object = new ResultKey(scenario, fileName, methodName);
public void Create(out IResultKey @object, ITestScenario scenario, MethodInfo methodInfo) => @object = new ResultKey(scenario, methodInfo);