public frmAddressBook() { this.InitializeComponent(); if (Globals.ThisAddIn.SuiteCRMUserSession.NotLoggedIn) { Robustness.DoOrLogError(Globals.ThisAddIn.Log, () => Globals.ThisAddIn.ShowSettingsForm()); } }
/// <summary> /// Do whatever it is I do repeatedly. /// </summary> private async void PerformRepeatedly() { Robustness.DoOrLogError( this.Log, () => this.PerformStartup(), $"{this.Name} PerformStartup"); do { var fred = Thread.CurrentThread; if (fred.Name == null) { Log.Debug($"Anonymous thread {fred.ManagedThreadId} running as '{this.Name}'."); } lock (processLock) { this.state = RunState.Running; } Robustness.DoOrLogError( this.Log, () => this.PerformIteration(), $"{this.Name} PerformIteration"); /* deal with any pending Windows messages, which we don't need to know about */ System.Windows.Forms.Application.DoEvents(); this.previousIterationCompleted = this.lastIterationCompleted; this.lastIterationCompleted = DateTime.UtcNow; if (this.state == RunState.Running) { try { lock (processLock) { this.state = RunState.Waiting; } await Task.Delay(this.Interval, interrupter.Token); } catch (TaskCanceledException) { // that's OK, that's what's supposed to happen. } } }while (this.IsActive); lock (processLock) { Log.Debug($"Stopping thread {this.Name} immediately."); this.state = RunState.Stopped; this.process = null; } }
/// <summary> /// Set the transmission state of this SyncState object to pending, if it has really changed. /// </summary> internal void SetPendingIfReallyChanged() { if (this.ReallyChanged()) { Robustness.DoOrLogError( Globals.ThisAddIn.Log, () => this.SetPending(), $"{this.GetType().Name}.SetPendingIfReallyChanged"); } }
internal override void PerformIteration() { DaemonAction task; if (tasks.TryDequeue(out task)) { Log.Info($"About to perform {task.Description}."); Robustness.DoOrLogError(this.Log, () => { task.Perform(); Log.Info($"{task.Description} completed."); }); } }
/// <summary> /// Do whatever it is I do repeatedly. /// </summary> private async void PerformRepeatedly() { do { Robustness.DoOrLogError( this.Log, () => this.PerformIteration(), $"{this.Name} PerformIteration"); this.lastIterationCompleted = DateTime.UtcNow; await Task.Delay(this.SyncPeriod); }while (this.Running); lock (processLock) { this.state = RunState.Stopped; this.process = null; } }
/// <summary> /// Do whatever it is I do repeatedly. /// </summary> private async void PerformRepeatedly() { do { var fred = Thread.CurrentThread; if (fred.Name == null) { Log.Warn($"Anonymous thread {fred.ManagedThreadId} running as '{this.Name}'."); } lock (processLock) { this.state = RunState.Running; } Robustness.DoOrLogError( this.Log, () => this.PerformIteration(), $"{this.Name} PerformIteration"); /* deal with any pending Windows messages, which we don't need to know about */ System.Windows.Forms.Application.DoEvents(); this.lastIterationCompleted = DateTime.UtcNow; if (this.state == RunState.Running) { lock (processLock) { this.state = RunState.Waiting; } await Task.Delay(this.SyncPeriod); } }while (this.Running); lock (processLock) { this.state = RunState.Stopped; this.process = null; } }
public static extern void glfwWindowHint(WindowHintRobustness hint, Robustness value);
/// <inheritdoc /> public abstract void WindowHint(WindowHintRobustness hint, Robustness value);
static void Main(string[] args) { string MNISTFile = null; string MNISTData = null; string MNISTLabels = null; var p = new OptionSet(); bool just_accuracy = false; bool just_loss = false; p.Add("nnet=", "MNIST neural network file name", x => MNISTFile = x); p.Add("datafile=", "MNIST data file name", x => MNISTData = x); p.Add("labelfile=", "MNIST label file name", x => MNISTLabels = x); p.Add <bool>("optimization=", "Do optimization (Default: true)", (x => RobustnessOptions.DoOptimization = x)); p.Add <double>("bound=", "Linfinity-ball to search", (x => RobustnessOptions.Epsilon = x)); p.Add <double>("sub=", "Subsample from 'live' constraints (0.0-1.0)", (x => RobustnessOptions.LiveConstraintSamplingRatio = x)); p.Add <string>("registry=", "Unique name to store output examples and statistics", (x => RobustnessOptions.Registry = x)); p.Add <bool>("cegar=", "Do CEGAR (default: true)", (x => RobustnessOptions.CEGAR = x)); p.Add <string>("only-accuracy", "Only evaluate accuracy", (x => just_accuracy = (x != null))); p.Add <string>("only-loss", "Only evaluate loss", (x => just_loss = (x != null))); p.Add <string>("no-quant-safety", "Quantization integrality safety off", (x => RobustnessOptions.QuantizationSafety = (x == null))); p.Add <string>("max-conf", "Use max-conf objective", (x => { if (x != null) { RobustnessOptions.ObjectiveKind = LPSObjectiveKind.MaxConf; } })); p.Add <double>("winner-diff=", "Winning label should be that much different than second best", (x => RobustnessOptions.LabelConfidenceDiff = x)); p.Add <string>("log-png", "Log png files", (x => RobustnessOptions.SavePNGCounterexamples = (x != null))); bool only_misclass = false; p.Add("only-filter-misclass", "Only keep the misclassifications", (x => only_misclass = (x != null))); Cmd.RunOptionSet(p, args); if (MNISTFile == null || MNISTData == null || MNISTLabels == null) { Console.WriteLine("Invalid arguments, use --help"); Environment.Exit(1); } RobustnessOptions.Dump(); Options.InitializeNNAnalysis(); NeuralNet nn = MNIST.GetNN(MNISTFile); ImageDataset data = MNIST.ReadData(MNISTLabels, MNISTData, MNIST.ALL_IMAGES, 0); if (just_accuracy) { NNAccuracy.GetAccuracy(nn, data.Dataset); return; } if (just_loss) { NNAccuracy.GetLoss(nn, data.Dataset); return; } if (only_misclass) { string filtered = RobustnessOptions.Registry + "-misclass"; Console.WriteLine("Orig {0} data", data.Dataset.Count()); var ds = NNAccuracy.KeepMisclass(nn, data.Dataset); Console.WriteLine("Kept {0} data", ds.Count()); ImageDataset ret = new ImageDataset(ds, MNIST.InputCoordinates.ChannelCount, MNIST.InputCoordinates.RowCount, MNIST.InputCoordinates.ColumnCount, true); MNIST.WriteData(filtered + "-labels", filtered + "-images", ret); return; } // NB: No snapshotting for MNIST since it never crashes ... ImageDataset synth = Robustness.SynthesizeCounterexamplesAndStore(nn, data, x => { return; }); MNIST.WriteData(RobustnessOptions.Registry + "-synth-labels", RobustnessOptions.Registry + "-synth-images", synth); }
private void EvalRobustness(double[] cb) { int intLIndex = m_pSnippet.GetIndexNumberFromLayerName(m_pActiveView, cboSourceLayer.Text); ILayer pLayer = mForm.axMapControl1.get_Layer(intLIndex); m_pFLayer = pLayer as IFeatureLayer; IFeatureClass pFClass = m_pFLayer.FeatureClass; string strValueFld = m_pRenderedLayer.strValueFldName; string strUncernfld = string.Empty; if (m_pRenderedLayer.strUncernFldName == string.Empty) { strUncernfld = cboUncernFld.Text; } else { strUncernfld = m_pRenderedLayer.strUncernFldName; } if (strUncernfld == string.Empty) { return; } int intValueIdx = pFClass.FindField(strValueFld); int intUncernIdx = pFClass.FindField(strUncernfld); int intClassCount = cb.Length - 1; string strSavefldnm = txtFldName.Text; //strTempfldName = "MinSepfave"; if (pFClass.FindField(strSavefldnm) == -1) { m_pSnippet.AddField(pFClass, strSavefldnm, esriFieldType.esriFieldTypeDouble); } int intSavefldIdx = pFClass.FindField(strSavefldnm); int intFCounts = pFClass.FeatureCount(null); Chart pChart = new Chart(); IFeature pFeat = null; IFeatureCursor pFCursor = null; pFCursor = pFClass.Update(null, false); pFeat = pFCursor.NextFeature(); Robustness[] pRobustness = new Robustness[intFCounts]; double[] arrValue = new double[intFCounts]; int i = 0; while (pFeat != null) { pRobustness[i] = new Robustness(); pRobustness[i].Robustnesses = new double[intClassCount]; double dblValue = Convert.ToDouble(pFeat.get_Value(intValueIdx)); double dblStd = Convert.ToDouble(pFeat.get_Value(intUncernIdx)); double dblEntropy = 0; for (int j = 0; j < intClassCount; j++) { double dblUpperZvalue = (cb[j + 1] - dblValue) / dblStd; double dblUpperConfLev = pChart.DataManipulator.Statistics.NormalDistribution(dblUpperZvalue); double dblLowerZvalue = (cb[j] - dblValue) / dblStd; double dblLowerConfLev = pChart.DataManipulator.Statistics.NormalDistribution(dblLowerZvalue); double dblProb = dblUpperConfLev - dblLowerConfLev; pRobustness[i].Robustnesses[j] = dblProb; //Probability of an observation to each class if (dblProb != 0) { dblEntropy += dblProb * Math.Log(dblProb, 2); } if (j == 0) { if (dblValue >= cb[j] && dblValue <= cb[j + 1]) { pRobustness[i].TargetClass = j; } } else { if (dblValue > cb[j] && dblValue <= cb[j + 1]) { pRobustness[i].TargetClass = j; } } } pRobustness[i].Entropy = ((double)(-1) * dblEntropy) / (Math.Log(intClassCount, 2)); if (cboMeasure.Text == "Entropy") { arrValue[i] = pRobustness[i].Entropy; pFeat.set_Value(intSavefldIdx, pRobustness[i].Entropy); } else { pFeat.set_Value(intSavefldIdx, pRobustness[i].Robustnesses[pRobustness[i].TargetClass]); arrValue[i] = pRobustness[i].Robustnesses[pRobustness[i].TargetClass]; } pFCursor.UpdateFeature(pFeat); pFeat = pFCursor.NextFeature(); i++; } //Visualization //dblClsMean = new double[intClassCount, 3]; //double dblSumRobustness = 0; //for (int j = 0; j < intFCounts; j++) //{ // dblSumRobustness = dblSumRobustness + arrRobustness[j, 0]; // for (int k = 0; k < intClassCount; k++) // { // if (arrRobustness[j, 1] == k) // { // dblClsMean[k, 0] = arrRobustness[j, 0] + dblClsMean[k, 0]; // dblClsMean[k, 1] = dblClsMean[k, 1] + 1; // } // } //} //for (int k = 0; k < intClassCount; k++) //{ // dblClsMean[k, 2] = dblClsMean[k, 0] / dblClsMean[k, 1]; //} //dblMeanRobustness = dblSumRobustness / intFCounts; //switch (cboGCClassify.Text) //{ // case "Equal Interval": // pClassifyGEN = new EqualIntervalClass(); // break; // case "Geometrical Interval": // pClassifyGEN = new GeometricalInterval(); // break; // case "Natural Breaks": // pClassifyGEN = new NaturalBreaksClass(); // break; // case "Quantile": // pClassifyGEN = new QuantileClass(); // break; // case "StandardDeviation": // pClassifyGEN = new StandardDeviationClass(); // break; // default: // pClassifyGEN = new NaturalBreaksClass(); // break; //} if (chkAddMap.Checked) { IClassifyGEN pClassifyGEN = new NaturalBreaksClass(); IFeatureLayer pflUncern = new FeatureLayerClass(); pflUncern.FeatureClass = pFClass; pflUncern.Name = cboSourceLayer.Text + " " + cboMeasure.Text; pflUncern.Visible = true; IGeoFeatureLayer pGeofeatureLayer = (IGeoFeatureLayer)pflUncern; ITableHistogram pTableHistogram2 = new BasicTableHistogramClass(); ITable pTable = (ITable)pFClass; pTableHistogram2.Field = strSavefldnm; pTableHistogram2.Table = pTable; //IHistogram pHistogram = (IHistogram)pTableHistogram2; IBasicHistogram pHistogram = (IBasicHistogram)pTableHistogram2; object xVals, frqs; pHistogram.GetHistogram(out xVals, out frqs); pClassifyGEN.Classify(xVals, frqs, 4); //Fixed Class count double[] cb_uncern = (double[])pClassifyGEN.ClassBreaks; if (cboMeasure.Text == "Entropy") { cb_uncern[1] = 0.4; cb_uncern[2] = 0.6; cb_uncern[3] = 0.75; } else { cb_uncern[1] = 0.4; cb_uncern[2] = 0.6; cb_uncern[3] = 0.75; } ISpacingBreaksRenderer pSpacingBrksRenderers = new SpacingBreaksRendererClass(); pSpacingBrksRenderers.arrClassBrks = cb_uncern; pSpacingBrksRenderers.arrValue = arrValue; if (cboMeasure.Text == "Entropy") { pSpacingBrksRenderers.dblFromSep = Convert.ToDouble(1); pSpacingBrksRenderers.dblToSep = Convert.ToDouble(20); } else { pSpacingBrksRenderers.dblFromSep = Convert.ToDouble(20); pSpacingBrksRenderers.dblToSep = Convert.ToDouble(1); } pSpacingBrksRenderers.dblLineAngle = Convert.ToDouble(45); pSpacingBrksRenderers.dblLineWidth = Convert.ToDouble(0.1); pSpacingBrksRenderers.m_pLineRgb = m_pSnippet.getRGB(0, 0, 0); if (pSpacingBrksRenderers.m_pLineRgb == null) { return; } pSpacingBrksRenderers.strHeading = cboMeasure.Text; pSpacingBrksRenderers.intRoundingDigits = 2; pSpacingBrksRenderers.CreateLegend(); pGeofeatureLayer.Renderer = (IFeatureRenderer)pSpacingBrksRenderers; m_pActiveView.FocusMap.AddLayer(pGeofeatureLayer); m_pActiveView.Refresh(); mForm.axTOCControl1.Update(); } }
static void Main(string[] args) { string CifarNNFile = null; string CifarDataBatch = null; bool just_accuracy = false; bool just_loss = false; bool raw_directory = false; var p = new OptionSet(); p.Add <string>("nnet=", "CIFAR neural network file name", (x => CifarNNFile = x)); p.Add <string>("dataset=", "CIFAR dataset file name", (x => CifarDataBatch = x)); p.Add <string>("rawdir", "If set then --dataset value should be a directory in raw directory format", (x => raw_directory = (x != null))); p.Add <bool> ("optimization=", "Do optimization (Default: true)", (x => RobustnessOptions.DoOptimization = x)); p.Add <double>("sub=", "Subsample from 'live' constraints (0.0-1.0)", (x => RobustnessOptions.LiveConstraintSamplingRatio = x)); p.Add <string>("registry=", "Unique name to store output examples and statistics", (x => RobustnessOptions.Registry = x)); p.Add <bool> ("cegar=", "Do CEGAR (default: true)", (x => RobustnessOptions.CEGAR = x)); p.Add <string>("only-accuracy", "Only evaluate accuracy", (x => just_accuracy = (x != null))); p.Add <string>("only-loss", "Only evaluate loss", (x => just_loss = (x != null))); p.Add <double>("bound=", "Linfinity-ball to search", (x => RobustnessOptions.Epsilon = x)); p.Add <double>("minval=", "Minimum value of each entry", (x => RobustnessOptions.MinValue = x)); p.Add <double>("maxval=", "Maximum value of each entry", (x => RobustnessOptions.MaxValue = x)); p.Add <string>("no-quant-safety", "Quantization integrality safety off", (x => RobustnessOptions.QuantizationSafety = (x == null))); p.Add <double>("scale-preprocessed=", "If image data is preprocessed, scale before dumping to registry", (x => RobustnessOptions.ScalePreProcessed = x)); p.Add <double>("offset-preprocessed=", "If image data is preprocessed, offset scaled before dumping to registry", (x => RobustnessOptions.OffsetPreProcessed = x)); p.Add <string>("max-conf", "Use max-conf objective", (x => { if (x != null) { RobustnessOptions.ObjectiveKind = LPSObjectiveKind.MaxConf; } })); p.Add <double>("winner-diff=", "Winning label should be that much different than second best", (x => RobustnessOptions.LabelConfidenceDiff = x)); p.Add <string>("log-png", "Log png files", (x => RobustnessOptions.SavePNGCounterexamples = (x != null))); bool only_filter = false; double filter_conf = 0.98; p.Add("only-filter", "Only filter by confidence", (x => only_filter = (x != null))); p.Add <double>("filter-conf=", "Filter confidence", (x => filter_conf = x)); Cmd.RunOptionSet(p, args); if (CifarNNFile == null || CifarDataBatch == null) { Console.WriteLine("Invalid arguments, use --help"); Environment.Exit(1); } /* Initialize parameters */ Options.InitializeNNAnalysis(); NeuralNet nn = CIFAR.GetNN(CifarNNFile); ImageDataset data; if (raw_directory) { // our raw data format (see lmdb2raw.py) data = CIFAR.ReadDirectoryData(CifarDataBatch); } else { // Plain old CIFAR binary format data = CIFAR.ReadData(CifarDataBatch, CIFAR.ALL_IMAGES, 0); } if (just_accuracy) { NNAccuracy.GetAccuracy(nn, data.Dataset); return; } if (just_loss) { NNAccuracy.GetLoss(nn, data.Dataset); return; } if (only_filter) { string filtered = RobustnessOptions.Registry + "-filtered-" + filter_conf.ToString(); Console.WriteLine("Orig {0} data", data.Dataset.Count()); var ds = NNAccuracy.KeepAboveConfidenceThreshold(nn, data.Dataset, filter_conf); Console.WriteLine("Kept {0} data", ds.Count()); ImageDataset ret = new ImageDataset(ds, CIFAR.InputCoordinates.ChannelCount, CIFAR.InputCoordinates.RowCount, CIFAR.InputCoordinates.ColumnCount, true); CIFAR.WriteData(filtered, ret); return; } RobustnessOptions.Dump(); string synthImagesName = RobustnessOptions.Registry + "-synth"; int labelcount = data.Dataset.LabelCount(); ImageDataset acc = new ImageDataset(new Dataset(labelcount), CIFAR.InputCoordinates.ChannelCount, CIFAR.InputCoordinates.RowCount, CIFAR.InputCoordinates.ColumnCount, true); int state = 0; Action <LabelWithConfidence> snapshot = x => { acc.Dataset.Data.Add(new MemAccessor <double[]>(x.datum)); acc.Dataset.Labels.Add(new MemAccessor <int>(x.actualLabel)); state++; if (state >= 4) { CIFAR.WriteData(synthImagesName, acc); state = 0; } }; ImageDataset synth = Robustness.SynthesizeCounterexamplesAndStore(nn, data, snapshot); if (synth.Dataset.Count() == 0) { Console.WriteLine("Did not synthesize any counterexamples, nothing to dump ..."); return; } if (raw_directory) { throw new NotImplementedException("Output to raw directory format not yet implemented!"); } else { CIFAR.WriteData(RobustnessOptions.Registry + "-synth", synth); } }