static void Main(string[] args) { string MNISTData = null; string MNISTLabels = null; int split_size = 0; var p = new OptionSet(); p.Add("datafile=", "MNIST data file name", x => MNISTData = x); p.Add("labelfile=", "MNIST label file name", x => MNISTLabels = x); p.Add <int>("split-size=", "Number of images per split", (x => split_size = x)); Cmd.RunOptionSet(p, args); if (MNISTData == null || MNISTData == null || split_size <= 0) { Console.WriteLine("Invalid arguments, use --help"); Environment.Exit(1); } /* Initialize parameters */ Options.InitializeNNAnalysis(); ImageDataset data = MNIST.ReadData(MNISTLabels, MNISTData, MNIST.ALL_IMAGES, 0); // Split var splits = data.ShuffleSplitMany(split_size); int count = 0; foreach (var s in splits) { MNIST.WriteData(MNISTLabels + ".split_" + count.ToString(), MNISTData + ".split_" + count.ToString(), s); count++; } }
static void Main(string[] args) { string MNISTData = null; string MNISTLabels = null; string[] split_data_files = null; string[] split_label_files = null; var p = new OptionSet(); p.Add("datafile=", "MNIST data file name to store result", x => MNISTData = x); p.Add("labelfile=", "MNIST label file name to store result", x => MNISTLabels = x); p.Add("datafiles=", "MNIST data batches to join", x => split_data_files = x.Split()); p.Add("labelfiles=", "MNIST label batches to join (corresponding 1-1 with datafiles)", x => split_label_files = x.Split()); Cmd.RunOptionSet(p, args); if (MNISTData == null || MNISTLabels == null || split_data_files == null || split_label_files == null || split_data_files.Length != split_label_files.Length) { Console.WriteLine("Invalid arguments, use --help"); Environment.Exit(1); } List <ImageDataset> dss = new List <ImageDataset>(); Console.WriteLine("Joining files ..."); for (int i = 0; i < split_data_files.Length; i++) { Console.Write(split_data_files[i] + " / " + split_label_files[i]); var datum = MNIST.ReadData(split_label_files[i], split_data_files[i], MNIST.ALL_IMAGES, 0); dss.Add(datum); } var data = Data.UnionMany(dss); Console.WriteLine("Output file ..."); Console.WriteLine(MNISTData + " / " + MNISTLabels); MNIST.WriteData(MNISTLabels, MNISTData, data); }
static void Main(string[] args) { string MNISTFile = null; string MNISTData = null; string MNISTLabels = null; var p = new OptionSet(); bool just_accuracy = false; bool just_loss = false; p.Add("nnet=", "MNIST neural network file name", x => MNISTFile = x); p.Add("datafile=", "MNIST data file name", x => MNISTData = x); p.Add("labelfile=", "MNIST label file name", x => MNISTLabels = x); p.Add <bool>("optimization=", "Do optimization (Default: true)", (x => RobustnessOptions.DoOptimization = x)); p.Add <double>("bound=", "Linfinity-ball to search", (x => RobustnessOptions.Epsilon = x)); p.Add <double>("sub=", "Subsample from 'live' constraints (0.0-1.0)", (x => RobustnessOptions.LiveConstraintSamplingRatio = x)); p.Add <string>("registry=", "Unique name to store output examples and statistics", (x => RobustnessOptions.Registry = x)); p.Add <bool>("cegar=", "Do CEGAR (default: true)", (x => RobustnessOptions.CEGAR = x)); p.Add <string>("only-accuracy", "Only evaluate accuracy", (x => just_accuracy = (x != null))); p.Add <string>("only-loss", "Only evaluate loss", (x => just_loss = (x != null))); p.Add <string>("no-quant-safety", "Quantization integrality safety off", (x => RobustnessOptions.QuantizationSafety = (x == null))); p.Add <string>("max-conf", "Use max-conf objective", (x => { if (x != null) { RobustnessOptions.ObjectiveKind = LPSObjectiveKind.MaxConf; } })); p.Add <double>("winner-diff=", "Winning label should be that much different than second best", (x => RobustnessOptions.LabelConfidenceDiff = x)); p.Add <string>("log-png", "Log png files", (x => RobustnessOptions.SavePNGCounterexamples = (x != null))); bool only_misclass = false; p.Add("only-filter-misclass", "Only keep the misclassifications", (x => only_misclass = (x != null))); Cmd.RunOptionSet(p, args); if (MNISTFile == null || MNISTData == null || MNISTLabels == null) { Console.WriteLine("Invalid arguments, use --help"); Environment.Exit(1); } RobustnessOptions.Dump(); Options.InitializeNNAnalysis(); NeuralNet nn = MNIST.GetNN(MNISTFile); ImageDataset data = MNIST.ReadData(MNISTLabels, MNISTData, MNIST.ALL_IMAGES, 0); if (just_accuracy) { NNAccuracy.GetAccuracy(nn, data.Dataset); return; } if (just_loss) { NNAccuracy.GetLoss(nn, data.Dataset); return; } if (only_misclass) { string filtered = RobustnessOptions.Registry + "-misclass"; Console.WriteLine("Orig {0} data", data.Dataset.Count()); var ds = NNAccuracy.KeepMisclass(nn, data.Dataset); Console.WriteLine("Kept {0} data", ds.Count()); ImageDataset ret = new ImageDataset(ds, MNIST.InputCoordinates.ChannelCount, MNIST.InputCoordinates.RowCount, MNIST.InputCoordinates.ColumnCount, true); MNIST.WriteData(filtered + "-labels", filtered + "-images", ret); return; } // NB: No snapshotting for MNIST since it never crashes ... ImageDataset synth = Robustness.SynthesizeCounterexamplesAndStore(nn, data, x => { return; }); MNIST.WriteData(RobustnessOptions.Registry + "-synth-labels", RobustnessOptions.Registry + "-synth-images", synth); }
static void Main(string[] args) { string MNISTData = null; string MNISTLabels = null; int how_many = 1; RANDTYPE randomness = RANDTYPE.UNIFORM; var p = new OptionSet(); p.Add("datafile=", "MNIST data file name", x => MNISTData = x); p.Add("labelfile=", "MNIST label file name", x => MNISTLabels = x); p.Add <int>("how-many=", "Number of new images per image", (x => how_many = x)); p.Add <string>("randomness=", "Gaussian|Uniform", (x => randomness = (x.Equals("Gaussian") ? RANDTYPE.GAUSSIAN : RANDTYPE.UNIFORM))); int xoffset = 0; int yoffset = 0; bool geometric = false; p.Add("geometric", "Use geometric transform", (x => geometric = (x != null))); p.Add <int>("xoffset=", "x-offset for geometric transform", (x => xoffset = x)); p.Add <int>("yoffset=", "y-offset for geometric transform", (x => yoffset = x)); bool random = false; double epsilon = 0.0; p.Add("random", "Use random perturbation", (x => random = (x != null))); p.Add <double>("epsilon=", "Distance (for uniform) or standard deviation (for gaussian) random perturbation", (x => epsilon = x)); bool brightness = false; double brightness_offset = 0.0; p.Add("brightness", "Use brightness perturbation", (x => brightness = (x != null))); p.Add <double>("brightness-offset=", "Brightness offset (<= RobustnessOptions.MaxValue - RobustnessOptions.MinValue)", (x => brightness_offset = x)); bool contrast = false; double contrast_min_factor = 1.0; double contrast_max_factor = 1.0; p.Add("contrast", "Use contrast perturbation", (x => contrast = (x != null))); p.Add <double>("contrast-min-factor=", "Contrast min factor (0.0-1.0)", (x => contrast_min_factor = x)); p.Add <double>("contrast-max-factor=", "Contrast max factor (0.0-1.0)", (x => contrast_max_factor = x)); Cmd.RunOptionSet(p, args); if (MNISTData == null || MNISTLabels == null) { Console.WriteLine("Invalid arguments, use --help"); Environment.Exit(1); } /* Initialize parameters */ Options.InitializeNNAnalysis(); ImageDataset data = MNIST.ReadData(MNISTLabels, MNISTData, MNIST.ALL_IMAGES, 0); IAugmentor augmentor = null; // TODO if (geometric) { augmentor = new AugmentGeometric(MNIST.InputCoordinates, randomness, how_many, xoffset, yoffset); goto KONT; } if (random) { augmentor = new AugmentRandom(MNIST.InputCoordinates, randomness, how_many, epsilon); goto KONT; } if (brightness) { augmentor = new AugmentBrightness(MNIST.InputCoordinates, randomness, how_many, brightness_offset); goto KONT; } if (contrast) { augmentor = new AugmentContrast(MNIST.InputCoordinates, how_many, contrast_min_factor, contrast_max_factor); goto KONT; } KONT: int count = data.Dataset.Count(); for (int i = 0; i < count; i++) { double[] datum = data.Dataset.GetDatum(i); int label = data.Dataset.GetLabel(i); var augmented = augmentor.Augment(datum); data.Update(augmented, label); } MNIST.WriteData(MNISTLabels + ".augmented", MNISTData + ".augmented", data); }