Exemple #1
0
        static void Main(string[] args)
        {
            string CifarDataBatch = args[0];

            /* Initialize parameters */
            Options.InitializeNNAnalysis();

            // Plain old CIFAR binary format
            ImageDataset data = CIFAR.ReadData(CifarDataBatch, CIFAR.ALL_IMAGES, 0);

            Trace.Assert(!data.Dataset.HasUninitialized());


            for (SplitSize = 100; SplitSize < 10000; SplitSize += 115)
            {
                Console.WriteLine("Splitting, SplitSize = {0}", SplitSize);

                // Split
                var splits = data.ShuffleSplitMany(SplitSize);

                Console.WriteLine("Shuffle-split to {0} splits", splits.Count());

                foreach (var s in splits)
                {
                    Trace.Assert(!s.Dataset.HasUninitialized());
                }

                Console.WriteLine("Joining back");
                // Union
                data = Data.UnionMany(splits);

                Trace.Assert(!data.Dataset.HasUninitialized());
            }
        }
    static void Main(string[] args)
    {
        string CifarDataBatch = null;

        int split_size = 0;

        var p = new OptionSet();

        p.Add <string>("dataset=", "CIFAR dataset file name", (x => CifarDataBatch = x));
        p.Add <int>("split-size=", "Number of images per split", (x => split_size = x));

        Cmd.RunOptionSet(p, args);

        if (CifarDataBatch == null || split_size <= 0)
        {
            Console.WriteLine("Invalid arguments, use --help");
            Environment.Exit(1);
        }

        /* Initialize parameters */
        Options.InitializeNNAnalysis();

        // Plain old CIFAR binary format
        ImageDataset data = CIFAR.ReadData(CifarDataBatch, CIFAR.ALL_IMAGES, 0);

        // Split
        var splits = data.ShuffleSplitMany(split_size);
        int count  = 0;

        foreach (var s in splits)
        {
            CIFAR.WriteData(CifarDataBatch + ".split_" + count.ToString(), s);
            count++;
        }
    }
Exemple #3
0
    static void Main(string[] args)
    {
        string MNISTData   = null;
        string MNISTLabels = null;
        int    split_size  = 0;

        var p = new OptionSet();

        p.Add("datafile=", "MNIST data file name", x => MNISTData     = x);
        p.Add("labelfile=", "MNIST label file name", x => MNISTLabels = x);
        p.Add <int>("split-size=", "Number of images per split", (x => split_size = x));

        Cmd.RunOptionSet(p, args);

        if (MNISTData == null || MNISTData == null || split_size <= 0)
        {
            Console.WriteLine("Invalid arguments, use --help");
            Environment.Exit(1);
        }

        /* Initialize parameters */
        Options.InitializeNNAnalysis();

        ImageDataset data = MNIST.ReadData(MNISTLabels, MNISTData, MNIST.ALL_IMAGES, 0);

        // Split
        var splits = data.ShuffleSplitMany(split_size);
        int count  = 0;

        foreach (var s in splits)
        {
            MNIST.WriteData(MNISTLabels + ".split_" + count.ToString(), MNISTData + ".split_" + count.ToString(), s);
            count++;
        }
    }
            public static void WriteData(string labelFile, string imageFile, ImageDataset data)
            {
                UTF8Encoding encoding = new UTF8Encoding(true);

                Console.WriteLine("Writing labels to: " + labelFile);
                Console.WriteLine("Writing images to: " + imageFile);
                using (FileStream fsL = File.Open(labelFile, FileMode.Create))
                    using (FileStream fsI = File.Open(imageFile, FileMode.Create))
                        WriteData(fsL, fsI, data);
            }
            private static void WriteData(FileStream fsL, FileStream fsI, ImageDataset data)
            {
                {
                    int numPixels = data.RowCount * data.ColumnCount;
                    Console.WriteLine("Writing " + data.Dataset.Count() + " images with " + numPixels + " pixels each");

                    // Step 0: Write the label file header
                    byte[] labelHeader = new byte[8];
                    Array.Copy(BitConverter.GetBytes(2049), 0, labelHeader, 0, 4);
                    Array.Copy(BitConverter.GetBytes(data.Dataset.Count()), 0, labelHeader, 4, 4);
                    if (BitConverter.IsLittleEndian)
                    {
                        Array.Reverse(labelHeader, 0, 4);
                        Array.Reverse(labelHeader, 4, 4);
                    }
                    fsL.Write(labelHeader, 0, 8);

                    // Step 1: Write the image file header
                    byte[] imageHeader = new byte[16];
                    Array.Copy(BitConverter.GetBytes(2051), 0, imageHeader, 0, 4);
                    Array.Copy(BitConverter.GetBytes(data.Dataset.Count()), 0, imageHeader, 4, 4);
                    Array.Copy(BitConverter.GetBytes(data.RowCount), 0, imageHeader, 8, 4);
                    Array.Copy(BitConverter.GetBytes(data.ColumnCount), 0, imageHeader, 12, 4);
                    if (BitConverter.IsLittleEndian)
                    {
                        Array.Reverse(imageHeader, 0, 4);
                        Array.Reverse(imageHeader, 4, 4);
                        Array.Reverse(imageHeader, 8, 4);
                        Array.Reverse(imageHeader, 12, 4);
                    }
                    fsI.Write(imageHeader, 0, imageHeader.Length);

                    // Step 3: Write the labels
                    byte[] labels = new byte[data.Dataset.Count()];
                    for (int i = 0; i < data.Dataset.Count(); i++)
                    {
                        labels[i] = Convert.ToByte(data.Dataset.GetLabel(i));
                    }
                    fsL.Write(labels.ToArray(), 0, labels.Length);

                    // Step 4: Write in the images one by one and write to the memory stream
                    for (int i = 0; i < labels.Length; i++)
                    {
                        fsI.Write(Utils.UArray.ToByteArray(data.Dataset.GetDatum(i)), 0, data.Dataset.GetDatum(i).Length);
                    }
                    fsL.Flush();
                    fsI.Flush();
                    Console.WriteLine("Done writing");
                }
            }
            public static void WriteData(string file, ImageDataset images)
            {
                UTF8Encoding encoding = new UTF8Encoding(true);

                Console.WriteLine("Writing data to: " + file);
                using (FileStream fw = File.Open(file, FileMode.Create))
                {
                    // Write in the images one by one
                    for (int i = 0; i < images.Dataset.Count(); i++)
                    {
                        // Step 4a: Write the label
                        fw.WriteByte(Convert.ToByte(images.Dataset.GetLabel(i)));
                        // Step 4b: Write the image
                        fw.Write(Utils.UArray.ToByteArray(images.Dataset.GetDatum(i)), 0, images.Dataset.GetDatum(i).Length);
                    }
                    fw.Flush();
                    Console.WriteLine("Done writing");
                }
            }
Exemple #7
0
        private void ImageSetCollection(ImageDataset dataSet)
        {
            ObservableCollection <PatientImage> imageCollection = new ObservableCollection <PatientImage>();

            dataSet.relatedImages = imageCollection;

            SqlCommand cmd = new SqlCommand("spr_SubTreeView_v001", conn);

            cmd.CommandType = CommandType.StoredProcedure;
            cmd.Parameters.Add("@seriesID", SqlDbType.Int).Value = dataSet.seriesID;
            SqlDataReader data = cmd.ExecuteReader();

            while (data.Read())
            {
                PatientImage image = new PatientImage(data.GetInt32(data.GetOrdinal("fileID")));
                imageCollection.Add(image);
            }
            data.Close();
        }
Exemple #8
0
        private void PatientCollection(ObservableCollection <Patient> patientCollection, SqlDataReader data, Boolean showName)
        {
            Encryption encryptedName = new Encryption();
            Patient    patient       = null;
            ObservableCollection <ImageDataset> dataSetCollection = null;
            Boolean patientExist  = false;
            string  patientName   = data.GetString(data.GetOrdinal("Patient Name"));
            string  currentID     = data.GetString(data.GetOrdinal("Patient ID"));
            string  currentSeries = data.GetString(data.GetOrdinal("Series"));
            int     seriesID      = data.GetInt32(data.GetOrdinal("Series ID"));

            foreach (Patient p in patientCollection)
            {
                if (p.patientID.Equals(currentID))
                {
                    dataSetCollection = p.dataSet;
                    patientExist      = true;
                }
            }

            if (patientExist == false)
            {
                dataSetCollection = new ObservableCollection <ImageDataset>();
                int tableID = data.GetInt32(data.GetOrdinal("Table ID"));
                if (showName == true)
                {
                    patient = new Patient(currentID, encryptedName.Decrypt(patientName, tableID), dataSetCollection);
                }
                else
                {
                    patient = new Patient(currentID, currentID, dataSetCollection);
                }
                patientCollection.Add(patient);
            }

            ImageDataset dataSet = new ImageDataset();

            dataSet.series   = currentSeries;
            dataSet.seriesID = seriesID;
            dataSetCollection.Add(dataSet);
        }
Exemple #9
0
        public async Task RunAsync(string datasetPath, string outputPath)
        {
            var argument = GetInputArgument();
            var dataset = new ImageDataset(datasetPath, new[] { argument.Config.InputChannels, argument.Config.InputHeight, argument.Config.InputWidth }, 1, PreprocessMethods.None, PostprocessMethods.None);

#if NET471
                await dataset.GetFixedBatchesAsync().ForEachAsync(async batch =>
#else
            await foreach (var batch in dataset.GetFixedBatchesAsync())
#endif
            {
                Run(batch.tensor, argument);
                var outputFile = Path.Combine(outputPath, Path.GetFileNameWithoutExtension(batch.filename[0]) + ".bin");
                using (var bw = new BinaryWriter(File.Open(outputFile, FileMode.Create, FileAccess.Write)))
                {
                    foreach (var outputNode in _outputAddresses)
                    {
                        var buffer = new byte[outputNode.Size];
                        Buffer.BlockCopy(_mainMemoryBuffer, (int)outputNode.Address, buffer, 0, buffer.Length);
                        bw.Write(buffer);
                    }
                }
            }
            /// <summary>
            /// Generate and return a list of counterexamples by iterating over the training set
            /// </summary>
            /// <param name="datasetname"></param>
            /// <param name="options"></param>
            /// <param name="nn"></param>
            /// <param name="ds"></param>
            /// <returns></returns>
            public static List<LabelWithConfidence> SynthesizeCounterexamples(
                NeuralNet nn,
                ImageDataset ds, 
                Action<LabelWithConfidence> snapshot)
            {

                /* Initialization stuff */
                List<LabelWithConfidence> counterexamples = new List<LabelWithConfidence>();
                SynthRegistry reg = new SynthRegistry(RobustnessOptions.Registry + ".csv", RobustnessOptions.Registry);

                /* How many training points to do */
                int trainingPointsToDo = (int)Math.Round(ds.Dataset.Count() * RobustnessOptions.DataSetPercentage);
                int completed = 0;

                /* The symbolic variables: NB we use the dimension PostCrop to avoid generating lots of useless variables */
                Tuple<LPSTerm[],LPSTerm> inputs = GenSymbolicInputs(nn.InputDimensionPostCrop);

                // Alternatively (the code is thread-safe already):
                // Parallel.For(0, ds.Dataset.Count(), RobustnessOptions.ParallelOptions, i =>
                for (int i = 0; i < ds.Dataset.Count(); i++)
                {
                        if (completed < trainingPointsToDo)
                        {
                            Console.WriteLine("Image count = {0}", i);
                            NNInstrumentation instr = new NNInstrumentation();
                            LabelWithConfidence imageLab = ULabel.LabelWithConfidence(nn, instr, ds.Dataset.GetDatum(i), true);

                            Nullable<LabelWithConfidence> synthLab = null;

                            try
                            {
                                var stopwatch = new Stopwatch();
                                stopwatch.Start();

                                synthLab = SynthesizeCounterexample(
                                    nn,
                                    inputs.Item1,
                                    inputs.Item2,
                                    imageLab,
                                    instr,
                                    ds.Dataset.GetLabel(i),
                                    ds.RowCount,
                                    ds.ColumnCount,
                                    ds.IsColor);

                                stopwatch.Stop();
                                Console.WriteLine("Processed image in {0} milliseconds", stopwatch.ElapsedMilliseconds);
                                GC.Collect();

                            }
                            catch
                            {
                                lock(lockObj)
                                {
                                    completed++;
                                }
                                continue;
                            }


                            lock (lockObj)
                            {
                                completed++;
                                if (synthLab.HasValue)
                                {
                                    // VERY IMPORTANTLY: Change the label of the counterexample
                                    // to be the label of the original point! This was a horrible bug.
                                    var forRetraining = synthLab.Value;
                                    forRetraining.actualLabel = imageLab.actualLabel;
                                    counterexamples.Add(forRetraining);
                                    snapshot(forRetraining);

                                    Console.WriteLine("forRetraining.label  = {0}", forRetraining.actualLabel);
                                    Console.WriteLine("synthLab.Value.label = {0}", synthLab.Value.actualLabel);

                                    reg.RecordAtomically(
                                        RobustnessOptions.Registry,
                                        i,
                                        imageLab,
                                        synthLab.Value,
                                        RobustnessOptions.ScalePreProcessed,
                                        RobustnessOptions.OffsetPreProcessed,
                                        ds.RowCount,
                                        ds.ColumnCount,
                                        ds.IsColor);
                                }
                                Console.WriteLine("Counterexamples/Processed-so-far: " + counterexamples.Count() + "/" + completed);
                            }
                        }
                }
                return counterexamples;
            }
    static void Main(string[] args)
    {
        string MNISTData   = null;
        string MNISTLabels = null;

        int      how_many   = 1;
        RANDTYPE randomness = RANDTYPE.UNIFORM;

        var p = new OptionSet();

        p.Add("datafile=", "MNIST data file name", x => MNISTData     = x);
        p.Add("labelfile=", "MNIST label file name", x => MNISTLabels = x);

        p.Add <int>("how-many=", "Number of new images per image", (x => how_many = x));
        p.Add <string>("randomness=", "Gaussian|Uniform", (x => randomness = (x.Equals("Gaussian") ? RANDTYPE.GAUSSIAN : RANDTYPE.UNIFORM)));

        int  xoffset   = 0;
        int  yoffset   = 0;
        bool geometric = false;

        p.Add("geometric", "Use geometric transform", (x => geometric = (x != null)));
        p.Add <int>("xoffset=", "x-offset for geometric transform", (x => xoffset = x));
        p.Add <int>("yoffset=", "y-offset for geometric transform", (x => yoffset = x));

        bool   random  = false;
        double epsilon = 0.0;

        p.Add("random", "Use random perturbation", (x => random = (x != null)));
        p.Add <double>("epsilon=", "Distance (for uniform) or standard deviation (for gaussian) random perturbation", (x => epsilon = x));

        bool   brightness        = false;
        double brightness_offset = 0.0;

        p.Add("brightness", "Use brightness perturbation", (x => brightness = (x != null)));
        p.Add <double>("brightness-offset=", "Brightness offset (<= RobustnessOptions.MaxValue - RobustnessOptions.MinValue)", (x => brightness_offset = x));

        bool   contrast            = false;
        double contrast_min_factor = 1.0;
        double contrast_max_factor = 1.0;

        p.Add("contrast", "Use contrast perturbation", (x => contrast = (x != null)));
        p.Add <double>("contrast-min-factor=", "Contrast min factor (0.0-1.0)", (x => contrast_min_factor = x));
        p.Add <double>("contrast-max-factor=", "Contrast max factor (0.0-1.0)", (x => contrast_max_factor = x));


        Cmd.RunOptionSet(p, args);

        if (MNISTData == null || MNISTLabels == null)
        {
            Console.WriteLine("Invalid arguments, use --help");
            Environment.Exit(1);
        }

        /* Initialize parameters */
        Options.InitializeNNAnalysis();

        ImageDataset data = MNIST.ReadData(MNISTLabels, MNISTData, MNIST.ALL_IMAGES, 0);

        IAugmentor augmentor = null; // TODO

        if (geometric)
        {
            augmentor = new AugmentGeometric(MNIST.InputCoordinates, randomness, how_many, xoffset, yoffset);
            goto KONT;
        }
        if (random)
        {
            augmentor = new AugmentRandom(MNIST.InputCoordinates, randomness, how_many, epsilon);
            goto KONT;
        }
        if (brightness)
        {
            augmentor = new AugmentBrightness(MNIST.InputCoordinates, randomness, how_many, brightness_offset);
            goto KONT;
        }
        if (contrast)
        {
            augmentor = new AugmentContrast(MNIST.InputCoordinates, how_many, contrast_min_factor, contrast_max_factor);
            goto KONT;
        }

KONT:

        int count = data.Dataset.Count();

        for (int i = 0; i < count; i++)
        {
            double[] datum     = data.Dataset.GetDatum(i);
            int      label     = data.Dataset.GetLabel(i);
            var      augmented = augmentor.Augment(datum);
            data.Update(augmented, label);
        }

        MNIST.WriteData(MNISTLabels + ".augmented", MNISTData + ".augmented", data);
    }
    static void Main(string[] args)
    {
        string CifarDataBatch = null;

        int      how_many   = 1;
        RANDTYPE randomness = RANDTYPE.UNIFORM;

        var p = new OptionSet();

        p.Add <string>("dataset=", "CIFAR dataset file name", (x => CifarDataBatch = x));
        p.Add <int>("how-many=", "Number of new images per image", (x => how_many = x));
        p.Add <string>("randomness=", "Gaussian|Uniform", (x => randomness = (x.Equals("Gaussian") ? RANDTYPE.GAUSSIAN : RANDTYPE.UNIFORM)));

        int  xoffset   = 0;
        int  yoffset   = 0;
        bool geometric = false;

        p.Add("geometric", "Use geometric transform", (x => geometric = (x != null)));
        p.Add <int>("xoffset=", "x-offset for geometric transform", (x => xoffset = x));
        p.Add <int>("yoffset=", "y-offset for geometric transform", (x => yoffset = x));

        bool   random  = false;
        double epsilon = 0.0;

        p.Add("random", "Use random perturbation", (x => random = (x != null)));
        p.Add <double>("epsilon=", "Distance (for uniform) or standard deviation (for gaussian) random perturbation", (x => epsilon = x));

        bool   brightness        = false;
        double brightness_offset = 0.0;

        p.Add("brightness", "Use brightness perturbation", (x => brightness = (x != null)));
        p.Add <double>("brightness-offset=", "Brightness offset (<= RobustnessOptions.MaxValue - RobustnessOptions.MinValue)", (x => brightness_offset = x));

        bool   contrast            = false;
        double contrast_min_factor = 1.0;
        double contrast_max_factor = 1.0;

        p.Add("contrast", "Use contrast perturbation", (x => contrast = (x != null)));
        p.Add <double>("contrast-min-factor=", "Contrast min factor (0.0-1.0)", (x => contrast_min_factor = x));
        p.Add <double>("contrast-max-factor=", "Contrast max factor (0.0-1.0)", (x => contrast_max_factor = x));


        bool lossy_jpeg   = false;
        int  photoquality = 90;

        p.Add("lossy-jpeg", "Use lossy jpeg perturbation (default photoquality = 90)", (x => lossy_jpeg = (x != null)));
        p.Add <int>("jpeg-photoquality=", "Lossy jpeg photoquality", (x => photoquality = x));


        bool  rotate = false;
        float angle  = 45.0F;

        p.Add("rotation", "Rotation transformation (default angle = 45.0)", (x => rotate = (x != null)));
        p.Add <double>("rotation-angle=", (x => angle = (float)x));


        bool perturbe_only = false;

        p.Add("perturbe-only", "Only perturbe (not augment)", (x => perturbe_only = (x != null)));

        Cmd.RunOptionSet(p, args);

        if (CifarDataBatch == null)
        {
            Console.WriteLine("Invalid arguments, use --help");
            Environment.Exit(1);
        }

        /* Initialize parameters */
        Options.InitializeNNAnalysis();

        // Plain old CIFAR binary format
        ImageDataset data = CIFAR.ReadData(CifarDataBatch, CIFAR.ALL_IMAGES, 0);

        IAugmentor augmentor = null; // TODO

        if (geometric)
        {
            augmentor = new AugmentGeometric(CIFAR.InputCoordinates, randomness, how_many, xoffset, yoffset);
            goto KONT;
        }
        if (random)
        {
            augmentor = new AugmentRandom(CIFAR.InputCoordinates, randomness, how_many, epsilon);
            goto KONT;
        }
        if (brightness)
        {
            augmentor = new AugmentBrightness(CIFAR.InputCoordinates, randomness, how_many, brightness_offset);
            goto KONT;
        }
        if (contrast)
        {
            augmentor = new AugmentContrast(CIFAR.InputCoordinates, how_many, contrast_min_factor, contrast_max_factor);
            goto KONT;
        }
        if (lossy_jpeg)
        {
            augmentor = new AugmentLossyJpeg(CIFAR.InputCoordinates, how_many, photoquality);
            goto KONT;
        }
        if (rotate)
        {
            augmentor = new AugmentRotation(CIFAR.InputCoordinates, how_many, angle);
            goto KONT;
        }


KONT:

        int count = data.Dataset.Count();

        ImageDataset initial = null;

        if (perturbe_only)
        {
            initial = new ImageDataset(new Dataset(10), CIFAR.InputCoordinates.ChannelCount,
                                       CIFAR.InputCoordinates.RowCount,
                                       CIFAR.InputCoordinates.ColumnCount, true);
        }
        else
        {
            initial = data;
        }

        for (int i = 0; i < count; i++)
        {
            double[] datum     = data.Dataset.GetDatum(i);
            int      label     = data.Dataset.GetLabel(i);
            var      augmented = augmentor.Augment(datum);
            initial.Update(augmented, label);
        }

        if (perturbe_only)
        {
            CIFAR.WriteData(CifarDataBatch + ".perturbed", initial);
        }
        else
        {
            CIFAR.WriteData(CifarDataBatch + ".augmented", initial);
        }
    }
Exemple #13
0
    static void Main(string[] args)
    {
        string CifarNNFile    = null;
        string CifarDataBatch = null;

        bool just_accuracy = false;
        bool just_loss     = false;

        bool raw_directory = false;

        var p = new OptionSet();

        p.Add <string>("nnet=", "CIFAR neural network file name", (x => CifarNNFile = x));
        p.Add <string>("dataset=", "CIFAR dataset file name", (x => CifarDataBatch = x));
        p.Add <string>("rawdir", "If set then --dataset value should be a directory in raw directory format", (x => raw_directory = (x != null)));
        p.Add <bool>  ("optimization=", "Do optimization (Default: true)", (x => RobustnessOptions.DoOptimization = x));
        p.Add <double>("sub=", "Subsample from 'live' constraints (0.0-1.0)", (x => RobustnessOptions.LiveConstraintSamplingRatio = x));
        p.Add <string>("registry=", "Unique name to store output examples and statistics", (x => RobustnessOptions.Registry = x));
        p.Add <bool>  ("cegar=", "Do CEGAR (default: true)", (x => RobustnessOptions.CEGAR = x));
        p.Add <string>("only-accuracy", "Only evaluate accuracy", (x => just_accuracy = (x != null)));
        p.Add <string>("only-loss", "Only evaluate loss", (x => just_loss = (x != null)));

        p.Add <double>("bound=", "Linfinity-ball to search", (x => RobustnessOptions.Epsilon = x));
        p.Add <double>("minval=", "Minimum value of each entry", (x => RobustnessOptions.MinValue = x));
        p.Add <double>("maxval=", "Maximum value of each entry", (x => RobustnessOptions.MaxValue = x));
        p.Add <string>("no-quant-safety", "Quantization integrality safety off", (x => RobustnessOptions.QuantizationSafety = (x == null)));

        p.Add <double>("scale-preprocessed=", "If image data is preprocessed, scale before dumping to registry", (x => RobustnessOptions.ScalePreProcessed = x));
        p.Add <double>("offset-preprocessed=", "If image data is preprocessed, offset scaled before dumping to registry", (x => RobustnessOptions.OffsetPreProcessed = x));

        p.Add <string>("max-conf", "Use max-conf objective", (x =>
        {
            if (x != null)
            {
                RobustnessOptions.ObjectiveKind = LPSObjectiveKind.MaxConf;
            }
        }));

        p.Add <double>("winner-diff=", "Winning label should be that much different than second best", (x => RobustnessOptions.LabelConfidenceDiff = x));
        p.Add <string>("log-png", "Log png files", (x => RobustnessOptions.SavePNGCounterexamples = (x != null)));

        bool   only_filter = false;
        double filter_conf = 0.98;

        p.Add("only-filter", "Only filter by confidence", (x => only_filter = (x != null)));
        p.Add <double>("filter-conf=", "Filter confidence", (x => filter_conf = x));



        Cmd.RunOptionSet(p, args);

        if (CifarNNFile == null || CifarDataBatch == null)
        {
            Console.WriteLine("Invalid arguments, use --help");
            Environment.Exit(1);
        }

        /* Initialize parameters */
        Options.InitializeNNAnalysis();
        NeuralNet nn = CIFAR.GetNN(CifarNNFile);


        ImageDataset data;

        if (raw_directory)
        {
            // our raw data format (see lmdb2raw.py)
            data = CIFAR.ReadDirectoryData(CifarDataBatch);
        }
        else
        {
            // Plain old CIFAR binary format
            data = CIFAR.ReadData(CifarDataBatch, CIFAR.ALL_IMAGES, 0);
        }

        if (just_accuracy)
        {
            NNAccuracy.GetAccuracy(nn, data.Dataset);
            return;
        }

        if (just_loss)
        {
            NNAccuracy.GetLoss(nn, data.Dataset);
            return;
        }


        if (only_filter)
        {
            string filtered = RobustnessOptions.Registry + "-filtered-" + filter_conf.ToString();

            Console.WriteLine("Orig {0} data", data.Dataset.Count());

            var ds = NNAccuracy.KeepAboveConfidenceThreshold(nn, data.Dataset, filter_conf);

            Console.WriteLine("Kept {0} data", ds.Count());

            ImageDataset ret = new ImageDataset(ds,
                                                CIFAR.InputCoordinates.ChannelCount,
                                                CIFAR.InputCoordinates.RowCount,
                                                CIFAR.InputCoordinates.ColumnCount, true);

            CIFAR.WriteData(filtered, ret);
            return;
        }



        RobustnessOptions.Dump();


        string synthImagesName = RobustnessOptions.Registry + "-synth";

        int labelcount = data.Dataset.LabelCount();

        ImageDataset acc = new ImageDataset(new Dataset(labelcount),
                                            CIFAR.InputCoordinates.ChannelCount,
                                            CIFAR.InputCoordinates.RowCount,
                                            CIFAR.InputCoordinates.ColumnCount, true);
        int state = 0;

        Action <LabelWithConfidence> snapshot = x =>
        {
            acc.Dataset.Data.Add(new MemAccessor <double[]>(x.datum));
            acc.Dataset.Labels.Add(new MemAccessor <int>(x.actualLabel));
            state++;
            if (state >= 4)
            {
                CIFAR.WriteData(synthImagesName, acc);
                state = 0;
            }
        };

        ImageDataset synth = Robustness.SynthesizeCounterexamplesAndStore(nn, data, snapshot);

        if (synth.Dataset.Count() == 0)
        {
            Console.WriteLine("Did not synthesize any counterexamples, nothing to dump ...");
            return;
        }

        if (raw_directory)
        {
            throw new NotImplementedException("Output to raw directory format not yet implemented!");
        }
        else
        {
            CIFAR.WriteData(RobustnessOptions.Registry + "-synth", synth);
        }
    }
Exemple #14
0
        static async Task Main(string[] args)
        {
            AppDomain.CurrentDomain.UnhandledException += (s, e) =>
            {
                if (e.ExceptionObject is Exception ex)
                {
                    Console.WriteLine("Fatal: " + ex.Message);

                    Console.WriteLine(ex.ToString());
                }
                else
                {
                    Console.WriteLine("Fatal: Unexpected error occurred.");
                }
                Environment.Exit(-1);
            };

            Options options = null;

            Parser.Default.ParseArguments <Options>(args)
            .WithParsed(o => options = o);
            if (options == null)
            {
                return;
            }

            Graph graph;

            switch (options.InputFormat.ToLowerInvariant())
            {
            case "caffe":
            {
                var file  = File.ReadAllBytes(options.Input);
                var model = Caffe.NetParameter.Parser.ParseFrom(file);
                var tfc   = new CaffeToGraphConverter(model);
                tfc.Convert();
                graph = tfc.Graph;
                break;
            }

            case "paddle":
            {
                var tfc = new PaddleToGraphConverter(options.Input);
                tfc.Convert(0);
                graph = tfc.Graph;
                break;
            }

            case "tflite":
            {
                var file  = File.ReadAllBytes(options.Input);
                var model = tflite.Model.GetRootAsModel(new FlatBuffers.ByteBuffer(file));
                var tfc   = new TfLiteToGraphConverter(model, model.Subgraphs(0).Value);
                tfc.Convert();
                graph = tfc.Graph;
                break;
            }

            case "k210model":
                graph = null;
                break;

            default:
                throw new ArgumentException("input-format");
            }

            var outputFormat = options.OutputFormat.ToLowerInvariant();

            switch (outputFormat)
            {
            case "tf":
            {
                var ctx = new GraphPlanContext();
                graph.Plan(ctx);

                using (var f = File.Open(options.Output, FileMode.Create, FileAccess.Write))
                    await ctx.SaveAsync(f);
                break;
            }

            case "addpad":
            {
                Transform.Process(graph, new Transform[] {
                        new Conv2dAddSpaceToBatchNdTransform()
                    });

                var ctx = new GraphPlanContext();
                graph.Plan(ctx);

                using (var f = File.Open(options.Output, FileMode.Create, FileAccess.Write))
                    await ctx.SaveAsync(f);
                break;
            }

            case "tflite":
            {
                await ConvertToTFLite(graph, options.Output);

                break;
            }

            case "k210model":
            case "k210pb":
            {
                float?mean = null, std = null;
                PostprocessMethods pm = PostprocessMethods.Normalize0To1;
                if (options.Postprocess == "n1to1")
                {
                    pm = PostprocessMethods.NormalizeMinus1To1;
                }
                else if (!string.IsNullOrWhiteSpace(options.Postprocess))
                {
                    var match = Regex.Match(options.Postprocess, @"mean:(?<mean>(-?\d+)(\.\d+)?),std:(?<std>(-?\d+)(\.\d+)?)");
                    if (match.Success)
                    {
                        mean = float.Parse(match.Groups["mean"].Value);
                        std  = float.Parse(match.Groups["std"].Value);
                    }
                    else
                    {
                        throw new ArgumentOutOfRangeException("Invalid postprocess method");
                    }
                }

                if (options.InputFormat.ToLowerInvariant() != "tflite")
                {
                    var tmpTflite = Path.GetTempFileName();
                    await ConvertToTFLite(graph, tmpTflite);

                    var file = File.ReadAllBytes(tmpTflite);
                    File.Delete(tmpTflite);
                    var model = tflite.Model.GetRootAsModel(new FlatBuffers.ByteBuffer(file));
                    var tfc   = new TfLiteToGraphConverter(model, model.Subgraphs(0).Value);
                    tfc.Convert();
                    graph = tfc.Graph;
                }

                Transform.Process(graph, new Transform[] {
                        new EliminateReshapeTransform(),
                        new EliminateTwoReshapeTransform(),
                        new EliminateTensorflowReshapeTransform(),
                        new TensorflowReshapeToFlattenTransform(),
                        new K210SeparableConv2dTransform(),
                        new K210SpaceToBatchNdAndValidConv2dTransform(),
                        new K210SameConv2dTransform(),
                        new K210Stride2Conv2dTransform(),
                        new GlobalAveragePoolTransform(),
                        options.FloatFc ? (Transform) new DummyTransform() : new K210FullyConnectedTransform(),
                        new LeakyReluTransform(),
                        new K210Conv2dWithNonTrivialActTransform(),
                        new K210Conv2dWithMaxAvgPoolTransform(),
                        new Conv2d1x1ToFullyConnectedTransform(),
                        new K210EliminateAddRemovePaddingTransform(),
                        new QuantizedAddTransform(),
                        new QuantizedMaxPool2dTransform(),
                        new QuantizedResizeNearestNeighborTransform(),
                        new ExclusiveConcatenationTransform(),
                        new QuantizedExclusiveConcatenationTransform(),
                        new QuantizedConcatenationTransform(),
                        new EliminateQuantizeDequantizeTransform(),
                        new EliminateInputQuantizeTransform(),
                        new K210EliminateInputUploadTransform(),
                        new K210EliminateConv2dUploadTransform(),
                        new K210EliminateUploadAddPaddingTransform(),
                        new K210EliminateConv2dRequantizeTransform(),
                        options.ChannelwiseOutput ? (Transform) new K210Conv2dToChannelwiseTransform(): new DummyTransform(),
                        //new EliminateDequantizeOutputTransform()
                    });

                {
                    var ctx = new GraphPlanContext();
                    graph.Plan(ctx);
                    if (outputFormat == "k210model")
                    {
                        var dim = graph.Inputs.First().Output.Dimensions.ToArray();

                        Dataset dataset;
                        if (options.DatasetFormat == "image")
                        {
                            dataset = new ImageDataset(
                                options.Dataset,
                                dim.Skip(1).ToArray(),
                                1,
                                PreprocessMethods.None,
                                pm,
                                mean,
                                std);
                        }
                        else if (options.DatasetFormat == "raw")
                        {
                            dataset = new RawDataset(
                                options.Dataset,
                                dim.Skip(1).ToArray(),
                                1,
                                PreprocessMethods.None,
                                pm,
                                mean,
                                std);
                        }
                        else
                        {
                            throw new ArgumentException("Invalid dataset format");
                        }

                        var k210c = new GraphToK210Converter(graph, options.WeightsBits);
                        await k210c.ConvertAsync(
                            dataset,
                            ctx,
                            Path.GetDirectoryName(options.Output),
                            Path.GetFileNameWithoutExtension(options.Output),
                            options.ChannelwiseOutput);
                    }
                    else
                    {
                        using (var f = File.Open(options.Output, FileMode.Create, FileAccess.Write))
                            await ctx.SaveAsync(f);
                    }
                }
                break;
            }

            case "k210script":
            {
                {
                    var dim   = graph.Inputs.First().Output.Dimensions.ToArray();
                    var k210c = new GraphToScriptConverter(graph);
                    await k210c.ConvertAsync(
                        Path.GetDirectoryName(options.Output),
                        Path.GetFileNameWithoutExtension(options.Output));
                }
                break;
            }

            case "inference":
            {
                if (options.InputFormat.ToLowerInvariant() != "k210model")
                {
                    throw new ArithmeticException("Inference mode only support k210model input.");
                }

                var emulator = new NnCase.Converter.K210.Emulator.K210Emulator(
                    File.ReadAllBytes(options.Input));
                await emulator.RunAsync(options.Dataset, options.Output);

                break;
            }

            default:
                throw new ArgumentException("output-format");
            }
        }
    static void Main(string[] args)
    {
        string MNISTFile   = null;
        string MNISTData   = null;
        string MNISTLabels = null;
        var    p           = new OptionSet();


        bool just_accuracy = false;
        bool just_loss     = false;

        p.Add("nnet=", "MNIST neural network file name", x => MNISTFile = x);
        p.Add("datafile=", "MNIST data file name", x => MNISTData       = x);
        p.Add("labelfile=", "MNIST label file name", x => MNISTLabels   = x);
        p.Add <bool>("optimization=", "Do optimization (Default: true)", (x => RobustnessOptions.DoOptimization = x));
        p.Add <double>("bound=", "Linfinity-ball to search", (x => RobustnessOptions.Epsilon = x));
        p.Add <double>("sub=", "Subsample from 'live' constraints (0.0-1.0)", (x => RobustnessOptions.LiveConstraintSamplingRatio = x));
        p.Add <string>("registry=", "Unique name to store output examples and statistics", (x => RobustnessOptions.Registry = x));
        p.Add <bool>("cegar=", "Do CEGAR (default: true)", (x => RobustnessOptions.CEGAR = x));
        p.Add <string>("only-accuracy", "Only evaluate accuracy", (x => just_accuracy = (x != null)));
        p.Add <string>("only-loss", "Only evaluate loss", (x => just_loss = (x != null)));

        p.Add <string>("no-quant-safety", "Quantization integrality safety off", (x => RobustnessOptions.QuantizationSafety = (x == null)));


        p.Add <string>("max-conf", "Use max-conf objective", (x => {
            if (x != null)
            {
                RobustnessOptions.ObjectiveKind = LPSObjectiveKind.MaxConf;
            }
        }));

        p.Add <double>("winner-diff=", "Winning label should be that much different than second best", (x => RobustnessOptions.LabelConfidenceDiff = x));


        p.Add <string>("log-png", "Log png files", (x => RobustnessOptions.SavePNGCounterexamples = (x != null)));

        bool only_misclass = false;

        p.Add("only-filter-misclass", "Only keep the misclassifications", (x => only_misclass = (x != null)));



        Cmd.RunOptionSet(p, args);

        if (MNISTFile == null || MNISTData == null || MNISTLabels == null)
        {
            Console.WriteLine("Invalid arguments, use --help");
            Environment.Exit(1);
        }

        RobustnessOptions.Dump();

        Options.InitializeNNAnalysis();

        NeuralNet    nn   = MNIST.GetNN(MNISTFile);
        ImageDataset data = MNIST.ReadData(MNISTLabels, MNISTData, MNIST.ALL_IMAGES, 0);


        if (just_accuracy)
        {
            NNAccuracy.GetAccuracy(nn, data.Dataset);
            return;
        }

        if (just_loss)
        {
            NNAccuracy.GetLoss(nn, data.Dataset);
            return;
        }


        if (only_misclass)
        {
            string filtered = RobustnessOptions.Registry + "-misclass";

            Console.WriteLine("Orig {0} data", data.Dataset.Count());

            var ds = NNAccuracy.KeepMisclass(nn, data.Dataset);

            Console.WriteLine("Kept {0} data", ds.Count());

            ImageDataset ret = new ImageDataset(ds,
                                                MNIST.InputCoordinates.ChannelCount,
                                                MNIST.InputCoordinates.RowCount,
                                                MNIST.InputCoordinates.ColumnCount, true);

            MNIST.WriteData(filtered + "-labels", filtered + "-images", ret);
            return;
        }

        // NB: No snapshotting for MNIST since it never crashes ...
        ImageDataset synth = Robustness.SynthesizeCounterexamplesAndStore(nn, data, x => { return; });

        MNIST.WriteData(RobustnessOptions.Registry + "-synth-labels",
                        RobustnessOptions.Registry + "-synth-images", synth);
    }
            public static ImageDataset SynthesizeCounterexamplesAndStore(NeuralNet nn, ImageDataset ds, Action<LabelWithConfidence> snapshot)
            {
                List<double[]> data = new List<double[]>();
                List<int> labs = new List<int>();

                List<LabelWithConfidence> results = SynthesizeCounterexamples(nn, ds, snapshot);

                for (int i=0; i < results.Count(); i++)
                {
                    data.Add(results[i].datum);
                    labs.Add(results[i].actualLabel);
                }

                Dataset newdata = new Dataset(data,labs,ds.Dataset.LabelCount());
                return new ImageDataset(newdata, ds.ChannelCount, ds.RowCount, ds.ColumnCount, ds.IsColor);

            }
 public TreeViewImageDatasetViewModel(ImageDataset imgDataset, TreeViewPatientViewModel parentRegion) : base(null, true)
 {
     _imgDataset = imgDataset;
 }