Пример #1
0
 public LPSState(NNInstrumentation instrumentation, double[] origin)
 {
     deferredConstraints_ = new LPSConstraints();
     currentConstraints_  = new LPSConstraints();
     instrumentation_     = instrumentation;
     origin_ = origin;
 }
Пример #2
0
            public static LabelWithConfidence LabelWithConfidence(NeuralNet model, NNInstrumentation instr, double[] datum, bool crop)
            {
                Vector <double> datum_v = DenseVector.OfArray(datum);

                if (crop)
                {
                    datum_v = model.CropMaybe(datum_v);
                }

                double[] outs = model.EvaluateNNConcretePostCrop(datum_v, instr);

//                Console.WriteLine("Outs = {0}", DenseVector.OfArray(outs));


                Tuple <double, int> max    = UMath.Max(outs);
                Tuple <double, int> secmax = UMath.MaxExcluding(max.Item2, outs);

                UMath.SoftMax(outs);

                var result = new LabelWithConfidence
                {
                    datum              = datum,
                    actualLabel        = max.Item2,
                    secBestLabel       = secmax.Item2,
                    softMaxValue       = outs[max.Item2],
                    diffFromSecondBest = Math.Abs(outs[max.Item2] - outs[secmax.Item2])
                };

                return(result);
            }
Пример #3
0
        public double[] EvaluateNNConcretePostCrop(Vector <double> input, NNInstrumentation instr)
        {
            Vector <double> v = input;

            for (int i = 0; i < LayerCount; i++)
            {
                Layer curr = Layers[i];
                var   w    = curr.EvaluateConcrete(v);
                if (instr != null)
                {
                    curr.Instrument(instr, v, w);
                }
                v = w;
            }
            return(v.ToArray());
        }
Пример #4
0
        public override double ApplyKernelConcrete(NNInstrumentation instr, Vector <double> input, int outIndex, int channel, int row, int column)
        {
            int    argMax = InputCoordinates.GetIndex(channel, row, column);
            double max    = input[argMax];

            for (int i = 0; i < KernelDimension; i++)
            {
                for (int j = 0; j < KernelDimension; j++)
                {
                    if (i == 0 && j == 0)
                    {
                        continue;
                    }

                    int x = row - Padding + i;
                    int y = column - Padding + j;
                    if (x >= InputCoordinates.RowCount || y >= InputCoordinates.ColumnCount)
                    {
                        continue;
                    }

                    int index = InputCoordinates.GetIndex(channel, x, y);
                    if (index < 0 || index >= input.Count)
                    {
                        continue;
                    }

                    if (max < input[index])
                    {
                        argMax = index;
                        max    = input[index];
                    }
                }
            }

            if (instr != null)
            {
                instr[Index].Selections[outIndex] = argMax;
            }

            return(max);
        }
Пример #5
0
 public override void Instrument(NNInstrumentation instr, Vector <double> input, Vector <double> output)
 {
     instr[Index] = Instrumentation.NoInstrumentation();
 }
Пример #6
0
        public override void Instrument(NNInstrumentation instr, Vector <double> input, Vector <double> output)
        {
            var disjunctionChoices = CreateDisjunctionChoices(input);

            instr[Index] = Instrumentation.ReLUInstrumentation(disjunctionChoices);
        }
Пример #7
0
 public override void Instrument(NNInstrumentation instrumentation, Vector <double> input, Vector <double> output)
 {
     instrumentation[Index] = Instrumentation.MaxPoolingInstrumentation(new int[OutputDimension]);
     base.ApplyKernels <NumInstDouble, double, Vector <double>, NNInstrumentation>(instrumentation, ApplyKernelConcrete, input);
 }
Пример #8
0
            /// <summary>
            /// Synthesize a counterexample from an existing labelled image.
            /// </summary>
            /// <param name="options"></param>
            /// <param name="nn">The model.</param>
            /// <param name="imageLab">The image and labeling information from the network.</param>
            /// <param name="instr"></param>
            /// <param name="realLabel">The label of the image from the training set.</param>
            /// <param name="rowSize"></param>
            /// <param name="colSize"></param>
            /// <param name="isColor"></param>
            /// <returns>NULL if we were not able to synthesize a counterexample, otherwise some information about it.</returns>
            public static Nullable<LabelWithConfidence> SynthesizeCounterexample
                ( NeuralNet nn
                , LPSTerm[] inputs             // Symbolic inputs (cropped)
                , LPSTerm epsilon              // Epsilon variable
                , LabelWithConfidence imageLab // Original image classification info (uncropped)
                , NNInstrumentation instr
                , int realLabel                // Ground truth for this image (from training set)
                , int rowSize                  // Original (uncropped) row size
                , int colSize                  // Original (uncropped) col size
                , bool isColor)
            {
                int origLabel = imageLab.actualLabel;
                int targetLabel = imageLab.secBestLabel;

                int input_dimension_pre_crop  = nn.InputDimensionPreCrop;
                int input_dimension_post_crop = nn.InputDimensionPostCrop;


                double[] orig_image = imageLab.datum;
                double[] orig_image_crop = nn.CropMaybe(DenseVector.OfArray(orig_image)).ToArray();


                if (realLabel != origLabel)
                {
                    Console.WriteLine("This image is misclassifed already! Skipping.");
                    return null;
                }
                if (RobustnessOptions.IgnoreLowConfidence && imageLab.softMaxValue < RobustnessOptions.LowConfidenceThreshold)
                {
                    Console.WriteLine("This image is misclassifed with low confidence! Skipping.");
                    return null;
                }

                // Fast path:
                // DiffInfo diff_info;
                /* *********************
                 * DV: Commenting out the fast path for now (but we are still keeping the Dictionary, for debugging)
                 * *********************
                if (diffDict.TryGetValue(new Tuple<int,int>(origLabel,targetLabel),out diff_info))
                {
                    Console.WriteLine("Got a hit in the difference cache!");
                    Vector<double> diff_counterexample = diff_info.diff;

                    Vector<double> cand = DenseVector.OfArray(orig_image) + diff_counterexample;


                    Console.WriteLine("oooooooooooooooo Checking with the fast path!");

                    double[] cand_arr_crop = nn.CropMaybe(cand).ToArray();

                    if (RobustnessOptions.QuantizationSafety)
                    {
                        Utils.UArray.InPlaceRoundDoubleArray(cand_arr_crop);
                    }

                    LabelWithConfidence candLab = Utils.ULabel.LabelWithConfidence(nn, cand_arr_crop,false); // Already  cropped, don't crop!

                    if (candLab.actualLabel != origLabel)
                    {

                        Console.WriteLine("=> Real counterexample (from fast path)!");
                        diff_info.number++;
                        return candLab;
                    }

                    Console.WriteLine("xxxx Fast path failed, continuing with symbolic interpreter ...");
                    // otherwise continue with the slow path ...
                }
                ***********************/

                var state = new LPSState(instr, orig_image_crop);
                int nomodelcount = 0;

                double[] newImageUnrounded;

            NOMODELLOOP:
                if (nomodelcount++ > 0) return null;

                state.ClearConstraints();

                LPSTerm[] output = nn.EvaluateNNSymbolicPostCrop(state, inputs);

                // Just some tracing ...
                // ReportSparsity(output);

                LPSConstraints currentCts = state.CurrentCts;
                LPSConstraints deferredCts = state.DeferredCts;

                // Conjoin the label formula
                currentCts.And(NNetFormulas.LabelFormula(output, targetLabel, RobustnessOptions.LabelConfidenceDiff));

                // If we are just looking for bounds, then the variables themselves will contain "origin" bounds
                if (RobustnessOptions.DoOptimization)
                {
                    NNETObjectives.AddEpsilonBounds(currentCts, inputs, epsilon, orig_image_crop);
                }

                // Ensure that at least *one* entry is different by at least 1.0
                if (RobustnessOptions.QuantizationSafety)
                {
                    NNETObjectives.AddQuantizationSafety(currentCts, inputs, orig_image_crop);
                }

                // Create objective
                Nullable<LPSObjective> objective = null;
                if (RobustnessOptions.DoOptimization)
                {
                    switch (RobustnessOptions.ObjectiveKind)
                    {
                        case LPSObjectiveKind.MinLinf:
                            objective = NNETObjectives.MinLInf(currentCts, inputs, epsilon, orig_image_crop);
                            break;
                        case LPSObjectiveKind.MaxConf:
                            objective = NNETObjectives.MaxConf(output, origLabel, targetLabel);
                            break;
                        default:
                            break;
                    }
                }
                if (!RobustnessOptions.CEGAR)
                {
                    currentCts.And(deferredCts);
                    deferredCts = new LPSConstraints();
                }

                // CEGAR loop header
                LabelWithConfidence newLab;

                Console.WriteLine(
                    "Current constraints: {0}, deferred: {1}", 
                    currentCts.Count, 
                    deferredCts.Count);

                LPSolver lps = new LPSolver(
                    input_dimension_post_crop, 
                    currentCts.Count + deferredCts.Count, 
                    orig_image_crop, 
                    RobustnessOptions.Epsilon);

                lps.AddConstraints(currentCts, objective);

                int cegar_iterations = 0;

                while (true)
                {
                    if (cegar_iterations++ > RobustnessOptions.CEGARGiveUpIterations)
                    {
                        Console.WriteLine("xxxxxxxxxxxxxxxx Giving up CEGAR, could not find model!");
                        goto NOMODELLOOP;
                    }

                    var newImage = lps.SolveLowLevelLP();

                    currentCts = new LPSConstraints();
                    if (newImage == null)
                    {
                        Console.WriteLine("xxxxxxxxxxxxxxxx No model!");
                        goto NOMODELLOOP;
                    }

                    Console.WriteLine("oooooooooooooooo Found model!");
                    newImageUnrounded = new double[newImage.Length];
                    Array.Copy(newImage, newImageUnrounded, newImage.Length);

                    if (RobustnessOptions.QuantizationSafety)
                    {
                        Utils.UArray.InPlaceRoundDoubleArray(newImage);
                    }
                    int samcount = Utils.UArray.ComputeRoundIdenticals(orig_image_crop, newImage);
                    Console.WriteLine("Synthesized image has {0} identical inputs (after rounding) to original (cropped)", samcount);
                    // Now, try to label the new example
                    newLab = Utils.ULabel.LabelWithConfidence(nn, newImage,false); // Already  cropped, don't crop!
                    if (newLab.actualLabel != targetLabel)
                    {
                        if (newLab.actualLabel == realLabel)
                        {
                            // Here the synthesized image is not really a counterexample. 
                            // This could be due to either (a) quantization errors or (b) CEGAR 
                            // underapproximation. But the only thing we can try and do here is
                            // add mor constraints and try to resolve. 

                            if (RobustnessOptions.CEGAR)
                                Console.WriteLine("Not really a counterexample, going round CEGAR loop.");

                            int added = 0;
                            
                            // new_image_plus_eps = newImage : 0.0 
                            // so that the length matches the coefficients of each constraint ... 
                            double[] newimage_plus_eps = new double[newImage.Length+1];
                            Array.Copy(newImageUnrounded,newimage_plus_eps,newImage.Length);
                            newimage_plus_eps[newImage.Length] = 0.0;
                            Vector<double> newImageVec_eps = DenseVector.OfArray(newimage_plus_eps);

                            var denumerator = deferredCts.GetEnumerator();

                            Parallel.For(0, deferredCts.Count, i =>
                            {
                                LPSConstraint curr_deferred;
                                if (added > 699) return;

                                lock (lockObj)
                                {

                                    denumerator.MoveNext();
                                    curr_deferred = (LPSConstraint)denumerator.Current;

                                    if (curr_deferred.Added == true) return;

                                }

                                bool sat = Satisfiable(curr_deferred, newImageVec_eps);

                                lock (lockObj)
                                {
                                    if (!sat)
                                    {
                                        lps.AddConstraint(curr_deferred);
                                        // currentCts.And(curr_deferred.Term, curr_deferred.Inequality);
                                        curr_deferred.Added = true;
                                        added++;
                                    }
                                }
                            });



                            Console.WriteLine();
                            Console.WriteLine("Added {0} constraints for CEGAR", added);
                            if (added == 0)
                            {
                                Console.WriteLine("=> CEGAR cannot improve things.");
                                goto NOMODELLOOP;
                                // return null;
                            }
                            // lps.AddConstraints(currentCts, null);
                            continue;
                        }
                        else
                        {
                            Console.WriteLine("=> Real counterexample! (Although with different label than expected)");
                            break;
                        }
                    }
                    else
                    {
                        Console.WriteLine("=> Real counterexample! (New image has second-best label");
                        break;
                    }
                }

                if (RobustnessOptions.DisplaySynthesizedImagesAndPause)
                {
                    Utils.UDraw.DisplayImageAndPause(Utils.UArray.ToIntArray(imageLab.datum), rowSize, colSize, isColor);
                    Utils.UDraw.DisplayImageAndPause(Utils.UArray.ToIntArray(newLab.datum), rowSize, colSize, isColor);
                }

                /* NB: Uncrop the image in newLab */
                newLab.datum = nn.UnCropMaybe(DenseVector.OfArray(orig_image), DenseVector.OfArray(newLab.datum)).ToArray();


                double[] tmp = nn.UnCropMaybe(DenseVector.OfArray(orig_image), DenseVector.OfArray(newImageUnrounded)).ToArray();
                Vector<double> diff_val = DenseVector.OfArray(tmp) - DenseVector.OfArray(orig_image);

                var key = new Tuple<int, int>(origLabel, newLab.actualLabel);
                DiffInfo dinfo;
                if (diffDict.TryGetValue(key, out dinfo))
                {
                    dinfo.number++;
                }
                else
                {
                    dinfo = new DiffInfo();
                    dinfo.diff = diff_val;
                    dinfo.number = 1;
                    diffDict.Add(new Tuple<int, int>(origLabel, newLab.actualLabel), dinfo);
                }

                return newLab;
            }
Пример #9
0
            /// <summary>
            /// Generate and return a list of counterexamples by iterating over the training set
            /// </summary>
            /// <param name="datasetname"></param>
            /// <param name="options"></param>
            /// <param name="nn"></param>
            /// <param name="ds"></param>
            /// <returns></returns>
            public static List<LabelWithConfidence> SynthesizeCounterexamples(
                NeuralNet nn,
                ImageDataset ds, 
                Action<LabelWithConfidence> snapshot)
            {

                /* Initialization stuff */
                List<LabelWithConfidence> counterexamples = new List<LabelWithConfidence>();
                SynthRegistry reg = new SynthRegistry(RobustnessOptions.Registry + ".csv", RobustnessOptions.Registry);

                /* How many training points to do */
                int trainingPointsToDo = (int)Math.Round(ds.Dataset.Count() * RobustnessOptions.DataSetPercentage);
                int completed = 0;

                /* The symbolic variables: NB we use the dimension PostCrop to avoid generating lots of useless variables */
                Tuple<LPSTerm[],LPSTerm> inputs = GenSymbolicInputs(nn.InputDimensionPostCrop);

                // Alternatively (the code is thread-safe already):
                // Parallel.For(0, ds.Dataset.Count(), RobustnessOptions.ParallelOptions, i =>
                for (int i = 0; i < ds.Dataset.Count(); i++)
                {
                        if (completed < trainingPointsToDo)
                        {
                            Console.WriteLine("Image count = {0}", i);
                            NNInstrumentation instr = new NNInstrumentation();
                            LabelWithConfidence imageLab = ULabel.LabelWithConfidence(nn, instr, ds.Dataset.GetDatum(i), true);

                            Nullable<LabelWithConfidence> synthLab = null;

                            try
                            {
                                var stopwatch = new Stopwatch();
                                stopwatch.Start();

                                synthLab = SynthesizeCounterexample(
                                    nn,
                                    inputs.Item1,
                                    inputs.Item2,
                                    imageLab,
                                    instr,
                                    ds.Dataset.GetLabel(i),
                                    ds.RowCount,
                                    ds.ColumnCount,
                                    ds.IsColor);

                                stopwatch.Stop();
                                Console.WriteLine("Processed image in {0} milliseconds", stopwatch.ElapsedMilliseconds);
                                GC.Collect();

                            }
                            catch
                            {
                                lock(lockObj)
                                {
                                    completed++;
                                }
                                continue;
                            }


                            lock (lockObj)
                            {
                                completed++;
                                if (synthLab.HasValue)
                                {
                                    // VERY IMPORTANTLY: Change the label of the counterexample
                                    // to be the label of the original point! This was a horrible bug.
                                    var forRetraining = synthLab.Value;
                                    forRetraining.actualLabel = imageLab.actualLabel;
                                    counterexamples.Add(forRetraining);
                                    snapshot(forRetraining);

                                    Console.WriteLine("forRetraining.label  = {0}", forRetraining.actualLabel);
                                    Console.WriteLine("synthLab.Value.label = {0}", synthLab.Value.actualLabel);

                                    reg.RecordAtomically(
                                        RobustnessOptions.Registry,
                                        i,
                                        imageLab,
                                        synthLab.Value,
                                        RobustnessOptions.ScalePreProcessed,
                                        RobustnessOptions.OffsetPreProcessed,
                                        ds.RowCount,
                                        ds.ColumnCount,
                                        ds.IsColor);
                                }
                                Console.WriteLine("Counterexamples/Processed-so-far: " + counterexamples.Count() + "/" + completed);
                            }
                        }
                }
                return counterexamples;
            }
Пример #10
0
 public override double ApplyKernelConcrete(NNInstrumentation instr, Vector <double> input, int outIndex, int channel, int row, int column)
 {
     return(ApplyKernel <NumInstDouble, double, Vector <double> >(input, channel, row, column));
 }
Пример #11
0
 public abstract double ApplyKernelConcrete(NNInstrumentation instr, Vector <double> input, int outIndex, int channel, int row, int column);
Пример #12
0
 public abstract void Instrument(NNInstrumentation instrumentation, Vector <double> input, Vector <double> output);
Пример #13
0
 public double[] EvaluateNNConcretePostCrop(double[] input, NNInstrumentation instr = null)
 {
     return(EvaluateNNConcretePostCrop(DenseVector.OfArray(input), instr));
 }