public static Tuple <INetwork, IFactory> SmallLoLa(string FileName, bool Encrypt) { Console.WriteLine("Small LoLa mode"); Console.Write("Generating keys in "); var start = DateTime.Now; var Factory = (Encrypt) ? (IFactory) new EncryptedSealBfvFactory(new ulong[] { 2277377, 2424833 }, 8192, DecompositionBitCount: 40, GaloisDecompositionBitCount: 40, SmallModulusCount: 3) : new RawFactory(8192); var end = DateTime.Now; Console.WriteLine("{0} seconds", (end - start).TotalSeconds); int weightscale = 64; // with weightscale of 64 the accuracy is 96.92% and the maximal value is 534491448976 var readerLayer = new LLConvReader() { FileName = FileName, SparseFormat = true, NormalizationFactor = 1.0 / 256.0, Scale = 16.0, InputShape = new int[] { 28, 28 }, KernelShape = new int[] { 5, 5 }, Upperpadding = new int[] { 1, 1 }, Stride = new int[] { 2, 2 }, }; var encryptLayer = new EncryptLayer() { Source = readerLayer, Factory = Factory }; var ConvLayer1 = new LLPoolLayer() { Source = encryptLayer, InputShape = new int[] { 28, 28 }, KernelShape = new int[] { 5, 5 }, Upperpadding = new int[] { 1, 1 }, Stride = new int[] { 2, 2 }, MapCount = new int[] { 5, 1 }, WeightsScale = weightscale, Weights = SmallModel.Weights_0 }; var VectorizeLayer2 = new LLVectorizeLayer() { Source = ConvLayer1 }; var ActivationLayer3 = new SquareActivation() { Source = VectorizeLayer2 }; var DenseLayer4 = new LLDenseLayer() { Source = ActivationLayer3, Bias = SmallModel.Biases_1, Weights = SmallModel.Weights_1, WeightsScale = weightscale, InputFormat = EVectorFormat.dense }; return(new Tuple <INetwork, IFactory>(DenseLayer4, Factory)); }
public void CalPrediction() { var FileName = "cal_deep_test.tsv"; var ini = new IniReader(@"cal.model.ini", 4096, 102); double weightscale = 1e+6; var readerLayer = new LLSingleLineReader() { FileName = FileName, SparseFormat = true, NormalizationFactor = 1.0, Scale = 1E+10, }; var debugLayer = new DebugLayer() { Source = readerLayer }; var encryptLayer = new EncryptLayer() { Source = debugLayer }; var denseLayer = new LLDenseLayer() { Source = encryptLayer, Weights = ini.Weights, Bias = ini.Bias, WeightsScale = weightscale, InputFormat = EVectorFormat.dense }; var network = denseLayer; network.PrepareNetwork(); var pred = network.GetNext().GetColumn(0).Decrypt(null); Assert.AreEqual(102, pred.Count); for (int i = 0; i < 10; i++) { Assert.AreEqual(debugLayer.scores[i], pred[i], 1e-3); } }
public void PoolLayerAsSparseToDense() { var Factory = Defaults.RawFactory; var v = Vector <double> .Build.DenseOfArray(new double[] { 1, 2, 3 }); var vec = Factory.GetEncryptedVector(v, EVectorFormat.sparse, 1); var m = Factory.GetMatrix(new IVector[] { vec }, EMatrixFormat.ColumnMajor); var poolLayer = new LLDenseLayer() { Factory = Factory, Weights = new double[] { 1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1 }, Bias = new double[] { 0, 0, 0, 0, 0, 0 }, WeightsScale = 1, InputFormat = EVectorFormat.sparse, Source = new FakeLayer() }; poolLayer.Prepare(); var res = poolLayer.Apply(m); Utils.ProcessInEnv(env => { var dec = res.Decrypt(env); Assert.AreEqual(1, dec.ColumnCount); Assert.AreEqual(6, dec.RowCount); for (int i = 0; i < 6; i++) { Assert.AreEqual(1 + (i % 3), dec[i, 0]); } }, Factory); }
public static Tuple<INetwork, IFactory> LargeLoLa(string FileName, bool Encrypt) { Console.WriteLine("Large LoLa mode"); WeightsReader wr = new WeightsReader("MnistLargeWeight.csv", "MnistLargeBias.csv"); Console.Write("Generating keys in "); var start = DateTime.Now; var Factory = Encrypt ? (IFactory)new EncryptedSealBfvFactory(new ulong[] { 2148728833, 2148794369, 2149810177 }, 16384, DecompositionBitCount: 60, GaloisDecompositionBitCount: 60, SmallModulusCount: 7) : new RawFactory(16384); var end = DateTime.Now; Console.WriteLine("{0} seconds", (end - start).TotalSeconds); var readerLayer = new LLConvReader { FileName = FileName, SparseFormat = true, InputShape = new int[] { 1, 28, 28 }, KernelShape = new int[] { 1, 8, 8 }, Upperpadding = new int[] { 0, 1, 1 }, Lowerpadding = new int[] { 0, 1, 1 }, Stride = new int[] { 1000, 2, 2 }, NormalizationFactor = 1.0, Scale = 16.0 }; var encryptLayer = new EncryptLayer() { Source = readerLayer, Factory = Factory }; var convLayer1 = new LLPoolLayer() { Source = encryptLayer, InputShape = new int[] { 1, 28, 28 }, KernelShape = new int[] { 1, 8, 8 }, Upperpadding = new int[] { 0, 1, 1 }, Lowerpadding = new int[] { 0, 1, 1 }, Stride = new int[] { 1000, 2, 2 }, MapCount = new int[] { 83, 1, 1 }, WeightsScale = 4096, Weights = ((double[])wr.Weights[0]).Select(x => x/256).ToArray(), Bias = (double[])wr.Biases[0] }; var VectorizeLayer2 = new LLVectorizeLayer() { Source = convLayer1 }; //var activationLayer3 = new SquareActivation() { Source = VectorizeLayer2 }; var activationLayer3 = new AppxReLUActivation() { Source = VectorizeLayer2 }; //var activationLayer3 = new LeakyReLUActivation() { Source = VectorizeLayer2 }; //var activationLayer3 = new ReLUActivation() { Source = VectorizeLayer2 }; var convEngine = new ConvolutionEngine() { InputShape = new int[] { 83, 12, 12 }, KernelShape = new int[] { 83, 6, 6 }, Padding = new bool[] { false, false, false }, Stride = new int[] { 83, 2, 2 }, MapCount = new int[] { 163, 1, 1 } }; var denseLayer4 = new LLDenseLayer { Source = activationLayer3, WeightsScale = 64, Weights = convEngine.GetDenseWeights((double[])wr.Weights[1]), Bias = convEngine.GetDenseBias((double[])wr.Biases[1]), InputFormat = EVectorFormat.dense, ForceDenseFormat = true }; //var activationLayer5 = new SquareActivation() { Source = denseLayer4 }; var activationLayer5 = new AppxReLUActivation() { Source = denseLayer4 }; //var activationLayer5 = new LeakyReLUActivation() { Source = denseLayer4 }; //var activationLayer5 = new ReLUActivation() { Source = denseLayer4 }; var denseLayer6 = new LLDenseLayer() { Source = activationLayer5, Weights = (double[])wr.Weights[2], Bias = (double[])wr.Biases[2], WeightsScale = 512, InputFormat = EVectorFormat.dense }; return new Tuple<INetwork, IFactory>(denseLayer6, Factory); }
public static void Main(string[] args) { WeightsReader wr = new WeightsReader("CifarWeight.csv", "CifarBias.csv"); Console.WriteLine("Generating encryption keys {0}", DateTime.Now); var factory = new EncryptedSealBfvFactory(new ulong[] { 2148728833, 2148794369, 2149810177 }, 16384, DecompositionBitCount: 60, GaloisDecompositionBitCount: 60, SmallModulusCount: 7); Console.WriteLine("Encryption keys ready {0}", DateTime.Now); string fileName = "cifar-test.tsv"; var readerLayer = new LLConvReader { FileName = fileName, SparseFormat = false, InputShape = new int[] { 3, 32, 32 }, KernelShape = new int[] { 3, 8, 8 }, Upperpadding = new int[] { 0, 1, 1 }, Lowerpadding = new int[] { 0, 1, 1 }, Stride = new int[] { 1000, 2, 2 }, NormalizationFactor = 1.0, Scale = 128.0 }; var encryptLayer = new EncryptLayer() { Source = readerLayer, Factory = factory }; var convLayer1 = new LLPoolLayer() { Source = encryptLayer, InputShape = new int[] { 3, 32, 32 }, KernelShape = new int[] { 3, 8, 8 }, Upperpadding = new int[] { 0, 1, 1 }, Lowerpadding = new int[] { 0, 1, 1 }, Stride = new int[] { 1000, 2, 2 }, MapCount = new int[] { 83, 1, 1 }, WeightsScale = 256.0, Weights = (double[])wr.Weights[0], Bias = (double[])wr.Biases[0] }; var VectorizeLayer2 = new LLVectorizeLayer() { Source = convLayer1 }; var activationLayer3 = new SquareActivation() { Source = VectorizeLayer2 }; var convEngine = new ConvolutionEngine() { InputShape = new int[] { 83, 14, 14 }, KernelShape = new int[] { 83, 6, 6 }, Padding = new bool[] { false, false, false }, Stride = new int[] { 83, 2, 2 }, MapCount = new int[] { 163, 1, 1 } }; var denseLayer4 = new LLDenseLayer { Source = activationLayer3, WeightsScale = 512.0, Weights = convEngine.GetDenseWeights((double[])wr.Weights[1]), Bias = convEngine.GetDenseBias((double[])wr.Biases[1]), InputFormat = EVectorFormat.dense, ForceDenseFormat = true }; var activationLayer5 = new SquareActivation() { Source = denseLayer4 }; var denseLayer6 = new LLDenseLayer() { Source = activationLayer5, Weights = (double[])wr.Weights[2], Bias = (double[])wr.Biases[2], WeightsScale = 1024.0, InputFormat = EVectorFormat.dense }; var network = denseLayer6; Console.WriteLine("Preparing"); // Visualize layer construction for (var p = (INetwork)network; p != null; p = p.Source) { if (p is BaseLayer b) { b.Verbose = true; } } network.PrepareNetwork(); var m = network.GetNext(); Utils.Show(m, factory, readerLayer.Labels); Console.WriteLine("Max computed value {0} ({1})", RawMatrix.Max, Math.Log(RawMatrix.Max) / Math.Log(2)); }
public static void Main(string[] args) { var options = new Options(); var parsed = Parser.Default.ParseArguments <Options>(args).WithParsed(x => options = x); if (parsed.Tag == ParserResultType.NotParsed) { Environment.Exit(-2); } WeightsReader wr = new WeightsReader("CifarWeight.csv", "CifarBias.csv"); // Model has accuracy of 76.5 // Current parameters (scale) provide accuracy of 76.31% and uses 78.55 + 1 bits in message length // It has a latency of 740 seconds on the reference machine (Azure B8ms server at rest) Console.WriteLine("Generating encryption keys {0}", DateTime.Now); IFactory factory = null; if (options.Encrypt) { factory = new EncryptedSealBfvFactory(new ulong[] { 957181001729, 957181034497 }, 16384, DecompositionBitCount: 60, GaloisDecompositionBitCount: 60, SmallModulusCount: 8); } else { factory = new RawFactory(16 * 1024); } Console.WriteLine("Encryption keys ready {0}", DateTime.Now); int numberOfRecords = 10000; bool verbose = options.Verbose; string fileName = "cifar-test.tsv"; var readerLayer = new LLConvReader { FileName = fileName, SparseFormat = false, InputShape = new int[] { 3, 32, 32 }, KernelShape = new int[] { 3, 8, 8 }, Upperpadding = new int[] { 0, 1, 1 }, Lowerpadding = new int[] { 0, 1, 1 }, Stride = new int[] { 1000, 2, 2 }, NormalizationFactor = 1.0 / 256.0, Scale = 8, Verbose = verbose }; var EncryptLayer = new EncryptLayer() { Source = readerLayer, Factory = factory }; var StartTimingLayer = new TimingLayer() { Source = EncryptLayer, StartCounters = new string[] { "Inference-Time" } }; var ConvLayer1 = new LLPoolLayer() { Source = StartTimingLayer, InputShape = new int[] { 3, 32, 32 }, KernelShape = new int[] { 3, 8, 8 }, Upperpadding = new int[] { 0, 1, 1 }, Lowerpadding = new int[] { 0, 1, 1 }, Stride = new int[] { 1000, 2, 2 }, MapCount = new int[] { 83, 1, 1 }, WeightsScale = 256.0, Weights = (double[])wr.Weights[0], Bias = (double[])wr.Biases[0], Verbose = verbose }; var VectorizeLayer2 = new LLVectorizeLayer() { Source = ConvLayer1, Verbose = verbose }; var ActivationLayer3 = new SquareActivation() { Source = VectorizeLayer2, Verbose = verbose }; var ConvEngine = new ConvolutionEngine() { InputShape = new int[] { 83, 14, 14 }, KernelShape = new int[] { 83, 10, 10 }, Upperpadding = new int[] { 0, 4, 4 }, Lowerpadding = new int[] { 0, 4, 4 }, Stride = new int[] { 83, 2, 2 }, MapCount = new int[] { 112, 1, 1 } }; var DenseLayer4 = new LLDenseLayer { Source = ActivationLayer3, WeightsScale = 512.0, Weights = ConvEngine.GetDenseWeights((double[])wr.Weights[1]), Bias = ConvEngine.GetDenseBias((double[])wr.Biases[1]), InputFormat = EVectorFormat.dense, ForceDenseFormat = true, Verbose = verbose }; var ActivationLayer5 = new SquareActivation() { Source = DenseLayer4, Verbose = verbose }; var DenseLayer6 = new LLDenseLayer() { Source = ActivationLayer5, Weights = (double[])wr.Weights[2], Bias = (double[])wr.Biases[2], WeightsScale = 512.0, InputFormat = EVectorFormat.dense, Verbose = verbose }; var StopTimingLayer = new TimingLayer() { Source = DenseLayer6, StopCounters = new string[] { "Inference-Time" } }; var network = StopTimingLayer; Console.WriteLine("Preparing"); network.PrepareNetwork(); int count = 0; int errs = 0; int batchSize = 1; while (count < numberOfRecords) { using (var m = network.GetNext()) { Utils.ProcessInEnv(env => { var decrypted = m.Decrypt(env); int pred = 0; for (int j = 1; j < decrypted.RowCount; j++) { if (decrypted[j, 0] > decrypted[pred, 0]) { pred = j; } } if (pred != readerLayer.Labels[0]) { errs++; } count++; if (count % batchSize == 0) { Console.Write("errs {0}/{1} accuracy {2:0.000}% prediction {3} label {4} {5}ms", errs, count, 100 - (100.0 * errs / (count)), pred, readerLayer.Labels[0], TimingLayer.GetStats()); if (options.Encrypt) { Console.WriteLine(); } else { Console.WriteLine(" 2^{0} largest-value", Math.Log(RawMatrix.Max) / Math.Log(2)); } } }, factory); } } Console.WriteLine("errs {0}/{1} accuracy {2:0.000}%", errs, count, 100 - (100.0 * errs / (count))); network.DisposeNetwork(); if (!options.Encrypt) { Console.WriteLine("Max computed value 2^{1}", Math.Log(RawMatrix.Max) / Math.Log(2)); } }
static void Main(string[] args) { var ini = new IniReader(@"cal.model.ini", 4096, 102); ini.Normalize(@"cal.AffineNormalizer.txt"); var start = DateTime.Now; var Factory = new EncryptedSealBfvFactory(new ulong[] { 4300801 }, 4096, DecompositionBitCount: 60, GaloisDecompositionBitCount: 60, SmallModulusCount: 2); double weightscale = 256; string FileName = "cal_deep_test.tsv"; if (!File.Exists(FileName)) { Console.WriteLine("ERROR: Can't find data file {0}", FileName); Console.WriteLine("Please use DataPreprocess to obtain the Caltech-101 dataset"); return; } var readerLayer = new LLSingleLineReader() { FileName = FileName, SparseFormat = true, NormalizationFactor = 1.0, Scale = 256, }; var encryptLayer = new EncryptLayer() { Source = readerLayer, Factory = Factory }; var denseLayer = new LLDenseLayer() { Source = encryptLayer, Weights = ini.Weights, Bias = ini.Bias, WeightsScale = weightscale, InputFormat = EVectorFormat.dense }; var network = denseLayer; network.PrepareNetwork(); int errs = 0; var N = 1020; IMatrix m = null; Utils.ProcessInEnv(env => { for (int i = 0; i < N; i++) { Utils.Time("Prediction+Encryption", () => m = network.GetNext()); var dec = m.Decrypt(env); m.Dispose(); var l = readerLayer.Labels[0]; int pred = 0; for (int j = 0; j < 101; j++) { if (dec[j, 0] > dec[pred, 0]) { pred = j; } } if (pred != l) { errs++; } Console.WriteLine("errs {0}/{1} accuracy {2:0.000}% {3} prediction {4} label {5}", errs, i + 1, 100 - (100.0 * errs / (i + 1)), TimingLayer.GetStats(), pred, l); } }, Factory); network.DisposeNetwork(); }