public void FVKeyGenerationNET() { var parms = new EncryptionParameters(); { parms.NoiseStandardDeviation = 3.19; parms.PolyModulus = "1x^64 + 1"; parms.CoeffModulus = new List <SmallModulus> { DefaultParams.SmallMods60Bit(0) }; parms.PlainModulus = 1 << 6; var context = new SEALContext(parms); var keygen = new KeyGenerator(context); Assert.IsTrue(keygen.PublicKey.HashBlock.Equals(parms.HashBlock)); Assert.IsTrue(keygen.SecretKey.HashBlock.Equals(parms.HashBlock)); var evk = new EvaluationKeys(); keygen.GenerateEvaluationKeys(60, evk); Assert.AreEqual(evk.HashBlock, parms.HashBlock); Assert.AreEqual(2, evk.Key(2)[0].Size); keygen.GenerateEvaluationKeys(30, 1, evk); Assert.AreEqual(evk.HashBlock, parms.HashBlock); Assert.AreEqual(4, evk.Key(2)[0].Size); keygen.GenerateEvaluationKeys(2, 2, evk); Assert.AreEqual(evk.HashBlock, parms.HashBlock); Assert.AreEqual(60, evk.Key(2)[0].Size); } { parms.NoiseStandardDeviation = 3.19; parms.PolyModulus = "1x^256 + 1"; parms.CoeffModulus = new List <SmallModulus> { DefaultParams.SmallMods60Bit(0), DefaultParams.SmallMods30Bit(0), DefaultParams.SmallMods30Bit(1) }; parms.PlainModulus = 1 << 6; var context = new SEALContext(parms); var keygen = new KeyGenerator(context); Assert.AreEqual(keygen.PublicKey.HashBlock, parms.HashBlock); Assert.AreEqual(keygen.SecretKey.HashBlock, parms.HashBlock); var evk = new EvaluationKeys(); keygen.GenerateEvaluationKeys(60, 2, evk); Assert.AreEqual(evk.HashBlock, parms.HashBlock); Assert.AreEqual(2, evk.Key(2)[0].Size); keygen.GenerateEvaluationKeys(30, 2, evk); Assert.AreEqual(evk.HashBlock, parms.HashBlock); Assert.AreEqual(4, evk.Key(2)[0].Size); keygen.GenerateEvaluationKeys(4, 1, evk); Assert.AreEqual(evk.HashBlock, parms.HashBlock); Assert.AreEqual(30, evk.Key(2)[0].Size); } }
public static void ExampleRelinearizationPart1() { Console.WriteLine("Example 1: Performing relinearization too early can increase noise in the final result."); // Set up encryption parameters var parms = new EncryptionParameters(); parms.SetPolyModulus("1x^4096 + 1"); parms.SetCoeffModulus(ChooserEvaluator.DefaultParameterOptions[4096]); parms.SetPlainModulus(1 << 8); /* * The choice of decomposition_bit_count (dbc) can affect the performance of relinearization * noticeably. A reasonable choice for it is between 1/10 and 1/2 of the significant bit count * of the coefficient modulus. Sometimes when the dbc needs to be very small (due to noise growth), * it might make more sense to move up to a larger PolyModulus and CoeffModulus, and set dbc to * be as large as possible. * * A smaller dbc will make relinearization too slow. A higher dbc will increase noise growth * while not making relinearization any faster. Here, the CoeffModulus has 116 significant * bits, so we choose dbc to be half of this. We can expect to see extreme differences in * noise growth between the relinearizing and non-relinearizing cases due to the decomposition * bit count being so large. */ parms.SetDecompositionBitCount(58); // Validate the parameters parms.Validate(); /* * By default, KeyGenerator.Generate() will generate no evaluation keys. This means that we * cannot perform any relinearization. However, this is sufficient for performing all other * homomorphic evaluation operations as they do not use evaluation keys, and is enough for * now as we start by demonstrating the computation without relinearization. */ Console.WriteLine("Generating keys ..."); var generator = new KeyGenerator(parms); generator.Generate(); Console.WriteLine("... key generation complete"); var publicKey = generator.PublicKey; var secretKey = generator.SecretKey; /* * Suppose we want to homomorphically multiply four ciphertexts together. Does it make sense * to relinearize at an intermediate stage of the computation? */ // Encrypt plaintexts to generate the four fresh ciphertexts var plain1 = new Plaintext("5"); var plain2 = new Plaintext("6"); var plain3 = new Plaintext("7"); var plain4 = new Plaintext("8"); Console.WriteLine("Encrypting values { 5, 6, 7, 8 } as { encrypted1, encrypted2, encrypted3, encrypted4 }"); var encryptor = new Encryptor(parms, publicKey); var encrypted1 = encryptor.Encrypt(plain1); var encrypted2 = encryptor.Encrypt(plain2); var encrypted3 = encryptor.Encrypt(plain3); var encrypted4 = encryptor.Encrypt(plain4); // We need a Decryptor to be able to measure the inherent noise var decryptor = new Decryptor(parms, secretKey); // What are the noise budgets in the four ciphertexts? Console.WriteLine("Noise budgets in the four ciphertexts: {0} bits, {1} bits, {2} bits, {3} bits", decryptor.InvariantNoiseBudget(encrypted1), decryptor.InvariantNoiseBudget(encrypted2), decryptor.InvariantNoiseBudget(encrypted3), decryptor.InvariantNoiseBudget(encrypted4)); // Construct an Evaluator var evaluator = new Evaluator(parms); // Perform first part of computation Console.WriteLine("Computing encProd1 as encrypted1*encrypted2 ..."); var encProd1 = evaluator.Multiply(encrypted1, encrypted2); Console.WriteLine("Computing encProd2 as encrypted3*encrypted4 ..."); var encProd2 = evaluator.Multiply(encrypted3, encrypted4); Console.WriteLine(); Console.WriteLine("Path 1: No relinearization."); // Compute product of all four Console.WriteLine("Computing encResult as encProd1*encProd2 ..."); var encResult = evaluator.Multiply(encProd1, encProd2); // Now enc_result has size 5 Console.WriteLine("Size of encResult: {0}", encResult.Size); // How much noise budget are we left with? var noiseBudgetNoRelin = decryptor.InvariantNoiseBudget(encResult); Console.WriteLine("Noise budget in encResult: {0} bits", noiseBudgetNoRelin); /* * We didn't create any evaluation keys, so we can't relinearize at all with the current * Evaluator. In general, relinearizing down from size K to any smaller size (but at least 2) * requires at least K-2 evaluation keys. In this case we wish to relinearize encProd1 and * encProd2, which both have size 3. Thus we need only one evaluation key. * * We can create this newn evaluation key by calling KeyGenerator.GenerateEvaluationKeys(...). * Alternatively, we could have created it already in the beginning by instead of calling * generator.Generate(1) instead of generator.Generate(). * * We will also need a new Evaluator, as the previous one was constructed without enough * indeed, any) evaluation keys. It is not possible to add new evaluation keys to a previously * created Evaluator. */ generator.GenerateEvaluationKeys(1); var evaluationKeys = generator.EvaluationKeys; var evaluator2 = new Evaluator(parms, evaluationKeys); // Now with relinearization Console.WriteLine(""); Console.WriteLine("Path 2: With relinearization"); // What if we do intermediate relinearization of encProd1 and encProd2? Console.WriteLine("Relinearizing encProd1 and encProd2 to size 2 ..."); var encRelinProd1 = evaluator2.Relinearize(encProd1); var encRelinProd2 = evaluator2.Relinearize(encProd2); // Now multiply the relinearized products together Console.WriteLine("Computing encResult as encRelinProd1*encRelinProd2"); encResult = evaluator2.Multiply(encRelinProd1, encRelinProd2); // Now enc_result has size 3 Console.WriteLine("Size of encResult: {0}", encResult.Size); // How much noise budget are we left with? var noiseBudgetRelin = decryptor.InvariantNoiseBudget(encResult); Console.WriteLine("Noise budget in encResult: {0} bits", noiseBudgetRelin); /* * While in this case the noise increased significantly due to relinearization, in other * computations the situation might be entirely different. Indeed, recall that larger * ciphertext sizes can have a huge adverse effect on noise growth in multiplication. * Also recall that homomorphic multiplication is much slower when the ciphertexts are * larger. */ }
public void BFVKeyGenerationNET() { var parms = new EncryptionParameters(); { parms.NoiseStandardDeviation = 3.19; parms.PolyModulus = "1x^64 + 1"; parms.CoeffModulus = new List <SmallModulus> { DefaultParams.SmallMods60Bit(0) }; parms.PlainModulus = 1 << 6; var context = new SEALContext(parms); var keygen = new KeyGenerator(context); Assert.IsTrue(keygen.PublicKey.HashBlock.Equals(parms.HashBlock)); Assert.IsTrue(keygen.SecretKey.HashBlock.Equals(parms.HashBlock)); var evk = new EvaluationKeys(); keygen.GenerateEvaluationKeys(60, evk); Assert.AreEqual(evk.HashBlock, parms.HashBlock); Assert.AreEqual(2, evk.Key(2)[0].Size); keygen.GenerateEvaluationKeys(30, 1, evk); Assert.AreEqual(evk.HashBlock, parms.HashBlock); Assert.AreEqual(4, evk.Key(2)[0].Size); keygen.GenerateEvaluationKeys(2, 2, evk); Assert.AreEqual(evk.HashBlock, parms.HashBlock); Assert.AreEqual(60, evk.Key(2)[0].Size); var galks = new GaloisKeys(); keygen.GenerateGaloisKeys(60, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.AreEqual(2, galks.Key(3)[0].Size); Assert.AreEqual(10, galks.Size); keygen.GenerateGaloisKeys(30, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.AreEqual(4, galks.Key(3)[0].Size); Assert.AreEqual(10, galks.Size); keygen.GenerateGaloisKeys(2, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.AreEqual(60, galks.Key(3)[0].Size); Assert.AreEqual(10, galks.Size); keygen.GenerateGaloisKeys(60, new List <UInt64> { 1, 3, 5, 7 }, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.IsTrue(galks.HasKey(1)); Assert.IsTrue(galks.HasKey(3)); Assert.IsTrue(galks.HasKey(5)); Assert.IsTrue(galks.HasKey(7)); Assert.IsFalse(galks.HasKey(9)); Assert.IsFalse(galks.HasKey(127)); Assert.AreEqual(2, galks.Key(1)[0].Size); Assert.AreEqual(2, galks.Key(3)[0].Size); Assert.AreEqual(2, galks.Key(5)[0].Size); Assert.AreEqual(2, galks.Key(7)[0].Size); Assert.AreEqual(4, galks.Size); keygen.GenerateGaloisKeys(30, new List <UInt64> { 1, 3, 5, 7 }, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.IsTrue(galks.HasKey(1)); Assert.IsTrue(galks.HasKey(3)); Assert.IsTrue(galks.HasKey(5)); Assert.IsTrue(galks.HasKey(7)); Assert.IsFalse(galks.HasKey(9)); Assert.IsFalse(galks.HasKey(127)); Assert.AreEqual(4, galks.Key(1)[0].Size); Assert.AreEqual(4, galks.Key(3)[0].Size); Assert.AreEqual(4, galks.Key(5)[0].Size); Assert.AreEqual(4, galks.Key(7)[0].Size); Assert.AreEqual(4, galks.Size); keygen.GenerateGaloisKeys(2, new List <UInt64> { 1, 3, 5, 7 }, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.IsTrue(galks.HasKey(1)); Assert.IsTrue(galks.HasKey(3)); Assert.IsTrue(galks.HasKey(5)); Assert.IsTrue(galks.HasKey(7)); Assert.IsFalse(galks.HasKey(9)); Assert.IsFalse(galks.HasKey(127)); Assert.AreEqual(60, galks.Key(1)[0].Size); Assert.AreEqual(60, galks.Key(3)[0].Size); Assert.AreEqual(60, galks.Key(5)[0].Size); Assert.AreEqual(60, galks.Key(7)[0].Size); Assert.AreEqual(4, galks.Size); keygen.GenerateGaloisKeys(30, new List <UInt64> { 1 }, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.IsTrue(galks.HasKey(1)); Assert.IsFalse(galks.HasKey(3)); Assert.IsFalse(galks.HasKey(127)); Assert.AreEqual(4, galks.Key(1)[0].Size); Assert.AreEqual(1, galks.Size); keygen.GenerateGaloisKeys(30, new List <UInt64> { 127 }, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.IsFalse(galks.HasKey(1)); Assert.IsTrue(galks.HasKey(127)); Assert.AreEqual(4, galks.Key(127)[0].Size); Assert.AreEqual(1, galks.Size); } { parms.NoiseStandardDeviation = 3.19; parms.PolyModulus = "1x^256 + 1"; parms.CoeffModulus = new List <SmallModulus> { DefaultParams.SmallMods60Bit(0), DefaultParams.SmallMods30Bit(0), DefaultParams.SmallMods30Bit(1) }; parms.PlainModulus = 1 << 6; var context = new SEALContext(parms); var keygen = new KeyGenerator(context); Assert.AreEqual(keygen.PublicKey.HashBlock, parms.HashBlock); Assert.AreEqual(keygen.SecretKey.HashBlock, parms.HashBlock); var evk = new EvaluationKeys(); keygen.GenerateEvaluationKeys(60, 2, evk); Assert.AreEqual(evk.HashBlock, parms.HashBlock); Assert.AreEqual(2, evk.Key(2)[0].Size); keygen.GenerateEvaluationKeys(30, 2, evk); Assert.AreEqual(evk.HashBlock, parms.HashBlock); Assert.AreEqual(4, evk.Key(2)[0].Size); keygen.GenerateEvaluationKeys(4, 1, evk); Assert.AreEqual(evk.HashBlock, parms.HashBlock); Assert.AreEqual(30, evk.Key(2)[0].Size); var galks = new GaloisKeys(); keygen.GenerateGaloisKeys(60, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.AreEqual(2, galks.Key(3)[0].Size); Assert.AreEqual(14, galks.Size); keygen.GenerateGaloisKeys(30, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.AreEqual(4, galks.Key(3)[0].Size); Assert.AreEqual(14, galks.Size); keygen.GenerateGaloisKeys(2, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.AreEqual(60, galks.Key(3)[0].Size); Assert.AreEqual(14, galks.Size); keygen.GenerateGaloisKeys(60, new List <UInt64> { 1, 3, 5, 7 }, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.IsTrue(galks.HasKey(1)); Assert.IsTrue(galks.HasKey(3)); Assert.IsTrue(galks.HasKey(5)); Assert.IsTrue(galks.HasKey(7)); Assert.IsFalse(galks.HasKey(9)); Assert.IsFalse(galks.HasKey(511)); Assert.AreEqual(2, galks.Key(1)[0].Size); Assert.AreEqual(2, galks.Key(3)[0].Size); Assert.AreEqual(2, galks.Key(5)[0].Size); Assert.AreEqual(2, galks.Key(7)[0].Size); Assert.AreEqual(4, galks.Size); keygen.GenerateGaloisKeys(30, new List <UInt64> { 1, 3, 5, 7 }, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.IsTrue(galks.HasKey(1)); Assert.IsTrue(galks.HasKey(3)); Assert.IsTrue(galks.HasKey(5)); Assert.IsTrue(galks.HasKey(7)); Assert.IsFalse(galks.HasKey(9)); Assert.IsFalse(galks.HasKey(511)); Assert.AreEqual(4, galks.Key(1)[0].Size); Assert.AreEqual(4, galks.Key(3)[0].Size); Assert.AreEqual(4, galks.Key(5)[0].Size); Assert.AreEqual(4, galks.Key(7)[0].Size); Assert.AreEqual(4, galks.Size); keygen.GenerateGaloisKeys(2, new List <UInt64> { 1, 3, 5, 7 }, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.IsTrue(galks.HasKey(1)); Assert.IsTrue(galks.HasKey(3)); Assert.IsTrue(galks.HasKey(5)); Assert.IsTrue(galks.HasKey(7)); Assert.IsFalse(galks.HasKey(9)); Assert.IsFalse(galks.HasKey(511)); Assert.AreEqual(60, galks.Key(1)[0].Size); Assert.AreEqual(60, galks.Key(3)[0].Size); Assert.AreEqual(60, galks.Key(5)[0].Size); Assert.AreEqual(60, galks.Key(7)[0].Size); Assert.AreEqual(4, galks.Size); keygen.GenerateGaloisKeys(30, new List <UInt64> { 1 }, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.IsTrue(galks.HasKey(1)); Assert.IsFalse(galks.HasKey(3)); Assert.IsFalse(galks.HasKey(511)); Assert.AreEqual(4, galks.Key(1)[0].Size); Assert.AreEqual(1, galks.Size); keygen.GenerateGaloisKeys(30, new List <UInt64> { 511 }, galks); Assert.AreEqual(galks.HashBlock, parms.HashBlock); Assert.IsFalse(galks.HasKey(1)); Assert.IsTrue(galks.HasKey(511)); Assert.AreEqual(4, galks.Key(511)[0].Size); Assert.AreEqual(1, galks.Size); } }
public void FVKeyGenerationNET() { var parms = new EncryptionParameters(MemoryPoolHandle.AcquireNew()); parms.SetDecompositionBitCount(4); parms.SetNoiseStandardDeviation(3.19); parms.SetNoiseMaxDeviation(35.06); var coeffModulus = new BigUInt(48); coeffModulus.Set("FFFFFFFFC001"); parms.SetCoeffModulus(coeffModulus); var plainModulus = new BigUInt(7); plainModulus.Set(1 << 6); parms.SetPlainModulus(plainModulus); var polyModulus = new BigPoly(65, 1); polyModulus[0].Set(1); polyModulus[64].Set(1); parms.SetPolyModulus(polyModulus); parms.Validate(); var keygen = new KeyGenerator(parms, MemoryPoolHandle.AcquireNew()); keygen.Generate(1); Assert.IsFalse(keygen.PublicKey[0].IsZero); Assert.IsFalse(keygen.PublicKey[1].IsZero); Assert.IsFalse(keygen.SecretKey.IsZero); Assert.AreEqual(1, keygen.EvaluationKeys.Size); Assert.AreEqual(12, keygen.EvaluationKeys.Keys[0].Item1.Size); Assert.AreEqual(12, keygen.EvaluationKeys.Keys[0].Item2.Size); for (int i = 0; i < 12; ++i) { Assert.IsFalse(keygen.EvaluationKeys[0].Item1[i].IsZero); Assert.IsFalse(keygen.EvaluationKeys[0].Item2[i].IsZero); } var publicKey = keygen.PublicKey; var secretKey = keygen.SecretKey; var evaluationKeys = keygen.EvaluationKeys; keygen.Generate(1); Assert.IsFalse(publicKey[0].Equals(keygen.PublicKey[0])); Assert.IsFalse(publicKey[1].Equals(keygen.PublicKey[1])); Assert.IsFalse(secretKey.Equals(keygen.SecretKey)); for (int i = 0; i < 12; ++i) { Assert.IsFalse(keygen.EvaluationKeys[0].Item1[i].Equals(evaluationKeys[0].Item1[i])); Assert.IsFalse(keygen.EvaluationKeys[0].Item2[i].Equals(evaluationKeys[0].Item2[i])); } keygen.GenerateEvaluationKeys(3); Assert.AreEqual(3, keygen.EvaluationKeys.Size); for (int i = 0; i < 12; ++i) { Assert.AreEqual(12, keygen.EvaluationKeys.Keys[0].Item1.Size); Assert.AreEqual(12, keygen.EvaluationKeys.Keys[0].Item2.Size); Assert.AreEqual(12, keygen.EvaluationKeys.Keys[1].Item1.Size); Assert.AreEqual(12, keygen.EvaluationKeys.Keys[1].Item2.Size); Assert.AreEqual(12, keygen.EvaluationKeys.Keys[2].Item1.Size); Assert.AreEqual(12, keygen.EvaluationKeys.Keys[2].Item2.Size); Assert.IsFalse(keygen.EvaluationKeys[0].Item1[i].IsZero); Assert.IsFalse(keygen.EvaluationKeys[0].Item2[i].IsZero); Assert.IsFalse(keygen.EvaluationKeys[1].Item1[i].IsZero); Assert.IsFalse(keygen.EvaluationKeys[1].Item1[i].IsZero); Assert.IsFalse(keygen.EvaluationKeys[2].Item2[i].IsZero); Assert.IsFalse(keygen.EvaluationKeys[2].Item2[i].IsZero); } }
public void FVKeyGenerationNET() { var parms = new EncryptionParameters { DecompositionBitCount = 4, NoiseStandardDeviation = 3.19, NoiseMaxDeviation = 35.06 }; var coeffModulus = parms.CoeffModulus; coeffModulus.Resize(48); coeffModulus.Set("FFFFFFFFC001"); var plainModulus = parms.PlainModulus; plainModulus.Resize(7); plainModulus.Set(1 << 6); var polyModulus = parms.PolyModulus; polyModulus.Resize(64, 1); polyModulus[0].Set(1); polyModulus[63].Set(1); var keygen = new KeyGenerator(parms); keygen.Generate(1); Assert.IsFalse(keygen.PublicKey[0].IsZero); Assert.IsFalse(keygen.PublicKey[1].IsZero); Assert.IsFalse(keygen.SecretKey.IsZero); Assert.AreEqual(1, keygen.EvaluationKeys.Size); Assert.AreEqual(12, keygen.EvaluationKeys.Keys[0].Item1.Size); Assert.AreEqual(12, keygen.EvaluationKeys.Keys[0].Item2.Size); for (int i = 0; i < 12; ++i) { Assert.IsFalse(keygen.EvaluationKeys[0].Item1[i].IsZero); Assert.IsFalse(keygen.EvaluationKeys[0].Item2[i].IsZero); } var publicKey = keygen.PublicKey; var secretKey = keygen.SecretKey; var evaluationKeys = keygen.EvaluationKeys; keygen.Generate(1); Assert.IsFalse(publicKey[0].Equals(keygen.PublicKey[0])); Assert.IsFalse(publicKey[1].Equals(keygen.PublicKey[1])); Assert.IsFalse(secretKey.Equals(keygen.SecretKey)); for (int i = 0; i < 12; ++i) { Assert.IsFalse(keygen.EvaluationKeys[0].Item1[i].Equals(evaluationKeys[0].Item1[i])); Assert.IsFalse(keygen.EvaluationKeys[0].Item2[i].Equals(evaluationKeys[0].Item2[i])); } keygen.GenerateEvaluationKeys(3); Assert.AreEqual(3, keygen.EvaluationKeys.Size); for (int i = 0; i < 12; ++i) { Assert.AreEqual(12, keygen.EvaluationKeys.Keys[0].Item1.Size); Assert.AreEqual(12, keygen.EvaluationKeys.Keys[0].Item2.Size); Assert.AreEqual(12, keygen.EvaluationKeys.Keys[1].Item1.Size); Assert.AreEqual(12, keygen.EvaluationKeys.Keys[1].Item2.Size); Assert.AreEqual(12, keygen.EvaluationKeys.Keys[2].Item1.Size); Assert.AreEqual(12, keygen.EvaluationKeys.Keys[2].Item2.Size); Assert.IsFalse(keygen.EvaluationKeys[0].Item1[i].IsZero); Assert.IsFalse(keygen.EvaluationKeys[0].Item2[i].IsZero); Assert.IsFalse(keygen.EvaluationKeys[1].Item1[i].IsZero); Assert.IsFalse(keygen.EvaluationKeys[1].Item1[i].IsZero); Assert.IsFalse(keygen.EvaluationKeys[2].Item2[i].IsZero); Assert.IsFalse(keygen.EvaluationKeys[2].Item2[i].IsZero); } }
public void EvaluationKeysSaveLoadNET() { var stream = new MemoryStream(); { var parms = new EncryptionParameters(); parms.NoiseStandardDeviation = 3.19; parms.PolyModulus = "1x^64 + 1"; parms.PlainModulus = 1 << 6; parms.CoeffModulus = new List<SmallModulus> { DefaultParams.SmallMods60Bit(0) }; var context = new SEALContext(parms); var keygen = new KeyGenerator(context); var keys = new EvaluationKeys(); Assert.AreEqual(keys.DecompositionBitCount, 0); var test_keys = new EvaluationKeys(); keys.Save(stream); stream.Seek(0, SeekOrigin.Begin); test_keys.Load(stream); Assert.AreEqual(keys.Size, test_keys.Size); Assert.IsTrue(keys.HashBlock.Equals(test_keys.HashBlock)); Assert.AreEqual(keys.DecompositionBitCount, test_keys.DecompositionBitCount); Assert.AreEqual(0, keys.Size); keygen.GenerateEvaluationKeys(1, 1, keys); Assert.AreEqual(keys.DecompositionBitCount, 1); stream.Seek(0, SeekOrigin.Begin); keys.Save(stream); stream.Seek(0, SeekOrigin.Begin); test_keys.Load(stream); Assert.AreEqual(keys.Size, test_keys.Size); Assert.IsTrue(keys.HashBlock.Equals(test_keys.HashBlock)); Assert.AreEqual(keys.DecompositionBitCount, test_keys.DecompositionBitCount); for (int j = 0; j < test_keys.Size; j++) { for (int i = 0; i < test_keys.Key(j + 2).Count; i++) { Assert.AreEqual(keys.Key(j + 2)[i].Size, test_keys.Key(j + 2)[i].Size); Assert.AreEqual(keys.Key(j + 2)[i].UInt64Count, test_keys.Key(j + 2)[i].UInt64Count); } } keygen.GenerateEvaluationKeys(2, 1, keys); Assert.AreEqual(keys.DecompositionBitCount, 2); stream.Seek(0, SeekOrigin.Begin); keys.Save(stream); stream.Seek(0, SeekOrigin.Begin); test_keys.Load(stream); Assert.AreEqual(keys.Size, test_keys.Size); Assert.IsTrue(keys.HashBlock.Equals(test_keys.HashBlock)); Assert.AreEqual(keys.DecompositionBitCount, test_keys.DecompositionBitCount); for (int j = 0; j < test_keys.Size; j++) { for (int i = 0; i < test_keys.Key(j + 2).Count; i++) { Assert.AreEqual(keys.Key(j + 2)[i].Size, test_keys.Key(j + 2)[i].Size); Assert.AreEqual(keys.Key(j + 2)[i].UInt64Count, test_keys.Key(j + 2)[i].UInt64Count); } } keygen.GenerateEvaluationKeys(59, 2, keys); Assert.AreEqual(keys.DecompositionBitCount, 59); stream.Seek(0, SeekOrigin.Begin); keys.Save(stream); stream.Seek(0, SeekOrigin.Begin); test_keys.Load(stream); Assert.AreEqual(keys.Size, test_keys.Size); Assert.IsTrue(keys.HashBlock.Equals(test_keys.HashBlock)); Assert.AreEqual(keys.DecompositionBitCount, test_keys.DecompositionBitCount); for (int j = 0; j < test_keys.Size; j++) { for (int i = 0; i < test_keys.Key(j + 2).Count; i++) { Assert.AreEqual(keys.Key(j + 2)[i].Size, test_keys.Key(j + 2)[i].Size); Assert.AreEqual(keys.Key(j + 2)[i].UInt64Count, test_keys.Key(j + 2)[i].UInt64Count); } } keygen.GenerateEvaluationKeys(60, 5, keys); Assert.AreEqual(keys.DecompositionBitCount, 60); stream.Seek(0, SeekOrigin.Begin); keys.Save(stream); stream.Seek(0, SeekOrigin.Begin); test_keys.Load(stream); Assert.AreEqual(keys.Size, test_keys.Size); Assert.IsTrue(keys.HashBlock.Equals(test_keys.HashBlock)); Assert.AreEqual(keys.DecompositionBitCount, test_keys.DecompositionBitCount); for (int j = 0; j < test_keys.Size; j++) { for (int i = 0; i < test_keys.Key(j + 2).Count; i++) { Assert.AreEqual(keys.Key(j + 2)[i].Size, test_keys.Key(j + 2)[i].Size); Assert.AreEqual(keys.Key(j + 2)[i].UInt64Count, test_keys.Key(j + 2)[i].UInt64Count); } } } { var parms = new EncryptionParameters(); parms.NoiseStandardDeviation = 3.19; parms.PolyModulus = "1x^256 + 1"; parms.PlainModulus = 1 << 6; parms.CoeffModulus = new List<SmallModulus> { DefaultParams.SmallMods60Bit(0), DefaultParams.SmallMods50Bit(0) }; var context = new SEALContext(parms); var keygen = new KeyGenerator(context); var keys = new EvaluationKeys(); Assert.AreEqual(keys.DecompositionBitCount, 0); var test_keys = new EvaluationKeys(); stream.Seek(0, SeekOrigin.Begin); keys.Save(stream); stream.Seek(0, SeekOrigin.Begin); test_keys.Load(stream); Assert.AreEqual(keys.Size, test_keys.Size); Assert.IsTrue(keys.HashBlock.Equals(test_keys.HashBlock)); Assert.AreEqual(keys.DecompositionBitCount, test_keys.DecompositionBitCount); Assert.AreEqual(0, keys.Size); keygen.GenerateEvaluationKeys(8, 1, keys); Assert.AreEqual(keys.DecompositionBitCount, 8); stream.Seek(0, SeekOrigin.Begin); keys.Save(stream); stream.Seek(0, SeekOrigin.Begin); test_keys.Load(stream); Assert.AreEqual(keys.Size, test_keys.Size); Assert.IsTrue(keys.HashBlock.Equals(test_keys.HashBlock)); Assert.AreEqual(keys.DecompositionBitCount, test_keys.DecompositionBitCount); for (int j = 0; j < test_keys.Size; j++) { for (int i = 0; i < test_keys.Key(j + 2).Count; i++) { Assert.AreEqual(keys.Key(j + 2)[i].Size, test_keys.Key(j + 2)[i].Size); Assert.AreEqual(keys.Key(j + 2)[i].UInt64Count, test_keys.Key(j + 2)[i].UInt64Count); } } keygen.GenerateEvaluationKeys(8, 2, keys); Assert.AreEqual(keys.DecompositionBitCount, 8); stream.Seek(0, SeekOrigin.Begin); keys.Save(stream); stream.Seek(0, SeekOrigin.Begin); test_keys.Load(stream); Assert.AreEqual(keys.Size, test_keys.Size); Assert.IsTrue(keys.HashBlock.Equals(test_keys.HashBlock)); Assert.AreEqual(keys.DecompositionBitCount, test_keys.DecompositionBitCount); for (int j = 0; j < test_keys.Size; j++) { for (int i = 0; i < test_keys.Key(j + 2).Count; i++) { Assert.AreEqual(keys.Key(j + 2)[i].Size, test_keys.Key(j + 2)[i].Size); Assert.AreEqual(keys.Key(j + 2)[i].UInt64Count, test_keys.Key(j + 2)[i].UInt64Count); } } keygen.GenerateEvaluationKeys(59, 2, keys); Assert.AreEqual(keys.DecompositionBitCount, 59); stream.Seek(0, SeekOrigin.Begin); keys.Save(stream); stream.Seek(0, SeekOrigin.Begin); test_keys.Load(stream); Assert.AreEqual(keys.Size, test_keys.Size); Assert.IsTrue(keys.HashBlock.Equals(test_keys.HashBlock)); Assert.AreEqual(keys.DecompositionBitCount, test_keys.DecompositionBitCount); for (int j = 0; j < test_keys.Size; j++) { for (int i = 0; i < test_keys.Key(j + 2).Count; i++) { Assert.AreEqual(keys.Key(j + 2)[i].Size, test_keys.Key(j + 2)[i].Size); Assert.AreEqual(keys.Key(j + 2)[i].UInt64Count, test_keys.Key(j + 2)[i].UInt64Count); } } keygen.GenerateEvaluationKeys(60, 5, keys); Assert.AreEqual(keys.DecompositionBitCount, 60); stream.Seek(0, SeekOrigin.Begin); keys.Save(stream); stream.Seek(0, SeekOrigin.Begin); test_keys.Load(stream); Assert.AreEqual(keys.Size, test_keys.Size); Assert.IsTrue(keys.HashBlock.Equals(test_keys.HashBlock)); Assert.AreEqual(keys.DecompositionBitCount, test_keys.DecompositionBitCount); for (int j = 0; j < test_keys.Size; j++) { for (int i = 0; i < test_keys.Key(j + 2).Count; i++) { Assert.AreEqual(keys.Key(j + 2)[i].Size, test_keys.Key(j + 2)[i].Size); Assert.AreEqual(keys.Key(j + 2)[i].UInt64Count, test_keys.Key(j + 2)[i].UInt64Count); } } } }
public static void ExampleRelinearization() { PrintExampleBanner("Example: Relinearization"); /* * A valid ciphertext consists of at least two polynomials. To read the current size of a ciphertext the * user can use BigPolyArray.Size. A fresh ciphertext always has size 2, and performing homomorphic multiplication * results in the output ciphertext growing in size. More precisely, if the input ciphertexts have size M and N, * then the output ciphertext after homomorphic multiplication will have size M+N-1. * * The multiplication operation on input ciphertexts of size M and N will require M*N polynomial multiplications * to be performed. Therefore, if the ciphertexts grow to be large, multiplication could potentially become * computationally very costly and in some situations the user might prefer to reduce the size of the ciphertexts * by performing a so-called relinearization operation. * * The function Evaluator.Relinearize(...) can reduce the size of an input ciphertext of size M to any size in * 2, 3, ..., M. Relinearizing one or both of two ciphertexts before performing multiplication on them may significantly * reduce the computational cost of the multiplication. However, note that the relinearization process also requires * several polynomial multiplications to be performed. In particular relinearizing a ciphertext of size K to size L * will itself require 2*(K-L)*[floor(log_2(CoeffModulus)/dbc)+1] polynomial multiplications, where dbc is the * DecompositionBitCount (see below). It is also important to understand that relinearization grows the inherent noise * in a ciphertext by an additive factor proportional to 2^dbc, which can in some cases be very large. When using * relinearization it is necessary that the DecompositionBitCount is specified in the encryption parameters, * and that enough evaluation keys are given to the constructor of Evaluator. * * The DecompositionBitCount affects both performance and noise growth in relinearization, as was explained above. * Simply put, the larger dbc is, the faster relinearization is, and the larger the additive noise growth factor is * (see above). However, if some multiplications have already been performed on a ciphertext so that the noise has * grown to some reasonable level, relinearization might have no practical effect anymore on noise due to the additive * factor being possibly (much) smaller than what the current noise is. This is why it makes almost never sense to * relinearize after the first multiplication since the noise will still be so small that any reasonably large dbc * would increase the noise by a significant amount. In many cases it might not be beneficial to relinearize at all, * especially if the computation to be performed amounts to evaluating some fairly low degree polynomial. If the * degree is higher, then in some cases it might be beneficial to relinearize at some stage in the computation. * See below for how to choose a good value for the DecompositionBitCount. * * If the intention of the evaluating party is to hide the structure of the computation that has been performed on * the ciphertexts, it might be necessary to relinearize to hide the number of multiplications that the ciphertexts * have gone through. In addition, after relinearizing (to size 2) it might be a good idea to re-randomize the * ciphertext by adding to it a fresh encryption of 0. * * In this example we will demonstrate using Evaluator.Relinearize(...) and illustrate how it reduces the ciphertext * sizes. We will also observe the effects it has on noise. */ // Set up encryption parameters var parms = new EncryptionParameters(); parms.PolyModulus.Set("1x^2048 + 1"); parms.CoeffModulus.Set(ChooserEvaluator.DefaultParameterOptions[2048]); parms.PlainModulus.Set(1 << 16); /* * The choice of DecompositionBitCount (dbc) can affect the performance of relinearization noticeably. A somewhat * optimal choice is to choose it between 1/5 and 1/2 of the significant bit count of the coefficient modulus (see * table below). It turns out that if dbc cannot (due to noise growth) be more than one fifth of the significant * bit count of the coefficient modulus, then it is in fact better to just move up to a larger PolyModulus and * CoeffModulus, and set dbc to be as large as possible. * /--------------------------------------------------------\ | poly_modulus | coeff_modulus bound | dbc min | dbc max | | -------------|---------------------|------------------ | | 1x^1024 + 1 | 48 bits | 10 | 24 | | 1x^2048 + 1 | 96 bits | 20 | 48 | | 1x^4096 + 1 | 192 bits | 39 | 96 | | 1x^8192 + 1 | 384 bits | 77 | 192 | | 1x^16384 + 1 | 768 bits | 154 | 384 | \--------------------------------------------------------/ | | A smaller DecompositionBitCount will make relinearization slower. A higher DecompositionBitCount will increase | noise growth while not making relinearization any faster. Here, the CoeffModulus has 96 significant bits, so | we choose DecompositionBitCount to be half of this. */ parms.DecompositionBitCount = 48; Console.WriteLine("Encryption parameters specify {0} coefficients with {1} bits per coefficient", parms.PolyModulus.GetSignificantCoeffCount(), parms.CoeffModulus.GetSignificantBitCount()); /* * Generate keys * * By default, KeyGenerator.Generate() will generate no evaluation keys. This means that we cannot perform any * relinearization. However, this is sufficient for performing all other homomorphic evaluation operations as * they do not use evaluation keys. */ Console.WriteLine("Generating keys..."); var generator = new KeyGenerator(parms); generator.Generate(); Console.WriteLine("... key generation complete"); BigPolyArray publicKey = generator.PublicKey; BigPoly secretKey = generator.SecretKey; /* * Suppose we want to homomorphically multiply four ciphertexts together. Does it make sense to relinearize * at an intermediate step of the computation? We demonstrate how relinearization at different stages affects * the results. */ // Encrypt the plaintexts to generate the four fresh ciphertexts var plain1 = new BigPoly("4"); var plain2 = new BigPoly("3x^1"); var plain3 = new BigPoly("2x^2"); var plain4 = new BigPoly("1x^3"); Console.WriteLine("Encrypting values as { encrypted1, encrypted2, encrypted3, encrypted4 }"); var encryptor = new Encryptor(parms, publicKey); var encrypted1 = encryptor.Encrypt(plain1); var encrypted2 = encryptor.Encrypt(plain2); var encrypted3 = encryptor.Encrypt(plain3); var encrypted4 = encryptor.Encrypt(plain4); // What are the noises in the four ciphertexts? var maxNoiseBitCount = Utilities.InherentNoiseMax(parms).GetSignificantBitCount(); Console.WriteLine("Noises in the four ciphertexts: {0}/{1} bits, {2}/{3} bits, {4}/{5} bits, {6}/{7} bits", Utilities.InherentNoise(encrypted1, parms, secretKey).GetSignificantBitCount(), maxNoiseBitCount, Utilities.InherentNoise(encrypted2, parms, secretKey).GetSignificantBitCount(), maxNoiseBitCount, Utilities.InherentNoise(encrypted3, parms, secretKey).GetSignificantBitCount(), maxNoiseBitCount, Utilities.InherentNoise(encrypted4, parms, secretKey).GetSignificantBitCount(), maxNoiseBitCount); // Construct an Evaluator var evaluator = new Evaluator(parms); // Perform first part of computation Console.WriteLine("Computing encProd1 as encrypted1*encrypted2"); var encProd1 = evaluator.Multiply(encrypted1, encrypted2); Console.WriteLine("Computing encProd2 as encrypted3*encrypted4"); var encProd2 = evaluator.Multiply(encrypted3, encrypted4); // Now encProd1 and encProd2 both have size 3 Console.WriteLine($"Sizes of enc_prod1 and enc_prod2: {encProd1.Size}, {encProd2.Size}"); // What are the noises in the products? Console.WriteLine("Noises in encProd1 and encProd2: {0}/{1} bits, {2}/{3} bits", Utilities.InherentNoise(encProd1, parms, secretKey).GetSignificantBitCount(), maxNoiseBitCount, Utilities.InherentNoise(encProd2, parms, secretKey).GetSignificantBitCount(), maxNoiseBitCount); // Compute product of all four Console.WriteLine("Computing encResult as encProd1*encProd2"); var encResult = evaluator.Multiply(encProd1, encProd2); // Now enc_result has size 5 Console.WriteLine($"Size of enc_result: {encResult.Size}"); // What is the noise in the result? Console.WriteLine("Noise in enc_result: {0}/{1} bits", Utilities.InherentNoise(encResult, parms, secretKey).GetSignificantBitCount(), maxNoiseBitCount); /* * We didn't create any evaluation keys, so we can't relinearize at all with the current Evaluator. * The size of our final ciphertext enc_result is 5, so for example to relinearize this down to size 2 * we will need 3 evaluation keys. In general, relinearizing down from size K to any smaller size (but at least 2) * requires at least K-2 evaluation keys, so in this case we will need at least 2 evaluation keys. * * We can create these new evaluation keys by calling KeyGenerator.GenerateEvaluationKeys(...). Alternatively, * we could have created them already in the beginning by instead of calling generator.Generate(2) instead of * generator.Generate(). * * We will also need a new Evaluator, as the previous one was constructed without enough (indeed, any) * evaluation keys. It is not possible to add new evaluation keys to a previously created Evaluator. */ generator.GenerateEvaluationKeys(3); var evaluationKeys = generator.EvaluationKeys; var evaluator2 = new Evaluator(parms, evaluationKeys); /* * We can relinearize encResult back to size 2 if we want to. In fact, we could also relinearize it to size 3 or 4, * or more generally to any size less than the current size but at least 2. The way to do this would be to call * Evaluator.Relinearize(encResult, destinationSize). */ Console.WriteLine("Relinearizing encResult to size 2 (stored in encRelinResult)"); var encRelinResult = evaluator2.Relinearize(encResult); /* * What did that do to size and noise? * In fact noise remained essentially the same, because at this point the size of noise is already significantly * larger than the additive term contributed by the relinearization process. We still remain below the noise bound. */ Console.WriteLine($"Size of enc_relin_result: {encRelinResult.Size}"); Console.WriteLine("Noise in enc_relin_result: {0}/{1} bits", Utilities.InherentNoise(encRelinResult, parms, secretKey).GetSignificantBitCount(), maxNoiseBitCount); // What if we do intermediate relinearization of encProd1 and encProd2? Console.WriteLine("Relinearizing encProd1 and encProd2 to size 2"); var encRelinProd1 = evaluator2.Relinearize(encProd1); var encRelinProd2 = evaluator2.Relinearize(encProd2); // What happened to sizes and noises? Noises grew by a significant amount! Console.WriteLine($"Sizes of encRelinProd1 and encRelinProd2: {encRelinProd1.Size}, {encRelinProd2.Size}"); Console.WriteLine("Noises in encRelinProd1 and encRelinProd2: {0}/{1} bits, {2}/{3} bits", Utilities.InherentNoise(encRelinProd1, parms, secretKey).GetSignificantBitCount(), maxNoiseBitCount, Utilities.InherentNoise(encRelinProd2, parms, secretKey).GetSignificantBitCount(), maxNoiseBitCount); // Now multiply the relinearized products together Console.WriteLine("Computing encIntermediateRelinResult as encRelinProd1*encRelinProd2"); var encIntermediateRelinResult = evaluator2.Multiply(encRelinProd1, encRelinProd2); /* * What did that do to size and noise? * We are above the noise bound in this case. The resulting ciphertext is corrupted. It is instructive to * try and see how a smaller DecompositionBitCount affects the results, e.g. try setting it to 24. * Also here PlainModulus was set to be quite large to emphasize the effect. */ Console.WriteLine($"Size of encIntermediateRelinResult: {encIntermediateRelinResult.Size}"); Console.WriteLine("Noise in encIntermediateRelinResult: {0}/{1} bits", Utilities.InherentNoise(encIntermediateRelinResult, parms, secretKey).GetSignificantBitCount(), maxNoiseBitCount); }