protected internal override double[] TrainWeights(int[][][][] data, int[][] labels, IEvaluator[] evaluators, int pruneFeatureItr, double[][][][] featureVals) { if (flags.secondOrderNonLinear) { CRFNonLinearSecondOrderLogConditionalObjectiveFunction func = new CRFNonLinearSecondOrderLogConditionalObjectiveFunction(data, labels, windowSize, classIndex, labelIndices, map, flags, nodeFeatureIndicesMap.Size(), edgeFeatureIndicesMap.Size ()); cliquePotentialFunctionHelper = func; double[] allWeights = TrainWeightsUsingNonLinearCRF(func, evaluators); Quadruple<double[][], double[][], double[][], double[][]> @params = func.SeparateWeights(allWeights); this.inputLayerWeights4Edge = @params.First(); this.outputLayerWeights4Edge = @params.Second(); this.inputLayerWeights = @params.Third(); this.outputLayerWeights = @params.Fourth(); } else { CRFNonLinearLogConditionalObjectiveFunction func = new CRFNonLinearLogConditionalObjectiveFunction(data, labels, windowSize, classIndex, labelIndices, map, flags, nodeFeatureIndicesMap.Size(), edgeFeatureIndicesMap.Size(), featureVals); if (flags.useAdaGradFOBOS) { func.gradientsOnly = true; } cliquePotentialFunctionHelper = func; double[] allWeights = TrainWeightsUsingNonLinearCRF(func, evaluators); Triple<double[][], double[][], double[][]> @params = func.SeparateWeights(allWeights); this.linearWeights = @params.First(); this.inputLayerWeights = @params.Second(); this.outputLayerWeights = @params.Third(); } return null; }
public virtual double ObjDeletionProbability(SemanticGraphEdge edge, IEnumerable <SemanticGraphEdge> neighbors) { // Get information about the neighbors // (in a totally not-creepy-stalker sort of way) Optional <string> subj = Optional.Empty(); Optional <string> pp = Optional.Empty(); foreach (SemanticGraphEdge neighbor in neighbors) { if (neighbor != edge) { string neighborRel = neighbor.GetRelation().ToString(); if (neighborRel.Contains("subj")) { subj = Optional.Of(neighbor.GetDependent().OriginalText().ToLower()); } if (neighborRel.Contains("prep")) { pp = Optional.Of(neighborRel); } if (neighborRel.Contains("obj")) { return(1.0); } } } // allow deleting second object string obj = edge.GetDependent().OriginalText().ToLower(); string verb = edge.GetGovernor().OriginalText().ToLower(); // Compute the most informative drop probability we can double rawScore = null; if (subj.IsPresent()) { if (pp.IsPresent()) { // Case: subj+obj rawScore = verbSubjPPObjAffinity[Quadruple.MakeQuadruple(verb, subj.Get(), pp.Get(), obj)]; } } if (rawScore == null) { rawScore = verbObjAffinity[verb]; } if (rawScore == null) { return(DeletionProbability(edge.GetRelation().ToString())); } else { return(1.0 - Math.Min(1.0, rawScore / upperProbabilityCap)); } }
private Quadruple UnaryOp(Operation op, Quadruple value) { Unsafe.As <byte, Int128>(ref sb[0]) = default; Unsafe.As <byte, Operation>(ref sb[0]) = op; Unsafe.As <byte, Quadruple>(ref sb[16]) = value; Unsafe.As <byte, Int128>(ref sb[32]) = default; Unsafe.As <byte, Int128>(ref sb[48]) = default; var a = nc.Send(sb, 64, "localhost", 16383); IPEndPoint ep = null; var sdfasd = nc.Receive(ref ep); return(Unsafe.As <byte, Quadruple>(ref sdfasd[0])); }
public virtual ICliquePotentialFunction GetCliquePotentialFunction(double[] x) { Quadruple <double[][], double[][], double[][], double[][]> allParams = SeparateWeights(x); double[][] W4Edge = allParams.First(); // inputLayerWeights4Edge double[][] U4Edge = allParams.Second(); // outputLayerWeights4Edge double[][] W = allParams.Third(); // inputLayerWeights double[][] U = allParams.Fourth(); // outputLayerWeights return(new NonLinearSecondOrderCliquePotentialFunction(W4Edge, U4Edge, W, U, flags)); }
private Quadruple <byte[]>[] ToOptionMessages(BitQuadrupleArray options) { Quadruple <byte[]>[] optionMessages = new Quadruple <byte[]> [options.Length]; for (int i = 0; i < optionMessages.Length; ++i) { optionMessages[i] = new Quadruple <byte[]>( new[] { (byte)options[i][0] }, new[] { (byte)options[i][1] }, new[] { (byte)options[i][2] }, new[] { (byte)options[i][3] } ); } return(optionMessages); }
private async Task <Quadruple <byte[]>[]> ReadOptions(IMessageChannel channel, int numberOfInvocations, int numberOfMessageBytes) { MessageDecomposer message = new MessageDecomposer(await channel.ReadMessageAsync()); Quadruple <byte[]>[] options = new Quadruple <byte[]> [numberOfInvocations]; for (int j = 0; j < numberOfInvocations; ++j) { options[j] = new Quadruple <byte[]>(); for (int i = 0; i < 4; ++i) { options[j][i] = message.ReadBuffer(numberOfMessageBytes); } } return(options); }
private void RunObliviousTransferParty() { Quadruple <byte[]>[] options = new Quadruple <byte[]> [3]; options = new Quadruple <byte[]>[] { new Quadruple <byte[]>(TestOptions.Select(s => Encoding.ASCII.GetBytes(s)).ToArray()), new Quadruple <byte[]>(TestOptions.Select(s => Encoding.ASCII.GetBytes(s.ToLower())).ToArray()), new Quadruple <byte[]>(TestOptions.Select(s => Encoding.ASCII.GetBytes(s.ToUpper())).ToArray()), }; using (CryptoContext cryptoContext = CryptoContext.CreateDefault()) { IGeneralizedObliviousTransfer obliviousTransfer = new NaorPinkasObliviousTransfer( SecurityParameters.CreateDefault768Bit(), cryptoContext ); using (ITwoPartyNetworkSession session = TestNetworkSession.EstablishTwoParty()) { if (session.LocalParty.Id == 0) { obliviousTransfer.SendAsync(session.Channel, options, 3, 6).Wait(); } else { QuadrupleIndexArray indices = new QuadrupleIndexArray(new[] { 0, 3, 2 }); byte[][] results = obliviousTransfer.ReceiveAsync(session.Channel, indices, 3, 6).Result; Assert.IsNotNull(results, "Result is null."); Assert.AreEqual(3, results.Length, "Result does not match the correct number of invocations."); for (int j = 0; j < 3; ++j) { CollectionAssert.AreEqual( results[j], options[j][indices[j]], "Incorrect message content {0} (should be {1}).", Encoding.ASCII.GetString(results[j]), Encoding.ASCII.GetString(options[j][indices[j]]) ); } } } } }
/// <summary> /// Calculates the Mandlebrot "escape value" for a given complex number. Note that for a graph of the Mandlebrot /// set, the real and imaginary portions of the complex number are generally used as the X and Y values /// for the graph and the point graphed is shown as the escape value with each distinct value generally shown as a /// different colour. /// </summary> /// <param name="c">The Complex number for which the escape value is to be calculated.</param> /// <returns>The Mandlebrot escape value for the given complex number. This is an integer count of the number of /// recursive calculations completed prior to a breakout value being generated or until a maximum avlue is reached.</returns> //private int calcMandlebrotEscapeVal(Complex c) //{ // FloatType Z; // FloatType dblTmp; // Complex c1; // Complex lastpassval; // Z = c.Real * c.Real + c.Imaginary * c.Imaginary; // if (Z >= breakoutval) // // We've broken out in first pass so exit returning pass no. // return 0; // lastpassval = c; // // Create new point in form (x^2-y^2, 2xy) + c // dblTmp = c.Real * c.Imaginary; // c1 = new Complex(c.Real * c.Real - c.Imaginary * c.Imaginary, dblTmp + dblTmp) + c; // for (int i = 1; i <= maxColorIndex; i++) // { // if (lastpassval == c1) // // Value has not changed on this pass so it will never change again so escape and return max value. // return maxColorIndex; // Z = c1.Real * c1.Real + c1.Imaginary * c1.Imaginary; // if (Z >= breakoutval) // // We've broken out so exit returning pass no. // return i; // lastpassval = c1; // // Create new point in form (x^2-y^2, 2xy) + c // dblTmp = c1.Real * c1.Imaginary; // c1 = new Complex(c1.Real * c1.Real - c1.Imaginary * c1.Imaginary, dblTmp + dblTmp) + c; // } // // We didn't break out so return max value. // return maxColorIndex; //} /// <summary> /// Calculates the Mandlebrot "escape value" for a given complex number. Note that for a graph of the Mandlebrot /// set, the real and imaginary portions of the complex number are generally used as the X and Y values /// for the graph and the point graphed is shown as the escape value with each distinct value generally shown as a /// different colour. /// </summary> /// <param name="cr">The real part of the Complex number for which the escape value is to be calculated.</param> /// <returns>The Mandlebrot escape value for the given complex number. This is an integer count of the number of /// <param name="cr">The real part of the complex number for which the escape value is to be calculated.</param> /// <param name="ci">The imaginary part of the complex number for which the escape value is to be calculated.</param> /// recursive calculations completed prior to a breakout value being generated or until a maximum avlue is reached.</returns> private static int calcMandlebrotEscapeVal(Quadruple cr, Quadruple ci) { Quadruple zr, zi; Quadruple zrsqr, zisqr; Quadruple lastpassZr, lastpassZi; zr = 0; zi = 0; zrsqr = 0; zisqr = 0; lastpassZi = 99999999; lastpassZr = 99999999; for (int i = 0; i <= maxColorIndex; i++) { if (zrsqr + zisqr > breakoutval) { // We've broken out so exit returning pass no. return(i); } if (lastpassZr == zr && lastpassZi == zi) { // Value has not changed on this pass so it will never change again so escape and return max value. return(maxColorIndex); } lastpassZi = zi; lastpassZr = zr; zi = zr * zi; zi += zi; // Multiply by two zi += ci; zr = zrsqr - zisqr + cr; zrsqr = zr * zr; zisqr = zi * zi; } // We didn't break out so return max value. return(maxColorIndex); }
void Backtrack() //we reach this point when potentialPieces is empty and nothing fits { Debug.Log("BACKTRACKING!"); if (placedPieces.Count == 0) { Debug.Log("THERE ARE NO SOLUTIONS TO THIS PUZZLE!"); } else { Quadruple pieceToRemove = placedPieces[placedPieces.Count - 1]; currentRow = pieceToRemove.row; currentColumn = pieceToRemove.column; GameObject piece = pieceToRemove.piece; currentPoint = pieceToRemove.currentPoint; placedPieces.Remove(pieceToRemove); pieces.Add(piece); Debug.Log("Removed " + piece.GetComponent <PieceInfo>().name); if (placedPieces.Count == 0) { nextPoint = upperRightCorner; theta = 90.0f; FindPotentialPieces(); } else { activePiece = placedPieces[placedPieces.Count - 1].piece; findNextPoint(); CalculateNextAngle(); FindPotentialPieces(); Debug.Log("upon backtracking " + potentialPieces.Count + " potential pieces were found"); if (potentialPieces.Count == 0) { Backtrack(); } } } }
private void OnServiceControl(IList <TaskResult> results, bool lastUpdate) { Dbg.Log($"Entering {MethodBase.GetCurrentMethod().Name}"); foreach (TaskResult result in results) { try { if (result.Status == Microsoft.EnterpriseManagement.Runtime.TaskStatus.Succeeded || result.Status == Microsoft.EnterpriseManagement.Runtime.TaskStatus.CompletedWithInfo) { using (StringReader stringReader = new StringReader(result.Output)) { using (XmlReader xmlReader = XmlReader.Create(stringReader)) { if (xmlReader.Read() && xmlReader.ReadToDescendant("QuadrupleList")) { QuadrupleListDataItem qlist = new QuadrupleListDataItem(xmlReader.ReadSubtree()); if (qlist.Data.List.Any()) { Quadruple response = qlist.Data.List[0]; if (response.I1 == "ERROR") { MessageBox.Show(response.I2, "Operation Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); } //if (response.I1 == "OK") // MessageBox.Show("Service Configuration task completed successfully.", "Operation Completed", MessageBoxButtons.OK, MessageBoxIcon.Information); } } } } } } catch (Exception e) { Dbg.Log($"Exception {e.Message} in {MethodBase.GetCurrentMethod().Name}"); } } }
public void Add(TKey1 key1, TKey2 key2, TKey3 key3, TValue value) { var quad = new Quadruple(key1, key2, key3, value); m_index1.Add(key1, quad); try { m_index2.Add(key2, quad); try { m_index3.Add(key3, quad); } catch { m_index2.Remove(key2); throw; } } catch { m_index1.Remove(key1); throw; } }
/// <exception cref="System.IO.IOException"/> public NaturalLogicWeights(string affinityModels, double upperProbabilityCap) { this.upperProbabilityCap = upperProbabilityCap; string line; // Simple PP attachments using (BufferedReader ppReader = IOUtils.ReaderFromString(affinityModels + "/pp.tab.gz", "utf8")) { while ((line = ppReader.ReadLine()) != null) { string[] fields = line.Split("\t"); Pair <string, string> key = Pair.MakePair(string.Intern(fields[0]), string.Intern(fields[1])); verbPPAffinity[key] = double.Parse(fields[2]); } } // Subj PP attachments using (BufferedReader subjPPReader = IOUtils.ReaderFromString(affinityModels + "/subj_pp.tab.gz", "utf8")) { while ((line = subjPPReader.ReadLine()) != null) { string[] fields = line.Split("\t"); Triple <string, string, string> key = Triple.MakeTriple(string.Intern(fields[0]), string.Intern(fields[1]), string.Intern(fields[2])); verbSubjPPAffinity[key] = double.Parse(fields[3]); } } // Subj Obj PP attachments using (BufferedReader subjObjPPReader = IOUtils.ReaderFromString(affinityModels + "/subj_obj_pp.tab.gz", "utf8")) { while ((line = subjObjPPReader.ReadLine()) != null) { string[] fields = line.Split("\t"); Quadruple <string, string, string, string> key = Quadruple.MakeQuadruple(string.Intern(fields[0]), string.Intern(fields[1]), string.Intern(fields[2]), string.Intern(fields[3])); verbSubjObjPPAffinity[key] = double.Parse(fields[4]); } } // Subj PP PP attachments using (BufferedReader subjPPPPReader = IOUtils.ReaderFromString(affinityModels + "/subj_pp_pp.tab.gz", "utf8")) { while ((line = subjPPPPReader.ReadLine()) != null) { string[] fields = line.Split("\t"); Quadruple <string, string, string, string> key = Quadruple.MakeQuadruple(string.Intern(fields[0]), string.Intern(fields[1]), string.Intern(fields[2]), string.Intern(fields[3])); verbSubjPPPPAffinity[key] = double.Parse(fields[4]); } } // Subj PP PP attachments using (BufferedReader subjPPObjReader = IOUtils.ReaderFromString(affinityModels + "/subj_pp_obj.tab.gz", "utf8")) { while ((line = subjPPObjReader.ReadLine()) != null) { string[] fields = line.Split("\t"); Quadruple <string, string, string, string> key = Quadruple.MakeQuadruple(string.Intern(fields[0]), string.Intern(fields[1]), string.Intern(fields[2]), string.Intern(fields[3])); verbSubjPPObjAffinity[key] = double.Parse(fields[4]); } } // Subj PP PP attachments using (BufferedReader objReader = IOUtils.ReaderFromString(affinityModels + "/obj.tab.gz", "utf8")) { while ((line = objReader.ReadLine()) != null) { string[] fields = line.Split("\t"); verbObjAffinity[fields[0]] = double.Parse(fields[1]); } } }
public static Quadruple Invoke(Quadruple arg1, Quadruple arg2) { return(arg1 + arg2); }
public Quadruple len() { return(Quadruple.Sqrt(x * x + y * y)); }
/// <summary>Calculates both value and partial derivatives at the point x, and save them internally.</summary> protected internal override void Calculate(double[] x) { double prob = 0.0; // the log prob of the sequence given the model, which is the negation of value at this point // final double[][] weights = to2D(x); To2D(x, weights); SetWeights(weights); // the expectations over counts // first index is feature index, second index is of possible labeling // double[][] E = empty2D(); Clear2D(E); Clear2D(dropoutPriorGradTotal); MulticoreWrapper <Pair <int, bool>, Quadruple <int, double, IDictionary <int, double[]>, IDictionary <int, double[]> > > wrapper = new MulticoreWrapper <Pair <int, bool>, Quadruple <int, double, IDictionary <int, double[]>, IDictionary <int, double[]> > > (multiThreadGrad, dropoutPriorThreadProcessor); // supervised part for (int m = 0; m < totalData.Length; m++) { bool submitIsUnsup = (m >= unsupDropoutStartIndex); wrapper.Put(new Pair <int, bool>(m, submitIsUnsup)); while (wrapper.Peek()) { Quadruple <int, double, IDictionary <int, double[]>, IDictionary <int, double[]> > result = wrapper.Poll(); int docIndex = result.First(); bool isUnsup = docIndex >= unsupDropoutStartIndex; if (isUnsup) { prob += unsupDropoutScale * result.Second(); } else { prob += result.Second(); } IDictionary <int, double[]> partialDropout = result.Fourth(); if (partialDropout != null) { if (isUnsup) { Combine2DArr(dropoutPriorGradTotal, partialDropout, unsupDropoutScale); } else { Combine2DArr(dropoutPriorGradTotal, partialDropout); } } if (!isUnsup) { IDictionary <int, double[]> partialE = result.Third(); if (partialE != null) { Combine2DArr(E, partialE); } } } } wrapper.Join(); while (wrapper.Peek()) { Quadruple <int, double, IDictionary <int, double[]>, IDictionary <int, double[]> > result = wrapper.Poll(); int docIndex = result.First(); bool isUnsup = docIndex >= unsupDropoutStartIndex; if (isUnsup) { prob += unsupDropoutScale * result.Second(); } else { prob += result.Second(); } IDictionary <int, double[]> partialDropout = result.Fourth(); if (partialDropout != null) { if (isUnsup) { Combine2DArr(dropoutPriorGradTotal, partialDropout, unsupDropoutScale); } else { Combine2DArr(dropoutPriorGradTotal, partialDropout); } } if (!isUnsup) { IDictionary <int, double[]> partialE = result.Third(); if (partialE != null) { Combine2DArr(E, partialE); } } } if (double.IsNaN(prob)) { // shouldn't be the case throw new Exception("Got NaN for prob in CRFLogConditionalObjectiveFunctionWithDropout.calculate()" + " - this may well indicate numeric underflow due to overly long documents."); } // because we minimize -L(\theta) value = -prob; if (Verbose) { log.Info("value is " + System.Math.Exp(-value)); } // compute the partial derivative for each feature by comparing expected counts to empirical counts int index = 0; for (int i = 0; i < E.Length; i++) { for (int j = 0; j < E[i].Length; j++) { // because we minimize -L(\theta) derivative[index] = (E[i][j] - Ehat[i][j]); derivative[index] += dropoutScale * dropoutPriorGradTotal[i][j]; if (Verbose) { log.Info("deriv(" + i + ',' + j + ") = " + E[i][j] + " - " + Ehat[i][j] + " = " + derivative[index]); } index++; } } }
public Quadruple Add(Quadruple first, Quadruple second) { return(BinOp(Operation.Add, first, second)); }
public bool TryRemove(TKey3 key3, out Quadruple removedValue) { return(m_index3.TryGetValue(key3, out removedValue) && m_index1.Remove(removedValue.Key1) && m_index2.Remove(removedValue.Key2) && m_index3.Remove(key3)); }
private static int Main(string[] args) { { var a1 = 1; var a2 = 2; var a3 = 3; var a4 = 4; var b = (double)(1L << (52 + 2)); for (var i = 0; i < 64; i++) { var bb = Math.ScaleB(b, i); for (var j = 0; j < 8; ++j) { Console.WriteLine($@"{UltimateOrb.Utilities.CilVerifiable.AddThenSubtractFirst(bb, j):R}"); } } return(0); } { var sdf = (object)(-4); Console.WriteLine(UltimateOrb.Utilities.CilVerifiable.UnboxRef <int>(sdf)); return(0); } { var aaa = Volatile.Read(ref UltimateOrb.Dummy <int> .Value); var bbb = Volatile.Read(ref UltimateOrb.Dummy <int> .Value); var a = 0L; for (var i = 0L; i < 400000000000L; i++) { a ^= UltimateOrb.Utilities.BooleanIntegerModule.GreaterThanOrEqual(aaa, bbb); } Console.WriteLine(a); return(0); } { var sdffa = new int[] { UltimateOrb.Utilities.BooleanIntegerModule.GreaterThanOrEqual(0.3, Double.NaN), UltimateOrb.Utilities.BooleanIntegerModule.GreaterThanOrEqual(0.3, 0.2), }; foreach (var item in sdffa) { Console.WriteLine(item); } return(0); } { var a = (1UL << 52) + 1; var b = (1UL << 52); var c = (1UL << 52) - 1; var d = (1UL << 52) - 1; var p = a + 0.5; var q = b + 0.5; var r = c + 0.5; var s = d + 0.75; var sp = d + 0.76; var sm = d + 0.74; Console.WriteLine($@"{double.Epsilon:R}"); Console.WriteLine($@"{double.Epsilon * d:R}"); Console.WriteLine($@"{p:R}"); Console.WriteLine($@"{q:R}"); Console.WriteLine($@"{r:R}"); Console.WriteLine($@"{s:R}"); Console.WriteLine($@"{sp:R}"); Console.WriteLine($@"{sm:R}"); return(0); } { var sdfada = 0u; sdfada ^= sdfada; var sdaf = System.Numerics.BitOperations.LeadingZeroCount(sdfada); _ = sdaf.GetHashCode(); } { var sfassss = BitConverter.Int64BitsToDouble(0x7FFF400000000000); var sfas = sfassss; var vdsa = 0.0 - sfas; var sfad = BitConverter.DoubleToInt64Bits(sfas); var asdsd = BitConverter.DoubleToInt64Bits(vdsa); _ = (sfad ^ asdsd).GetHashCode(); } { var ccc = (Quadruple)0.0 < (Quadruple)Double.NegativeInfinity; Console.WriteLine(ccc); } { var ccc = Double.NaN != Double.NaN; var a = Quadruple.IsNaN(Quadruple.NaN); Console.WriteLine(a); var sdfa = +Quadruple.MinValue; Console.WriteLine(sdfa); } { var sdfa = (NodeId_A)3; var dsafsd = 7 * sdfa; Console.WriteLine(dsafsd); } { var sdfa = (NodeId_A)3; var dsafsd = 7 * sdfa; Console.WriteLine(dsafsd); } return(0); }
protected override async Task GeneralizedSendAsync(IMessageChannel channel, Quadruple <byte[]>[] options, int numberOfInvocations, int numberOfMessageBytes) { #if DEBUG Stopwatch stopwatch = Stopwatch.StartNew(); #endif Quadruple <BigInteger> listOfCs = new Quadruple <BigInteger>(); Quadruple <BigInteger> listOfExponents = new Quadruple <BigInteger>(); Parallel.For(0, 4, i => { BigInteger exponent; listOfCs[i] = GenerateGroupElement(out exponent); listOfExponents[i] = exponent; }); BigInteger alpha = listOfExponents[0]; #if DEBUG stopwatch.Stop(); Console.WriteLine("[Sender] Generating group elements took {0} ms.", stopwatch.ElapsedMilliseconds); stopwatch.Restart(); #endif Task writeCsTask = WriteGroupElements(channel, listOfCs); Task <BigInteger[]> readDsTask = ReadGroupElements(channel, numberOfInvocations); Quadruple <BigInteger> listOfExponentiatedCs = new Quadruple <BigInteger>(); Parallel.For(1, 4, i => { listOfExponentiatedCs[i] = BigInteger.ModPow(listOfCs[i], alpha, _parameters.P); }); await Task.WhenAll(writeCsTask, readDsTask); BigInteger[] listOfDs = readDsTask.Result; #if DEBUG stopwatch.Stop(); Console.WriteLine("[Sender] Precomputing exponentations, sending c and reading d took {0} ms.", stopwatch.ElapsedMilliseconds); stopwatch.Restart(); #endif Quadruple <byte[]>[] maskedOptions = new Quadruple <byte[]> [numberOfInvocations]; Parallel.For(0, numberOfInvocations, j => { maskedOptions[j] = new Quadruple <byte[]>(); BigInteger exponentiatedD = BigInteger.ModPow(listOfDs[j], alpha, _parameters.P); BigInteger inverseExponentiatedD = Invert(exponentiatedD); Parallel.For(0, 4, i => { BigInteger e = exponentiatedD; if (i > 0) { e = (listOfExponentiatedCs[i] * inverseExponentiatedD) % _parameters.P; } // note(lumip): the protocol as proposed by Naor and Pinkas includes a random value // to be incorporated in the random oracle query to ensure that the same query does // not occur several times. This is partly because the envision several receivers // over which the same Cs are used. Since we are having seperate sets of Cs for each // sender-receiver pair, the requirement of unique queries is satisified just using // the index j of the OT invocation and we can save a bit of bandwidth. // todo: think about whether we want to use a static set of Cs for each sender for all // connection to reduce the required amount of computation per OT. Would require to // maintain state in this class and negate the points made in the note above. maskedOptions[j][i] = MaskOption(options[j][i], e, j, i); }); }); #if DEBUG stopwatch.Stop(); Console.WriteLine("[Sender] Computing masked options took {0} ms.", stopwatch.ElapsedMilliseconds); stopwatch.Restart(); #endif await WriteOptions(channel, maskedOptions, numberOfInvocations, numberOfMessageBytes); #if DEBUG stopwatch.Stop(); Console.WriteLine("[Sender] Sending masked options took {0} ms.", stopwatch.ElapsedMilliseconds); #endif }
public QVector2(Quadruple x, Quadruple y) { this.x = x; this.y = y; }
protected override async Task <byte[][]> GeneralizedReceiveAsync(IMessageChannel channel, QuadrupleIndexArray selectionIndices, int numberOfInvocations, int numberOfMessageBytes) { #if DEBUG Stopwatch stopwatch = Stopwatch.StartNew(); #endif Quadruple <BigInteger> listOfCs = new Quadruple <BigInteger>(await ReadGroupElements(channel, 4)); #if DEBUG stopwatch.Stop(); Console.WriteLine("[Receiver] Reading values for c took {0} ms.", stopwatch.ElapsedMilliseconds); stopwatch.Restart(); #endif BigInteger[] listOfBetas = new BigInteger[numberOfInvocations]; BigInteger[] listOfDs = new BigInteger[numberOfInvocations]; Parallel.For(0, numberOfInvocations, j => { listOfDs[j] = GenerateGroupElement(out listOfBetas[j]); if (selectionIndices[j] > 0) { listOfDs[j] = (listOfCs[selectionIndices[j]] * Invert(listOfDs[j])) % _parameters.P; } }); #if DEBUG stopwatch.Stop(); Console.WriteLine("[Receiver] Generating and d took {0} ms.", stopwatch.ElapsedMilliseconds); stopwatch.Restart(); #endif Task writeDsTask = WriteGroupElements(channel, listOfDs); Task <Quadruple <byte[]>[]> readMaskedOptionsTask = ReadOptions(channel, numberOfInvocations, numberOfMessageBytes); BigInteger[] listOfEs = new BigInteger[numberOfInvocations]; Parallel.For(0, numberOfInvocations, j => { int i = selectionIndices[j]; listOfEs[j] = BigInteger.ModPow(listOfCs[0], listOfBetas[j], _parameters.P); }); await Task.WhenAll(writeDsTask, readMaskedOptionsTask); Quadruple <byte[]>[] maskedOptions = readMaskedOptionsTask.Result; #if DEBUG stopwatch.Stop(); Console.WriteLine("[Receiver] Computing e, sending d and reading masked options took {0} ms.", stopwatch.ElapsedMilliseconds); stopwatch.Restart(); #endif byte[][] selectedOptions = new byte[numberOfInvocations][]; Parallel.For(0, numberOfInvocations, j => { int i = selectionIndices[j]; BigInteger e = listOfEs[j]; selectedOptions[j] = MaskOption(maskedOptions[j][i], e, j, i); }); #if DEBUG stopwatch.Stop(); Console.WriteLine("[Receiver] Unmasking result took {0} ms.", stopwatch.ElapsedMilliseconds); #endif return(selectedOptions); }
public QPolarCoordinate(Quadruple Radius, Quadruple Theta_rad) { this.Theta_rad = Theta_rad; this.Radius = Radius; }
public Quadruple Sqrt(Quadruple value, CFloatingPointRounding rounding) { return(UnaryOp(Operation.Sqrt, value, rounding)); }
// todo [cdm]: Below data[m] --> docData /// <summary>Calculates both value and partial derivatives at the point x, and save them internally.</summary> protected internal override void Calculate(double[] x) { double prob = 0.0; // the log prob of the sequence given the model, which is the negation of value at this point Quadruple <double[][], double[][], double[][], double[][]> allParams = SeparateWeights(x); double[][] W4Edge = allParams.First(); // inputLayerWeights4Edge double[][] U4Edge = allParams.Second(); // outputLayerWeights4Edge double[][] W = allParams.Third(); // inputLayerWeights double[][] U = allParams.Fourth(); // outputLayerWeights double[][] Y4Edge = null; double[][] Y = null; if (flags.softmaxOutputLayer) { Y4Edge = new double[U4Edge.Length][]; for (int i = 0; i < U4Edge.Length; i++) { Y4Edge[i] = ArrayMath.Softmax(U4Edge[i]); } Y = new double[U.Length][]; for (int i_1 = 0; i_1 < U.Length; i_1++) { Y[i_1] = ArrayMath.Softmax(U[i_1]); } } double[][] What4Edge = EmptyW4Edge(); double[][] Uhat4Edge = EmptyU4Edge(); double[][] What = EmptyW(); double[][] Uhat = EmptyU(); // the expectations over counts // first index is feature index, second index is of possible labeling double[][] eW4Edge = EmptyW4Edge(); double[][] eU4Edge = EmptyU4Edge(); double[][] eW = EmptyW(); double[][] eU = EmptyU(); // iterate over all the documents for (int m = 0; m < data.Length; m++) { int[][][] docData = data[m]; int[] docLabels = labels[m]; NonLinearSecondOrderCliquePotentialFunction cliquePotentialFunction = new NonLinearSecondOrderCliquePotentialFunction(W4Edge, U4Edge, W, U, flags); // make a clique tree for this document CRFCliqueTree <string> cliqueTree = CRFCliqueTree.GetCalibratedCliqueTree(docData, labelIndices, numClasses, classIndex, backgroundSymbol, cliquePotentialFunction, null); // compute the log probability of the document given the model with the parameters x int[] given = new int[window - 1]; Arrays.Fill(given, classIndex.IndexOf(backgroundSymbol)); int[] windowLabels = new int[window]; Arrays.Fill(windowLabels, classIndex.IndexOf(backgroundSymbol)); if (docLabels.Length > docData.Length) { // only true for self-training // fill the given array with the extra docLabels System.Array.Copy(docLabels, 0, given, 0, given.Length); System.Array.Copy(docLabels, 0, windowLabels, 0, windowLabels.Length); // shift the docLabels array left int[] newDocLabels = new int[docData.Length]; System.Array.Copy(docLabels, docLabels.Length - newDocLabels.Length, newDocLabels, 0, newDocLabels.Length); docLabels = newDocLabels; } // iterate over the positions in this document for (int i = 0; i < docData.Length; i++) { int label = docLabels[i]; double p = cliqueTree.CondLogProbGivenPrevious(i, label, given); if (Verbose) { log.Info("P(" + label + "|" + ArrayMath.ToString(given) + ")=" + p); } prob += p; System.Array.Copy(given, 1, given, 0, given.Length - 1); given[given.Length - 1] = label; } // compute the expected counts for this document, which we will need to compute the derivative // iterate over the positions in this document for (int i_1 = 0; i_1 < docData.Length; i_1++) { // for each possible clique at this position System.Array.Copy(windowLabels, 1, windowLabels, 0, window - 1); windowLabels[window - 1] = docLabels[i_1]; for (int j = 0; j < docData[i_1].Length; j++) { IIndex <CRFLabel> labelIndex = labelIndices[j]; // for each possible labeling for that clique int[] cliqueFeatures = docData[i_1][j]; double[] As = null; double[] fDeriv = null; double[][] yTimesA = null; double[] sumOfYTimesA = null; int inputSize; int outputSize = -1; if (j == 0) { inputSize = inputLayerSize; outputSize = outputLayerSize; As = cliquePotentialFunction.HiddenLayerOutput(W, cliqueFeatures, flags, null, j + 1); } else { inputSize = inputLayerSize4Edge; outputSize = outputLayerSize4Edge; As = cliquePotentialFunction.HiddenLayerOutput(W4Edge, cliqueFeatures, flags, null, j + 1); } fDeriv = new double[inputSize]; double fD = 0; for (int q = 0; q < inputSize; q++) { if (useSigmoid) { fD = As[q] * (1 - As[q]); } else { fD = 1 - As[q] * As[q]; } fDeriv[q] = fD; } // calculating yTimesA for softmax if (flags.softmaxOutputLayer) { double val = 0; yTimesA = new double[outputSize][]; for (int ii = 0; ii < outputSize; ii++) { yTimesA[ii] = new double[numHiddenUnits]; } sumOfYTimesA = new double[outputSize]; for (int k = 0; k < outputSize; k++) { double[] Yk = null; if (flags.tieOutputLayer) { if (j == 0) { Yk = Y[0]; } else { Yk = Y4Edge[0]; } } else { if (j == 0) { Yk = Y[k]; } else { Yk = Y4Edge[k]; } } double sum = 0; for (int q_1 = 0; q_1 < inputSize; q_1++) { if (q_1 % outputSize == k) { int hiddenUnitNo = q_1 / outputSize; val = As[q_1] * Yk[hiddenUnitNo]; yTimesA[k][hiddenUnitNo] = val; sum += val; } } sumOfYTimesA[k] = sum; } } // calculating Uhat What int[] cliqueLabel = new int[j + 1]; System.Array.Copy(windowLabels, window - 1 - j, cliqueLabel, 0, j + 1); CRFLabel crfLabel = new CRFLabel(cliqueLabel); int givenLabelIndex = labelIndex.IndexOf(crfLabel); double[] Uk = null; double[] UhatK = null; double[] Yk_1 = null; double[] yTimesAK = null; double sumOfYTimesAK = 0; if (flags.tieOutputLayer) { if (j == 0) { Uk = U[0]; UhatK = Uhat[0]; } else { Uk = U4Edge[0]; UhatK = Uhat4Edge[0]; } if (flags.softmaxOutputLayer) { if (j == 0) { Yk_1 = Y[0]; } else { Yk_1 = Y4Edge[0]; } } } else { if (j == 0) { Uk = U[givenLabelIndex]; UhatK = Uhat[givenLabelIndex]; } else { Uk = U4Edge[givenLabelIndex]; UhatK = Uhat4Edge[givenLabelIndex]; } if (flags.softmaxOutputLayer) { if (j == 0) { Yk_1 = Y[givenLabelIndex]; } else { Yk_1 = Y4Edge[givenLabelIndex]; } } } if (flags.softmaxOutputLayer) { yTimesAK = yTimesA[givenLabelIndex]; sumOfYTimesAK = sumOfYTimesA[givenLabelIndex]; } for (int k_1 = 0; k_1 < inputSize; k_1++) { double deltaK = 1; if (flags.sparseOutputLayer || flags.tieOutputLayer) { if (k_1 % outputSize == givenLabelIndex) { int hiddenUnitNo = k_1 / outputSize; if (flags.softmaxOutputLayer) { UhatK[hiddenUnitNo] += (yTimesAK[hiddenUnitNo] - Yk_1[hiddenUnitNo] * sumOfYTimesAK); deltaK *= Yk_1[hiddenUnitNo]; } else { UhatK[hiddenUnitNo] += As[k_1]; deltaK *= Uk[hiddenUnitNo]; } } } else { UhatK[k_1] += As[k_1]; if (useOutputLayer) { deltaK *= Uk[k_1]; } } if (useHiddenLayer) { deltaK *= fDeriv[k_1]; } if (useOutputLayer) { if (flags.sparseOutputLayer || flags.tieOutputLayer) { if (k_1 % outputSize == givenLabelIndex) { double[] WhatK = null; if (j == 0) { WhatK = What[k_1]; } else { WhatK = What4Edge[k_1]; } foreach (int cliqueFeature in cliqueFeatures) { WhatK[cliqueFeature] += deltaK; } } } else { double[] WhatK = null; if (j == 0) { WhatK = What[k_1]; } else { WhatK = What4Edge[k_1]; } foreach (int cliqueFeature in cliqueFeatures) { WhatK[cliqueFeature] += deltaK; } } } else { if (k_1 == givenLabelIndex) { double[] WhatK = null; if (j == 0) { WhatK = What[k_1]; } else { WhatK = What4Edge[k_1]; } foreach (int cliqueFeature in cliqueFeatures) { WhatK[cliqueFeature] += deltaK; } } } } for (int k_2 = 0; k_2 < labelIndex.Size(); k_2++) { // labelIndex.size() == numClasses int[] label = labelIndex.Get(k_2).GetLabel(); double p = cliqueTree.Prob(i_1, label); // probability of these labels occurring in this clique with these features double[] Uk2 = null; double[] eUK = null; double[] Yk2 = null; if (flags.tieOutputLayer) { if (j == 0) { // for node features Uk2 = U[0]; eUK = eU[0]; } else { Uk2 = U4Edge[0]; eUK = eU4Edge[0]; } if (flags.softmaxOutputLayer) { if (j == 0) { Yk2 = Y[0]; } else { Yk2 = Y4Edge[0]; } } } else { if (j == 0) { Uk2 = U[k_2]; eUK = eU[k_2]; } else { Uk2 = U4Edge[k_2]; eUK = eU4Edge[k_2]; } if (flags.softmaxOutputLayer) { if (j == 0) { Yk2 = Y[k_2]; } else { Yk2 = Y4Edge[k_2]; } } } if (useOutputLayer) { for (int q_1 = 0; q_1 < inputSize; q_1++) { double deltaQ = 1; if (flags.sparseOutputLayer || flags.tieOutputLayer) { if (q_1 % outputSize == k_2) { int hiddenUnitNo = q_1 / outputSize; if (flags.softmaxOutputLayer) { eUK[hiddenUnitNo] += (yTimesA[k_2][hiddenUnitNo] - Yk2[hiddenUnitNo] * sumOfYTimesA[k_2]) * p; deltaQ = Yk2[hiddenUnitNo]; } else { eUK[hiddenUnitNo] += As[q_1] * p; deltaQ = Uk2[hiddenUnitNo]; } } } else { eUK[q_1] += As[q_1] * p; deltaQ = Uk2[q_1]; } if (useHiddenLayer) { deltaQ *= fDeriv[q_1]; } if (flags.sparseOutputLayer || flags.tieOutputLayer) { if (q_1 % outputSize == k_2) { double[] eWq = null; if (j == 0) { eWq = eW[q_1]; } else { eWq = eW4Edge[q_1]; } foreach (int cliqueFeature in cliqueFeatures) { eWq[cliqueFeature] += deltaQ * p; } } } else { double[] eWq = null; if (j == 0) { eWq = eW[q_1]; } else { eWq = eW4Edge[q_1]; } foreach (int cliqueFeature in cliqueFeatures) { eWq[cliqueFeature] += deltaQ * p; } } } } else { double deltaK = 1; if (useHiddenLayer) { deltaK *= fDeriv[k_2]; } double[] eWK = null; if (j == 0) { eWK = eW[k_2]; } else { eWK = eW4Edge[k_2]; } foreach (int cliqueFeature in cliqueFeatures) { eWK[cliqueFeature] += deltaK * p; } } } } } } if (double.IsNaN(prob)) { // shouldn't be the case throw new Exception("Got NaN for prob in CRFNonLinearSecondOrderLogConditionalObjectiveFunction.calculate()"); } value = -prob; if (Verbose) { log.Info("value is " + value); } // compute the partial derivative for each feature by comparing expected counts to empirical counts int index = 0; for (int i_2 = 0; i_2 < eW4Edge.Length; i_2++) { for (int j = 0; j < eW4Edge[i_2].Length; j++) { derivative[index++] = (eW4Edge[i_2][j] - What4Edge[i_2][j]); if (Verbose) { log.Info("inputLayerWeights4Edge deriv(" + i_2 + "," + j + ") = " + eW4Edge[i_2][j] + " - " + What4Edge[i_2][j] + " = " + derivative[index - 1]); } } } for (int i_3 = 0; i_3 < eW.Length; i_3++) { for (int j = 0; j < eW[i_3].Length; j++) { derivative[index++] = (eW[i_3][j] - What[i_3][j]); if (Verbose) { log.Info("inputLayerWeights deriv(" + i_3 + "," + j + ") = " + eW[i_3][j] + " - " + What[i_3][j] + " = " + derivative[index - 1]); } } } if (index != beforeOutputWeights) { throw new Exception("after W derivative, index(" + index + ") != beforeOutputWeights(" + beforeOutputWeights + ")"); } if (useOutputLayer) { for (int i = 0; i_3 < eU4Edge.Length; i_3++) { for (int j = 0; j < eU4Edge[i_3].Length; j++) { derivative[index++] = (eU4Edge[i_3][j] - Uhat4Edge[i_3][j]); if (Verbose) { log.Info("outputLayerWeights4Edge deriv(" + i_3 + "," + j + ") = " + eU4Edge[i_3][j] + " - " + Uhat4Edge[i_3][j] + " = " + derivative[index - 1]); } } } for (int i_1 = 0; i_1 < eU.Length; i_1++) { for (int j = 0; j < eU[i_1].Length; j++) { derivative[index++] = (eU[i_1][j] - Uhat[i_1][j]); if (Verbose) { log.Info("outputLayerWeights deriv(" + i_1 + "," + j + ") = " + eU[i_1][j] + " - " + Uhat[i_1][j] + " = " + derivative[index - 1]); } } } } if (index != x.Length) { throw new Exception("after W derivative, index(" + index + ") != x.length(" + x.Length + ")"); } int regSize = x.Length; if (flags.skipOutputRegularization || flags.softmaxOutputLayer) { regSize = beforeOutputWeights; } // incorporate priors if (prior == QuadraticPrior) { double sigmaSq = sigma * sigma; for (int i = 0; i_3 < regSize; i_3++) { double k = 1.0; double w = x[i_3]; value += k * w * w / 2.0 / sigmaSq; derivative[i_3] += k * w / sigmaSq; } } else { if (prior == HuberPrior) { double sigmaSq = sigma * sigma; for (int i = 0; i_3 < regSize; i_3++) { double w = x[i_3]; double wabs = System.Math.Abs(w); if (wabs < epsilon) { value += w * w / 2.0 / epsilon / sigmaSq; derivative[i_3] += w / epsilon / sigmaSq; } else { value += (wabs - epsilon / 2) / sigmaSq; derivative[i_3] += ((w < 0.0) ? -1.0 : 1.0) / sigmaSq; } } } else { if (prior == QuarticPrior) { double sigmaQu = sigma * sigma * sigma * sigma; for (int i = 0; i_3 < regSize; i_3++) { double k = 1.0; double w = x[i_3]; value += k * w * w * w * w / 2.0 / sigmaQu; derivative[i_3] += k * w / sigmaQu; } } } } }
public bool TryGetValue(TKey3 key3, out Quadruple result) { return(m_index3.TryGetValue(key3, out result)); }
public Quadruple Sqrt(Quadruple value) { return(UnaryOp(Operation.Sqrt, value)); }