private static void AnBacahaytMEthod(int count, Matrix <double> arrayForA, Vector <double> arrayForB, Vector <double> oldVectorForX, Matrix <double> eye3, Matrix <double> c, double tao) { Console.WriteLine("\n\n******************* An-Bacahayt-Method *******************"); Stopwatch stopWatch = new Stopwatch(); stopWatch.Start(); count = 0; ////anbacahayt Matrix <double> arrayForM = (2 * c) - tao * arrayForA; Console.WriteLine(arrayForM); Evd <double> eigenM = arrayForM.Evd(); Vector <Complex> eigenVectroM = eigenM.EigenValues; if (eigenVectroM.All(item => item.Real > 0)) { Console.WriteLine(eigenVectroM); Vector <double> newVectorForX; while (true) { count++; newVectorForX = (eye3 - tao * c.Inverse() * arrayForA) * oldVectorForX + (tao * c.Inverse() * arrayForB); if (count == maximalIterationCount || Math.Abs(oldVectorForX.Sum() - newVectorForX.Sum()) < 0.00001) { Console.WriteLine("Iteration number count = {0} ", count); Console.WriteLine(newVectorForX); break; } if (count % 10 == 0) { Console.WriteLine("Iteration number count = {0} ", count); Console.WriteLine(newVectorForX); } oldVectorForX = newVectorForX; } } else { Console.WriteLine("M matrxi EIG is a NEGATIVE"); } stopWatch.Start(); TimeSpan ts = stopWatch.Elapsed; string elaspedTIme = String.Format("{0:00}.{1:00}", ts.Seconds, ts.Milliseconds / 10); Console.WriteLine("AnBacahayt Method RunTime: {0}", elaspedTIme); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="SingleOp"]/message_doc[@name="CharacterAverageConditional(StringDistribution)"]/*'/> public static DiscreteChar CharacterAverageConditional(StringDistribution str) { Argument.CheckIfNotNull(str, "str"); if (str.IsPointMass) { return(CharacterAverageConditional(str.Point)); } Vector resultLogProb = PiecewiseVector.Constant(char.MaxValue + 1, double.NegativeInfinity); StringAutomaton probFunc = str.GetWorkspaceOrPoint(); StringAutomaton.EpsilonClosure startEpsilonClosure = new Automaton <string, char, DiscreteChar, StringManipulator, StringAutomaton> .EpsilonClosure(probFunc, probFunc.Start); for (int stateIndex = 0; stateIndex < startEpsilonClosure.Size; ++stateIndex) { StringAutomaton.State state = startEpsilonClosure.GetStateByIndex(stateIndex); Weight stateLogWeight = startEpsilonClosure.GetStateWeightByIndex(stateIndex); foreach (var transition in state.Transitions) { if (!transition.IsEpsilon) { StringAutomaton.State destState = probFunc.States[transition.DestinationStateIndex]; StringAutomaton.EpsilonClosure destStateClosure = new Automaton <string, char, DiscreteChar, StringManipulator, StringAutomaton> .EpsilonClosure(probFunc, destState); if (!destStateClosure.EndWeight.IsZero) { Weight weight = Weight.Product(stateLogWeight, transition.Weight, destStateClosure.EndWeight); var logProbs = transition.ElementDistribution.Value.GetProbs(); logProbs.SetToFunction(logProbs, Math.Log); resultLogProb = LogSumExp(resultLogProb, logProbs, weight); } } } } if (resultLogProb.All(double.IsNegativeInfinity)) { throw new AllZeroException("An input distribution assigns zero probability to all single character strings."); } Vector resultProb = PiecewiseVector.Zero(char.MaxValue + 1); double logNormalizer = resultLogProb.LogSumExp(); resultProb.SetToFunction(resultLogProb, lp => Math.Exp(lp - logNormalizer)); return(DiscreteChar.FromVector(resultProb)); }
/// <summary>EP message to <c>character</c>.</summary> /// <param name="str">Incoming message from <c>str</c>.</param> /// <returns>The outgoing EP message to the <c>character</c> argument.</returns> /// <remarks> /// <para>The outgoing message is a distribution matching the moments of <c>character</c> as the random arguments are varied. The formula is <c>proj[p(character) sum_(str) p(str) factor(character,str)]/p(character)</c>.</para> /// </remarks> public static DiscreteChar CharacterAverageConditional(StringDistribution str) { Argument.CheckIfNotNull(str, "str"); Vector resultlogProb = PiecewiseVector.Constant(char.MaxValue + 1, double.NegativeInfinity); StringAutomaton probFunc = str.GetProbabilityFunction(); StringAutomaton.EpsilonClosure startEpsilonClosure = probFunc.Start.GetEpsilonClosure(); for (int stateIndex = 0; stateIndex < startEpsilonClosure.Size; ++stateIndex) { StringAutomaton.State state = startEpsilonClosure.GetStateByIndex(stateIndex); double stateLogWeight = startEpsilonClosure.GetStateLogWeightByIndex(stateIndex); for (int transitionIndex = 0; transitionIndex < state.Transitions.Count; ++transitionIndex) { StringAutomaton.Transition transition = state.Transitions[transitionIndex]; if (!transition.IsEpsilon) { StringAutomaton.State destState = probFunc.States[transition.DestinationStateIndex]; StringAutomaton.EpsilonClosure destStateClosure = destState.GetEpsilonClosure(); if (!double.IsNegativeInfinity(destStateClosure.EndLogWeight)) { double logWeight = stateLogWeight + transition.LogWeight + destStateClosure.EndLogWeight; resultlogProb = LogSumExp(resultlogProb, transition.ElementDistribution.GetInternalDiscrete().GetLogProbs(), logWeight); } } } } if (resultlogProb.All(double.IsNegativeInfinity)) { throw new AllZeroException("An input distribution assigns zero probability to all single character strings."); } Vector resultProb = PiecewiseVector.Zero(char.MaxValue + 1); resultProb.SetToFunction(resultlogProb, Math.Exp); return(DiscreteChar.FromVector(resultProb)); }
/// <summary> /// Uses the KernelOptimiser class to optimise the hypers given the current variational posterior /// on the function values (which has mean SampleMean and covariance SampleCovariance) /// </summary> public static GPBuffer BufferHelper(int[] hypersToOptimise, GPBuffer Buffer, Vector[] x, Vector SampleMean, PositiveDefiniteMatrix SampleVariance, Gamma scaling) { if (SampleMean.All(o => o == 0.0)) { Buffer.Precision = Utils.GramMatrix(Buffer.kernel, x).Inverse(); } else { //Console.WriteLine(Utils.KernelToArray(Buffer.kernel).Select(o => o.ToString()).Aggregate((p, q) => p + " " + q)); var helper = new KernelOptimiser(settings); helper.kernel = Buffer.kernel; helper.xData = x; helper.hypersToOptimise = hypersToOptimise; helper.Optimise((prec, gradK, gradientVector) => helperFunction(prec, gradK, gradientVector, scaling, SampleMean, SampleVariance), ref Buffer.Precision); Buffer.ESamplePrecisionSample = VectorGaussianScaledPrecisionOp.ESamplePrecisionSample(SampleMean, SampleVariance, Buffer.Precision); Buffer.PrecisionMeanLogDet = VectorGaussianScaledPrecisionOp.PrecisionMeanLogDet(Buffer.Precision); //Console.WriteLine(Utils.KernelToArray(Buffer.kernel).Select(o => o.ToString()).Aggregate((p, q) => p + " " + q)); rememberKernel = Buffer.kernel; } return(Buffer); }
private bool CheckVectorValues(Vector <float> vector) { return(vector.All(value => (int)value == 1 || (int)value == 0)); }
public bool IsBlack() { return(c.All(x => x < Renderer.Epsilon)); }