public void TestDistribution() { IList <double> values = new List <double>() { 3.2, 4.1, 5.3, 5.1, 3.825, 4.8, 2.7, 3.1, 2.3 }; double m = values.Average(); Assert.IsTrue(m > 3.824); Assert.IsTrue(m < 3.825); double sigma = Sigma.Get(values); // approx 1.02 Assert.IsTrue(sigma > 1.01); Assert.IsTrue(sigma < 1.02); SigmaDistributor distributor = new SigmaDistributor(values, (float)0.5); IDictionary <string, long> distribution = distributor.Distribute(); Assert.IsTrue(distribution != null); Assert.IsTrue(distribution.Count == 6); Assert.IsTrue(distribution["-1,5σ"] == 2); Assert.IsTrue(distribution["-1σ"] == 2); Assert.IsTrue(distribution["-0,5σ"] == 0); Assert.IsTrue(distribution["+0,5σ"] == 2); Assert.IsTrue(distribution["+1σ"] == 1); Assert.IsTrue(distribution["+1,5σ"] == 2); }
public static Permutation FromSigma(Sigma sigma) { List <List <int> > cycles = new List <List <int> >(); var visited = new HashSet <int>(); for (int i = 1; i <= sigma.Size; i++) { if (visited.Contains(i)) { continue; } var cycle = new List <int>(); var curr = i; while (!visited.Contains(curr)) { cycle.Add(curr); visited.Add(curr); curr = sigma.Of(curr); } if (cycle.Count > 1) { cycles.Add(cycle); } } return(new Permutation(cycles)); }
public AnalyticStatisticsItemDetailsWindow( AnalyticStatisticsItemViewModel item) : this() { if (item == null) { throw new ArgumentNullException("item"); } this.item = item; sigma = Math.Round(Sigma.Get(item.Model.GetDoubleValues().ToList()), 2); expectation = Math.Round(item.Model.GetDoubleValues().Average(), 2); controlProperties.textBlockElementType.Text = item.Type; controlProperties.textBlockEngineFamily.Text = item.EngineFamily; controlProperties.textBlockEngine.Text = item.EngineType; controlProperties.textBlockExpectation.Text = expectation.ToString(CultureInfo.InvariantCulture); controlProperties.textBlockSigma.Text = sigma.ToString(CultureInfo.InvariantCulture); discretion = float.Parse( controlDisplaySettings.textBlockDiscretionValue.Text, NumberStyles.Float, CultureInfo.InvariantCulture); IDictionary <string, long> distribution = GetDistribution(); controlGraphicalRepresentation.DisplayDistribution(distribution); }
public GaussianNaiveBayes Estimate(Matrix input, Vector output) { // Algorithm used to calculate variance + mean : http://i.stanford.edu/pub/cstr/reports/cs/tr/79/773/CS-TR-79-773.pdf var distinctOutput = output.Distinct(); var nbFeatures = input.NbColumns; var nbClasses = distinctOutput.Count(); var classCount = new int[nbClasses]; var classPrior = new int[nbClasses]; var epsilon = 1e-9 * input.Variance().Max().Key.GetNumber(); Theta = Matrix.BuildEmptyMatrix(nbClasses, nbFeatures); Sigma = Matrix.BuildEmptyMatrix(nbClasses, nbFeatures); for (int i = 0; i < nbClasses; i++) { var ot = distinctOutput.Values[i]; var indexes = output.FindIndexes(ot); Matrix subMatrix = indexes.Select(_ => input.GetRowVector(_).Values).ToArray(); var kvp = UpdateMeanVariance(subMatrix); Theta.SetRow(kvp.Key, i); Sigma.SetRow(kvp.Value, i); classCount[i] = subMatrix.NbRows; } Sigma = Sigma.Sum(epsilon); CalculateClassPrior(output); return(this); }
public void Verify() { Channel channel; Shared shared; PicoBuffer returnedExtraData; shared = new Shared(); shared.load_or_generate_keys("testkey.pub", "testkey.priv"); channel = new Channel(); Thread prover_td = new Thread(() => prover_main(channel.get_name())); prover_td.Start(); returnedExtraData = new PicoBuffer(0); Sigma.verify(shared, channel, null, "123456", returnedExtraData, null); returnedExtraData.append(new byte[] { 0x00 }); Assert.AreEqual(returnedExtraData.to_string(), "Test data"); prover_td.Join(); shared.delete(); channel.delete(); returnedExtraData.delete(); }
private void TauSigmaInertia() { double.TryParse(textBox1.Text, out a); double.TryParse(textBox9.Text, out b); double.TryParse(textBox8.Text, out c); double.TryParse(textBox7.Text, out d); if (a == 0 || b == 0 || c == 0 || d == 0) { MessageBox.Show("Invalid input"); } if (d > b / 2) { MessageBox.Show("d<b/2 !"); } if (c > a / 2) { MessageBox.Show("c<a/2 !"); } Ix = ((a * a * a * b) - ((b - d) * (b - d) * (b - d) * (a - c - c))) / 12; textBox4.Text = "Ix = " + Ix.ToString() + "mm4"; Tau = T1 / ((a * b) - (a - c - c) * (b - d)); textBox5.Text = Tau.ToString(); Sigma = (M1 * a) / (Ix * 2); textBox6.Text = Sigma.ToString(); }
public override void NextIteration(Vector x) { if (N > 0) { Sigma = (Vector)Sigma.Multiply(SigmaMultiplier); } }
public override int GetHashCode() { unchecked { var hashCode = Voltage.GetHashCode(); hashCode = (hashCode * 397) ^ Sigma.GetHashCode(); hashCode = (hashCode * 397) ^ Deviation.GetHashCode(); return(hashCode); } }
public void Inference() { Teacher?.Clear(); Sigma?.Clear(); var reader = InferenceReader != null ? InferenceReader : LearningReader; var buf = reader.GetBuffer(); InferenceProcess(buf.Input); }
/// <inheritdoc /> public override void SignalStop() { if (StopSigmaOnClose) { Sigma.SignalStop(); } Dispose(); }
public void TestSigmaGeneration() { IList <double> data = new List <double>() { 3.2, 4.1, 5.3, 5.1, 3.825, 4.8, 2.7, 3.1, 2.3 }; double sigma = Sigma.Get(data); Assert.IsTrue(sigma > 1.01); Assert.IsTrue(sigma < 1.02); }
public override void NextIteration(Vector x) { if (N > 0) { Sigma = (Vector)Sigma.Multiply(SigmaMultiplier); for (int i = 0; i < N; ++i) { Theta[i] -= Math.Min(Constraints[i].Evaluate(x), Theta[i]); Theta[i] /= SigmaMultiplier; } } }
public static void sigma_test() //****************************************************************************80 // // Purpose: // // SIGMA_TEST tests SIGMA. // // Licensing: // // This code is distributed under the GNU LGPL license. // // Modified: // // 02 June 2007 // // Author: // // John Burkardt // { int c = 0; int n = 0; Console.WriteLine(""); Console.WriteLine("SIGMA_TEST"); Console.WriteLine(" SIGMA computes the SIGMA function."); Console.WriteLine(""); Console.WriteLine(" N Exact SIGMA(N)"); Console.WriteLine(""); int n_data = 0; for (;;) { Burkardt.Values.Sigma.sigma_values(ref n_data, ref n, ref c); if (n_data == 0) { break; } Console.WriteLine(" " + n.ToString().PadLeft(4) + " " + c.ToString().PadLeft(10) + " " + Sigma.sigma(n).ToString().PadLeft(10) + ""); } }
public static void sigma_values_test() //****************************************************************************80 // // Purpose: // // SIGMA_VALUES_TEST tests SIGMA_VALUES. // // Licensing: // // This code is distributed under the GNU LGPL license. // // Modified: // // 09 February 2007 // // Author: // // John Burkardt // { int fn = 0; int n = 0; Console.WriteLine(""); Console.WriteLine("SIGMA_VALUES_TEST:"); Console.WriteLine(" SIGMA_VALUES returns values of"); Console.WriteLine(" the SIGMA function."); Console.WriteLine(""); Console.WriteLine(" N SIGMA(N)"); Console.WriteLine(""); int n_data = 0; for (;;) { Sigma.sigma_values(ref n_data, ref n, ref fn); if (n_data == 0) { break; } Console.WriteLine(" " + n.ToString().PadLeft(6) + " " + fn.ToString().PadLeft(12) + ""); } }
private void ComputeCholeskyDecomp() { var cd = Sigma.Cholesky(); cd.Solve(Sigma); sqrtSigma = cd.Factor; detSigma = Sigma.Determinant(); if (detSigma != 0) { invSigma = Sigma.Inverse(); sqrtSigmaInverse = sqrtSigma.Inverse(); } else { invSigma = null; sqrtSigmaInverse = null; } }
public Matrix PredictProbability(Matrix input) { var jointLogLikelihood = new Vector[ClassPriors.Count()]; for (int i = 0; i < ClassPriors.Count(); i++) { var kvp = ClassPriors.ElementAt(i); var jointi = System.Math.Log(kvp.Value); var nij = Sigma.GetRowVector(i).Multiply(System.Math.PI).Multiply(2.0).Log().Sum().GetNumber() * -0.5; var res = input.Substract(Theta.GetRowVector(i)).Pow(2).Div(Sigma.GetRowVector(i)).SumAllRows().Multiply(-0.5).Sum(nij); jointLogLikelihood[i] = res.Sum(jointi); } Matrix jointLogLikelihoodMatrix = jointLogLikelihood; jointLogLikelihoodMatrix = jointLogLikelihoodMatrix.Transpose(); var logProbX = jointLogLikelihoodMatrix.Logsumexp(); return(jointLogLikelihoodMatrix.Substract(logProbX).Exp()); }
//--------------------------------------------------------------------------------------------- /// <summary> /// редукция SVD /// </summary> /// <param name="Reduction - порог отбрасывания сингулярных чисел"></param> public void Reduction_SVD(double Reduction) { //наименьшее измерение int Min_Size = Math.Min(Sigma.M, Sigma.N); //проверка на возможность редукции по сингулярным числам for (int i = 0; i < Min_Size; i++) { if (Math.Abs(Sigma.Elem[i][i]) < Reduction) { Min_Size = i; break; } } //редукция размерности матриц Sigma.Size_Reduction(Min_Size, Min_Size); U.Size_Reduction(U.M, Min_Size); V.Size_Reduction(V.M, Min_Size); RES = new Matrix(U.M, V.M); RES = U * Sigma * V.Transpose_Matrix(); }
public override string ToString() => $"TukeyGH({Mu.ToStringInvariant()},{Sigma.ToStringInvariant()},{G.ToStringInvariant()},{H.ToStringInvariant()})";
internal static void NowCastWFPFunc(out string outMethod, out ILArray <double> outTime, out ILArray <double> outX, out ILArray <double> outXhmsAll, out int outXhmsAllTimeOffset, out int outXhmsLLength, out int outXhmsUOffset, ILArray <double> Data, double TPredict = TPredictDef, string Method = MethodDef, int r = rDef, double Ts = TsDef) { #region "Original function comments" //Nowcasting with model based on total wind farm power // // NowCastWFPFunc(Data,TPredict,Method,r,Ts) // // Data : Total wind farm power // TPredict : Time for starting multi step prediction. If TPredict<1 it // is assumed a fraction of the end time (default 0.5) // Method : 'AR(1)' or 'Persistence' only first letter count (default 'a') // r : Decimation with a moving average of order r (default 1) // Ts : Sampling time (default 0.1) // // External input: None // Time-stamp: <2014-10-17 14:09:40 tk> // Version 1: Initial version // Torben Knudsen // Aalborg University, Dept. of Electronic Systems, Section of Automation // and Control // E-mail: [email protected] #endregion #region "Used variables declaration" int IMT; double q0; double TauLambdaLRel; double Lambda0; double TauLambdaInf; double TimeScaling; //string TitleStr; int NS; ILArray <double> T; ILArray <double> TimePlot; double NWT; double NomWFP; ILArray <double> PWF; double MinWFP; double TauLambdaL; double LambdaL; double LambdaInf; int TPS; ILArray <double> Theta; ILArray <double> Sigma; ILArray <double> Xh; ILArray <double> Lambda; ILArray <int> Time; ILArray <double> Res; int TPSEst; double A; double B; ILArray <double> xhms; ILArray <double> covxhms; double aux; int i; ILArray <double> q; double xh; double xt; double dt; ILArray <double> sigmaxhms; ILArray <double> ConIntAWFP; int NS10Min; int NSOneHour; #endregion //% setting up inputs //TsDef = 0.1; //rDef = 1; //MethodDef = "a"; //TPredictDef = 0.5; //if nargin < 5; Ts= []; end; //if nargin < 4; r= []; end; //if nargin < 3; Method= []; end; //if nargin < 2; TPredict= []; end; //if nargin < 1; error('Error TK: To few input arguments'); end; //if isempty(r); r= rDef; end; //if isempty(TPredict); TPredict= TPredictDef; end; //if isempty(Ts); Ts= TsDef; end; //if isempty(Method); Method= MethodDef; end; //% Parameters IMT = 1; // Include measurement time; q0 = 0; TauLambdaLRel = 0.1; // TauLambdaL= 10% of the samples Lambda0 = 0.5; // Initial Lambda TauLambdaInf = 600; // TauLambdaInf= 10min TimeScaling = 1.0 / 3600; // From seconds to hours //% Initialization // Use Offwind simulation data made by Rasmus 2014-10-14 // The format is: // [time sumPower sumRef sumAvai] as a matrix NS x 4. Power i MW // 48 WT are simulated. Data for individual WT are also found e.g. in // Power, P_ref, PA, beta etc if (strncmpi(Method, "a", 1)) { Method = "AR(1)"; } else { Method = "Persistence"; } //TitleStr = "Nowcasting with " + Method + " model based on total wind farm " //... // + "power, Offwind simulation"; Data = Data[_(':')]; NS = size(Data, 1); // Number of samples NS = max_(find(Data > 0)); // Number of samples; Data = Data[_(1, ':', NS)]; // Limit the data T = Ts * (_c(1, NS)).T; TimePlot = T * TimeScaling; // Time in hours NWT = 48; NomWFP = NWT * 5e6 * 1e-6; // Power in MW PWF = Data; // Total Power in MW if (r > 1) { DecimateWMA(out PWF, out _ILArray_double, PWF, r); NS = size(PWF, 1); // Number of samples Ts = Ts * r; T = Ts * (_c(1, NS)).T; TimePlot = T * TimeScaling; // Time in hours } MinWFP = 0; // For real WFs //% Definitions etc. // Calculate Lamba* from TauLambda* and dt TauLambdaL = TauLambdaLRel * (T._(end) - T._(1));// TauLambdaL= 10% of the samples LambdaL = exp(-Ts / TauLambdaL); LambdaInf = exp(-Ts / TauLambdaInf); //% Algorithm // Initialization // Prediction from time in TPredict // if TPredict is a fraction calculate TPredict if (TPredict < 1) { TPredict = round(TPredict * T._(end) / Ts) * Ts; } if (TPredict < TauLambdaInf) { warning(__["TK: Prediction time is so small that the estimator/predictor ", //... "might not have converged yet"]); } TPS = min_(find(T >= TPredict)); // Use time from measurements // Multi step prediction if (strncmpi(Method, "a", 1)) { // ARX1 version; // Recursive parameter estimation RLSMARX1(out Theta, out Sigma, out Xh, out Lambda, out Time, out _ILArray_double, out _ILArray_double, out _ILArray_double, PWF, 1, LambdaInf); Res = __[TimePlot[_(Time)], PWF[_(Time)], Xh, Sigma, Lambda]; Res = __[nan(Time._(1) - 1, size(Res, 2)), ';', Res]; // Notice that length of Time is shorter than length of T if batch RLS // start is used so TPSEst < TPS in that case TPSEst = min_(find(_dbl(Time) * Ts >= TPredict)); // Use time from estimates // Parameter values must be taken for index TPSEst A = Theta._(TPSEst, 1); if (abs(A) > 1) { warning(__["TK: Unstable pole, max(abs(eig)): ", num2str(abs(A))]); } B = Theta._(TPSEst, 2); xhms = zeros(NS - TPS, 1); covxhms = zeros(NS - TPS, 1); xhms._(1, '=', Xh._(TPSEst + 1)); covxhms._(1, '=', Sigma._(TPSEst + 1)); aux = Sigma._(TPSEst + 1); for (i = 2; i <= NS - TPS; i++) { xhms._(i, '=', A * xhms._(i - 1) + B); aux = A * aux * A.T(); covxhms._(i, '=', covxhms._(i - 1) + aux); } // Prepend xhms with the present measurement so the plot clearly indicates // the time for the measurement } else { // Persistence version; // Initialization Lambda = Lambda0; q = q0; xh = PWF._(1); Res = __[T._(1), PWF._(1), xh, q, Lambda0]; // Recursive estimation of incremental covariance for (i = 2; i <= NS; i++) { xt = PWF._(i) - xh; dt = T._(i) - T._(i - 1); q = _m(Lambda, '*', q) + (1 - Lambda) * _p(xt, 2) / dt; Res = __[Res, ';', __[T._(i), PWF._(i), xh, q, Lambda]]; Lambda = LambdaL * Lambda + (1 - LambdaL) * LambdaInf; xh = PWF._(i); } Res[_(':'), _(1)] = Res[_(':'), _(1)] * TimeScaling; // Persistence version; xhms = Res._(TPS + 1, 3) * ones(NS - TPS, 1); covxhms = Res._(TPS + 1, 4) * Ts * ((_c(1, NS - TPS)).T); } if (IMT != 0) { xhms = __[PWF._(TPS), ';', xhms]; covxhms = __[0, ';', covxhms]; } sigmaxhms = sqrt(covxhms); ConIntAWFP = _m(xhms, '*', __[1, 1, 1]) + _m(sigmaxhms, '*', __[-2, 0, 2]); ConIntAWFP = min(max(ConIntAWFP, MinWFP), NomWFP); // Limit confidence limits // Plot results // Plot actual as black solid and lower, prediction and upper confidence // limits as red, green and blue solid for 10 min, dashed for one hour and // dotted for the rest. //figure; NS10Min = min(NS - TPS - 1, round_(600.0 / Ts)); NSOneHour = min(NS - TPS - 1, round_(3600.0 / Ts)); //set(gcf, "defaultaxescolororder", ILMath.eye(3, 3)); outMethod = Method; outTime = TimePlot; outX = Res[_(':'), _(2)]; outXhmsAll = ConIntAWFP; outXhmsAllTimeOffset = (TPS - IMT); outXhmsLLength = (NS10Min + IMT); outXhmsUOffset = (NSOneHour + IMT); //plot( // TimePlot, Res[ILMath.full, 2 - 1], 'k',///... // TimePlot[ILMath.r(TPS + 1 - IMT - 1, TPS + NS10Min - 1)], ConIntAWFP[ILMath.r(1 - 1, NS10Min + IMT - 1), ILMath.full],//... // TimePlot[ILMath.r(TPS + NS10Min + 1 - 1, TPS + NSOneHour - 1)], ConIntAWFP[_a(NS10Min + 1, 1, NSOneHour) + IMT - 1, ILMath.full], "--",//... // TimePlot[ILMath.r(TPS + NSOneHour + 1 - 1, ILMath.end)], ConIntAWFP[ILMath.r(NSOneHour + 1 + IMT - 1, ILMath.end), ILMath.full], ':'); //title(TitleStr); //Legend= {'x' 'xhmsL' 'xhms' 'xhmsU'}; //legend(Legend); //grid('on'); }
private void CalculateErrors(ref Iteration iter) { var n = iter.Elements.Count; var errors = new DenseVector(n); var errorsNorms = new DenseVector(n); var eNormV2 = 0.0; var uNormV2 = 0.0; for (int i = 0; i < n; ++i) { var m = Math.Pow(iter.Elements[i].H, 3) / Mu.Evaluate(iter.Elements[i].MidPoint); var b = F.Evaluate(iter.Elements[i].MidPoint) - (Beta.Evaluate(iter.Elements[i].MidPoint) * iter.SolutionCenterDeriv[i]) - (Sigma.Evaluate(iter.Elements[i].MidPoint) * iter.SolutionCenter[i]); var d = 10 + ((iter.Elements[i].H * Beta.Evaluate(iter.Elements[i].MidPoint)) / Mu.Evaluate(iter.Elements[i].MidPoint) * ((iter.Elements[i].H * iter.Elements[i].H * Sigma.Evaluate(iter.Elements[i].MidPoint)) / Mu.Evaluate(iter.Elements[i].MidPoint))); var e_h2 = (5.0 / 6) * m * (b * b / d); errorsNorms[i] = Math.Sqrt(Math.Abs(e_h2)); eNormV2 += Math.Abs(e_h2); var q = iter.SolutionCenterDeriv[i]; uNormV2 += iter.Elements[i].H * q * q; } iter.UNormV2 = uNormV2; iter.ENormV2 = eNormV2; iter.ErrorsNormsV = errorsNorms; //iter.UNorm = Math.Sqrt(uNormV2); iter.UNorm = Math.Sqrt(iter.Solution * GenereteMatrix(iter.Elements) * iter.Solution); iter.ENorm = Math.Sqrt(eNormV2); for (int i = 0; i < n; ++i) { errors[i] = (errorsNorms[i] * Math.Sqrt(n) * 100) / Math.Sqrt(uNormV2 + eNormV2); } iter.Errors = errors; if (iter.N == InitialN) { startEn = iter.ENorm; } iter.OrderOfConvergence = (iter.N != InitialN) ? (Math.Log(startEn) - Math.Log(iter.ENorm)) / (Math.Log(iter.N) - Math.Log(InitialN)) : 0; iter.MaxRelativeError = iter.Errors.Maximum(); }
private Matrix GenereteMatrix(List <Element> elem) { var n = elem.Count; var matrix = new DenseMatrix(n + 1, n + 1); matrix[0, 0] = Mu.Evaluate(elem[0].MidPoint) / elem[0].H - Beta.Evaluate(elem[0].MidPoint) / 2 + Sigma.Evaluate(elem[0].MidPoint) * elem[0].H / 3 + Alpha; matrix[0, 1] = -Mu.Evaluate(elem[0].MidPoint) / elem[0].H + Beta.Evaluate(elem[0].MidPoint) / 2 + Sigma.Evaluate(elem[0].MidPoint) * elem[0].H / 6; for (int i = 1; i < n; ++i) { matrix[i, i - 1] = -Mu.Evaluate(elem[i - 1].MidPoint) / elem[i - 1].H - Beta.Evaluate(elem[i - 1].MidPoint) / 2 + Sigma.Evaluate(elem[i - 1].MidPoint) * elem[i - 1].H / 6; matrix[i, i] = Mu.Evaluate(elem[i - 1].MidPoint) / elem[i - 1].H + Beta.Evaluate(elem[i - 1].MidPoint) / 2 + Sigma.Evaluate(elem[i - 1].MidPoint) * elem[i - 1].H / 3 + Mu.Evaluate(elem[i].MidPoint) / elem[i].H - Beta.Evaluate(elem[i].MidPoint) / 2 + Sigma.Evaluate(elem[i].MidPoint) * elem[i].H / 3; matrix[i, i + 1] = -Mu.Evaluate(elem[i].MidPoint) / elem[i].H + Beta.Evaluate(elem[i].MidPoint) / 2 + Sigma.Evaluate(elem[i].MidPoint) * elem[i].H / 6; } matrix[n, n - 1] = -Mu.Evaluate(elem[n - 1].MidPoint) / elem[n - 1].H - Beta.Evaluate(elem[n - 1].MidPoint) / 2 + Sigma.Evaluate(elem[n - 1].MidPoint) * elem[n - 1].H / 6; matrix[n, n] = Mu.Evaluate(elem[n - 1].MidPoint) / elem[n - 1].H + Beta.Evaluate(elem[n - 1].MidPoint) / 2 + Sigma.Evaluate(elem[n - 1].MidPoint) * elem[n - 1].H / 3 + Gamma; return(matrix); }
/// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> public override void Dispose() { base.Dispose(); Sigma.RemoveMonitor(this); }
public IActionResult Index() { List <int> primes = sieve(); ViewBag.primes = primes; dotnet.Models.ViewModels.IndexViewModel model = new dotnet.Models.ViewModels.IndexViewModel(); //20 random values and min + max model.values = twentyRandom(); int max = 0; int min = 10000000; foreach (int r in model.values) { if (r > max) { max = r; } if (r < min) { min = r; } } model.Max = max; model.Min = min; //hash of values from database var fiftyValues = _context.Fiftyvalues.ToList(); model.fifty = new List <FiftyValuesView>(); foreach (Fiftyvalues v in fiftyValues) { FiftyValuesView fv = new FiftyValuesView(); fv.Value = v.Value; fv.Id = v.Id; fv.Hash = ComputeSha256Hash(v.Value); model.fifty.Add(fv); } //names var names = _context.Names.ToList(); var sorted_names = MergeSort(names); model.names = new List <Names>(); model.names = sorted_names; //make sigma for display on page Sigma s = new Sigma(); s.nodes = new List <Nodes>(); s.edges = new List <Edges>(); int i = 1; foreach (Names n in names) { s.nodes.Add(new Nodes(n.Id.ToString())); string[] friends = n.Friends.Split(","); foreach (string f in friends) { s.edges.Add(new Edges(i, n.Id.ToString(), f)); i++; } } model.s = s; //bfs for shortest path var graph = new Graph <int>(s.ToVertices(), s.ToTuples()); var algorithms = new Algorithms(); var startVertex = 6; var shortestPath = algorithms.ShortestPathFunction(graph, startVertex); model.path = string.Join(" => ", shortestPath(77)); //string pairs var hash = "acb80281e4e94213c7452a81fa08f61893eff5ffa62d50876da8d1fed4710d95"; var strings = new List <Tuple <string, string> >(); strings.Add(Tuple.Create("ethereal", "front")); strings.Add(Tuple.Create("ask", "release")); strings.Add(Tuple.Create("bucket", "unique")); strings.Add(Tuple.Create("plug", "average")); strings.Add(Tuple.Create("trade", "weather")); strings.Add(Tuple.Create("card", "wide")); strings.Add(Tuple.Create("numberless", "copper")); strings.Add(Tuple.Create("fruit", "example")); strings.Add(Tuple.Create("slap", "pause")); strings.Add(Tuple.Create("jittery", "confused")); model.hashPresent = checkForHash(hash, strings); //insert statement Transactions tx = new Transactions(); tx.Customer = "Ben Hogan"; tx.Price = 900; tx.Item = "Cleek"; _context.Transactions.Add(tx); _context.SaveChanges(); return(View(model)); }