public NDarray <double> Forward(NDarray <double> X, bool isTraining) { IsTraining = isTraining; if (runningMean == null) { runningMean = ND.MeanAxis(X, 0); runningVar = ND.VarAxis(X, 0); } NDarray <double> mean = null, var = null; if (isTraining) { mean = ND.MeanAxis(X, 0); var = ND.VarAxis(X, 0); runningMean = ND.AddNDarray(runningMean, mean, momentum, 1 - momentum); runningVar = ND.AddNDarray(runningVar, var, momentum, 1 - momentum); } else { mean = runningMean; var = runningVar; } xCentered = ND.SubNDarray(X, mean); stdDevInv = var.ApplyFunc(x => 1.0 / Math.Sqrt(x + 0.01)); xNorm = ND.MulNDarray(xCentered, stdDevInv); var output = ND.MulNDarray(gamma, xNorm); output = ND.AddNDarray(output, beta); return(output); }
public override void Create() { //年度+月度+表名+“-”+填报单位 this.RWBH = $"{ND.ToString() + YD.ToString() + CJBBM.Substring(CJBBM.Length >= 5 ? CJBBM.Length - 5 : 0) + "-" + JGDM}";// "T"+DateTime.Now.ToString("yyyyMMddHHmmssfff"); this.CREATOR = OperatorProvider.Provider.Current().UserName; this.CREATEAT = DateTime.Now; }
public CrossEntropyLoss() { Name = "CrossEntropyLoss"; pc = ND.Clamp(p0, 1e-12, 1 - 1e-12); lossExpr = (-y0 * ND.Log(pc) - (1 - y0) * ND.Log(1 - pc)).MeanAll(); gradExpr = -y0 / pc + (1 - y0) / (1 - pc); }
static void Test3() { var a = ND.Uniform(1, 10, 2, 3, 4).CastCopy <double>(); Console.WriteLine(a); Console.WriteLine(a.Sum()); Console.WriteLine(a.Sum(0)); Console.WriteLine(a.Sum(1)); Console.WriteLine(a.Sum(2)); Console.WriteLine(a.Prod()); Console.WriteLine(a.Prod(0)); Console.WriteLine(a.Prod(1)); Console.WriteLine(a.Prod(2)); Console.WriteLine(a.Mean()); Console.WriteLine(a.Mean(0)); Console.WriteLine(a.Mean(1)); Console.WriteLine(a.Mean(2)); //var a = ND.Uniform(1, 10, 4, 2, 2); //Console.WriteLine(a); //Console.WriteLine(a.Prod(2)); //Console.WriteLine(a.Prod(2).Prod(1, true)); //Console.WriteLine(a.Reshape(4, -1).Prod(1, true)); }
public double Func(NDarray <U> y, NDarray <U> p) { var eq1 = ND.ApplyFuncAB(ND.Round(y), ND.Round(p), NDarray <U> .OpsT.Eq); var eq2 = ND.ProdAxis(eq1, -1); return(eq2.Data.Average()); }
static void TestXor <Type>() { Console.WriteLine($"Hello World! Xor MLP. Backend NDarray<{typeof(Type).Name}>"); Utils.DebugNumpy = Utils.DbgNo; var Xdata = ND.CreateNDarray(new double[4, 2] { { 0, 0 }, { 1, 0 }, { 0, 1 }, { 1, 1 } }).CastCopy <Type>(); var Ydata = ND.CreateNDarray(new double[4, 1] { { 0 }, { 1 }, { 1 }, { 0 } }).CastCopy <Type>(); var net = new Network <Type>(new SGD <Type>(lr: 0.2), new SquareLoss <Type>(), new RoundAccuracy <Type>()); net.AddLayer(new DenseLayer <Type>(inputNodes: 2, outputNodes: 8, new TanhActivation <Type>())); net.AddLayer(new DenseLayer <Type>(outputNodes: 1, new SigmoidActivation <Type>())); net.Summary(); var sw = Stopwatch.StartNew(); net.Fit(Xdata, Ydata, epochs: 1000, batchSize: 4, displayEpochs: 50); Console.WriteLine($"Time:{sw.ElapsedMilliseconds} ms"); Console.WriteLine("Prediction"); NDarray <Type> pred = net.Predict(Xdata).Round(6); for (int k = 0; k < Xdata.Shape[0]; ++k) { Console.WriteLine($"{Xdata[k]} = {Ydata[k]} -> {pred[k]}"); } }
public NDarray <double> Backward(NDarray <double> accumGrad) { var gamma0 = new NDarray <double>(gamma); var s0 = ND.SumAxis(accumGrad, 0); var s1 = ND.SumAxis(ND.MulNDarray(accumGrad, xCentered), 0); if (IsTraining) { var gGamma = ND.SumAxis(ND.MulNDarray(accumGrad, xNorm), 0); var gBeta = s0; gOpt.Update(gamma, gGamma); bOpt.Update(beta, gBeta); } double batchSize = accumGrad.Shape[0]; var m0 = ND.MulNDarray(gamma0, stdDevInv, 1.0 / batchSize, 1); var m1 = ND.MulNDarray(xNorm, ND.MulNDarray(stdDevInv, s1)); var diff = ND.SubNDarray(accumGrad, ND.AddNDarray(s0, m1), batchSize, 1); accumGrad = ND.MulNDarray(m0, diff); return(accumGrad); }
public void CancelSelection() { switch (curChar) { case SelectableCharacters.Leslie: CD.GetComponent <Collider2D>().isTrigger = true; Leslie.GetComponent <Image>().sprite = HoverImages[0]; break; case SelectableCharacters.Jona: ND.GetComponent <Collider2D>().isTrigger = true; Jona.GetComponent <Image>().sprite = HoverImages[1]; break; case SelectableCharacters.Ashita: NED.GetComponent <Collider2D>().isTrigger = true; Ashita.GetComponent <Image>().sprite = HoverImages[2]; break; case SelectableCharacters.Catarine: ED.GetComponent <Collider2D>().isTrigger = true; Catarine.GetComponent <Image>().sprite = HoverImages[3]; break; case SelectableCharacters.Ramsey: SED.GetComponent <Collider2D>().isTrigger = true; Ramsey.GetComponent <Image>().sprite = HoverImages[4]; break; case SelectableCharacters.Drogas: SD.GetComponent <Collider2D>().isTrigger = true; Drogas.GetComponent <Image>().sprite = HoverImages[5]; break; case SelectableCharacters.Sylvia: SWD.GetComponent <Collider2D>().isTrigger = true; Sylvia.GetComponent <Image>().sprite = HoverImages[6]; break; case SelectableCharacters.Bjin: WD.GetComponent <Collider2D>().isTrigger = true; Bjin.GetComponent <Image>().sprite = HoverImages[7]; break; case SelectableCharacters.Xavier: NWD.GetComponent <Collider2D>().isTrigger = true; Xavier.GetComponent <Image>().sprite = HoverImages[8]; break; case SelectableCharacters.None: break; default: break; } weaponSelect.SetActive(false); weaponSelectActive = false; characterChosen = false; }
public override NDarray <double> Loss(NDarray <U> y, NDarray <U> p) { y0.SetContent(y); p0.SetContent(p); pc.Evaluate(); //return lossExpr; return((-y0 * ND.Log(pc) - (1 - y0) * ND.Log(1 - pc)).MeanAll()); }
public double Func(NDarray <U> y, NDarray <U> p) { var y0 = ND.ArgMax(y, -1); var p0 = ND.ArgMax(p, -1); NDarray <double> eq = ND.Eq(y0, p0); return(eq.Data.Average()); }
static void Test4() { var a = ND.Uniform(1, 10, 4, 4); var b = ND.Uniform(1, 10, 2, 4, 2); Console.WriteLine(a); Console.WriteLine(b); Console.WriteLine(ND.TensorDot <int>(a, b)); }
static void Test1() { var a = ND.Arange(15).Reshape(5, 3); var b = ND.Arange(4, 3); var c = a + 2 * b; Console.WriteLine(a); Console.WriteLine(b); Console.WriteLine(c); }
public NDarray <Type> Update(NDarray <Type> w, NDarray <Type> g) { if (wUpdt == null) { wUpdt = ND.Zeros <Type>(w.Shape); } wUpdt = momentum * wUpdt + (1 - momentum) * g; return(w - lr * wUpdt); }
public void Initialize(IOptimizer <U> optimizer) { wOpt = optimizer.Clone(); bOpt = optimizer.Clone(); double lim = 3.0 / Math.Sqrt(InputShape[0]); weight = ND.Uniform(-lim, lim, InputShape[0], OutputShape[0]).Cast <U>(); biases = new NDarray <double>(1, OutputShape[0]).Cast <U>(); wTmp = new NDarray <double>(weight.Shape).Cast <U>(); }
public void Initialize(IOptimizer <Type> optimizer) { weightsOptmz = optimizer.Clone(); double lim = 3.0 / Math.Sqrt(InputShape[0]); weights = ND.Uniform(-lim, lim, InputShape[0], OutputShape[0]).Cast <Type>(); if (useBiases) { biasesOptmz = optimizer.Clone(); biases = ND.Zeros <Type>(1, OutputShape[0]); } }
public double Func(NDarray <double> y, NDarray <double> p) { var y0 = ND.ArgmaxAxis(y, -1); var p0 = ND.ArgmaxAxis(p, -1); NDarray <double> eq = new NDarray <double>(y0.Shape); for (int i = 0; i < eq.Count; ++i) { eq.Data[i] = Math.Abs(y0.Data[i] - p0.Data[i]) < 1e-6 ? 1.0 : 0.0; } return(eq.Data.Average()); }
public NDarray <Type> Forward(NDarray <Type> X, bool isTraining = true) { IsTraining = isTraining; LayerInput = X.Copy; NDarray <Type> X0 = useBiases ? ND.TensorDot <Type>(X, weights) + biases : ND.TensorDot <Type>(X, weights); if (activationLayer == null) { return(X0); } return(activationLayer.Forward(X0)); }
static NDarray <V> DMSE <V>(NDarray <V> x, NDarray <V> y) { if (typeof(V) == typeof(float)) { var xf = x as NDarray <float>; var yf = y as NDarray <float>; return(ND.ApplyFuncAB(xf, yf, (a, b) => b - a) as NDarray <V>); } if (typeof(V) == typeof(double)) { var xd = x as NDarray <double>; var yd = y as NDarray <double>; return(ND.ApplyFuncAB(xd, yd, (a, b) => b - a) as NDarray <V>); } throw new ArgumentException($"{typeof(U).Name} is not supported. Only float or double"); }
static void Test1() { Utils.DebugNumpy = Utils.DbgLvlAll; var a0 = ND.Uniform(0, 10, 4, 6); Console.WriteLine(a0); var a1 = ND.Uniform(0, 10, 4, 6); Console.WriteLine(a1); var a = Variable.CreateNDarray <int>("a"); var ex = a * 3 + 2; Console.WriteLine(ex.Evaluate(("a", a0))); Console.WriteLine(ex.Evaluate(("a", a1))); }
// todo maybe move to a separate class private async Task <List <DBTableInfo> > GetAllDBTableInfos(string database) { List <string> tableList = new List <string>() { }; #if DEBUG tableList = new List <string>(); // Clear because on windows the capitalization of tables is different and currently that breaks some SQL Queries #endif var queryResult = await SQLHelper.GetQueryResultsPostgreSQL(Context, $@"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'", database, true, 50); // Add tables incase they arent in the list above for their correct order foreach (var item in queryResult.Data) { tableList.Add(item.ElementAt(0)); } //;$"PRAGMA table_info('{item}')"; //PRAGMA foreign_key_list('DiscordChannels'); string text = ""; string header = ""; List <DBTableInfo> DbTableInfos = new List <DBTableInfo>(); List <List <ForeignKeyInfo> > ForeignKeyInfos = new List <List <ForeignKeyInfo> >(); foreach (var table in tableList) { string query = @$ "SELECT data_type, column_name, is_nullable, is_identity FROM information_schema.columns WHERE table_schema = 'public' AND table_name = '{table}'"; var data = await SQLHelper.GetQueryResultsPostgreSQL(Context, query, database); string getPrimaryKey = @$ "SELECT c.column_name, c.data_type, constraint_type FROM information_schema.table_constraints tc JOIN information_schema.constraint_column_usage AS ccu USING (constraint_schema, constraint_name) JOIN information_schema.columns AS c ON c.table_schema = tc.constraint_schema AND tc.table_name = c.table_name AND ccu.column_name = c.column_name WHERE tc.table_name = '{table}' AND constraint_type = 'PRIMARY KEY';";
public NDarray <U> Backward(NDarray <U> accumGrad) { for (int i = 0; i < wTmp.Count; ++i) { wTmp.Data[i] = weight.Data[i]; } if (IsTraining) { var gW = ND.GemmTAB(LastInput, accumGrad); var gB = ND.SumAxis(accumGrad, 0, true); wOpt.Update(weight, gW); bOpt.Update(biases, gB); } return(ND.GemmATB(accumGrad, wTmp)); }
static void Test2() { var a = ND.Uniform <double>(0, 1, 4, 1); var b = ND.Uniform <double>(0, 1, 4, 1); Func <NDview <double>, NDview <double> > f = x => - a * ND.Log(x) + (1 - a) * ND.Log(1 - x); Func <NDview <double>, double, NDview <double> > df = (x, h) => (f(x + h) - f(x)) / h; var c = f(b); var d = -a / b - (1 - a) / (1 - b); var e = df(b, 1e-12); Console.WriteLine(a); Console.WriteLine(b); Console.WriteLine(c); Console.WriteLine(d); Console.WriteLine(e); Console.WriteLine(ND.Abs(d - e)); }
public static (NDarray <U>, NDarray <U>, NDarray <U>, NDarray <U>) IrisDataset <U>(double ratio) { var raw = File.ReadAllLines("datasets/iris.csv").ToArray(); var data = raw.SelectMany(l => l.Split(',')).Select(double.Parse).ToArray(); var nDarray = ND.CreateNDarray(data: data, shape: new int[] { -1, 7 }); var idx0 = (int)(nDarray.Shape[0] * ratio); (var train, var test) = ND.Split(nDarray, axis: 0, idx: idx0); (var trainX, var trainY) = ND.Split(train, axis: 1, idx: 4); (var testX, var testY) = ND.Split(test, axis: 1, idx: 4); var vmax = ND.Max(ND.MaxAxis(trainX.ToExpr(), axis: 0, keepdims: true), ND.MaxAxis(testX.ToExpr(), axis: 0, keepdims: true)); trainX = trainX.ToExpr() / vmax; testX = testX.ToExpr() / vmax; return(trainX.Cast <U>(), trainY.Cast <U>(), testX.Cast <U>(), testY.Cast <U>()); }
static void Test5() { var a = ND.Arange(0, 24).Reshape(2, -1, 3); var b = ND.Arange(24, 12).Reshape(2, -1, 3); Console.WriteLine(a); Console.WriteLine(b); Console.WriteLine(ND.Concatene(a, b, 1)); //var a = ND.Arange(1, 8).Reshape(4, -1).Copy; //var b = ND.Arange(1, 4).Reshape(4, -1).Copy; //Console.WriteLine(a); //Console.WriteLine(b); //Utils.DebugNumpy = Utils.DbgNo; //var allBatch = ND.BatchIterator(a, b, 2, true); //foreach (var batch in allBatch) //Console.WriteLine(ND.HConcat<int>(batch.Item1, batch.Item2)); }
public NDarray <double> Func(NDarray <double> X) { int axis = X.Shape.Length - 1; int shapeAxis = X.Shape[axis]; var mx = ND.MaxAxis(X, axis, true); double[] data0 = new double[X.Count]; for (int i = 0; i < X.Count; ++i) { data0[i] = Math.Exp(X.Data[i] - mx.Data[i / shapeAxis]); } var ex = new NDarray <double>(data: data0, shape: X.Shape); var sx = ND.SumAxis(ex, axis, true); for (int i = 0; i < X.Count; ++i) { ex.Data[i] /= sx.Data[i / shapeAxis]; } return(ex); }
static NDarray <float> Softmaxfloat(NDarray <float> X) { int axis = X.Shape.Length - 1; int shapeAxis = X.Shape[axis]; var mx = ND.MaxAxis(X, axis, true); float[] data0 = new float[X.Count]; for (int i = 0; i < X.Count; ++i) { data0[i] = (float)Math.Exp(X.Data[i] - mx.Data[i / shapeAxis]); } var ex = new NDarray <float>(data: data0, shape: X.Shape); var sx = ND.SumAxis(ex, axis, true); for (int i = 0; i < X.Count; ++i) { ex.Data[i] /= sx.Data[i / shapeAxis]; } return(ex); }
static void TestXor <U>(bool summary = false) { Console.WriteLine($"Hello World! Xor MLP. Backend NDarray<{typeof(Type).Name}>"); Utils.DebugNumpy = Utils.DbgNo; var Xdata = ND.CreateNDarray(new double[4, 2] { { 0, 0 }, { 1, 0 }, { 0, 1 }, { 1, 1 } }).Cast <U>(); var Ydata = ND.CreateNDarray(new double[4, 1] { { 0 }, { 1 }, { 1 }, { 0 } }).Cast <U>(); var net = new Network <U>(new SGD <U>(lr: 0.1), new CrossEntropyLoss <U>(), new RoundAccuracy <U>()); net.AddLayer(new DenseLayer <U>(8, 2)); net.AddLayer(new TanhLayer <U>()); net.AddLayer(new DenseLayer <U>(1)); net.AddLayer(new SigmoidLayer <U>()); if (summary) { net.Summary(); } var sw = Stopwatch.StartNew(); net.Fit(Xdata, Ydata, epochs: 1000, batchSize: 4, displayEpochs: 500); Console.WriteLine($"Time:{sw.ElapsedMilliseconds} ms"); Console.WriteLine("Prediction"); NDarray <U> pred = ND.Round(net.Predict(Xdata).ToExpr(), 6); for (int k = 0; k < Xdata.Shape[0]; ++k) { Console.WriteLine($"[{Xdata.DataAtIdx(k).Glue()}] = [{Ydata.DataAtIdx(k).Glue()}] -> {pred.DataAtIdx(k).Glue()}"); } Console.WriteLine(); }
public void Fit(NDarray <U> trainX, NDarray <U> trainY, int epochs, int batchSize = 50, int displayEpochs = 1, bool shuffle = true) { var sw = Stopwatch.StartNew(); for (int k = 0; k <= epochs; ++k) { List <double> losses = new List <double>(); List <double> accs = new List <double>(); var batch = ND.BatchIterator(trainX, trainY, batchSize, shuffle); foreach ((var X, var y) in batch) { (double vloss, double vacc) = TrainOnBatch(X, y); losses.Add(vloss); accs.Add(vacc); } if (k % displayEpochs == 0) { Console.WriteLine($"Epoch: {k,4}/{epochs}. loss:{losses.Average():0.000000} acc:{accs.Average():0.0000} Time:{sw.ElapsedMilliseconds,10} ms"); } } Console.WriteLine($"Time:{sw.ElapsedMilliseconds} ms"); }
public override void Initialize(BaseOptimizer <U> optimizer) { wOpt = optimizer.Clone(); bOpt = optimizer.Clone(); double lim = 3.0 / Math.Sqrt(InputShape[0]); var w0 = ND.Uniform(-lim, lim, InputShape[0], OutputShape[0]).Cast <U>(); var b0 = ND.Zeros <U>(1, OutputShape[0]); paramsCount = w0.Count + b0.Count; weights = Variable.CreateNDarray <U>("w", w0); biases = Variable.CreateNDarray <U>("b", b0); wT = Variable.CreateNDarray <U>("wT"); fwExpr = ND.Dot(xFw, weights) + biases; gwExpr = ND.Dot(layInp.T, agBw); gbExpr = agBw.Sum(0, true); bwExpr = ND.Dot(agBw, wT); }
public NDarray <Type> Backward(NDarray <Type> accumGrad) { if (activationLayer != null) { accumGrad = activationLayer.Backward(accumGrad); } NDarray <Type> W = weights.T; if (IsTraining) { var gW = ND.TensorDot(LayerInput.T, accumGrad); weights = weightsOptmz.Update(weights, gW); if (useBiases) { var gw0 = accumGrad.Sum(0, true); biases = biasesOptmz.Update(biases, gw0); } } return(ND.TensorDot <Type>(accumGrad, W)); }