public void AddFollower(Chainer c) { if(!HasFollower(c)) followers.Add(c); }
public void AVGPoolingRandomTest() { Python.Initialize(); Chainer.Initialize(); int batchCount = Mother.Dice.Next(1, 5); int chCount = Mother.Dice.Next(1, 5); int wideSize = Mother.Dice.Next(8, 32); int heightSize = Mother.Dice.Next(8, 32); int kWidth = Mother.Dice.Next(1, 5); int kHeight = Mother.Dice.Next(1, 5); int strideX = Mother.Dice.Next(1, 5); int strideY = Mother.Dice.Next(1, 5); int padX = Mother.Dice.Next(0, 5); int padY = Mother.Dice.Next(0, 5); int outputHeight = (int)Math.Floor((heightSize - kHeight + padY * 2.0) / strideY) + 1; int outputWidth = (int)Math.Floor((wideSize - kWidth + padX * 2.0) / strideX) + 1; Real[,,,] input = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, chCount, heightSize, wideSize }); Real[,,,] dummyGy = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, chCount, outputHeight, outputWidth }); //Chainer NChainer.AveragePooling2D <Real> cMaxPooling2D = new NChainer.AveragePooling2D <Real>( new[] { kHeight, kWidth }, new[] { strideY, strideX }, new[] { padY, padX } ); Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input)); Variable <Real> cY = cMaxPooling2D.Forward(cX); cY.Grad = Real.ToBaseNdArray(dummyGy); cY.Backward(); //KelpNet KelpNet.AveragePooling2D maxPooling2D = new KelpNet.AveragePooling2D( new[] { kWidth, kHeight }, new[] { strideX, strideY }, new[] { padX, padY }); NdArray x = new NdArray(Real.ToRealArray(input), new[] { chCount, heightSize, wideSize }, batchCount); NdArray y = maxPooling2D.Forward(x)[0]; y.Grad = Real.ToRealArray(dummyGy); y.Backward(); Real[] cYdata = Real.ToRealArray((Real[, , , ])cY.Data.Copy()); Real[] cXgrad = Real.ToRealArray((Real[, , , ])cX.Grad.Copy()); //許容範囲を算出 double delta = 0.00001; Assert.AreEqual(cYdata.Length, y.Data.Length); Assert.AreEqual(cXgrad.Length, x.Grad.Length); //y for (int i = 0; i < y.Data.Length; i++) { if (cYdata[i] < float.Epsilon && cYdata[i] > -float.Epsilon) { Assert.AreEqual(cYdata[i], y.Data[i], delta); } } //x.grad for (int i = 0; i < x.Grad.Length; i++) { Assert.AreEqual(cXgrad[i], x.Grad[i], delta); } }
private Expression(Chainer prev, bool overload) : base(null) { }
private ExecArgument(Chainer arg) : base(arg) { TryTakeAll(arg); }
public void LRNRandomTest() { Python.Initialize(); Chainer.Initialize(); int n = Mother.Dice.Next(2, 7); float k = (float)Mother.Dice.NextDouble() * 3; int batchCount = Mother.Dice.Next(1, 5); int ch = Mother.Dice.Next(1, 5); int width = Mother.Dice.Next(1, 16); int height = Mother.Dice.Next(1, 16); Real[,,,] input = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, ch, height, width }); Real[,,,] dummyGy = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, ch, height, width }); //chainer NChainer.LocalResponseNormalization <Real> cLocalResponseNormalization = new NChainer.LocalResponseNormalization <Real>(n, k); Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input)); Variable <Real> cY = cLocalResponseNormalization.Forward(cX); cY.Grad = Real.ToBaseNdArray(dummyGy); cY.Backward(); //kelpnet KelpNet.LRN lrn = new LRN(n, k); NdArray x = new NdArray(Real.ToRealArray(input), new[] { ch, height, width }, batchCount); NdArray y = lrn.Forward(x)[0]; y.Grad = Real.ToRealArray(dummyGy); y.Backward(); Real[] cYdata = Real.ToRealArray((Real[, , , ])cY.Data); Real[] cXgrad = Real.ToRealArray((Real[, , , ])cX.Grad); //許容範囲を算出 double delta = 0.00001; //y Assert.AreEqual(cYdata.Length, y.Data.Length); for (int i = 0; i < y.Data.Length; i++) { Assert.AreEqual(cYdata[i], y.Data[i], delta); } //x.grad Assert.AreEqual(cXgrad.Length, x.Grad.Length); for (int i = 0; i < x.Grad.Length; i++) { Assert.AreEqual(cXgrad[i], x.Grad[i], delta); } }
internal FullJoinChainer(Chainer prev, Table table) : base(prev, table) { }
private GroupingArgument(Chainer arg) : base(arg) { TryTakeAll(arg); }
public void LRNRandomTest() { Python.Initialize(); Chainer.Initialize(); int n = Mother.Dice.Next(2, 7); float k = (float)Mother.Dice.NextDouble() * 3; int batchCount = Mother.Dice.Next(1, 5); int ch = Mother.Dice.Next(1, 5); int width = Mother.Dice.Next(1, 16); int height = Mother.Dice.Next(1, 16); Real[,,,] input = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, width); Real[,,,] dummyGy = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, width); //chainer LocalResponseNormalization <Real> cLocalResponseNormalization = new LocalResponseNormalization <Real>(n, k); Variable <Real> cX = new Variable <Real>(input); Variable <Real> cY = cLocalResponseNormalization.Forward(cX); cY.Grad = dummyGy; cY.Backward(); //kelpnet LRN <Real> lrn = new LRN <Real>(n, k); NdArray <Real> x = new NdArray <Real>(input, asBatch: true); NdArray <Real> y = lrn.Forward(x)[0]; y.Grad = dummyGy.Flatten(); y.Backward(); Real[] cYdata = ((Real[, , , ])cY.Data).Flatten(); Real[] cXgrad = ((Real[, , , ])cX.Grad).Flatten(); //許容範囲を算出 Real delta = 0.00001f; //y Assert.AreEqual(cYdata.Length, y.Data.Length); for (int i = 0; i < y.Data.Length; i++) { Assert.AreEqual(cYdata[i], y.Data[i], delta); } //x.grad Assert.AreEqual(cXgrad.Length, x.Grad.Length); for (int i = 0; i < x.Grad.Length; i++) { Assert.AreEqual(cXgrad[i], x.Grad[i], delta); } }
internal PivotAsChainer(Chainer prev, string alias) : base(prev, alias) { CheckNullOrEmptyAliasAndThrow(alias); ((PivotChainer)Prev).SetAlias(this); }
public void SwishRandomTest() { Python.Initialize(); Chainer.Initialize(); int ioCount = Mother.Dice.Next(1, 50); int batchCount = Mother.Dice.Next(1, 5); Real[,] input = Initializer.GetRandomValues <Real[, ]>(batchCount, ioCount); Real[,] dummyGy = Initializer.GetRandomValues <Real[, ]>(batchCount, ioCount); Real beta = (Real)Mother.Dice.NextDouble(); //Chainer NChainer.Swish <Real> cSwish = new NChainer.Swish <Real>(new[] { ioCount }, beta); Variable <Real> cX = new Variable <Real>(input); Variable <Real> cY = cSwish.Forward(cX); cY.Grad = dummyGy; cY.Backward(); //KelpNet Swish <Real> swish = new Swish <Real>(new[] { ioCount }, beta); NdArray <Real> x = new NdArray <Real>(input, asBatch: true); NdArray <Real> y = swish.Forward(x)[0]; y.Grad = dummyGy.Flatten(); y.Backward(); Real[] cYdata = ((Real[, ])cY.Data).Flatten(); Real[] cXgrad = ((Real[, ])cX.Grad).Flatten(); Real[] cbgrad = (Real[])cSwish.beta.Grad; //許容範囲を算出 Real delta = 0.00001f; //y Assert.AreEqual(cYdata.Length, y.Data.Length); for (int i = 0; i < y.Data.Length; i++) { Assert.AreEqual(cYdata[i], y.Data[i], delta); } //x.grad Assert.AreEqual(cXgrad.Length, x.Grad.Length); for (int i = 0; i < x.Grad.Length; i++) { Assert.AreEqual(cXgrad[i], x.Grad[i], delta); } //b.grad Assert.AreEqual(cbgrad.Length, swish.Beta.Grad.Length); for (int i = 0; i < swish.Beta.Grad.Length; i++) { Assert.AreEqual(cbgrad[i], swish.Beta.Grad[i], delta); } }
internal FromAsChainer(Chainer prev, int alias) : base(prev, alias) { ((TableChainer)prev).SetAlias(this); }
public void TrainTest(bool isTtrain, bool finetune) { Python.Initialize(); Chainer.Initialize(); int batchCount = Mother.Dice.Next(1, 50); int ioCount = Mother.Dice.Next(1, 50); Real[,] input = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, ioCount }); Real[,] dummyGy = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, ioCount }); Real[] gamma = Initializer.GetRealArray(ioCount); Real[] beta = Initializer.GetRealArray(ioCount); Real[] avgMean = Initializer.GetRealArray(ioCount); Real[] avgVar = Initializer.GetRealArray(ioCount); //Chainer Chainer.Config["train"] = isTtrain; NChainer.BatchNormalization <Real> cBatchNormalization = new NChainer.BatchNormalization <Real>(ioCount, dtype: Real.Type); cBatchNormalization.gamma = new Variable <Real>(Real.ToBaseNdArray(gamma)); cBatchNormalization.beta = new Variable <Real>(Real.ToBaseNdArray(beta)); cBatchNormalization.avgMean = Real.ToBaseNdArray(avgMean); cBatchNormalization.avgVar = Real.ToBaseNdArray(avgVar); Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input)); Variable <Real> cY = cBatchNormalization.Forward(cX, finetune); cY.Grad = Real.ToBaseNdArray(dummyGy); cY.Backward(); //KelpNet KelpNet.BatchNormalization batchNormalization = new BatchNormalization(ioCount, train: isTtrain, finetune: finetune); batchNormalization.Gamma.Data = Real.ToRealArray(gamma); batchNormalization.Beta.Data = Real.ToRealArray(beta); batchNormalization.AvgMean.Data = Real.ToRealArray(avgMean); batchNormalization.AvgVar.Data = Real.ToRealArray(avgVar); NdArray x = new NdArray(Real.ToRealArray(input), new[] { ioCount }, batchCount); NdArray y = batchNormalization.Forward(x)[0]; y.Grad = Real.ToRealArray(dummyGy); y.Backward(); Real[] cYdata = Real.ToRealArray((Real[, ])cY.Data); Real[] cXgrad = Real.ToRealArray((Real[, ])cX.Grad); Real[] cGammaGrad = Real.ToRealArray((Real[])cBatchNormalization.gamma.Grad); Real[] cBetaGrad = Real.ToRealArray((Real[])cBatchNormalization.beta.Grad); //許容範囲を算出 double delta = 0.00001; //y Assert.AreEqual(cYdata.Length, y.Data.Length); for (int i = 0; i < y.Data.Length; i++) { Assert.AreEqual(cYdata[i], y.Data[i], delta); } //x.grad Assert.AreEqual(cXgrad.Length, x.Grad.Length); for (int i = 0; i < x.Grad.Length; i++) { Assert.AreEqual(cXgrad[i], x.Grad[i], delta); } //gamma.grad Assert.AreEqual(cGammaGrad.Length, batchNormalization.Gamma.Grad.Length); for (int i = 0; i < batchNormalization.Gamma.Grad.Length; i++) { Assert.AreEqual(cGammaGrad[i], batchNormalization.Gamma.Grad[i], delta); } //beta.grad Assert.AreEqual(cBetaGrad.Length, batchNormalization.Beta.Grad.Length); for (int i = 0; i < batchNormalization.Beta.Grad.Length; i++) { Assert.AreEqual(cBetaGrad[i], batchNormalization.Beta.Grad[i], delta); } }
public void ConcatRandomTest() { Python.Initialize(); Chainer.Initialize(); int batchCount = Mother.Dice.Next(1, 5); int ch = Mother.Dice.Next(1, 5); int widthA = Mother.Dice.Next(1, 16); int widthB = Mother.Dice.Next(1, 16); int height = Mother.Dice.Next(1, 16); int axis = 3; Real[,,,] inputA = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, ch, height, widthA }); Real[,,,] inputB = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, ch, height, widthB }); Real[,,,] dummyGy = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, ch, height, widthA + widthB }); //chainer NChainer.Concat <Real> cConcat = new NChainer.Concat <Real>(axis); Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(inputA)); Variable <Real> cY = new Variable <Real>(Real.ToBaseNdArray(inputB)); Variable <Real> cZ = cConcat.Forward(cX, cY); cZ.Grad = Real.ToBaseNdArray(dummyGy); cZ.Backward(); //KelpNet KelpNet.Concat concat = new Concat(axis - 1);//Chainerと異なり1次元目を暗黙的にBatchとみなさないため NdArray x = new NdArray(Real.ToRealArray(inputA), new[] { ch, height, widthA }, batchCount); NdArray y = new NdArray(Real.ToRealArray(inputB), new[] { ch, height, widthB }, batchCount); NdArray z = concat.Forward(x, y)[0]; z.Grad = Real.ToRealArray(dummyGy); z.Backward(); Real[] cZdata = Real.ToRealArray((Real[, , , ])cZ.Data); //Copyが必要 Real[] cXgrad = Real.ToRealArray((Real[, , , ])cX.Grad.Copy()); Real[] cYgrad = Real.ToRealArray((Real[, , , ])cY.Grad.Copy()); //許容範囲を算出 double delta = 0.00001; //z Assert.AreEqual(cZdata.Length, z.Data.Length); for (int i = 0; i < y.Data.Length; i++) { Assert.AreEqual(cZdata[i], z.Data[i], delta); } //x.grad Assert.AreEqual(cXgrad.Length, x.Grad.Length); for (int i = 0; i < x.Grad.Length; i++) { Assert.AreEqual(cXgrad[i], x.Grad[i], delta); } //y.grad Assert.AreEqual(cYgrad.Length, y.Grad.Length); for (int i = 0; i < y.Grad.Length; i++) { Assert.AreEqual(cYgrad[i], y.Grad[i], delta); } }
private Udf(Chainer prev) : base(null) { }
public void RandomTest() { Python.Initialize(); Chainer.Initialize(); //Make random value. Real[] val = { 1 + Mother.Dice.Next() }; //Chainer Variable <Real> cX = new Variable <Real>(Real.ToBaseArray(val)); //Add Variable <Real> cAdd = 2 + cX + cX + 2; cAdd.Backward(); Real[] pyGadd = (Real[])cX.Grad; //Mul Variable <Real> cMul = 2 * cX * cX * 3; cMul.Backward(); Real[] pyGmul = (Real[])cX.Grad; //Sub Variable <Real> cSub = 30 - cX - cX - 2; cSub.Backward(); Real[] pyGsub = (Real[])cX.Grad; //Div Variable <Real> cDiv = 50 / cX / cX / 2; cDiv.Backward(); Real[] pyGdiv = (Real[])cX.Grad; //KelpNet NdArray x = new NdArray(val); //Add NdArray add = 2 + x + x + 2; add.Backward(); Real[] gadd = x.Grad.ToArray(); //このToArrayはコピーのため //mul NdArray mul = 2 * x * x * 3; mul.Backward(); Real[] gmul = x.Grad.ToArray(); //sub NdArray sub = 30 - x - x - 2; sub.Backward(); Real[] gsub = x.Grad.ToArray(); //mul NdArray div = 50 / x / x / 2; div.Backward(); Real[] gdiv = x.Grad.ToArray(); //Check CollectionAssert.AreEqual(pyGadd, gadd); CollectionAssert.AreEqual(pyGmul, gmul); CollectionAssert.AreEqual(pyGsub, gsub); CollectionAssert.AreEqual(pyGdiv, gdiv); }
internal ApplyAsChainer(Chainer prev, string alias) : base(prev, alias) { CheckNullOrEmptyAliasAndThrow(alias); ((TableChainer)prev).SetAliasByClient(this); }
internal CaseValueChainer(Chainer prev, ScalarArgument checkValue) : base(prev) { checkValue = checkValue ?? Designer.Null; TryTake(checkValue, TakeProperty.Database, TakeProperty.Build); }
internal Procedure(Chainer prev, bool isInternal = false) : base(prev, ObjectType.Procedure, isInternal) { var root = GetRoot(); Build = (buildContext, buildArgs) => { if (buildArgs.Executable != null && buildArgs.Executable.Exception != null) { throw buildArgs.Executable.Exception; } if (Sql != null) { return(Sql); } StringBuilder sql = new StringBuilder(); // append transaction begin block if (root.IsEmbeddedTransaction) { BeginEmbeddedTransactionWrapper(root, sql, Text.Reserved.QtTransactionCount, Text.Reserved.QtTransactionSave); } if (HasTVP) { InjectTableVariables(sql, buildArgs, false); } // build sql.Append(BuildChain(buildContext, buildArgs)); sql.TrimEnd(); if (!(prev is EndChainer)) { sql.TerminateSingle(); } // append transaction end block if (root.IsEmbeddedTransaction) { EndEmbeddedTransactionWrapper(sql, Text.Reserved.QtTransactionCount, Text.Reserved.QtTransactionSave); } // cache compiled code only, non-compiled should not be cached if (buildArgs.Executable == null) { Sql = sql.ToString(); } CompiledSql = Sql ?? sql.ToString(); return(CompiledSql); }; CheckAndThrow(); // detect TVP if (root.AllParams.Where(param => param.DT == DT.TableVariable).Any()) { HasTVP = true; } // compiled property is set to false if a procedure has TVP or inliners if (HasTVP || root.AllParams.Where(param => param.DT.IsInliner()).Any()) { compiled = false; } if (compiled) { Build(new BuildContext(this), new BuildArgs(null)); } }
internal FullJoinChainer(Chainer prev, IOpenView table) : base(prev, table) { }
internal OrderByChainer(Chainer prev, OrderingArgument[] columns) : base(prev) { _Body(columns); }
public void ConcatRandomTest() { Python.Initialize(); Chainer.Initialize(); int batchCount = Mother.Dice.Next(1, 5); int ch = Mother.Dice.Next(1, 5); int widthA = Mother.Dice.Next(1, 16); int widthB = Mother.Dice.Next(1, 16); int height = Mother.Dice.Next(1, 16); int axis = 3; Real[,,,] inputA = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, widthA); Real[,,,] inputB = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, widthB); Real[,,,] dummyGy = Initializer.GetRandomValues <Real[, , , ]>(batchCount, ch, height, widthA + widthB); //chainer NChainer.Concat <Real> cConcat = new NChainer.Concat <Real>(axis); Variable <Real> cX = new Variable <Real>(inputA); Variable <Real> cY = new Variable <Real>(inputB); Variable <Real> cZ = cConcat.Forward(cX, cY); cZ.Grad = dummyGy; cZ.Backward(); //KelpNet Concat <Real> concat = new Concat <Real>(axis - 1);//Chainerと異なり1次元目を暗黙的にBatchとみなさないため NdArray <Real> x = new NdArray <Real>(inputA, asBatch: true); NdArray <Real> y = new NdArray <Real>(inputB, asBatch: true); NdArray <Real> z = concat.Forward(x, y)[0]; z.Grad = dummyGy.Flatten(); z.Backward(); Real[] cZdata = ((Real[, , , ])cZ.Data).Flatten(); //Copyが必要 Real[] cXgrad = ((Real[, , , ])cX.Grad.Copy()).Flatten(); Real[] cYgrad = ((Real[, , , ])cY.Grad.Copy()).Flatten(); //許容範囲を算出 Real delta = 0.00001f; //z Assert.AreEqual(cZdata.Length, z.Data.Length); for (int i = 0; i < y.Data.Length; i++) { Assert.AreEqual(cZdata[i], z.Data[i], delta); } //x.grad Assert.AreEqual(cXgrad.Length, x.Grad.Length); for (int i = 0; i < x.Grad.Length; i++) { Assert.AreEqual(cXgrad[i], x.Grad[i], delta); } //y.grad Assert.AreEqual(cYgrad.Length, y.Grad.Length); for (int i = 0; i < y.Grad.Length; i++) { Assert.AreEqual(cYgrad[i], y.Grad[i], delta); } }
public void RandomTest(bool gpuEnable) { Python.Initialize(); Chainer.Initialize(); int inputCount = Mother.Dice.Next(1, 50); int outputCount = Mother.Dice.Next(1, 50); int batchCount = Mother.Dice.Next(1, 5); Real[,] input = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, inputCount }); Real[,] dummyGy = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, outputCount }); Real[,] w = (Real[, ])Initializer.GetRealNdArray(new[] { outputCount, inputCount }); Real[] b = Initializer.GetRealArray(outputCount); //Chainer NChainer.Linear <Real> cLinear = new NChainer.Linear <Real>(inputCount, outputCount, false, Real.ToBaseNdArray(w), Real.ToBaseArray(b)); Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input)); Variable <Real> cY = cLinear.Forward(cX); cY.Grad = Real.ToBaseNdArray(dummyGy); cY.Backward(); //KelpNet KelpNet.Linear linear = new KelpNet.Linear(inputCount, outputCount, false, w, b, gpuEnable: gpuEnable); NdArray x = new NdArray(Real.ToRealArray(input), new[] { inputCount }, batchCount); NdArray y = linear.Forward(x)[0]; y.Grad = Real.ToRealArray(dummyGy); y.Backward(); Real[] cYdata = Real.ToRealArray((Real[, ])cY.Data); Real[] cXgrad = Real.ToRealArray((Real[, ])cX.Grad); Real[] cWgrad = Real.ToRealArray((Real[, ])cLinear.W.Grad); Real[] cbgrad = (Real[])cLinear.b.Grad; //許容範囲を算出 double delta = 0.00001; //y Assert.AreEqual(cYdata.Length, y.Data.Length); for (int i = 0; i < y.Data.Length; i++) { Assert.AreEqual(cYdata[i], y.Data[i], delta); } //x.grad Assert.AreEqual(cXgrad.Length, x.Grad.Length); for (int i = 0; i < x.Grad.Length; i++) { Assert.AreEqual(cXgrad[i], x.Grad[i], delta); } //W.grad Assert.AreEqual(cWgrad.Length, linear.Weight.Grad.Length); for (int i = 0; i < linear.Weight.Grad.Length; i++) { Assert.AreEqual(cWgrad[i], linear.Weight.Grad[i], delta); } //b.grad Assert.AreEqual(cbgrad.Length, linear.Bias.Grad.Length); for (int i = 0; i < linear.Bias.Grad.Length; i++) { Assert.AreEqual(cbgrad[i], linear.Bias.Grad[i], delta); } }
public void SGDRandomTest() { Python.Initialize(); Chainer.Initialize(); int inputCount = Mother.Dice.Next(2, 50); int outputCount = Mother.Dice.Next(2, 50); int batchCount = Mother.Dice.Next(1, 5); Real[,] input = Initializer.GetRandomValues <Real[, ]>(batchCount, inputCount); Real[,] dummyGy = Initializer.GetRandomValues <Real[, ]>(batchCount, outputCount); Real[,] w = Initializer.GetRandomValues <Real[, ]>(outputCount, inputCount); Real[] b = Initializer.GetRandomValues <Real[]>(outputCount); //Chainer Linear <Real> cLinear = new Linear <Real>(inputCount, outputCount, false, w, b); NChainer.SGD <Real> cSgd = new NChainer.SGD <Real>(); cSgd.Setup(cLinear); Variable <Real> cX = new Variable <Real>(input); Variable <Real> cY = cLinear.Forward(cX); cY.Grad = dummyGy; cY.Backward(); cSgd.Update(); //KelpNet CL.Linear <Real> linear = new CL.Linear <Real>(inputCount, outputCount, false, w, b); KelpNet.SGD <Real> sgd = new SGD <Real>(); sgd.SetUp(linear); NdArray <Real> x = new NdArray <Real>(input, asBatch: true); NdArray <Real> y = linear.Forward(x)[0]; y.Grad = dummyGy.Flatten(); y.Backward(); sgd.Update(); Real[] cW = ((Real[, ])cLinear.W.Data).Flatten(); Real[] cb = (Real[])cLinear.b.Data; //許容範囲を算出 Real delta = 0.00001f; //W.grad Assert.AreEqual(cW.Length, linear.Weight.Data.Length); for (int i = 0; i < linear.Weight.Data.Length; i++) { Assert.AreEqual(cW[i], linear.Weight.Data[i], delta); } //b.grad Assert.AreEqual(cb.Length, linear.Bias.Data.Length); for (int i = 0; i < linear.Bias.Data.Length; i++) { Assert.AreEqual(cb[i], linear.Bias.Data[i], delta); } }
public void SwishRandomTest() { Python.Initialize(); Chainer.Initialize(); int ioCount = Mother.Dice.Next(1, 50); int batchCount = Mother.Dice.Next(1, 5); Real[,] input = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, ioCount }); Real[,] dummyGy = (Real[, ])Initializer.GetRealNdArray(new[] { batchCount, ioCount }); Real beta = Mother.Dice.NextDouble(); //Chainer NChainer.Swish <Real> cSwish = new NChainer.Swish <Real>(new[] { ioCount }, beta); Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input)); Variable <Real> cY = cSwish.Forward(cX); cY.Grad = Real.ToBaseNdArray(dummyGy); cY.Backward(); //KelpNet KelpNet.Swish swish = new KelpNet.Swish(new[] { ioCount }, beta); NdArray x = new NdArray(Real.ToRealArray(input), new[] { ioCount }, batchCount); NdArray y = swish.Forward(x)[0]; y.Grad = Real.ToRealArray(dummyGy); y.Backward(); Real[] cYdata = Real.ToRealArray((Real[, ])cY.Data); Real[] cXgrad = Real.ToRealArray((Real[, ])cX.Grad); Real[] cbgrad = (Real[])cSwish.beta.Grad; //許容範囲を算出 double delta = 0.00001; //y Assert.AreEqual(cYdata.Length, y.Data.Length); for (int i = 0; i < y.Data.Length; i++) { Assert.AreEqual(cYdata[i], y.Data[i], delta); } //x.grad Assert.AreEqual(cXgrad.Length, x.Grad.Length); for (int i = 0; i < x.Grad.Length; i++) { Assert.AreEqual(cXgrad[i], x.Grad[i], delta); } //b.grad Assert.AreEqual(cbgrad.Length, swish.beta.Grad.Length); for (int i = 0; i < swish.beta.Grad.Length; i++) { Assert.AreEqual(cbgrad[i], swish.beta.Grad[i], delta); } }
internal ConditionChainer(Chainer prev, PredicateGroup predicateGroup) : base(prev) { PredicateGroup = predicateGroup; }
public Wave(Dictionary <string, System.Object> json) { canvas = GameObject.Find("Canvas").transform; System.Random rand = new System.Random(); enemies = new List <GameObject> (); //note: ints in MiniJSON come out as longs, so must be cast twice levelID = (int)(long)json ["levelID"]; waveID = (int)(long)json ["waveID"]; maxTime = (int)(long)json ["maxMilliseconds"]; interval = (int)(long)json ["minimumInterval"]; List <System.Object> enemyjson = (List <System.Object>)json ["enemies"]; //step one: create a randomized list of spawn times int slots = maxTime / interval; //Debug.Log ("wave "+ waveID + " slots: " + slots); int occupants = enemyjson.Count; //create bool array to randomize List <bool> timeslots = new List <bool>(); for (int i = 0; i < slots; i++) { if (i < occupants) { timeslots.Add(true); } else { timeslots.Add(false); } } //randomize this array (fisher-yates shuffle) for (int i = slots - 1; i > 0; i--) { int j = rand.Next(i + 1); bool temp = timeslots[i]; timeslots[i] = timeslots[j]; timeslots[j] = temp; } //make sure one enemy spawns immediately if (!timeslots[0]) { for (int i = 1; i < slots; i++) { if (timeslots[i]) { timeslots[0] = true; timeslots[i] = false; break; } } } //create corresponding array of random-ish long bonuses to positions List <long> timeChaos = new List <long>(); for (int i = 0; i < timeslots.Count; i++) { timeChaos.Add(0); } for (int i = 0; i < timeChaos.Count; i++) { if (timeslots[i]) //if an enemy should spawn in this timeframe, //get the previous bonus to make sure you're the min distance away { long previous = 0; if (i > 0) { previous = timeChaos[i - 1]; } //create random spawn time within the time slot allotted long chaos = rand.Next((int)previous, (int)interval); timeChaos[i] = chaos; } } //check to make sure nothing went wrong and timeslots and timechaos are both of length slots if (timeslots.Count != slots) { Debug.Log("timeslots is wrong length! slots is " + slots + " but timeslots length is " + timeslots.Count); } if (timeChaos.Count != slots) { Debug.Log("timechaos is wrong length! slots is " + slots + " but timechaos length is " + timeChaos.Count); } //finally, create final list of spawn times List <long> spawntimesInMillis = new List <long>(); for (int i = 0; i < timeslots.Count; i++) { if (timeslots[i]) { long spawntime = i * interval; spawntime += timeChaos[i]; spawntimesInMillis.Add(spawntime); } } //for(int i = 0; i < spawntimesInMillis.Count; i++){ // Debug.Log ("wave "+ waveID + " spawntime "+i +": " + spawntimesInMillis[i]); //} //check to make sure nothing went wrong and spawntimes is of length occupants if (spawntimesInMillis.Count != enemyjson.Count) { Debug.Log("spawntimes and enemies don't match! (" + spawntimesInMillis.Count + "/" + enemyjson.Count + ")"); } //shuffle the enemy order (fisher-yates) for (int i = enemyjson.Count - 1; i > 0; i--) { int j = rand.Next(i + 1); System.Object temp = enemyjson[i]; enemyjson[i] = enemyjson[j]; enemyjson[j] = temp; } for (int i = 0; i < enemyjson.Count; i++) { System.Object enemy = enemyjson[i]; Dictionary <string, System.Object> enemydict = (Dictionary <string, System.Object>)enemy; //would load from bestiary using (string)enemydict["enemyID"], but no bestiary yet //long spawntimeInMillis = (long)enemydict["spawntime"]; string filename = (string)enemydict["enemyID"]; int track = (int)(long)enemydict["trackID"]; int trackpos = 0; if (enemydict.ContainsKey("trackpos")) { trackpos = (int)(long)enemydict["trackpos"]; } //make enemy GameObject enemyspawn = GameObject.Instantiate(Resources.Load("Prefabs/MainCanvas/Enemy")) as GameObject; //Debug.Log("we're setting it to the spawn layer"); //Debug.Log (Dial.spawnLayer == null); enemyspawn.transform.SetParent(Dial.spawnLayer, false); enemyspawn.SetActive(false); Enemy ec = enemyspawn.GetComponent <Enemy>(); FileLoader fl = new FileLoader("JSONData" + Path.DirectorySeparatorChar + "Bestiary", filename); string actualenemyjson = fl.Read(); Dictionary <string, System.Object> actualenemydict = Json.Deserialize(actualenemyjson) as Dictionary <string, System.Object>; string enemytype = (string)actualenemydict["enemyType"]; if (enemytype.Equals("Chainers")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); Chainer c = enemyobj.AddComponent <Chainer>() as Chainer; float chaindelay = (float)(double)actualenemydict["delay"]; c.delay = chaindelay; ec = c; } else if (enemytype.Equals("TipOfTheSpear")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); TipOfTheSpear tots = enemyobj.AddComponent <TipOfTheSpear>() as TipOfTheSpear; float chaindelay = (float)(double)actualenemydict["delay"]; tots.SetDelay(chaindelay); tots.leader = true; ec = tots; } else if (enemytype.Equals("WallOfDoom")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); WallOfDoom wod = enemyobj.AddComponent <WallOfDoom>() as WallOfDoom; ec = wod; } else if (enemytype.Equals("TheDiversion")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); Diversion d = enemyobj.AddComponent <Diversion>() as Diversion; float chaindelay = (float)(double)actualenemydict["delay"]; d.SetDelay(chaindelay); ec = d; } else if (enemytype.Equals("MeatShield")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); MeatShield ms = enemyobj.AddComponent <MeatShield>() as MeatShield; float chaindelay = (float)(double)actualenemydict["delay"]; ms.SetDelay(chaindelay); ms.leader = true; ec = ms; } else if (enemytype.Equals("Splitter")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); Splitter s = enemyobj.AddComponent <Splitter>() as Splitter; ec = s; } else if (enemytype.Equals("Blob")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); Blob b = enemyobj.AddComponent <Blob>() as Blob; ec = b; } else if (enemytype.Equals("Megasplit")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); Megasplit ms = enemyobj.AddComponent <Megasplit>() as Megasplit; ec = ms; } else if (enemytype.Equals("Melder")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); Melder m = enemyobj.AddComponent <Melder>() as Melder; ec = m; } else if (enemytype.Equals("BigSplit")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); BigSplit bs = enemyobj.AddComponent <BigSplit>() as BigSplit; ec = bs; } else if (enemytype.Equals("Junior")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); Junior j = enemyobj.AddComponent <Junior>() as Junior; ec = j; } else if (enemytype.Equals("Cheater")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); Cheater ch = enemyobj.AddComponent <Cheater>() as Cheater; ec = ch; } else if (enemytype.Equals("Spite")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); Spite s = enemyobj.AddComponent <Spite>() as Spite; ec = s; } else if (enemytype.Equals("Executor")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); Executor s = enemyobj.AddComponent <Executor>() as Executor; ec = s; } else if (enemytype.Equals("Saboteur")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); Saboteur s = enemyobj.AddComponent <Saboteur>() as Saboteur; ec = s; } else if (enemytype.Equals("Pusher")) { GameObject enemyobj = ec.gameObject; GameObject.Destroy(enemyobj.GetComponent <Enemy>()); Pusher s = enemyobj.AddComponent <Pusher>() as Pusher; ec = s; } //give enemy a filename to load from ec.SetSrcFileName(filename); ec.SetTrackID(track); ec.SetTrackLane(trackpos); //calculate and set position float degrees = (track - 1) * 60 + 30; //clockwise of y-axis degrees += 15 * trackpos; //negative trackpos is left side, positive is right side, 0 is middle degrees = ((360 - degrees) + 90) % 360; //convert to counterclockwise of x axis degrees *= Mathf.Deg2Rad; ((RectTransform)enemyspawn.transform).anchoredPosition = new Vector2(Dial.ENEMY_SPAWN_LENGTH * Mathf.Cos(degrees), Dial.ENEMY_SPAWN_LENGTH * Mathf.Sin(degrees)); //set spawn time ec.SetSpawnTime(spawntimesInMillis[i]); enemies.Add(enemyspawn); } /*foreach (System.Object enemy in enemyjson) { * //ec.ConfigureEnemy (); * }*/ }
internal InjectChainer(Chainer prev, string sqlOrInliner) : base(prev) { var root = GetRoot(); var inliner = root.TryGetVariable(sqlOrInliner, out chainException, Variable.SearchType.Inliner); chainException = null; // reset exception if (inliner == null) { Build = (buildContext, buildArgs) => { return(Text.GenerateSql(200) .NewLine(sqlOrInliner) .TerminateSingle() .ToString()); }; } // inliner else { Build = (buildContext, buildArgs) => { ParameterArgument inlinerArgument = buildArgs.Executable.GetInlinerArgument(inliner.Name); if (inlinerArgument == null) { return(null); } if (inliner.DT == DT.InSql) { if (inlinerArgument.DT != DT.InSql) { TryThrow(inliner.DT.InvalidInlinerException(GetType().Name, inliner.Name, _sqlInliners)); } string sql2 = inliner.Name; if (buildArgs.Executable != null) { sql2 = (string)inlinerArgument.Value; } return(Text.GenerateSql(500) .NewLine(sql2) .TerminateSingle() .ToString()); } else if (inliner.DT == DT.InSnippet) { if (inlinerArgument.DT != DT.InSnippet) { buildContext.TryTakeException(inliner.DT.InvalidInlinerException(GetType().Name, inliner.Name, _snippetInliners)); return(null); } string sql = inliner.Name; if (buildArgs.Executable != null) { var snippet = (Snippet)inlinerArgument.Value; if (snippet != null) { TryTake(snippet); buildContext.ParamRoot = GetRoot(); sql = snippet.Build(buildContext, buildArgs); } else { sql = null; // it is allowed that snippet is not passed } } return(sql); } else { chainException = inliner.DT.InvalidInlinerException(GetType().Name, inliner.Name, _inliners); chainException.ObjectName = root.Name; chainException.Method = Text.Method.Exec; throw chainException; } }; } }
internal WhereChainer(Chainer prev, Expression expression, PredicateGroup predicateGroup = null) : base(prev, expression, predicateGroup) { Query.Clause.Wheres.Add(this); }
public void RandomTest(bool gpuEnable) { Python.Initialize(); Chainer.Initialize(); int inputCount = Mother.Dice.Next(1, 50); int outputCount = Mother.Dice.Next(1, 50); int batchCount = Mother.Dice.Next(1, 5); Real[,] input = Initializer.GetRandomValues <Real[, ]>(batchCount, inputCount); Real[,] dummyGy = Initializer.GetRandomValues <Real[, ]>(batchCount, outputCount); Real[,] w = Initializer.GetRandomValues <Real[, ]>(outputCount, inputCount); Real[] b = Initializer.GetRandomValues <Real[]>(outputCount); //Chainer NChainer.Linear <Real> cLinear = new NChainer.Linear <Real>(inputCount, outputCount, false, w, b); Variable <Real> cX = new Variable <Real>(input); Variable <Real> cY = cLinear.Forward(cX); cY.Grad = dummyGy; cY.Backward(); //KelpNet CL.Linear <Real> linear = new CL.Linear <Real>(inputCount, outputCount, false, w, b, gpuEnable: gpuEnable); NdArray <Real> x = new NdArray <Real>(input, asBatch: true); NdArray <Real> y = linear.Forward(x)[0]; y.Grad = dummyGy.Flatten(); y.Backward(); Real[] cYdata = ((Real[, ])cY.Data).Flatten(); Real[] cXgrad = ((Real[, ])cX.Grad).Flatten(); Real[] cWgrad = ((Real[, ])cLinear.W.Grad).Flatten(); Real[] cbgrad = (Real[])cLinear.b.Grad; //許容範囲を算出 Real delta = 0.00001f; //y Assert.AreEqual(cYdata.Length, y.Data.Length); for (int i = 0; i < y.Data.Length; i++) { Assert.AreEqual(cYdata[i], y.Data[i], delta); } //x.grad Assert.AreEqual(cXgrad.Length, x.Grad.Length); for (int i = 0; i < x.Grad.Length; i++) { Assert.AreEqual(cXgrad[i], x.Grad[i], delta); } //W.grad Assert.AreEqual(cWgrad.Length, linear.Weight.Grad.Length); for (int i = 0; i < linear.Weight.Grad.Length; i++) { Assert.AreEqual(cWgrad[i], linear.Weight.Grad[i], delta); } //b.grad Assert.AreEqual(cbgrad.Length, linear.Bias.Grad.Length); for (int i = 0; i < linear.Bias.Grad.Length; i++) { Assert.AreEqual(cbgrad[i], linear.Bias.Grad[i], delta); } }
// .WhereExists internal WhereChainer(Chainer prev, INonSelectView nonSelectView, bool exists, PredicateGroup predicateGroup = null) : base(prev, nonSelectView, exists, predicateGroup) { Query.Clause.Wheres.Add(this); chainMethod = exists ? Text.Method.WhereExists : Text.Method.WhereNotExists; }
public bool HasFollower(Chainer c) { return followers.Contains(c); }
public void RandomTest(bool gpuEnable) { Python.Initialize(); Chainer.Initialize(); int batchCount = Mother.Dice.Next(1, 5); int inChCount = Mother.Dice.Next(1, 5); int outChCount = Mother.Dice.Next(1, 5); int wideSize = Mother.Dice.Next(8, 32); int heightSize = Mother.Dice.Next(8, 32); int kWidth = Mother.Dice.Next(1, 5); int kHeight = Mother.Dice.Next(1, 5); int strideX = Mother.Dice.Next(1, 5); int strideY = Mother.Dice.Next(1, 5); int padX = Mother.Dice.Next(0, 5); int padY = Mother.Dice.Next(0, 5); int outputHeight = (heightSize - 1) * strideY + kHeight - padY * 2; int outputWidth = (wideSize - 1) * strideX + kWidth - padX * 2; Real[,,,] input = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, inChCount, heightSize, wideSize }); Real[,,,] dummyGy = (Real[, , , ])Initializer.GetRealNdArray(new[] { batchCount, outChCount, outputHeight, outputWidth }); Real[,,,] w = (Real[, , , ])Initializer.GetRealNdArray(new[] { inChCount, outChCount, kHeight, kWidth }); Real[] b = Initializer.GetRealArray(outChCount); //Chainer NChainer.Deconvolution2D <Real> cDeconvolotion2D = new NChainer.Deconvolution2D <Real>( inChCount, outChCount, new[] { kHeight, kWidth }, new[] { strideY, strideX }, new[] { padY, padX }, false, new PyObject[] { outputHeight, outputWidth }, Real.ToBaseNdArray(w), Real.ToBaseArray(b)); Variable <Real> cX = new Variable <Real>(Real.ToBaseNdArray(input)); Variable <Real> cY = cDeconvolotion2D.Forward(cX); cY.Grad = Real.ToBaseNdArray(dummyGy); cY.Backward(); //KelpNet KelpNet.Deconvolution2D deconvolution2D = new KelpNet.Deconvolution2D( inChCount, outChCount, new [] { kWidth, kHeight }, new [] { strideX, strideY }, new [] { padX, padY }, false, w, b, gpuEnable: gpuEnable); NdArray x = new NdArray(Real.ToRealArray(input), new[] { inChCount, heightSize, wideSize }, batchCount); NdArray y = deconvolution2D.Forward(x)[0]; y.Grad = Real.ToRealArray(dummyGy); y.Backward(); Real[] cYdata = Real.ToRealArray((Real[, , , ])cY.Data.Copy()); Real[] cXgrad = Real.ToRealArray((Real[, , , ])cX.Grad.Copy()); Real[] cWgrad = Real.ToRealArray((Real[, , , ])cDeconvolotion2D.W.Grad); Real[] cbgrad = (Real[])cDeconvolotion2D.b.Grad; //許容範囲を算出 double delta = 0.00001; Assert.AreEqual(cYdata.Length, y.Data.Length); Assert.AreEqual(cXgrad.Length, x.Grad.Length); Assert.AreEqual(cWgrad.Length, deconvolution2D.Weight.Grad.Length); Assert.AreEqual(cbgrad.Length, deconvolution2D.Bias.Grad.Length); //y for (int i = 0; i < y.Data.Length; i++) { Assert.AreEqual(cYdata[i], y.Data[i], delta); } //x.grad for (int i = 0; i < x.Grad.Length; i++) { Assert.AreEqual(cXgrad[i], x.Grad[i], delta); } delta = 0.1; //W.grad for (int i = 0; i < deconvolution2D.Weight.Grad.Length; i++) { Assert.AreEqual(cWgrad[i], deconvolution2D.Weight.Grad[i], delta); } //b.grad for (int i = 0; i < deconvolution2D.Bias.Grad.Length; i++) { Assert.AreEqual(cbgrad[i], deconvolution2D.Bias.Grad[i], delta); } }