public static T Test_Ops <T>(T a, T b, T c) { return(Ops.Subtract(Ops.Add(a, b), c)); }
protected override void TrainImplem() { var parametersAndGradients = this.Net.GetParametersAndGradients(); // initialize lists for accumulators. Will only be done once on first iteration if (this.gsum.Count == 0) { foreach (var t in parametersAndGradients) { this.gsum.Add(BuilderInstance <T> .Volume.SameAs(t.Volume.Shape)); this.xsum.Add(BuilderInstance <T> .Volume.SameAs(t.Volume.Shape)); } } // perform an update for all sets of weights for (var i = 0; i < parametersAndGradients.Count; i++) { var parametersAndGradient = parametersAndGradients[i]; var vol = parametersAndGradient.Volume; var grad = parametersAndGradient.Gradient; grad.Multiply(Ops <T> .Divide(Ops <T> .One, Ops <T> .Cast(this.BatchSize)), grad); // grad *= 1 / BatchSize using (var temp1 = BuilderInstance <T> .Volume.SameAs(vol.Shape)) using (var temp2 = BuilderInstance <T> .Volume.SameAs(vol.Shape)) using (var gradgrad = BuilderInstance <T> .Volume.SameAs(vol.Shape)) using (var two = BuilderInstance <T> .Volume.From(new[] { Ops <T> .Cast(2.0) }, new Shape(1))) using (var epsilon = BuilderInstance <T> .Volume.From(new[] { this.Eps }, new Shape(1))) { // momentum update // update biased first moment estimate: gsum[i] = gsum[i] * Beta1 + (1 - Beta1) * grad this.gsum[i].Multiply(this.Beta1, temp1); // temp1 = this.gsum[i] * this.Beta1 grad.Multiply(Ops <T> .Add(Ops <T> .One, Ops <T> .Negate(this.Beta1)), this.gsum[i]); // this.gsum[i] = grad * (1 - Beta1) temp1.Add(this.gsum[i]); // this.gsum[i] += temp1 grad.Power(two, gradgrad); // gradgrad = grad * grad // update biased second moment estimate: xsum[i] = xsum[i] * Beta2 + (1 - Beta2) * grad * grad this.xsum[i].Multiply(this.Beta2, temp1); // temp1 = this.xsum[i] * this.Beta2 gradgrad.Multiply(Ops <T> .Add(Ops <T> .One, Ops <T> .Negate(this.Beta2)), this.xsum[i]); // temp2 = gradgrad * (1 - Beta2) temp1.Add(this.xsum[i]); // this.xsum[i] += temp1 var biasCorr1 = temp1; var biasCorr2 = temp2; this.gsum[i].Multiply(Ops <T> .Add(Ops <T> .One, Ops <T> .Negate(Ops <T> .Pow(this.Beta1, Ops <T> .Cast(this.k)))), biasCorr1); // correct bias first moment estimate this.xsum[i].Multiply(Ops <T> .Add(Ops <T> .One, Ops <T> .Negate(Ops <T> .Pow(this.Beta2, Ops <T> .Cast(this.k)))), biasCorr2); // correct bias second moment estimate biasCorr2.Sqrt(biasCorr2); // biasCorr2 = sqrt(biasCorr2) epsilon.Add(biasCorr2); // biasCorr2 += epsilon var dx = biasCorr1; dx.Multiply(this.LearningRate, dx); dx.Divide(biasCorr2, dx); dx.SubtractFrom(vol, vol); } grad.Clear(); // zero out gradient so that we can begin accumulating anew this.k += this.BatchSize; } }
public IWeightTensor Affine(IWeightTensor m1, IWeightTensor m2, IWeightTensor mbias, float alpha = 1.0f) { if (m1 == null) { throw new ArgumentNullException($"m1 tensor is null"); } if (m2 == null) { throw new ArgumentNullException($"m2 tensor is null"); } if (mbias == null) { throw new ArgumentNullException($"mbias tensor is null"); } WeightTensor t1 = m1 as WeightTensor; WeightTensor t2 = m2 as WeightTensor; WeightTensor t3 = mbias as WeightTensor; int n = t1.Rows; int d = t2.Columns; WeightTensor res = m_weightTensorFactory.CreateWeightTensor(n, d, m_deviceId, name: $"{GetHashString(m1.Name, m2.Name, mbias.Name)}.Affine", graphToBind: this); VisualizeNodes(new IWeightTensor[] { m1, m2, mbias }, res); using (Tensor t3WExp = t3.TWeight.Expand(n, d)) { Ops.Addmm(res.TWeight, 1.0f, t3WExp, alpha, t1.TWeight, t2.TWeight); } if (m_needsBackprop) { Action backward = () => { res.ReleaseWeight(); using (Tensor t3G = t3.TGradient.Expand(n, d)) { Ops.Add(t3G, t3G, res.TGradient); } using (Tensor tW2 = t2.TWeight.Transpose()) { Ops.Addmm(t1.TGradient, 1.0f, t1.TGradient, alpha, res.TGradient, tW2); } using (Tensor tW1 = t1.TWeight.Transpose()) { Ops.Addmm(t2.TGradient, 1.0f, t2.TGradient, alpha, tW1, res.TGradient); } res.Dispose(); }; m_backprop.Add(backward); t1.UnbindFromComputeGraph(); t2.UnbindFromComputeGraph(); } return(res); }
/// <summary>Estimate viability of operation.</summary> /// <exception cref="FileNotFoundException">If <see cref="SrcPath"/> is not found.</exception> /// <exception cref="FileSystemExceptionFileExists">If <see cref="Path"/> already exists.</exception> protected override void InnerEstimate() { PathConverter pathConverter = new PathConverter(SrcPath, Path); List <IEntry> queue = new List <IEntry>(); // Src IEntry e = SrcFileSystem.GetEntry(SrcPath, srcOption.OptionIntersection(session.Option)); // Src not found if (e == null) { // Throw if (EffectivePolicy.HasFlag(OperationPolicy.SrcThrow)) { throw new FileNotFoundException(SrcPath); } // Skip if (EffectivePolicy.HasFlag(OperationPolicy.SrcSkip)) { SetState(OperationState.Skipped); return; } // Fail anyway throw new FileNotFoundException(SrcPath); } queue.Add(e); while (queue.Count > 0) { try { // Next entry int lastIx = queue.Count - 1; IEntry entry = queue[lastIx]; queue.RemoveAt(lastIx); // Omit package mounts if (session.Policy.HasFlag(OperationPolicy.OmitMountedPackages) && entry.IsPackageMount()) { continue; } // Process directory if (entry.IsDirectory()) { // Browse children IDirectoryContent content = SrcFileSystem.Browse(entry.Path, srcOption.OptionIntersection(session.Option)); // Assert children don't refer to the parent of the parent foreach (IEntry child in content) { if (entry.Path.StartsWith(child.Path)) { throw new IOException($"{child.Path} cannot be child of {entry.Path}"); } } // Visit child for (int i = content.Count - 1; i >= 0; i--) { queue.Add(content[i]); } // Convert path string _dstPath; if (!pathConverter.ParentToChild(entry.Path, out _dstPath)) { throw new Exception("Failed to convert path"); } // Add op if (_dstPath != "") { Ops.Add(new CreateDirectory(session, FileSystem, _dstPath, Option.OptionIntersection(session.Option), OpPolicy)); } } // Process file else if (entry.IsFile()) { // Convert path string _dstPath; if (!pathConverter.ParentToChild(entry.Path, out _dstPath)) { throw new Exception("Failed to convert path"); } // Add op Ops.Add(new CopyFile(session, SrcFileSystem, entry.Path, FileSystem, _dstPath, srcOption.OptionIntersection(session.Option), Option.OptionIntersection(session.Option), OpPolicy)); } } catch (Exception error) when(SetError(error)) { } } base.InnerEstimate(); }
private CodeFrame(Op op) : this() { Ops.Add(op); OpData.Add(0); Layouts.Add(new MemoryLayout(0, 0, 0)); }
public static object OperatorConcat(object a, object b) { return(Ops.Add(a, b)); }
static void Main(string[] args) { Console.WriteLine(Ops.Add <MyInt, BigInteger, int>(new MyInt(1), new BigInteger(1))); }
public static object Add(object a, object b) { return(Ops.Add(a, b)); }
/// <summary>Estimate viability of operation.</summary> /// <exception cref="FileNotFoundException">If <see cref="Path"/> is not found.</exception> /// <exception cref="FileSystemExceptionFileExists">If <see cref="Path"/> already exists.</exception> protected override void InnerEstimate() { List <Delete> dirDeletes = new List <Delete>(); try { List <IEntry> queue = new List <IEntry>(); IEntry e = FileSystem.GetEntry(Path, Option.OptionIntersection(session.Option)); if (e == null) { throw new FileNotFoundException(Path); } queue.Add(e); while (queue.Count > 0) { try { // Next entry int lastIx = queue.Count - 1; IEntry entry = queue[lastIx]; queue.RemoveAt(lastIx); // Omit package mounts if (session.Policy.HasFlag(OperationPolicy.OmitMountedPackages) && entry.IsPackageMount()) { continue; } // Process directory if (entry.IsDirectory()) { // Browse children IDirectoryContent content = FileSystem.Browse(entry.Path, Option.OptionIntersection(session.Option)); // Assert children don't refer to the parent of the parent foreach (IEntry child in content) { if (entry.Path.StartsWith(child.Path)) { throw new IOException($"{child.Path} cannot be child of {entry.Path}"); } } // Visit children for (int i = content.Count - 1; i >= 0; i--) { queue.Add(content[i]); } // Add op dirDeletes.Add(new Delete(session, FileSystem, entry.Path, false)); } // Process file else if (entry.IsFile()) { // Add op Ops.Add(new Delete(session, FileSystem, entry.Path, false, Option.OptionIntersection(session.Option), OpPolicy)); } } catch (Exception error) when(SetError(error)) { } } } finally { // Add directory deletes for (int i = dirDeletes.Count - 1; i >= 0; i--) { Ops.Add(dirDeletes[i]); } } // Estimate added ops base.InnerEstimate(); }
protected override void TrainImplem() { var parametersAndGradients = this.Net.GetParametersAndGradients(); // initialize lists for accumulators. Will only be done once on first iteration if (this.gsum.Count == 0) { foreach (var t in parametersAndGradients) { this.gsum.Add(BuilderInstance <T> .Volume.SameAs(t.Volume.Shape)); this.xsum.Add(BuilderInstance <T> .Volume.SameAs(t.Volume.Shape)); } } var factor = Ops <T> .Divide(Ops <T> .One, Ops <T> .Cast(this.BatchSize)); // perform an update for all sets of weights for (var i = 0; i < parametersAndGradients.Count; i++) { var parametersAndGradient = parametersAndGradients[i]; var vol = parametersAndGradient.Volume; var grad = parametersAndGradient.Gradient; // learning rate for some parameters. var l2DecayMul = parametersAndGradient.L2DecayMul ?? Ops <T> .One; var l1DecayMul = parametersAndGradient.L1DecayMul ?? Ops <T> .One; var l2Decay = Ops <T> .Multiply(this.L2Decay, l2DecayMul); var l1Decay = Ops <T> .Multiply(this.L1Decay, l1DecayMul); // this.L2DecayLoss += l2Decay * vol.Get(j) * vol.Get(j) / 2; // accumulate weight decay loss // this.L1DecayLoss += l1Decay * Math.Abs(vol.Get(j)); var l1Grad = vol.Clone(); l1Grad.MapInplace(x => Ops <T> .GreaterThan(x, Ops <T> .Zero) ? Ops <T> .One : Ops <T> .Negate(Ops <T> .One)); l1Grad = l1Grad * l1Decay; var l2Grad = vol * l2Decay; var gij = (grad + l2Grad + l1Grad) * factor; // momentum update this.gsum[i] = this.gsum[i] * this.Beta1 + gij * Ops <T> .Add(Ops <T> .One, Ops <T> .Negate(this.Beta1)); // update biased first moment estimate var gijgij = gij.Clone(); gijgij.MapInplace(x => Ops <T> .Multiply(x, x)); this.xsum[i] = this.xsum[i] * this.Beta2 + gijgij * Ops <T> .Add(Ops <T> .One, Ops <T> .Negate(this.Beta2)); // update biased second moment estimate var biasCorr1 = this.gsum[i] * Ops <T> .Add(Ops <T> .One, Ops <T> .Negate(Ops <T> .Pow(this.Beta1, Ops <T> .Cast(this.k)))); // correct bias first moment estimate var biasCorr2 = this.xsum[i] * Ops <T> .Add(Ops <T> .One, Ops <T> .Negate(Ops <T> .Pow(this.Beta2, Ops <T> .Cast(this.k)))); // correct bias second moment estimate biasCorr2.MapInplace(x => Ops <T> .Add(Ops <T> .Sqrt(x), this.Eps)); var dx = biasCorr1 * this.LearningRate; dx.MapInplace((l, r) => Ops <T> .Divide(l, r), biasCorr2); vol.MapInplace((v, d) => d, vol - dx); // apply corrected gradient grad.Clear(); // zero out gradient so that we can begin accumulating anew } this.k += this.BatchSize; }
public static Var <E> operator +(Var <E> left, Var <E> right) => Ops.Add(left.Value, right.Value);
public static object operator +(PythonBuffer a, PythonBuffer b) { return(Ops.Add(Ops.GetIndex(a.@object, a.GetSlice()), Ops.GetIndex(b.@object, b.GetSlice()))); }