public void Contextualize(Comparison comp, int number, StonkMem sm) { var result = comp.Values.Duplicate(); sm.LocalContextOutputs[number].Add(result); for (int i = 0; i < Network.Layers.Count(); i++) { result = Network.Layers[i].Output(result); sm.LocalContextOutputs[number].Add(result); } }
public double[] Forward(List <Comparison> vals, StonkContext ctxt, StonkMem sm) { Parallel.For(0, vals.Count(), j => { sm.LocationOutputs[j].Add(vals[j].Values.Duplicate()); for (int i = 0; i < Network.Layers.Count(); i++) { sm.LocationOutputs[j].Add(Network.Layers[i].Output(sm.LocationOutputs[j].Last())); } ctxt.Contextualize(vals[j], j, sm); }); return(sm.Multiply()); }
public void Propogate (ValueSet val, WriteToCMDLine write) { Stonk stk = new Stonk(); StonkContext ctxt = new StonkContext(datatype); var vals = val.ReadValues(Datatypes.Datatype.AAPL, 24); NetworkMem AAPLMem = new NetworkMem(Network); NetworkMem StkMem = new NetworkMem(stk.Network); NetworkMem CtxtMem = new NetworkMem(ctxt.Network); double e = 0; Parallel.For(0, vals.Count(), j => { try { List <Comparison> comps = Comparison.GenerateComparisons(vals[j]); if (j == 0 || j == 1) { write("Comparisons : " + comps.Count()); } StonkMem sm = new StonkMem(comps.Count()); var MktOutput = stk.Forward(comps, ctxt, sm); var F = Network.Forward(MktOutput, dropout, write); var output = new double[2]; int opnumb = vals[j].Increase ? 1 : 0; output[opnumb] = 1; var Error = CategoricalCrossEntropy.Forward(F.Last().GetRank(0), output); e += Error.Max(); var D = Network.Backward(F, output, AAPLMem, write); stk.Backward(D, ctxt, sm, StkMem, CtxtMem); } catch { } }); write("Samples : " + vals.Count()); write("Loss : " + e); AAPLMem.Update(vals.Count(), 1e-4, Network); StkMem.Update(vals.Count(), 1e-4, stk.Network); CtxtMem.Update(vals.Count(), 1e-4, ctxt.Network); Network.Save(); stk.Network.Save(); ctxt.Save(); }
public void Backward(double[] DValues, StonkContext context, StonkMem sm, NetworkMem mem, NetworkMem CtxtMem) { var LocDValues = sm.DLocation(DValues); DValues = sm.DGlobalContext(DValues); DValues = Activations.InverseSoftMax(DValues, sm.GlobalOutputs.ToArray()); context.Backward(DValues, sm.LocationOutputs.Count(), sm, CtxtMem); Parallel.For(0, sm.GlobalOutputs.Count(), j => { var ldv = LocDValues[j]; for (int i = Network.Layers.Count() - 1; i >= 0; i--) { ldv = mem.Layers[i].DActivation(ldv, sm.LocationOutputs[j][i + 1]); mem.Layers[i].DBiases(ldv, Network.Layers[i], sm.GlobalOutputs.Count()); mem.Layers[i].DWeights(ldv, sm.LocationOutputs[j][i], Network.Layers[i], sm.GlobalOutputs.Count()); ldv = mem.Layers[i].DInputs(ldv, Network.Layers[i]); } }); }
public void Backward(double[] DValues, int runs, StonkMem sm, NetworkMem mem) { Parallel.For(0, runs, j => { double[] cdv = new double[1] { DValues[j] }; for (int i = Network.Layers.Count() - 1; i >= 0; i--) { try { cdv = mem.Layers[i].DActivation(cdv, sm.LocalContextOutputs[j][i + 1]); mem.Layers[i].DBiases(cdv, Network.Layers[i], runs); mem.Layers[i].DWeights(cdv, sm.LocalContextOutputs[j][i], Network.Layers[i], runs); cdv = mem.Layers[i].DInputs(cdv, Network.Layers[i]); } catch (Exception e) { e.OutputError(); } } }); }