예제 #1
0
        public int Predict(ValueSet val, WriteToCMDLine write)
        {
            Stonk        st    = new Stonk();
            StonkContext ctxt  = new StonkContext(Datatype.AAPL);
            var          comps = Comparison.GenerateComparisons(val);

            double[] Results = st.Forward(comps, ctxt);
            Results = Network.Forward(Results);
            return(Results.ToList().IndexOf(Results.Max()));
        }
예제 #2
0
 public double[] Forward(List <Comparison> vals, StonkContext ctxt, StonkMem sm)
 {
     Parallel.For(0, vals.Count(), j =>
     {
         sm.LocationOutputs[j].Add(vals[j].Values.Duplicate());
         for (int i = 0; i < Network.Layers.Count(); i++)
         {
             sm.LocationOutputs[j].Add(Network.Layers[i].Output(sm.LocationOutputs[j].Last()));
         }
         ctxt.Contextualize(vals[j], j, sm);
     });
     return(sm.Multiply());
 }
예제 #3
0
        public void Propogate
            (ValueSet val, WriteToCMDLine write)
        {
            Stonk        stk  = new Stonk();
            StonkContext ctxt = new StonkContext(datatype);
            var          vals = val.ReadValues(Datatypes.Datatype.AAPL, 24);

            NetworkMem AAPLMem = new NetworkMem(Network);
            NetworkMem StkMem  = new NetworkMem(stk.Network);
            NetworkMem CtxtMem = new NetworkMem(ctxt.Network);
            double     e       = 0;

            Parallel.For(0, vals.Count(), j =>
            {
                try
                {
                    List <Comparison> comps = Comparison.GenerateComparisons(vals[j]);
                    if (j == 0 || j == 1)
                    {
                        write("Comparisons : " + comps.Count());
                    }
                    StonkMem sm = new StonkMem(comps.Count());

                    var MktOutput  = stk.Forward(comps, ctxt, sm);
                    var F          = Network.Forward(MktOutput, dropout, write);
                    var output     = new double[2];
                    int opnumb     = vals[j].Increase ? 1 : 0;
                    output[opnumb] = 1;

                    var Error = CategoricalCrossEntropy.Forward(F.Last().GetRank(0), output);
                    e        += Error.Max();
                    var D     = Network.Backward(F, output, AAPLMem, write);
                    stk.Backward(D, ctxt, sm, StkMem, CtxtMem);
                }
                catch { }
            });
            write("Samples : " + vals.Count());
            write("Loss : " + e);
            AAPLMem.Update(vals.Count(), 1e-4, Network);
            StkMem.Update(vals.Count(), 1e-4, stk.Network);
            CtxtMem.Update(vals.Count(), 1e-4, ctxt.Network);

            Network.Save();
            stk.Network.Save();
            ctxt.Save();
        }
예제 #4
0
        public double[] Forward(List <Comparison> vals, StonkContext context)
        {
            double[] ctxt = new double[vals.Count()];
            double[,] loc = new double[vals.Count(), MktSize];

            Parallel.For(0, vals.Count(), j =>
            {
                double[] a = vals[j].Values.Duplicate();
                for (int i = 0; i < Network.Layers.Count(); i++)
                {
                    a = Network.Layers[i].Output(a);
                }
                loc.SetRank(a, j);
                ctxt[j] = context.Contextualize(vals[j]);
            });

            return(loc.Multiply(Activations.SoftMax(ctxt)));
        }
예제 #5
0
        public void Backward(double[] DValues, StonkContext context, StonkMem sm, NetworkMem mem, NetworkMem CtxtMem)
        {
            var LocDValues = sm.DLocation(DValues);

            DValues = sm.DGlobalContext(DValues);
            DValues = Activations.InverseSoftMax(DValues, sm.GlobalOutputs.ToArray());
            context.Backward(DValues, sm.LocationOutputs.Count(), sm, CtxtMem);
            Parallel.For(0, sm.GlobalOutputs.Count(), j =>
            {
                var ldv = LocDValues[j];
                for (int i = Network.Layers.Count() - 1; i >= 0; i--)
                {
                    ldv = mem.Layers[i].DActivation(ldv, sm.LocationOutputs[j][i + 1]);
                    mem.Layers[i].DBiases(ldv, Network.Layers[i], sm.GlobalOutputs.Count());
                    mem.Layers[i].DWeights(ldv, sm.LocationOutputs[j][i], Network.Layers[i], sm.GlobalOutputs.Count());
                    ldv = mem.Layers[i].DInputs(ldv, Network.Layers[i]);
                }
            });
        }